code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v3
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalInteger, Linked}
import uk.gov.hmrc.ct.computations.CP257
case class B160(value: Option[Int]) extends CtBoxIdentifier(name = "(Pre reform) Trading losses brought forward set against trading profits") with CtOptionalInteger
object B160 extends Linked[CP257, B160] {
override def apply(source: CP257): B160 = B160(source.value)
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v3/B160.scala | Scala | apache-2.0 | 1,025 |
package org.jetbrains.plugins.scala.lang.typeConformance.generated
import org.jetbrains.plugins.scala.DependencyManagerBase._
import org.jetbrains.plugins.scala.ScalaVersion
import org.jetbrains.plugins.scala.base.libraryLoaders.{IvyManagedLoader, LibraryLoader}
import org.jetbrains.plugins.scala.lang.typeConformance.TypeConformanceTestBase
import org.jetbrains.plugins.scala.project.ScalaLanguageLevel
class TypeConformanceZioTest extends TypeConformanceTestBase {
override protected def supportedIn(version: ScalaVersion): Boolean =
version.languageLevel == ScalaLanguageLevel.Scala_2_13
override protected def librariesLoaders: Seq[LibraryLoader] =
super.librariesLoaders :+ IvyManagedLoader("dev.zio" %% "zio" % "1.0.0-RC18-2")
def testSCL17210(): Unit = {
doTest(
s"""import zio.{Has, ZLayer}
|import zio.console.Console
|import zio.random.Random
|
|type Example = Has[Example.Service]
|object Example {
| trait Service
|}
|
|val live: ZLayer[Console with Random, Nothing, Example] =
| ZLayer.fromServices[Console.Service, Random.Service, Example.Service] { (console, random) =>
| new Example.Service {}
| }
|//true
""".stripMargin)
}
def testSCL17210_differentOrder(): Unit = {
doTest(
s"""import zio.{Has, ZLayer}
|import zio.console.Console
|import zio.random.Random
|
|type Example = Has[Example.Service]
|object Example {
| trait Service
|}
|
|val live: ZLayer[Random with Console, Nothing, Example] =
| ZLayer.fromServices[Console.Service, Random.Service, Example.Service] { (console, random) =>
| new Example.Service {}
| }
|//true
""".stripMargin)
}
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/lang/typeConformance/generated/TypeConformanceZioTest.scala | Scala | apache-2.0 | 1,874 |
import java.util.TimeZone.getAvailableIDs
var sortedIDs: Array[String] = getAvailableIDs.map(_.split('/').last).sorted
for (city <- sortedIDs) println(city)
| demiazz/scala-impatient | chapter-03/exercise-09/main.scala | Scala | unlicense | 159 |
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bqhiveloader
import java.util
import java.util.Map
import com.google.cloud.RetryOption
import com.google.cloud.bigquery.JobInfo.{CreateDisposition, WriteDisposition}
import com.google.cloud.bigquery.QueryJobConfiguration.Priority
import com.google.cloud.bigquery.{BigQuery, Clustering, Job, JobId, JobInfo, QueryJobConfiguration, RangePartitioningUtil, StandardTableDefinition, Table, TableId, TableInfo, TableResult, TimePartitioning}
import com.google.cloud.bqhiveloader.ExternalTableManager.jobid
import org.apache.spark.sql.types.{DateType, StructType}
import org.threeten.bp.Duration
import scala.util.{Failure, Success}
object NativeTableManager extends Logging {
def getExistingPartitions(tableId: TableId, bigQuery: BigQuery): TableResult = {
val tableSpec = tableId.getProject + ":" + tableId.getDataset + "." + tableId.getTable
tableId.toString + "$__PARTITIONS_SUMMARY__"
bigQuery.query(QueryJobConfiguration.newBuilder(
s"""SELECT
| partition_id,
| TIMESTAMP(creation_time/1000) AS creation_time
|FROM [$tableSpec]""".stripMargin)
.setUseLegacySql(true)
.setPriority(Priority.INTERACTIVE)
.build())
}
def createTableIfNotExists(project: String, dataset: String, table: String, c: Config, schema: StructType, bigquery: BigQuery, bql: com.google.api.services.bigquery.Bigquery, expirationMs: scala.Option[Long] = None): Boolean = {
val tableId = TableId.of(project, dataset, table)
createTableIfNotExistsWithId(c, schema, tableId, bigquery, bql, expirationMs)
}
def createTableIfNotExistsWithId(c: Config, schema: StructType, tableId: TableId, bigquery: BigQuery, bql: com.google.api.services.bigquery.Bigquery, expirationMs: scala.Option[Long] = None): Boolean = {
if (!ExternalTableManager.tableExists(tableId, bigquery)) {
createTable(c, schema, tableId, bigquery, bql, expirationMs)
true
} else false
}
def copyOnto(srcProject: String, srcDataset: String, srcTable: String, destProject: String, destDataset: String, destTable: String, destPartition: scala.Option[String] = None, bq: BigQuery, dryRun: Boolean, batch: Boolean): scala.util.Try[Job] = {
val tableWithPartition = destPartition.map(partId => destTable + "$" + partId).getOrElse(destTable)
val srcTableId = TableId.of(srcProject, srcDataset, srcTable)
val destTableId = TableId.of(destProject, destDataset, tableWithPartition)
val job = selectInto(srcTableId, destTableId, bq, dryRun, batch)
if (!dryRun){
job.map{_.waitFor(
RetryOption.initialRetryDelay(Duration.ofSeconds(8)),
RetryOption.maxRetryDelay(Duration.ofSeconds(60)),
RetryOption.retryDelayMultiplier(2.0d),
RetryOption.totalTimeout(Duration.ofMinutes(120)))
} match {
case Some(j) =>
val error = scala.Option(j.getStatus).flatMap(x => scala.Option(x.getError))
error
.map(e => Failure(new RuntimeException(e.toString)))
.getOrElse(Success(j))
case _ =>
Failure(new RuntimeException("Job doesn't exist"))
}
} else Success(null)
}
def deletePartition(tbl: TableId, partitionId: String, bq: BigQuery): Boolean = {
require(partitionId.matches("""^\\d{8}$"""), "partitionId must match format YYYYMMDD")
bq.delete(TableId.of(tbl.getProject, tbl.getDataset, tbl.getTable + "$" + partitionId))
}
def selectInto(src: TableId, dest: TableId, bq: BigQuery, dryRun: Boolean = false, batch: Boolean = false): scala.Option[Job] = {
val query = s"select * from `${src.getProject}.${src.getDataset}.${src.getTable}`"
val jobConfig = QueryJobConfiguration
.newBuilder(query)
.setCreateDisposition(CreateDisposition.CREATE_NEVER)
.setWriteDisposition(WriteDisposition.WRITE_TRUNCATE)
.setDestinationTable(dest)
.setAllowLargeResults(true)
.setDryRun(dryRun)
.setUseLegacySql(false)
.setUseQueryCache(false)
.setPriority(if (batch) Priority.BATCH else Priority.INTERACTIVE)
.build()
val jobId = JobId.newBuilder()
.setProject(src.getProject)
.setJob(jobid(dest))
.build()
val jobInfo = JobInfo.of(jobId, jobConfig)
logger.info(jobInfo.toString)
if (dryRun) None
else scala.Option(ExternalTableManager.createJob(bq, jobId, jobConfig))
}
def createTable(c: Config, schema: StructType, destTableId: TableId, bigquery: BigQuery, bql: com.google.api.services.bigquery.Bigquery, expirationMs: scala.Option[Long] = None): Table ={
require(c.clusterColumns.nonEmpty, "destination table does not exist, clusterColumns must not be empty")
require(c.partitionColumn.nonEmpty, "destination table does not exist, partitionColumn must not be empty")
val destTableSchema = if (c.partitionColumn.map(_.toLowerCase).contains("none")) {
Mapping.convertStructType(schema.add(c.unusedColumnName, DateType))
} else {
val hasPartCol = schema
.find(_.name.equalsIgnoreCase(c.partitionColumn.get))
.exists(_.dataType == DateType)
if (hasPartCol)
Mapping.convertStructType(schema)
else {
val fieldsWithoutPartCol = schema
.filterNot(_.name.equalsIgnoreCase(c.partitionColumn.get))
val withPartCol = StructType(fieldsWithoutPartCol)
.add(c.partitionColumn.get, DateType)
Mapping.convertStructType(withPartCol)
}
}
val destTableDefBuilder = StandardTableDefinition.newBuilder()
.setLocation(c.bqLocation)
.setSchema(destTableSchema)
if (c.clusterColumns.map(_.toLowerCase) != Seq("none")) {
import scala.collection.JavaConverters.seqAsJavaListConverter
destTableDefBuilder.setClustering(Clustering.newBuilder()
.setFields(c.clusterColumns.map(_.toLowerCase).asJava).build())
}
if (c.partitionColumn.contains("none") && c.clusterColumns.exists(_ != "none")) {
// Only set null partition column if both partition column and cluster columns are provided
destTableDefBuilder
.setTimePartitioning(TimePartitioning
.newBuilder(TimePartitioning.Type.DAY)
.setField(c.unusedColumnName)
.build())
} else if (c.partitionType == "DAY") {
c.partitionColumn match {
case Some(partitionCol) if partitionCol != "none" =>
// Only set partition column if partition column is set
destTableDefBuilder
.setTimePartitioning(TimePartitioning
.newBuilder(TimePartitioning.Type.DAY)
.setField(partitionCol)
.build())
case _ =>
// Don't set a partition column if partition column is none
}
}
val tableInfoBuilder = TableInfo.newBuilder(destTableId, destTableDefBuilder.build())
expirationMs.foreach(x => tableInfoBuilder.setExpirationTime(System.currentTimeMillis() + x))
if (c.partitionType == "RANGE" && c.partitionColumn.isDefined) {
val tableInfo = tableInfoBuilder.build()
RangePartitioningUtil.createTable(destTableId.getProject, destTableId.getDataset, tableInfo, bql, c.partitionColumn.get, c.partitionRangeStart, c.partitionRangeEnd, c.partitionRangeInterval)
bigquery.getTable(tableInfo.getTableId)
} else {
bigquery.create(tableInfoBuilder.build())
}
}
}
| CloudVLab/professional-services | tools/bigquery-hive-external-table-loader/src/main/scala/com/google/cloud/bqhiveloader/NativeTableManager.scala | Scala | apache-2.0 | 7,931 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.controllers.application.status
import iht.config.AppConfig
import iht.connector.{CachingConnector, IhtConnector}
import javax.inject.Inject
import play.api.mvc.{MessagesControllerComponents, Request}
import uk.gov.hmrc.auth.core.AuthConnector
import uk.gov.hmrc.play.bootstrap.frontend.controller.FrontendController
import iht.views.html.application.status.in_review_application
class ApplicationInReviewControllerImpl @Inject()(val ihtConnector: IhtConnector,
val cachingConnector: CachingConnector,
val authConnector: AuthConnector,
val inReviewApplicationView: in_review_application,
implicit val appConfig: AppConfig,
val cc: MessagesControllerComponents) extends FrontendController(cc) with ApplicationInReviewController
trait ApplicationInReviewController extends ApplicationStatusController {
val inReviewApplicationView: in_review_application
def getView = (ihtReference, deceasedName, probateDetails) => (request: Request[_]) => {
implicit val req = request
inReviewApplicationView(ihtReference, deceasedName, probateDetails)
}
}
| hmrc/iht-frontend | app/iht/controllers/application/status/ApplicationInReviewController.scala | Scala | apache-2.0 | 1,833 |
/* Copyright 2009-2016 EPFL, Lausanne */
import leon.lang._
import leon.collection._
import leon._
object LambdaCalculus {
abstract class Term
case class Var(x: BigInt) extends Term
case class Abs(x: BigInt, body: Term) extends Term
case class App(func: Term, arg: Term) extends Term
def fv(t: Term): Set[BigInt] = t match {
case Var(x) => Set(x)
case Abs(x, body) => fv(body) ++ Set(x)
case App(func, arg) => fv(func) ++ fv(arg)
}
// [x->u]t
def subst(x: BigInt, u: Term, t: Term): Term = t match {
case Var(y) => if (x == y) u else t
case Abs(y, body) => if (x == y) t else Abs(y, subst(x, u, body))
case App(f, a) => App(subst(x, u, f), subst(x, u, a))
}
/* Termination checker (LoopProcessor) says:
✗ Non-terminating for call: looping_eval(App(Abs(0, App(Var(0), Var(0))), Abs(0, App(Var(0), Var(0)))))
i.e.
(λx. x x)(λx. x x)
This is the well-known "omega".
*/
// big step call-by-value looping_evaluation
def looping_eval(t: Term): Option[Term] = (t match {
case App(t1, t2) => looping_eval(t1) match {
case Some(Abs(x, body)) => looping_eval(t2) match {
case Some(v2) => looping_eval(subst(x, v2, body))
case None() => None[Term]()
}
case _ => None[Term]() // stuck
}
case _ => Some(t) // Abs or Var, already a value
}) ensuring { res => res match {
case Some(t) => isValue(t)
case None() => true
}}
def isValue(t: Term): Boolean = t match {
case Var(x) => true
case Abs(x, body) => true
case App(f, a) => false
}
}
| regb/leon | src/test/resources/regression/termination/looping/LambdaCalculus.scala | Scala | gpl-3.0 | 1,572 |
/**
* Copyright (C) 2010-2011 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.kernel.util
import org.joda.time.DateTime
import org.easymock.internal.ArgumentToString
import org.easymock.{IAnswer, IArgumentMatcher, EasyMock}
/**
* Utilities for making EasyMock work nicely with Scala.
*/
object EasyMockScalaUtils {
/**
* Replacement for EasyMock.anyObject when working with a Function1 that returns Unit.
*/
def anyUnitF1[A] = {
EasyMock.anyObject
(a:A) => {}
}
/**
* Replacement for EasyMock.anyObject when working with a Function3 that returns Unit.
*/
def anyUnitF3[A, B, C] = {
EasyMock.anyObject
(a:A, b:B, c:C) => {}
}
/**
* Replacement for EasyMock.anyObject when working with a Function4 that returns Unit.
*/
def anyUnitF4[A, B, C, D] = {
EasyMock.anyObject
(a:A, b:B, c:C, d:D) => {}
}
/**
* Replacement for EasyMock.anyObject when working with a Function5 that returns Unit.
*/
def anyUnitF5[A, B, C, D, E] = {
EasyMock.anyObject
(a:A, b:B, c:C, d:D, e:E) => {}
}
def anyString = {
EasyMock.anyObject
""
}
def anyTimestamp = {
EasyMock.anyObject()
new DateTime
}
/**
* Allows a DateTime to be validated by being between two values.
*/
def between(start:DateTime, end:DateTime):DateTime = {
// We create our own matcher here that confirms the value is between the two points. Note that we can't
// use EasyMock.and(EasyMock.geq(start), EasyMock.leq(end)) because DateTime isn't Comparable[DateTime] - it is just
// Comparable, and this makes the Scala type-checker unhappy.
EasyMock.reportMatcher(new IArgumentMatcher() {
def matches(argument: Any) = {
argument match {
case d:DateTime => d.compareTo(start) >= 0 && d.compareTo(end) <= 0
case _ => false
}
}
def appendTo(buffer: StringBuffer) = buffer.append("between " + start + " and " + end)
})
null
}
def asUnorderedList[T](expected:Seq[T]):Seq[T] = {
EasyMock.reportMatcher(new IArgumentMatcher() {
def matches(argument: Any):Boolean = {
val argSeq = argument.asInstanceOf[Seq[T]]
argSeq.toSet == expected.toSet
}
def appendTo(buffer: StringBuffer) {
ArgumentToString.appendArgument(expected, buffer)
}
})
null
}
val emptyAnswer = new IAnswer[Unit] {
def answer() {}
}
} | 0x6e6562/diffa | kernel/src/test/scala/net/lshift/diffa/kernel/util/EasyMockScalaUtils.scala | Scala | apache-2.0 | 2,988 |
package vep.app.common.page
import vep.Configuration
import vep.app.common.verifications.CommonVerifications
import vep.app.user.UserService
import scala.concurrent.ExecutionContext
trait PageIntegrationModule {
def userService: UserService
def commonVerifications: CommonVerifications
def executionContext: ExecutionContext
def configuration: Configuration
lazy val pageServices = new PageService()
lazy val pageVerifications = new PageVerifications(
commonVerifications,
pageServices
)
lazy val pageRouter = new PageRouter(
pageVerifications,
pageServices,
configuration,
userService,
executionContext
)
}
| kneelnrise/vep | src/main/scala/vep/app/common/page/PageIntegrationModule.scala | Scala | mit | 658 |
package com.zobot.ai.spark.helpers
import org.apache.spark.ml.classification.{LogisticRegression, LogisticRegressionModel}
import org.apache.spark.ml.linalg.Vector
import org.apache.spark.sql.{DataFrame, Row}
object LogisticRegressionHelpers {
case class ModelTestResults (
features: Vector,
label: Double,
probability: Vector,
prediction: Double
)
def trainModel(estimator: LogisticRegression, trainingSet: DataFrame): LogisticRegressionModel = estimator.fit(trainingSet)
def testModel(transformer: LogisticRegressionModel, testSet: DataFrame): LogisticRegressionModel = {
transformer.transform(testSet).select("features", "label", "myProbability", "prediction").collect().foreach {
case Row(features: Vector, label: Double, prob: Vector, prediction: Double) =>
ModelTestResults(features, label, prob, prediction)
}
transformer
}
}
| BecauseNoReason/zobot | src/main/scala/com/zobot/ai/spark/helpers/LogisticRegressionHelpers.scala | Scala | mit | 889 |
/*
* Copyright 2014 Treode, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.treode.disk
import java.nio.file.Path
import com.treode.async.Async
import com.treode.notify.Notification
/** The disk controller.
*
* All the admin-y things you can do with the disk system.
*/
trait DiskController {
/** This disk system. */
implicit def disk: Disk
/** Change the disk drives (or files) that are attached to the system.
*
* When this method returns, The newly attached drives will be an active part of the disk
* system, and the newly draining drives will have begun draining. Draining drives will
* eventually be detached.
*
* @param change The change to apply.
*/
def change (change: DriveChange): Async [Notification [Unit]]
/** Summary information. */
def digest: Async [DiskSystemDigest]
def shutdown(): Async [Unit]
/** Attach new drives.
*
* When this method returns, the drives are a part of the disk system.
*
* @param attaches The drives to attach.
*/
def attach (attaches: Seq [DriveAttachment]): Async [Notification [Unit]] =
change (DriveChange (attaches, Seq.empty))
/** Attach new drives.
*
* When this method returns, the drives are a part of the disk system.
*
* @param geometry The physical properties of the drives. The same geometry is used for every path.
* @param paths The path to attach.
*/
def attach (geometry: DriveGeometry, paths: Path*): Async [Notification [Unit]] =
attach (paths map (DriveAttachment (_, geometry)))
/** Drain drives that are attached.
*
* The disk system drains drives by copying all live data on them to someplace else. When this
* method returns, the drain has begun, but it may not complete until later. When the drives
* have been drained, the disk system will detach them.
*
* @param paths The drives to drain.
*/
def drain (drains: Path*): Async [Notification [Unit]] =
change (DriveChange (Seq.empty, drains))
}
object DiskController {
trait Proxy extends DiskController {
protected def _disk: DiskController
implicit def disk: Disk =
_disk.disk
def digest: Async [DiskSystemDigest] =
_disk.digest
def change (change: DriveChange): Async [Notification [Unit]] =
_disk.change (change)
}}
| Treode/store | disk/src/com/treode/disk/DiskController.scala | Scala | apache-2.0 | 2,867 |
package org.cloudio.morpheus.mail.traditional
import org.cloudio.morpheus.mail.MailOwner
/**
* Created by zslajchrt on 24/08/15.
*/
trait RegisteredUserAdapter extends MailOwner {
this: RegisteredUser =>
override def isMale: Boolean = male
}
| zslajchrt/morpheus-tutor | src/main/scala/org/cloudio/morpheus/mail/traditional/RegisteredUserAdapter.scala | Scala | apache-2.0 | 251 |
package com.michalrus.nofatty.ui.utils
import java.awt.event._
import javax.swing.table.TableModel
import javax.swing._
class BetterTable(model: TableModel, isInstantlyEditable: (Int, Int) ⇒ Boolean, cellPopup: ⇒ JPopupMenu) extends JTable(model) {
setCellSelectionEnabled(true)
setSurrendersFocusOnKeystroke(true)
putClientProperty("terminateEditOnFocusLost", true)
putClientProperty("JTable.autoStartsEdit", false)
val _ = getInputMap(JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT).
put(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0), "selectNextColumnCell")
lazy val PassThrough: Set[KeyStroke] = {
import JComponent._
Set(WHEN_FOCUSED, WHEN_ANCESTOR_OF_FOCUSED_COMPONENT, WHEN_IN_FOCUSED_WINDOW)
.flatMap(c ⇒ Option(getInputMap(c)).toSet)
.flatMap(im ⇒ Option(im.allKeys()).toSet)
.flatMap(_.toSet)
}
override def processKeyBinding(ks: KeyStroke, e: KeyEvent, condition: Int, pressed: Boolean): Boolean =
if (PassThrough contains ks)
super.processKeyBinding(ks, e, condition, pressed)
else {
val leadRow = getSelectionModel.getLeadSelectionIndex
val leadColumn = getColumnModel.getSelectionModel.getLeadSelectionIndex
if (condition == JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT
&& isFocusOwner
&& ks.getKeyEventType == KeyEvent.KEY_TYPED
&& !ks.getKeyChar.isControl
&& isInstantlyEditable(leadRow, leadColumn)
&& editCellAt(leadRow, leadColumn)) {
Option(getEditorComponent) match {
case Some(ec: JTextField with TextFieldUsableAsCellEditor) ⇒
ec.reset(ks.getKeyChar.toString)
ec.setSelectAllOnFocus(false)
ec.requestFocus()
edt {
ec.setSelectAllOnFocus(true)
}
true
case _ ⇒ false
}
}
else false
}
addFocusListener(new FocusListener {
override def focusGained(e: FocusEvent): Unit =
if (getSelectionModel.isSelectionEmpty) {
setRowSelectionInterval(0, 0)
setColumnSelectionInterval(0, 0)
}
override def focusLost(e: FocusEvent): Unit = ()
})
addMouseListener(new MouseAdapter {
def showPopup(e: MouseEvent): Unit = {
val r = rowAtPoint(e.getPoint)
val c = columnAtPoint(e.getPoint)
if (r >= 0 && r < getRowCount && c >= 0 && c < getColumnCount) {
if (!(isRowSelected(r) && isColumnSelected(c))) {
setRowSelectionInterval(r, r)
setColumnSelectionInterval(c, c)
}
}
else clearSelection()
e.getComponent match {
case t: JTable ⇒ cellPopup.show(t, e.getX, e.getY)
}
}
override def mousePressed(e: MouseEvent): Unit = if (e.isPopupTrigger) showPopup(e)
override def mouseReleased(e: MouseEvent): Unit = if (e.isPopupTrigger) showPopup(e)
})
}
| michalrus/nofatty | src/main/scala/com/michalrus/nofatty/ui/utils/BetterTable.scala | Scala | apache-2.0 | 2,870 |
package drt.shared
import uk.gov.homeoffice.drt.auth.Roles.LHR
import drt.shared.Terminals.{T1, Terminal}
import org.specs2.mutable.Specification
import scala.collection.immutable.SortedMap
class AirportConfigsSpec extends Specification {
"AirportConfigs" should {
"have a list size of 24 of min and max desks by terminal and queue for all ports" in {
for {
port <- AirportConfigs.allPortConfigs
terminalName <- port.minMaxDesksByTerminalQueue24Hrs.keySet
queueName <- port.minMaxDesksByTerminalQueue24Hrs(terminalName).keySet
(minDesks, maxDesks) = port.minMaxDesksByTerminalQueue24Hrs(terminalName)(queueName)
} yield {
minDesks.size.aka(s"minDesk-> ${port.portCode} -> $terminalName -> $queueName") mustEqual 24
maxDesks.size.aka(s"maxDesk-> ${port.portCode} -> $terminalName -> $queueName") mustEqual 24
}
}
"Queue names in min max desks by terminal and queues should be defined in Queues" in {
for {
port <- AirportConfigs.allPortConfigs
terminalName <- port.minMaxDesksByTerminalQueue24Hrs.keySet
queueName <- port.minMaxDesksByTerminalQueue24Hrs(terminalName).keySet
} yield {
Queues.displayName(queueName).aka(s"$queueName not found in Queues") mustNotEqual None
}
}
"All Airport config queues must be defined in Queues" in {
for {
port <- AirportConfigs.allPortConfigs
queueName <- port.queuesByTerminal.values.flatten
} yield {
Queues.displayName(queueName).aka(s"$queueName not found in Queues") mustNotEqual None
}
}
"A cloned Airport config should return the portcode of the port it is cloned from when calling feedPortCode" in {
import AirportConfigDefaults._
val clonedConfig = AirportConfig(
portCode = PortCode("LHR_Clone"),
cloneOfPortCode = Option(PortCode("LHR")),
queuesByTerminal = SortedMap(),
slaByQueue = Map(),
timeToChoxMillis = 0L,
firstPaxOffMillis = 0L,
defaultWalkTimeMillis = Map(),
terminalPaxSplits = Map(),
terminalProcessingTimes = Map(),
minMaxDesksByTerminalQueue24Hrs = Map(),
eGateBankSizes = Map(),
role = LHR,
terminalPaxTypeQueueAllocation = Map(T1 -> defaultQueueRatios),
desksByTerminal = Map[Terminal, Int](),
feedSources = Seq(ApiFeedSource, LiveBaseFeedSource, LiveFeedSource, AclFeedSource)
)
val result = clonedConfig.feedPortCode
val expected = PortCode("LHR")
result === expected
}
"All configurations should be valid with no missing queues or terminals" in {
AirportConfigs.allPortConfigs.foreach(_.assertValid())
success
}
}
}
| UKHomeOffice/drt-scalajs-spa-exploration | server/src/test/scala/drt/shared/AirportConfigsSpec.scala | Scala | apache-2.0 | 2,766 |
object Test {
class A {
val foo: String => String = null
def foo(x: Int) = 1
}
(new A)./* line: 3 */foo("")
(new A)./* line: 4 */foo(4)
} | ilinum/intellij-scala | testdata/resolve2/overloading/hardOverloadings/ValueFunction1.scala | Scala | apache-2.0 | 154 |
package com.datastax.spark.connector.rdd.typeTests
import java.time.LocalTime
import com.datastax.spark.connector._
import com.datastax.oss.driver.api.core.DefaultProtocolVersion
import com.datastax.oss.driver.api.core.cql.Row
import com.datastax.spark.connector.cluster.DefaultCluster
class TimeTypeTest extends AbstractTypeTest[LocalTime, LocalTime] with DefaultCluster {
override val minPV = DefaultProtocolVersion.V4
override def getDriverColumn(row: Row, colName: String): LocalTime = row.getLocalTime(colName)
override protected val typeName: String = "time"
override protected val typeData: Seq[LocalTime] = (1L to 5L).map(LocalTime.ofNanoOfDay)
override protected val addData: Seq[LocalTime] = (6L to 10L).map(LocalTime.ofNanoOfDay)
"Time Types" should "be writable as dates" in skipIfProtocolVersionLT(minPV) {
val times = (100 to 500 by 100).map(LocalTime.ofNanoOfDay(_))
sc.parallelize(times.map(x => (x, x, x, x))).saveToCassandra(keyspaceName, typeNormalTable)
val results = sc.cassandraTable[(LocalTime, LocalTime, LocalTime, LocalTime)](keyspaceName, typeNormalTable).collect
checkNormalRowConsistency(times, results)
}
}
| datastax/spark-cassandra-connector | connector/src/it/scala/com/datastax/spark/connector/rdd/typeTests/TimeTypeTest.scala | Scala | apache-2.0 | 1,177 |
package org.shade.common.types
import org.scalatest.{Matchers, WordSpec}
class PositiveIntSpec extends WordSpec with Matchers {
"Construction" should {
"succeed for positive ints" in {
(1 to 100).foreach { i =>
PositiveInt(i).value shouldBe i
}
(Int.MaxValue - 100 to Int.MaxValue).foreach { i =>
PositiveInt(i).value shouldBe i
}
}
"fail for zero" in {
an[IllegalArgumentException] should be thrownBy PositiveInt(0)
}
"fail for anything else" in {
(Int.MinValue to Int.MinValue + 100).foreach { i =>
an[IllegalArgumentException] should be thrownBy PositiveInt(i)
}
(-100 to -1).foreach { i =>
an[IllegalArgumentException] should be thrownBy PositiveInt(i)
}
}
}
}
| jamesshade/common | src/test/scala/org/shade/common/types/PositiveIntSpec.scala | Scala | apache-2.0 | 790 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.calcite
import org.apache.flink.table.types.logical.{ArrayType, BigIntType, BooleanType, DateType, DecimalType, DoubleType, FloatType, IntType, LogicalType, MapType, RowType, SmallIntType, TimeType, TimestampType, TinyIntType, VarBinaryType, VarCharType}
import org.junit.{Assert, Test}
class FlinkTypeFactoryTest {
@Test
def testInternalToRelType(): Unit = {
val typeFactory = new FlinkTypeFactory(new FlinkTypeSystem)
def test(t: LogicalType): Unit = {
Assert.assertEquals(
t.copy(true),
FlinkTypeFactory.toLogicalType(
typeFactory.createFieldTypeFromLogicalType(t.copy(true)))
)
Assert.assertEquals(
t.copy(false),
FlinkTypeFactory.toLogicalType(
typeFactory.createFieldTypeFromLogicalType(t.copy(false)))
)
// twice for cache.
Assert.assertEquals(
t.copy(true),
FlinkTypeFactory.toLogicalType(
typeFactory.createFieldTypeFromLogicalType(t.copy(true)))
)
Assert.assertEquals(
t.copy(false),
FlinkTypeFactory.toLogicalType(
typeFactory.createFieldTypeFromLogicalType(t.copy(false)))
)
}
test(new BooleanType())
test(new TinyIntType())
test(new VarCharType(VarCharType.MAX_LENGTH))
test(new DoubleType())
test(new FloatType())
test(new IntType())
test(new BigIntType())
test(new SmallIntType())
test(new VarBinaryType(VarBinaryType.MAX_LENGTH))
test(new DateType())
test(new TimeType())
test(new TimestampType(3))
test(new ArrayType(new DoubleType()))
test(new MapType(new DoubleType(), new VarCharType(VarCharType.MAX_LENGTH)))
test(RowType.of(new DoubleType(), new VarCharType(VarCharType.MAX_LENGTH)))
}
@Test def testDecimalInferType(): Unit = {
Assert.assertEquals(new DecimalType(20, 13), FlinkTypeSystem.inferDivisionType(5, 2, 10, 4))
Assert.assertEquals(new DecimalType(7, 0), FlinkTypeSystem.inferIntDivType(5, 2, 4))
Assert.assertEquals(new DecimalType(38, 5), FlinkTypeSystem.inferAggSumType(5))
Assert.assertEquals(new DecimalType(38, 6), FlinkTypeSystem.inferAggAvgType(5))
Assert.assertEquals(new DecimalType(8, 2), FlinkTypeSystem.inferRoundType(10, 5, 2))
Assert.assertEquals(new DecimalType(8, 2), FlinkTypeSystem.inferRoundType(10, 5, 2))
}
}
| shaoxuan-wang/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/calcite/FlinkTypeFactoryTest.scala | Scala | apache-2.0 | 3,176 |
package net.scalax.cpoi.style
import org.apache.poi.ss.usermodel.Workbook
case class StyleKeyWrap(workbook: Workbook, styleTrans: List[StyleTransform])
| scalax/poi-collection | src/main/scala/net/scalax/cpoi/style/StyleKeyWrap.scala | Scala | mit | 154 |
package HackerRank.Training.DataStructures.Stacks
import java.io.{ByteArrayInputStream, IOException, InputStream, PrintWriter}
import java.util.InputMismatchException
import scala.collection.generic.CanBuildFrom
import scala.collection.mutable
import scala.language.higherKinds
/**
* Copyright (c) 2017 A. Roberto Fischer
*
* @author A. Roberto Fischer <[email protected]> on 6/7/2017
*/
private[this] object BalancedBrackets {
import Reader._
import Writer._
private[this] val TEST_INPUT: Option[String] = None
//------------------------------------------------------------------------------------------//
// Solution
//------------------------------------------------------------------------------------------//
private[this] def solve(): Unit = {
val alphabet = Map('}' -> '{', ']' -> '[', ')' -> '(')
val n = nextInt()
val input = nextString[Vector](n)
input.foreach(x => println(if (isBalanced(x, alphabet)) "YES" else "NO"))
}
private[this] def isBalanced(input: String, alphabet: Map[Char, Char]): Boolean = {
if (input.length % 2 != 0) {
false
} else {
val stack = mutable.ArrayStack[Char]()
val balanced = input.takeWhile {
case char if alphabet.values.toSet.contains(char) =>
stack.push(char)
true
case char if alphabet.keySet.contains(char) =>
stack.headOption.fold(false)(_ =>
stack.pop() == alphabet(char)
)
case _ => false
}
balanced == input && stack.isEmpty
}
}
//------------------------------------------------------------------------------------------//
// Run
//------------------------------------------------------------------------------------------//
@throws[Exception]
def main(args: Array[String]): Unit = {
val s = System.currentTimeMillis
solve()
flush()
if (TEST_INPUT.isDefined) System.out.println(System.currentTimeMillis - s + "ms")
}
//------------------------------------------------------------------------------------------//
// Input
//------------------------------------------------------------------------------------------//
private[this] final object Reader {
private[this] implicit val in: InputStream = TEST_INPUT.fold(System.in)(s => new ByteArrayInputStream(s.getBytes))
def nextSeq[T, Coll[_]](reader: => Seq[T], n: Int)
(implicit cbf: CanBuildFrom[Coll[T], T, Coll[T]]): Coll[T] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder ++= reader
}
builder.result()
}
def next[T, Coll[_]](reader: => T, n: Int)
(implicit cbf: CanBuildFrom[Coll[T], T, Coll[T]]): Coll[T] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += reader
}
builder.result()
}
def nextWithIndex[T, Coll[_]](reader: => T, n: Int)
(implicit cbf: CanBuildFrom[Coll[(T, Int)], (T, Int), Coll[(T, Int)]]): Coll[(T, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((reader, i))
}
builder.result()
}
def nextDouble[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[Double], Double, Coll[Double]]): Coll[Double] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += nextDouble()
}
builder.result()
}
def nextDoubleWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(Double, Int)], (Double, Int), Coll[(Double, Int)]]): Coll[(Double, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((nextDouble(), i))
}
builder.result()
}
def nextChar[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[Char], Char, Coll[Char]]): Coll[Char] = {
val builder = cbf()
builder.sizeHint(n)
var b = skip
var p = 0
while (p < n && !isSpaceChar(b)) {
builder += b.toChar
p += 1
b = readByte().toInt
}
builder.result()
}
def nextCharWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(Char, Int)], (Char, Int), Coll[(Char, Int)]]): Coll[(Char, Int)] = {
val builder = cbf()
builder.sizeHint(n)
var b = skip
var p = 0
while (p < n && !isSpaceChar(b)) {
builder += ((b.toChar, p))
p += 1
b = readByte().toInt
}
builder.result()
}
def nextInt[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[Int], Int, Coll[Int]]): Coll[Int] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += nextInt()
}
builder.result()
}
def nextIntWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(Int, Int)], (Int, Int), Coll[(Int, Int)]]): Coll[(Int, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((nextInt(), i))
}
builder.result()
}
def nextLong[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[Long], Long, Coll[Long]]): Coll[Long] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += nextLong()
}
builder.result()
}
def nextLongWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(Long, Int)], (Long, Int), Coll[(Long, Int)]]): Coll[(Long, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((nextLong(), i))
}
builder.result()
}
def nextString[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[String], String, Coll[String]]): Coll[String] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += nextString()
}
builder.result()
}
def nextStringWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(String, Int)], (String, Int), Coll[(String, Int)]]): Coll[(String, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((nextString(), i))
}
builder.result()
}
def nextMultiLine(n: Int, m: Int): Array[Array[Char]] = {
val map = new Array[Array[Char]](n)
var i = 0
while (i < n) {
map(i) = nextChar[Array](m)
i += 1
}
map
}
def nextDouble(): Double = nextString().toDouble
def nextChar(): Char = skip.toChar
def nextString(): String = {
var b = skip
val sb = new java.lang.StringBuilder
while (!isSpaceChar(b)) {
sb.appendCodePoint(b)
b = readByte().toInt
}
sb.toString
}
def nextInt(): Int = {
var num = 0
var b = 0
var minus = false
while ( {
b = readByte().toInt
b != -1 && !((b >= '0' && b <= '9') || b == '-')
}) {}
if (b == '-') {
minus = true
b = readByte().toInt
}
while (true) {
if (b >= '0' && b <= '9') {
num = num * 10 + (b - '0')
} else {
if (minus) return -num else return num
}
b = readByte().toInt
}
throw new IOException("Read Int")
}
def nextLong(): Long = {
var num = 0L
var b = 0
var minus = false
while ( {
b = readByte().toInt
b != -1 && !((b >= '0' && b <= '9') || b == '-')
}) {}
if (b == '-') {
minus = true
b = readByte().toInt
}
while (true) {
if (b >= '0' && b <= '9') {
num = num * 10 + (b - '0')
} else {
if (minus) return -num else return num
}
b = readByte().toInt
}
throw new IOException("Read Long")
}
private[this] val inputBuffer = new Array[Byte](1024)
private[this] var lenBuffer = 0
private[this] var ptrBuffer = 0
private[this] def readByte()(implicit in: java.io.InputStream): Byte = {
if (lenBuffer == -1) throw new InputMismatchException
if (ptrBuffer >= lenBuffer) {
ptrBuffer = 0
try {
lenBuffer = in.read(inputBuffer)
} catch {
case _: IOException =>
throw new InputMismatchException
}
if (lenBuffer <= 0) return -1
}
inputBuffer({
ptrBuffer += 1
ptrBuffer - 1
})
}
private[this] def isSpaceChar(c: Int) = !(c >= 33 && c <= 126)
private[this] def skip = {
var b = 0
while ( {
b = readByte().toInt
b != -1 && isSpaceChar(b)
}) {}
b
}
}
//------------------------------------------------------------------------------------------//
// Output
//------------------------------------------------------------------------------------------//
private[this] final object Writer {
private[this] val out = new PrintWriter(System.out)
def flush(): Unit = out.flush()
def println(x: Any): Unit = out.println(x)
def print(x: Any): Unit = out.print(x)
}
} | robertoFischer/hackerrank | src/main/scala/HackerRank/Training/DataStructures/Stacks/BalancedBrackets.scala | Scala | mit | 9,236 |
package com.dwolla.cloudflare.domain.dto.logpush
import io.circe.Codec
import io.circe.generic.semiauto.deriveCodec
case class LogpushOwnershipDTO(
filename: String,
message: String,
valid: Boolean
)
object LogpushOwnershipDTO {
implicit val logpushOwnershipDTOCodec: Codec[LogpushOwnershipDTO] = deriveCodec
}
case class CreateOwnershipDTO(destination_conf: String)
object CreateOwnershipDTO {
implicit val createOwnershipDTOCodec: Codec[CreateOwnershipDTO] = deriveCodec
}
| Dwolla/scala-cloudflare | dto/src/main/scala/com/dwolla/cloudflare/domain/dto/logpush/LogpushOwnershipDTO.scala | Scala | mit | 490 |
package com.scalegray
import io.jvm.uuid._
import scalaz._
import Scalaz._
import scalaz.Validation
import scalaz.Validation.FlatMap._
/*
* A seperate UID class just to extend the UID later to integrate twitter snowflake for other usecases.
*/
object UID {
def getUID: ValidationNel[Throwable, Long] = {
(Validation.fromTryCatchThrowable[Long, Throwable] {
io.jvm.uuid.UUID.random.leastSigBits.abs
} leftMap { t: Throwable =>
new Throwable("""UID failed to generate""")
}).toValidationNel.flatMap { id: Long => Validation.success[Throwable, Long](id).toValidationNel}
}
}
| scalegray/concorde | src/main/scala/com/scalegray/Uid.scala | Scala | mit | 612 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.yggdrasil
trait IdSource {
def nextIdBlock(n: Int): Long
def nextId(): Long
}
final class FreshAtomicIdSource extends IdSource {
private val source = new java.util.concurrent.atomic.AtomicLong
def nextId() = source.getAndIncrement
def nextIdBlock(n: Int): Long = {
var nextId = source.get()
while (!source.compareAndSet(nextId, nextId + n)) {
nextId = source.get()
}
nextId
}
}
| drostron/quasar | yggdrasil/src/main/scala/quasar/yggdrasil/IdSource.scala | Scala | apache-2.0 | 1,035 |
class B {
bar() // error
}
| lampepfl/dotty | tests/neg/multi-file-error/B.scala | Scala | apache-2.0 | 29 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.submit
import java.util.{Collections, UUID}
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.util.control.NonFatal
import io.fabric8.kubernetes.api.model._
import io.fabric8.kubernetes.client.KubernetesClient
import org.apache.spark.SparkConf
import org.apache.spark.deploy.SparkApplication
import org.apache.spark.deploy.k8s.Config._
import org.apache.spark.deploy.k8s.Constants._
import org.apache.spark.deploy.k8s.SparkKubernetesClientFactory
import org.apache.spark.deploy.k8s.submit.steps.DriverConfigurationStep
import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils
/**
* Encapsulates arguments to the submission client.
*
* @param mainAppResource the main application resource if any
* @param mainClass the main class of the application to run
* @param driverArgs arguments to the driver
*/
private[spark] case class ClientArguments(
mainAppResource: Option[MainAppResource],
mainClass: String,
driverArgs: Array[String])
private[spark] object ClientArguments {
def fromCommandLineArgs(args: Array[String]): ClientArguments = {
var mainAppResource: Option[MainAppResource] = None
var mainClass: Option[String] = None
val driverArgs = mutable.ArrayBuffer.empty[String]
args.sliding(2, 2).toList.foreach {
case Array("--primary-java-resource", primaryJavaResource: String) =>
mainAppResource = Some(JavaMainAppResource(primaryJavaResource))
case Array("--main-class", clazz: String) =>
mainClass = Some(clazz)
case Array("--arg", arg: String) =>
driverArgs += arg
case other =>
val invalid = other.mkString(" ")
throw new RuntimeException(s"Unknown arguments: $invalid")
}
require(mainClass.isDefined, "Main class must be specified via --main-class")
ClientArguments(
mainAppResource,
mainClass.get,
driverArgs.toArray)
}
}
/**
* Submits a Spark application to run on Kubernetes by creating the driver pod and starting a
* watcher that monitors and logs the application status. Waits for the application to terminate if
* spark.kubernetes.submission.waitAppCompletion is true.
*
* @param submissionSteps steps that collectively configure the driver
* @param sparkConf the submission client Spark configuration
* @param kubernetesClient the client to talk to the Kubernetes API server
* @param waitForAppCompletion a flag indicating whether the client should wait for the application
* to complete
* @param appName the application name
* @param watcher a watcher that monitors and logs the application status
*/
private[spark] class Client(
submissionSteps: Seq[DriverConfigurationStep],
sparkConf: SparkConf,
kubernetesClient: KubernetesClient,
waitForAppCompletion: Boolean,
appName: String,
watcher: LoggingPodStatusWatcher) extends Logging {
private val driverJavaOptions = sparkConf.get(
org.apache.spark.internal.config.DRIVER_JAVA_OPTIONS)
/**
* Run command that initializes a DriverSpec that will be updated after each
* DriverConfigurationStep in the sequence that is passed in. The final KubernetesDriverSpec
* will be used to build the Driver Container, Driver Pod, and Kubernetes Resources
*/
def run(): Unit = {
var currentDriverSpec = KubernetesDriverSpec.initialSpec(sparkConf)
// submissionSteps contain steps necessary to take, to resolve varying
// client arguments that are passed in, created by orchestrator
for (nextStep <- submissionSteps) {
currentDriverSpec = nextStep.configureDriver(currentDriverSpec)
}
val resolvedDriverJavaOpts = currentDriverSpec
.driverSparkConf
// Remove this as the options are instead extracted and set individually below using
// environment variables with prefix SPARK_JAVA_OPT_.
.remove(org.apache.spark.internal.config.DRIVER_JAVA_OPTIONS)
.getAll
.map {
case (confKey, confValue) => s"-D$confKey=$confValue"
} ++ driverJavaOptions.map(Utils.splitCommandString).getOrElse(Seq.empty)
val driverJavaOptsEnvs: Seq[EnvVar] = resolvedDriverJavaOpts.zipWithIndex.map {
case (option, index) =>
new EnvVarBuilder()
.withName(s"$ENV_JAVA_OPT_PREFIX$index")
.withValue(option)
.build()
}
val resolvedDriverContainer = new ContainerBuilder(currentDriverSpec.driverContainer)
.addAllToEnv(driverJavaOptsEnvs.asJava)
.build()
val resolvedDriverPod = new PodBuilder(currentDriverSpec.driverPod)
.editSpec()
.addToContainers(resolvedDriverContainer)
.endSpec()
.build()
Utils.tryWithResource(
kubernetesClient
.pods()
.withName(resolvedDriverPod.getMetadata.getName)
.watch(watcher)) { _ =>
val createdDriverPod = kubernetesClient.pods().create(resolvedDriverPod)
try {
if (currentDriverSpec.otherKubernetesResources.nonEmpty) {
val otherKubernetesResources = currentDriverSpec.otherKubernetesResources
addDriverOwnerReference(createdDriverPod, otherKubernetesResources)
kubernetesClient.resourceList(otherKubernetesResources: _*).createOrReplace()
}
} catch {
case NonFatal(e) =>
kubernetesClient.pods().delete(createdDriverPod)
throw e
}
if (waitForAppCompletion) {
logInfo(s"Waiting for application $appName to finish...")
watcher.awaitCompletion()
logInfo(s"Application $appName finished.")
} else {
logInfo(s"Deployed Spark application $appName into Kubernetes.")
}
}
}
// Add a OwnerReference to the given resources making the driver pod an owner of them so when
// the driver pod is deleted, the resources are garbage collected.
private def addDriverOwnerReference(driverPod: Pod, resources: Seq[HasMetadata]): Unit = {
val driverPodOwnerReference = new OwnerReferenceBuilder()
.withName(driverPod.getMetadata.getName)
.withApiVersion(driverPod.getApiVersion)
.withUid(driverPod.getMetadata.getUid)
.withKind(driverPod.getKind)
.withController(true)
.build()
resources.foreach { resource =>
val originalMetadata = resource.getMetadata
originalMetadata.setOwnerReferences(Collections.singletonList(driverPodOwnerReference))
}
}
}
/**
* Main class and entry point of application submission in KUBERNETES mode.
*/
private[spark] class KubernetesClientApplication extends SparkApplication {
override def start(args: Array[String], conf: SparkConf): Unit = {
val parsedArguments = ClientArguments.fromCommandLineArgs(args)
run(parsedArguments, conf)
}
private def run(clientArguments: ClientArguments, sparkConf: SparkConf): Unit = {
val namespace = sparkConf.get(KUBERNETES_NAMESPACE)
// For constructing the app ID, we can't use the Spark application name, as the app ID is going
// to be added as a label to group resources belonging to the same application. Label values are
// considerably restrictive, e.g. must be no longer than 63 characters in length. So we generate
// a unique app ID (captured by spark.app.id) in the format below.
val kubernetesAppId = s"spark-${UUID.randomUUID().toString.replaceAll("-", "")}"
val launchTime = System.currentTimeMillis()
val waitForAppCompletion = sparkConf.get(WAIT_FOR_APP_COMPLETION)
val appName = sparkConf.getOption("spark.app.name").getOrElse("spark")
// The master URL has been checked for validity already in SparkSubmit.
// We just need to get rid of the "k8s://" prefix here.
val master = sparkConf.get("spark.master").substring("k8s://".length)
val loggingInterval = if (waitForAppCompletion) Some(sparkConf.get(REPORT_INTERVAL)) else None
val watcher = new LoggingPodStatusWatcherImpl(kubernetesAppId, loggingInterval)
val orchestrator = new DriverConfigOrchestrator(
kubernetesAppId,
launchTime,
clientArguments.mainAppResource,
appName,
clientArguments.mainClass,
clientArguments.driverArgs,
sparkConf)
Utils.tryWithResource(SparkKubernetesClientFactory.createKubernetesClient(
master,
Some(namespace),
KUBERNETES_AUTH_SUBMISSION_CONF_PREFIX,
sparkConf,
None,
None)) { kubernetesClient =>
val client = new Client(
orchestrator.getAllConfigurationSteps,
sparkConf,
kubernetesClient,
waitForAppCompletion,
appName,
watcher)
client.run()
}
}
}
| saltstar/spark | resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientApplication.scala | Scala | apache-2.0 | 9,470 |
package info.armado.ausleihe.admin.transport.responses
import javax.xml.bind.annotation.{XmlAccessType, XmlAccessorType, XmlRootElement}
object AddEnvelopesResponseDTO {
def apply(success: Boolean): AddEnvelopesResponseDTO = new AddEnvelopesResponseDTO(success)
def apply(alreadyExistingBarcodes: Array[String], duplicateBarcodes: Array[String]): AddEnvelopesResponseDTO =
AddEnvelopesResponseDTO(alreadyExistingBarcodes.isEmpty && duplicateBarcodes.isEmpty, alreadyExistingBarcodes, duplicateBarcodes)
}
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
case class AddEnvelopesResponseDTO(var success: Boolean,
var alreadyExistingBarcodes: Array[String],
var duplicateBarcodes: Array[String]) {
def this() = this(false, Array(), Array())
def this(success: Boolean) = this(success, Array(), Array())
}
| Spielekreis-Darmstadt/lending | lending-admin-interfaces/src/main/scala/info/armado/ausleihe/admin/transport/responses/AddEnvelopesResponseDTO.scala | Scala | apache-2.0 | 893 |
/*
* Copyright (c) 2013 original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eigengo.monitor.output.statsd
import akka.actor.{Actor, ActorRef}
import akka.io.{IO, Udp}
import java.net.InetSocketAddress
import akka.util.ByteString
/**
* Companion object for the ``StatsdActor``, containing the messages that it can handle
*/
object StatsdActor {
/**
* Common supertype for all ``StatsdStatistic``s
*/
sealed trait StatsdStatistic
/**
* A counter increments or decrements the specified ``aspect`` by ``delta``, optionally containing
* non-empty ``tags``.
*
* @param aspect the aspect identifying the counter
* @param delta the delta
* @param tags the optional tags (DD extension)
*/
case class Counter(aspect: String, delta: Int, tags: Seq[String] = Nil) extends StatsdStatistic
/**
* A gauge records a ``value`` identified by ``aspect``, optionally containing
* non-empty ``tags``.
*
* @param aspect the aspect identifying the gauge
* @param value the gauge value
* @param tags the optional tags (DD extension)
*/
case class Gauge(aspect: String, value: Int, tags: Seq[String] = Nil) extends StatsdStatistic
/**
* An execution execution time records time in milliseconds for the given ``aspect``, with
* optional ``tags``.
*
* @param aspect the aspect identifying the execution time
* @param timeMs the time in milliseconds
* @param tags the optional tags (DD extension)
*/
case class ExecutionTime(aspect: String, timeMs: Int, tags: Seq[String] = Nil) extends StatsdStatistic
}
/**
* Turns the ``StatsdStatistic`` and some ``prefix`` into a ``ByteString``
*/
trait StatisticMarshaller {
import StatsdActor._
/**
* Formats the ``statistic`` into the appropriate ``ByteString``
*
* @param statistic the statistic to format
* @param prefix the prefix, including the trailing ``.``
* @return the formatted value that can be sent to the statsd server
*/
def toByteString(statistic: StatsdStatistic, prefix: String): ByteString
}
/**
* Implements the DataDog extensions
*/
trait DataDogStatisticMarshaller extends StatisticMarshaller {
import StatsdActor._
def constantTags: Seq[String]
private def tagString(tags: Seq[String]): String =
if (tags.isEmpty) "" else {
// mutable, but we need speed
val b = new StringBuilder("|#")
tags.foreach { tag => if (b.length > 2) b.append(','); b.append(tag) }
b.toString()
}
private def format(prefix: String, aspect: String, value: Int, clazz: String, tags: Seq[String]): ByteString =
ByteString("%s%s:%d|%s%s".format(prefix, aspect, value, clazz, tagString(tags)))
override def toByteString(statistic: StatsdStatistic, prefix: String): ByteString = statistic match {
case Counter(aspect, delta, tags) => format(prefix, aspect, delta, "c", constantTags ++ tags)
case Gauge(aspect, value, tags) => format(prefix, aspect, value, "g", constantTags ++ tags)
case ExecutionTime(aspect, timeMs, tags) => format(prefix, aspect, timeMs, "ms", constantTags ++ tags)
}
}
/**
* Sends the received ``StatsdStatistic`` messages to the statsd server.
*
* @param remote the address of the statsd server
* @param prefix the constant prefix for all messages. Must be empty or end with ``.``
*/
class StatsdActor(remote: InetSocketAddress, prefix: String) extends Actor {
this: StatisticMarshaller =>
require(prefix.isEmpty || prefix.endsWith("."), "Prefix must be empty or end with '.'")
import context.system
import StatsdActor._
IO(Udp) ! Udp.SimpleSender
def receive: Receive = {
case Udp.SimpleSenderReady =>
context.become(ready(sender))
}
def ready(send: ActorRef): Receive = {
case stat: StatsdStatistic =>
val payload = toByteString(stat, prefix)
send ! Udp.Send(payload, remote)
}
}
| eigengo/monitor | output-statsd/src/main/scala/org/eigengo/monitor/output/statsd/statsd.scala | Scala | apache-2.0 | 4,409 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.kernel.protocol.v5.handler
import java.util.UUID
import org.apache.toree.kernel.protocol.v5
import akka.actor.{Props, ActorRef, ActorSystem}
import akka.testkit.{TestProbe, ImplicitSender, TestKit}
import org.apache.toree.Main
import org.apache.toree.kernel.protocol.v5.content.{CommClose, ClearOutput, CommOpen}
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.comm._
import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader
import org.mockito.Mockito._
import org.mockito.Matchers._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{BeforeAndAfter, FunSpecLike, Matchers}
import test.utils.MaxAkkaTestTimeout
class CommOpenHandlerSpec extends TestKit(
ActorSystem("CommOpenHandlerSpec", None, Some(Main.getClass.getClassLoader))
) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar
with BeforeAndAfter
{
private val TestCommId = UUID.randomUUID().toString
private val TestTargetName = "some test target"
private var kmBuilder: KMBuilder = _
private var spyCommStorage: CommStorage = _
private var mockCommCallbacks: CommCallbacks = _
private var mockCommRegistrar: CommRegistrar = _
private var mockActorLoader: ActorLoader = _
private var commOpenHandler: ActorRef = _
private var kernelMessageRelayProbe: TestProbe = _
private var statusDispatchProbe: TestProbe = _
before {
kmBuilder = KMBuilder()
mockCommCallbacks = mock[CommCallbacks]
spyCommStorage = spy(new CommStorage())
mockCommRegistrar = mock[CommRegistrar]
mockActorLoader = mock[ActorLoader]
commOpenHandler = system.actorOf(Props(
classOf[CommOpenHandler],
mockActorLoader, mockCommRegistrar, spyCommStorage
))
// Used to intercept responses
kernelMessageRelayProbe = TestProbe()
when(mockActorLoader.load(SystemActorType.KernelMessageRelay))
.thenReturn(system.actorSelection(kernelMessageRelayProbe.ref.path.toString))
// Used to intercept busy/idle messages
statusDispatchProbe = new TestProbe(system)
when(mockActorLoader.load(SystemActorType.StatusDispatch))
.thenReturn(system.actorSelection(statusDispatchProbe.ref.path.toString))
}
describe("CommOpenHandler") {
describe("#process") {
it("should execute open callbacks if the target exists") {
// Mark our target as registered
doReturn(Some(mockCommCallbacks)).when(spyCommStorage)
.getTargetCallbacks(TestTargetName)
// Send a comm_open message with the test target
commOpenHandler ! kmBuilder
.withHeader(CommOpen.toTypeString)
.withContentString(CommOpen(TestCommId, TestTargetName, v5.MsgData.Empty))
.build
// Should receive a busy and an idle message
statusDispatchProbe.receiveN(2, MaxAkkaTestTimeout)
// Verify that the open callbacks were triggered along the way
verify(mockCommCallbacks).executeOpenCallbacks(
any[CommWriter], any[v5.UUID], anyString(), any[v5.MsgData])
}
it("should close the comm connection if the target does not exist") {
// Mark our target as not registered
doReturn(None).when(spyCommStorage).getTargetCallbacks(TestTargetName)
// Send a comm_open message with the test target
commOpenHandler ! kmBuilder
.withHeader(CommOpen.toTypeString)
.withContentString(CommOpen(TestCommId, TestTargetName, v5.MsgData.Empty))
.build
// Should receive a close message as a result of the target missing
kernelMessageRelayProbe.expectMsgPF(MaxAkkaTestTimeout) {
case KernelMessage(_, _, header, _, _, _) =>
header.msg_type should be (CommClose.toTypeString)
}
}
it("should do nothing if there is a parsing error") {
// Send a comm_open message with an invalid content string
commOpenHandler ! kmBuilder
.withHeader(CommOpen.toTypeString)
.withContentString(ClearOutput(_wait = true))
.build
// TODO: Is there a better way to test for this without an upper time
// limit? Is there a different logical approach?
kernelMessageRelayProbe.expectNoMsg(MaxAkkaTestTimeout)
}
it("should include the parent's header in the parent header of " +
"outgoing messages"){
// Register a callback that sends a message using the comm writer
val openCallback: CommCallbacks.OpenCallback =
new CommCallbacks.OpenCallback() {
def apply(v1: CommWriter, v2: v5.UUID, v3: String, v4: v5.MsgData) =
v1.writeMsg(MsgData.Empty)
}
val callbacks = (new CommCallbacks).addOpenCallback(openCallback)
doReturn(Some(callbacks)).when(spyCommStorage)
.getCommIdCallbacks(TestCommId)
// Send a comm_open message
val msg = kmBuilder
.withHeader(CommOpen.toTypeString)
.withContentString(
CommOpen(TestCommId, TestTargetName, v5.MsgData.Empty)
)
.build
commOpenHandler ! msg
// Verify that the message sent by the handler has the desired property
kernelMessageRelayProbe.fishForMessage(MaxAkkaTestTimeout) {
case KernelMessage(_, _, _, parentHeader, _, _) =>
parentHeader == msg.header
}
}
}
}
}
| Myllyenko/incubator-toree | kernel/src/test/scala/org/apache/toree/kernel/protocol/v5/handler/CommOpenHandlerSpec.scala | Scala | apache-2.0 | 6,199 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.e2.fixture
import org.apache.spark.mllib.linalg.distributed.MatrixEntry
trait MarkovChainFixture {
def twoByTwoMatrix = {
new {
val matrixEntries = Seq(
MatrixEntry(0, 0, 3),
MatrixEntry(0, 1, 7),
MatrixEntry(1, 0, 10),
MatrixEntry(1, 1, 10)
)
}
}
def fiveByFiveMatrix = {
new {
val matrixEntries = Seq(
MatrixEntry(0, 1, 12),
MatrixEntry(0, 2, 8),
MatrixEntry(1, 0, 3),
MatrixEntry(1, 1, 3),
MatrixEntry(1, 2, 9),
MatrixEntry(1, 3, 2),
MatrixEntry(1, 4, 8),
MatrixEntry(2, 1, 10),
MatrixEntry(2, 2, 8),
MatrixEntry(2, 4, 10),
MatrixEntry(3, 0, 2),
MatrixEntry(3, 3, 3),
MatrixEntry(3, 4, 4),
MatrixEntry(4, 1, 7),
MatrixEntry(4, 3, 8),
MatrixEntry(4, 4, 10)
)
}
}
}
| pferrel/PredictionIO | e2/src/test/scala/org/apache/predictionio/e2/fixture/MarkovChainFixture.scala | Scala | apache-2.0 | 1,714 |
import sbt._
import Keys._
trait BaseModule {
def moduleName: String
def location: String
lazy val moduleSettings = baseSettings ++ settings
def settings: Seq[Project.Setting[_]]
lazy val baseSettings =
MyDefaults.settings ++
Seq (
name := moduleName
) ++ Seq (
libraryDependencies ++= libraries
)
def libraries: Seq[ModuleID]
def baseProject = Project(
id = moduleName,
base = file(location),
settings = moduleSettings
)
def project: Project
}
| jeffmay/angular-play-multimodule-seed | project/BaseModule.scala | Scala | apache-2.0 | 511 |
/*
Copyright 2013 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.algebird
// TODO this is clearly more general than summingbird, and should be extended to be a ring (add union, etc...)
/**
* Represents a single interval on a T with an Ordering
*/
sealed trait Interval[T] extends java.io.Serializable {
def contains(t: T): Boolean
def intersect(that: Interval[T]): Interval[T]
def apply(t: T) = contains(t)
def &&(that: Interval[T]) = intersect(that)
/**
* Map the Interval with a non-decreasing function.
* If you use a non-monotonic function (like x^2)
* then the result is meaningless.
* TODO: It might be good to have types for these properties in algebird.
*/
def mapNonDecreasing[U: Ordering](fn: T => U): Interval[U]
}
case class Universe[T]() extends Interval[T] {
def contains(t: T): Boolean = true
def intersect(that: Interval[T]): Interval[T] = that
def mapNonDecreasing[U: Ordering](fn: T => U): Interval[U] = Universe()
}
case class Empty[T]() extends Interval[T] {
def contains(t: T): Boolean = false
def intersect(that: Interval[T]): Interval[T] = this
def mapNonDecreasing[U: Ordering](fn: T => U): Interval[U] = Empty()
}
object Interval extends java.io.Serializable {
type GenIntersection[T] = Intersection[Lower, Upper, T]
type InLowExUp[T] = Intersection[InclusiveLower, ExclusiveUpper, T]
type InLowInUp[T] = Intersection[InclusiveLower, InclusiveUpper, T]
type ExLowExUp[T] = Intersection[ExclusiveLower, ExclusiveUpper, T]
type ExLowInUp[T] = Intersection[ExclusiveLower, InclusiveUpper, T]
implicit def monoid[T]: Monoid[Interval[T]] = Monoid.from[Interval[T]](Universe[T]()) { _ && _ }
// Automatically convert from an either
implicit def fromEither[L[t] <: Interval[t], R[t] <: Interval[t], T](either: Either[L[T], R[T]]): Interval[T] =
either match {
case Right(i) => i
case Left(i) => i
}
def leftClosedRightOpen[T: Ordering](lower: T, upper: T): Either[Empty[T], InLowExUp[T]] =
if (Ordering[T].lt(lower, upper))
Right(Intersection(InclusiveLower(lower), ExclusiveUpper(upper)))
else Left(Empty())
def leftOpenRightClosed[T: Ordering](lower: T, upper: T): Either[Empty[T], ExLowInUp[T]] =
if (Ordering[T].lt(lower, upper))
Right(Intersection(ExclusiveLower(lower), InclusiveUpper(upper)))
else Left(Empty())
}
// Marker traits to keep lower on the left in Intersection
sealed trait Lower[T] extends Interval[T] {
def ordering: Ordering[T]
/**
* This may give a false positive (but should try not to).
* Note the case of (0,1) for the integers. If they were doubles,
* this would intersect, but since there are no members of the
* set Int that are bigger than 0 and less than 1, they don't really
* intersect. So, ordering is not enough here. You need a stronger
* notion, which we don't have a typeclass for.
*/
def intersects(u: Upper[T]): Boolean
/**
* The smallest value that is contained here
* This is an Option, because of cases like ExclusiveLower(Int.MaxValue)
* which are pathological and equivalent to Empty
*/
def least(implicit s: Successible[T]): Option[T]
def strictLowerBound(implicit p: Predecessible[T]): Option[T]
/**
* Iterates all the items in this Lower[T] from lowest to highest
*/
def toIterable(implicit s: Successible[T]): Iterable[T] =
least match {
case Some(l) => s.iterateNext(l)
case None => Iterable.empty
}
}
sealed trait Upper[T] extends Interval[T] {
def ordering: Ordering[T]
/**
* The smallest value that is contained here
* This is an Option, because of cases like ExclusiveUpper(Int.MinValue),
* which are pathological and equivalent to Empty
*/
def greatest(implicit p: Predecessible[T]): Option[T]
// The smallest value that is not present
def strictUpperBound(implicit s: Successible[T]): Option[T]
/**
* Iterates all the items in this Upper[T] from highest to lowest
*/
def toIterable(implicit p: Predecessible[T]): Iterable[T] =
greatest match {
case Some(g) => p.iteratePrev(g)
case None => Iterable.empty
}
}
case class InclusiveLower[T](lower: T)(implicit val ordering: Ordering[T]) extends Interval[T] with Lower[T] {
def contains(t: T): Boolean = ordering.lteq(lower, t)
def intersect(that: Interval[T]): Interval[T] = that match {
case Universe() => this
case Empty() => that
case ub @ InclusiveUpper(upper) =>
if (intersects(ub)) Intersection(this, ub) else Empty()
case ub @ ExclusiveUpper(upper) =>
if (intersects(ub)) Intersection(this, ub) else Empty()
case lb @ InclusiveLower(thatlb) => if (lb.ordering.gt(lower, thatlb)) this else that
case lb @ ExclusiveLower(thatlb) => if (lb.ordering.gt(lower, thatlb)) this else that
case Intersection(thatL, thatU) => (this && thatL) && thatU
}
def intersects(u: Upper[T]): Boolean = u match {
case InclusiveUpper(upper) => ordering.lteq(lower, upper)
case ExclusiveUpper(upper) => ordering.lt(lower, upper)
}
def least(implicit s: Successible[T]): Option[T] = Some(lower)
def strictLowerBound(implicit p: Predecessible[T]): Option[T] = p.prev(lower)
def mapNonDecreasing[U: Ordering](fn: T => U): Interval[U] = InclusiveLower(fn(lower))
}
case class ExclusiveLower[T](lower: T)(implicit val ordering: Ordering[T]) extends Interval[T] with Lower[T] {
def contains(t: T): Boolean = ordering.lt(lower, t)
def intersect(that: Interval[T]): Interval[T] = that match {
case Universe() => this
case Empty() => that
case ub @ InclusiveUpper(upper) =>
if (intersects(ub)) Intersection(this, ub) else Empty()
case ub @ ExclusiveUpper(upper) =>
if (intersects(ub)) Intersection(this, ub) else Empty()
case lb @ InclusiveLower(thatlb) => if (lb.ordering.gteq(lower, thatlb)) this else that
case lb @ ExclusiveLower(thatlb) => if (lb.ordering.gteq(lower, thatlb)) this else that
case Intersection(thatL, thatU) => (this && thatL) && thatU
}
def intersects(u: Upper[T]): Boolean = u match {
case InclusiveUpper(upper) => ordering.lt(lower, upper)
case ExclusiveUpper(upper) => ordering.lt(lower, upper) // This is a false positive for (x, next(x))
}
def least(implicit s: Successible[T]): Option[T] = s.next(lower)
def strictLowerBound(implicit p: Predecessible[T]): Option[T] = Some(lower)
def mapNonDecreasing[U: Ordering](fn: T => U): Interval[U] = ExclusiveLower(fn(lower))
}
case class InclusiveUpper[T](upper: T)(implicit val ordering: Ordering[T]) extends Interval[T] with Upper[T] {
def contains(t: T): Boolean = ordering.lteq(t, upper)
def greatest(implicit p: Predecessible[T]): Option[T] = Some(upper)
// The smallest value that is not present
def strictUpperBound(implicit s: Successible[T]): Option[T] = s.next(upper)
def intersect(that: Interval[T]): Interval[T] = that match {
case Universe() => this
case Empty() => that
case lb @ InclusiveLower(lower) =>
if (lb.intersects(this)) Intersection(lb, this) else Empty()
case lb @ ExclusiveLower(lower) =>
if (lb.intersects(this)) Intersection(lb, this) else Empty()
case ub @ InclusiveUpper(thatub) =>
if (ub.ordering.lt(upper, thatub)) this else that
case ub @ ExclusiveUpper(thatub) =>
if (ub.ordering.lt(upper, thatub)) this else that
case Intersection(thatL, thatU) => thatL && (this && thatU)
}
def mapNonDecreasing[U: Ordering](fn: T => U): Interval[U] = InclusiveUpper(fn(upper))
}
case class ExclusiveUpper[T](upper: T)(implicit val ordering: Ordering[T]) extends Interval[T] with Upper[T] {
def contains(t: T): Boolean = ordering.lt(t, upper)
def greatest(implicit p: Predecessible[T]): Option[T] = p.prev(upper)
// The smallest value that is not present
def strictUpperBound(implicit s: Successible[T]): Option[T] = Some(upper)
def intersect(that: Interval[T]): Interval[T] = that match {
case Universe() => this
case Empty() => that
case lb @ InclusiveLower(lower) =>
if (lb.intersects(this)) Intersection(lb, this) else Empty()
case lb @ ExclusiveLower(lower) =>
if (lb.intersects(this)) Intersection(lb, this) else Empty()
case ub @ InclusiveUpper(thatub) =>
if (ub.ordering.lteq(upper, thatub)) this else that
case ub @ ExclusiveUpper(thatub) =>
if (ub.ordering.lteq(upper, thatub)) this else that
case Intersection(thatL, thatU) => thatL && (this && thatU)
}
def mapNonDecreasing[U: Ordering](fn: T => U): Interval[U] = ExclusiveUpper(fn(upper))
}
case class Intersection[L[t] <: Lower[t], U[t] <: Upper[t], T](lower: L[T], upper: U[T]) extends Interval[T] {
require(lower.intersects(upper), "Intersections must be non-empty: (%s, %s)".format(lower, upper))
def contains(t: T): Boolean = lower.contains(t) && upper.contains(t)
def intersect(that: Interval[T]): Interval[T] = that match {
case Universe() => this
case Empty() => that
case lb @ InclusiveLower(_) => (lb && lower) && upper
case lb @ ExclusiveLower(_) => (lb && lower) && upper
case ub @ InclusiveUpper(_) => lower && (ub && upper)
case ub @ ExclusiveUpper(_) => lower && (ub && upper)
case Intersection(thatL, thatU) => (lower && thatL) && (upper && thatU)
}
def mapNonDecreasing[U: Ordering](fn: T => U): Interval[U] =
lower.mapNonDecreasing(fn) && upper.mapNonDecreasing(fn)
/**
* Goes from lowest to highest for all items
* that are contained in this Intersection
*/
def leastToGreatest(implicit s: Successible[T]): Iterable[T] = {
val self = this
// TODO https://github.com/twitter/algebird/issues/263
new AbstractIterable[T] {
// We have to do this because the normal takeWhile causes OOM on big intervals
def iterator = lower.toIterable.iterator.takeWhile(self.upper.contains(_))
}
}
/**
* Goes from highest to lowest for all items
* that are contained in this Intersection
*/
def greatestToLeast(implicit p: Predecessible[T]): Iterable[T] = {
val self = this
// TODO https://github.com/twitter/algebird/issues/263
new AbstractIterable[T] {
// We have to do this because the normal takeWhile causes OOM on big intervals
def iterator = upper.toIterable.iterator.takeWhile(self.lower.contains(_))
}
}
/**
* Some intervals can actually be synonyms for empty:
* (0,0) for instance, contains nothing. This cannot be normalized to
* [a, b) form, thus we return an option
* Also, there are cases like [Int.MinValue, Int.MaxValue] that cannot
* are actually equivalent to Universe.
* The bottom line: if this returns None, it just means you can't express
* it this way, it does not mean it is empty or universe, etc... (there
* are other cases).
*/
def toLeftClosedRightOpen(implicit s: Successible[T]): Option[Intersection[InclusiveLower, ExclusiveUpper, T]] = {
implicit val ord = lower.ordering
for {
l <- lower.least
g <- upper.strictUpperBound if lower.ordering.lt(l, g)
} yield Intersection(InclusiveLower(l), ExclusiveUpper(g))
}
}
| avibryant/algebird | algebird-core/src/main/scala/com/twitter/algebird/Interval.scala | Scala | apache-2.0 | 11,611 |
/*
* Copyright 2015 TouchType Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.databricks.spark.redshift
import java.sql.Timestamp
import java.util.Calendar
import org.apache.spark.sql.types._
/**
* Helpers for Redshift tests that require common mocking
*/
object TestUtils {
/**
* Makes a field for the test schema
*/
def makeField(name: String, typ: DataType) = {
val md = (new MetadataBuilder).putString("name", name).build()
StructField(name, typ, nullable = true, metadata = md)
}
/**
* Simple schema that includes all data types we support
*/
lazy val testSchema =
StructType(
Seq(
makeField("testByte", ByteType),
makeField("testBool", BooleanType),
makeField("testDate", DateType),
makeField("testDouble", DoubleType),
makeField("testFloat", FloatType),
makeField("testInt", IntegerType),
makeField("testLong", LongType),
makeField("testShort", ShortType),
makeField("testString", StringType),
makeField("testTimestamp", TimestampType)))
/**
* Convert date components to a millisecond timestamp
*/
def toMillis(year: Int, zeroBasedMonth: Int, date: Int, hour: Int, minutes: Int, seconds: Int, millis: Int = 0) = {
val calendar = Calendar.getInstance()
calendar.set(year, zeroBasedMonth, date, hour, minutes, seconds)
calendar.set(Calendar.MILLISECOND, millis)
calendar.getTime.getTime
}
/**
* Convert date components to a SQL Timestamp
*/
def toTimestamp(year: Int, zeroBasedMonth: Int, date: Int, hour: Int, minutes: Int, seconds: Int, millis: Int = 0) = {
new Timestamp(toMillis(year, zeroBasedMonth, date, hour, minutes, seconds, millis))
}
}
| methodmill/spark-redshift | src/test/scala/com/databricks/spark/redshift/TestUtils.scala | Scala | apache-2.0 | 2,258 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.metrics.core
import java.util.concurrent.TimeUnit
import com.codahale.metrics.{ConsoleReporter, MetricRegistry, ScheduledReporter}
import com.typesafe.config.Config
class ConsoleReporterFactory extends ReporterFactory {
override def apply(
conf: Config,
registry: MetricRegistry,
rates: TimeUnit,
durations: TimeUnit): Option[ScheduledReporter] = {
if (!conf.hasPath("type") || !conf.getString("type").equalsIgnoreCase("console")) { None } else {
val reporter =
ConsoleReporter.forRegistry(registry)
.convertRatesTo(rates)
.convertDurationsTo(durations)
.build()
Some(reporter)
}
}
}
| locationtech/geomesa | geomesa-metrics/geomesa-metrics-core/src/main/scala/org/locationtech/geomesa/metrics/core/ConsoleReporterFactory.scala | Scala | apache-2.0 | 1,174 |
/*
* Copyright 2015 ligaDATA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ligadata.metadataapiservice
import akka.actor.{Actor, ActorRef}
import akka.event.Logging
import akka.io.IO
import com.ligadata.kamanja.metadata._
import spray.routing.RequestContext
import spray.httpx.SprayJsonSupport
import spray.client.pipelining._
import scala.util.{ Success, Failure }
import com.ligadata.MetadataAPI._
object GetHeartbeatService {
case class Process(nodeIds:String)
}
/**
* @author danielkozin
*/
class GetHeartbeatService(requestContext: RequestContext, userid:Option[String], password:Option[String], cert:Option[String]) extends Actor {
import GetHeartbeatService._
import system.dispatcher
implicit val system = context.system
val log = Logging(system, getClass)
val APIName = "GetHeartbeatService"
def receive = {
case Process(nodeId) =>
process(nodeId)
context.stop(self)
}
def process(nodeIds:String): Unit = {
// NodeIds is a JSON array of nodeIds.
if (nodeIds == null || (nodeIds != null && nodeIds.length == 0))
requestContext.complete(new ApiResult(ErrorCodeConstants.Failure, APIName, null, "Invalid BODY in a POST request. Expecting either an array of nodeIds or an empty array for all").toString)
if (!MetadataAPIImpl.checkAuth(userid,password,cert, MetadataAPIImpl.getPrivilegeName("get","heartbeat"))) {
requestContext.complete(new ApiResult(ErrorCodeConstants.Failure, APIName, null, "Error:Checking Heartbeat is not allowed for this user").toString )
} else {
val apiResult = MetadataAPIImpl.getHealthCheck(nodeIds)
requestContext.complete(apiResult)
}
}
} | traytonwhite/Kamanja | trunk/MetadataAPIService/src/main/scala/com/ligadata/metadataapiservice/GetHeartbeatService.scala | Scala | apache-2.0 | 2,219 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs105.boxes
import uk.gov.hmrc.ct.accounts.retriever.AccountsBoxRetriever
import uk.gov.hmrc.ct.box._
case class AC415(value: Option[Int]) extends CtBoxIdentifier(name = "Staff costs (current PoA)")
with CtOptionalInteger
with Input
with ValidatableBox[AccountsBoxRetriever]
with Debit {
override def validate(boxRetriever: AccountsBoxRetriever): Set[CtValidation] = {
collectErrors(
validateMoney(value)
)
}
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs105/boxes/AC415.scala | Scala | apache-2.0 | 1,078 |
package slick.test.jdbc
import org.junit.Test
import org.junit.Assert._
import slick.testutil._
import com.typesafe.slick.testkit.util.{DBTest, DBTestObject, JdbcTestDB}
import com.typesafe.slick.testkit.util.StandardTestDBs._
object EmbeddingTest extends DBTestObject(H2Mem)
@deprecated("Using deprecated API", "3.0")
class EmbeddingTest(val tdb: JdbcTestDB) extends DBTest {
import tdb.profile.backend.Database.dynamicSession
@Test def testRaw(): Unit = db withDynSession {
import slick.jdbc.{StaticQuery => Q, GetResult}
(Q.u + "create table USERS(ID int not null primary key, NAME varchar(255))").execute
(Q.u + "create table POSTS(ID int not null primary key, NAME varchar(255), UID int not null)").execute
List(
(1, "u1"),
(2, "u2"),
(3, "u3")
).map(Q.u1[(Int, String)] + "insert into USERS values (?, ?)").foreach(_.execute)
List(
(1, "p1u1", 1),
(2, "p2u1", 1),
(3, "p3u1", 1),
(4, "p4u2", 2)
).map(Q.u1[(Int, String, Int)] + "insert into POSTS values (?, ?, ?)").foreach(_.execute)
val l1 = (Q(GetResult { r => (r.nextString, r.nextString) }) + """
select u.NAME, p.NAME
from USERS u left join POSTS p on u.ID = p.UID
order by u.NAME, p.NAME
""").buildColl[List]
l1 foreach println
assertEquals(List(
("u1", "p1u1"),
("u1", "p2u1"),
("u1", "p3u1"),
("u2", "p4u2"),
("u3", null)
), l1)
val l2 = (Q(GetResult { r => (r.nextString, r.view1.to[List](GetResult(_.nextString))) }) + """
select u.NAME, (u.r0 + p.r0), p.NAME
from (select *, rownum as r0 from USERS order by NAME) u
left join (select *, 0 as r0 from POSTS order by NAME) p
on u.ID = p.UID
order by u.r0
""").buildColl[List]
l2 foreach println
assertEquals(List(
("u1", List("p1u1", "p2u1", "p3u1")),
("u2", List("p4u2")),
("u3", List())
), l2)
}
}
| easel/slick | slick-testkit/src/test/scala/slick/test/jdbc/EmbeddingTest.scala | Scala | bsd-2-clause | 1,939 |
/*
* Copyright 2010-2020 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package mongodb
package record
package field
import java.util.{List => JavaList, UUID}
import java.util.regex.Pattern
import com.mongodb._
import net.liftweb.common.{Box, Empty, Failure, Full}
import net.liftweb.http.SHtml
import net.liftweb.json._
import net.liftweb.record.{Field, FieldHelpers, MandatoryTypedField}
import net.liftweb.util.Helpers._
import org.bson._
import org.bson.codecs.{BsonDocumentCodec, BsonTypeCodecMap, Codec, DecoderContext, Encoder, EncoderContext}
import org.bson.codecs.configuration.CodecRegistry
import scala.collection.mutable
import scala.collection.JavaConverters._
import scala.xml.NodeSeq
object BsonBinary {
def apply(subtype: BsonBinarySubType, data: Array[Byte]): BsonBinary = new BsonBinary(subtype, data)
}
/**
* List field.
*
* Supported types:
* primitives - String, Int, Long, Double, BigDecimal, Byte, BigInt,
* Boolean (and their Java equivalents)
* date types - java.util.Date, org.joda.time.DateTime
* mongo types - ObjectId, Pattern, UUID
*
* If you need to support other types, you will need to override the
* `readValue` function and either override the `writeValue` function
* or create a custom codec for it and add it to your registry. You'll
* also need to override the `asJValue` and `setFromJValue` functions if you
* will be using them.
*
* Note: setting optional_? = false will result in incorrect equals behavior when using setFromJValue
*/
class MongoListField[OwnerType <: BsonRecord[OwnerType], ListType: Manifest](rec: OwnerType)
extends Field[List[ListType], OwnerType]
with MandatoryTypedField[List[ListType]]
with MongoFieldFlavor[List[ListType]]
with BsonableField[List[ListType]]
{
import mongodb.Meta.Reflection._
lazy val mf = manifest[ListType]
override type MyType = List[ListType]
def owner = rec
def defaultValue = List.empty[ListType]
implicit def formats = owner.meta.formats
def setFromAny(in: Any): Box[MyType] = {
in match {
case dbo: DBObject => setFromDBObject(dbo)
case list@c::xs if mf.runtimeClass.isInstance(c) => setBox(Full(list.asInstanceOf[MyType]))
case Some(list@c::xs) if mf.runtimeClass.isInstance(c) => setBox(Full(list.asInstanceOf[MyType]))
case Full(list@c::xs) if mf.runtimeClass.isInstance(c) => setBox(Full(list.asInstanceOf[MyType]))
case jlist: JavaList[_] => {
if(!jlist.isEmpty) {
val elem = jlist.get(0)
if(elem.isInstanceOf[Document]) {
setFromDocumentList(jlist.asInstanceOf[JavaList[Document]])
} else {
setBox(Full(jlist.asScala.toList.asInstanceOf[MyType]))
}
} else {
setBox(Full(Nil))
}
}
case s: String => setFromString(s)
case Some(s: String) => setFromString(s)
case Full(s: String) => setFromString(s)
case null|None|Empty => setBox(defaultValueBox)
case f: Failure => setBox(f)
case o => setFromString(o.toString)
}
}
def setFromJValue(jvalue: JValue): Box[MyType] = jvalue match {
case JNothing|JNull if optional_? => setBox(Empty)
case JArray(array) => setBox(Full((array.map {
case JsonObjectId(objectId) => objectId
case JsonRegex(regex) => regex
case JsonUUID(uuid) => uuid
case JsonDateTime(dt) if (mf.toString == "org.joda.time.DateTime") => dt
case JsonDate(date) => date
case other => other.values
}).asInstanceOf[MyType]))
case other => setBox(FieldHelpers.expectedA("JArray", other))
}
// parse String into a JObject
def setFromString(in: String): Box[List[ListType]] = tryo(JsonParser.parse(in)) match {
case Full(jv: JValue) => setFromJValue(jv)
case f: Failure => setBox(f)
case other => setBox(Failure("Error parsing String into a JValue: "+in))
}
/** Options for select list **/
def options: List[(ListType, String)] = Nil
private def elem = {
def elem0 = SHtml.multiSelectObj[ListType](
options,
value,
set(_)
) % ("tabindex" -> tabIndex.toString)
SHtml.hidden(() => set(Nil)) ++ (uniqueFieldId match {
case Full(id) => (elem0 % ("id" -> id))
case _ => elem0
})
}
def toForm: Box[NodeSeq] =
if (options.length > 0) Full(elem)
else Empty
def asJValue: JValue = JArray(value.map(li => li.asInstanceOf[AnyRef] match {
case x if primitive_?(x.getClass) => primitive2jvalue(x)
case x if mongotype_?(x.getClass) => mongotype2jvalue(x)(owner.meta.formats)
case x if datetype_?(x.getClass) => datetype2jvalue(x)(owner.meta.formats)
case _ => JNothing
}))
/*
* Convert this field's value into a DBObject so it can be stored in Mongo.
*/
@deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3")
def asDBObject: DBObject = {
val dbl = new BasicDBList
value.foreach {
case f => f.asInstanceOf[AnyRef] match {
case x if primitive_?(x.getClass) => dbl.add(x)
case x if mongotype_?(x.getClass) => dbl.add(x)
case x if datetype_?(x.getClass) => dbl.add(datetype2dbovalue(x))
case o => dbl.add(o.toString)
}
}
dbl
}
// set this field's value using a DBObject returned from Mongo.
@deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3")
def setFromDBObject(dbo: DBObject): Box[MyType] =
setBox(Full(dbo.asInstanceOf[BasicDBList].asScala.toList.asInstanceOf[MyType]))
@deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3")
def setFromDocumentList(list: JavaList[Document]): Box[MyType] = {
throw new RuntimeException("Warning, setting Document as field with no conversion, probably not something you want to do")
}
def setFromBsonReader(reader: BsonReader, context: DecoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Box[List[ListType]] = {
reader.getCurrentBsonType match {
case BsonType.NULL =>
reader.readNull()
Empty
case BsonType.ARRAY =>
setBox(tryo(readList(reader, context, registry, bsonTypeCodecMap).asInstanceOf[List[ListType]]))
case bsonType =>
Failure(s"Invalid BsonType for field ${name}: ${bsonType}")
}
}
def writeToBsonWriter(writer: BsonWriter, context: EncoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Unit = {
writer.writeName(name)
writeIterable(writer, value, context.getChildContext, registry)
}
}
| lift/framework | persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoListField.scala | Scala | apache-2.0 | 7,100 |
import sbt._
import Keys._
object Build {
val specs2_core = "org.specs2" %% "specs2-core" % "4.8.3"
val specs2_scalacheck = "org.specs2" %% "specs2-scalacheck" % "4.8.3"
val scalacheck = "org.scalacheck" %% "scalacheck" % "1.14.3"
implicit class ProjectOps(self: Project) {
def standard: Project = {
self
.settings(organization := "org.picoworks")
.settings(resolvers += "scalaz-bintray" at "https://dl.bintray.com/scalaz/releases")
.settings(scalacOptions := Seq("-feature", "-deprecation", "-unchecked", "-Xlint", "-Yrangepos", "-encoding", "utf8"))
.settings(scalacOptions in Test ++= Seq("-Yrangepos"))
}
def notPublished: Project = {
self
.settings(publish := {})
.settings(publishArtifact := false)
}
def libs(modules: ModuleID*) = self.settings(libraryDependencies ++= modules)
def testLibs(modules: ModuleID*) = self.libs(modules.map(_ % "test"): _*)
}
}
| pico-works/pico-hashids | project/Build.scala | Scala | mit | 1,021 |
package org.bitcoins.core.serializers.p2p.messages
import org.bitcoins.core.crypto.DoubleSha256Digest
import org.bitcoins.core.serializers.RawBitcoinSerializer
import org.bitcoins.core.p2p.TypeIdentifier
import org.bitcoins.core.p2p.Inventory
import scodec.bits.ByteVector
/**
* Serializes/deserializes a inventory
* @see https://bitcoin.org/en/developer-reference#term-inventory
*/
trait RawInventorySerializer extends RawBitcoinSerializer[Inventory] {
override def read(bytes: ByteVector): Inventory = {
val typeIdentifier = TypeIdentifier(bytes.take(4))
val hash = DoubleSha256Digest(bytes.slice(4, bytes.size))
Inventory(typeIdentifier, hash)
}
override def write(inventory: Inventory): ByteVector = {
inventory.typeIdentifier.bytes ++ inventory.hash.bytes
}
}
object RawInventorySerializer extends RawInventorySerializer
| bitcoin-s/bitcoin-s-core | core/src/main/scala/org/bitcoins/core/serializers/p2p/messages/RawInventorySerializer.scala | Scala | mit | 861 |
package com.wallace.spark.sparkstreaming.kafkademo
import java.nio.charset.Charset
import java.util
import java.util.{Timer, TimerTask}
import com.wallace.common.LogSupport
import com.wallace.common.timeformat.TimePara
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import scala.io.Source
/**
* Created by Wallace on 2016/5/5.
*/
object KafkaProducerDemo extends LogSupport {
def main(args: Array[String]): Unit = {
// if (args.length < 3) {
// log.error("Usage: KafkaWordCountProducer <metadataBrokerList> <topic> <messagesPerSec>")
// System.exit(1)
// }
val (brokers, topic, messagesPerSec) = ("207.246.109.109:9092", "test_hby", "1000")
val timer = new Timer
timer.schedule(new senderTimer(brokers, topic, messagesPerSec.toInt), 1000, 5000)
}
}
class senderTimer(brokers: String, topic: String, messagesPerSec: Int) extends TimerTask with LogSupport {
// Zookeeper connection properties
val propsV1 = new util.HashMap[String, Object]()
propsV1.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
propsV1.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
"org.apache.kafka.common.serialization.StringSerializer")
propsV1.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
"org.apache.kafka.common.serialization.StringSerializer")
val producerV1 = new KafkaProducer[String, String](propsV1)
val propsV2 = new util.HashMap[String, Object]()
propsV2.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
propsV2.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
"org.apache.kafka.common.serialization.ByteArraySerializer")
propsV2.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
"org.apache.kafka.common.serialization.StringSerializer")
val producerV2 = new KafkaProducer[String, Array[Byte]](propsV2)
// Send some messages
override def run(): Unit = {
val file = Source.fromFile("demo/SparkDemo/data/DateProducer_2016-05-14_Test.csv", "UTF-8")
val lines = file.getLines.toArray
log.info(s"========== Start to send ${messagesPerSec * 5} message to Topic: [$topic] ==========")
(1 to messagesPerSec * 5).foreach {
_ =>
val str: Array[String] = lines(scala.util.Random.nextInt(lines.length)).split(",", -1)
try {
val msg: String = s"""${TimePara.getCurrentDate},${str.drop(1).mkString(",")}"""
val messageV1: ProducerRecord[String, String] = new ProducerRecord[String, String](topic, msg)
val messageV2: ProducerRecord[String, Array[Byte]] = new ProducerRecord[String, Array[Byte]](topic + "_temp", msg.getBytes(Charset.forName("UTF8")))
producerV1.send(messageV1)
producerV2.send(messageV2)
} catch {
case e: Exception =>
log.error(e.getMessage)
throw e
}
}
log.info(s"========== Succeed to Send message to Topic : [$topic] ==========")
}
}
| BiyuHuang/CodePrototypesDemo | demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/kafkademo/KafkaProducerDemo.scala | Scala | apache-2.0 | 2,931 |
package dhg.ccg.tagdict
import dhg.util._
import scalaz._
import scalaz.Scalaz._
import dhg.ccg.cat.Cat
import dhg.ccg.cat.StartCat
import dhg.ccg.cat.EndCat
trait TagDictionary[Tag] extends (String => Set[Tag]) {
type Word = String
def allWords: Set[Word]; def allTags: Set[Tag]
def startWord: Word; def startTag: Tag; def endWord: Word; def endTag: Tag
def excludedTags: Set[Tag]
def apply(w: Word): Set[Tag]
final def allWordsSE = allWords + (startWord, endWord)
final def allTagsSE = allTags + (startTag, endTag)
def reversed: Map[Tag, Set[Word]]
def entries: Map[Word, Set[Tag]]
def knownWordsForTag: Map[Tag, Set[Word]]
def withWords(words: Set[Word]): TagDictionary[Tag]
def withTags(tags: Set[Tag]): TagDictionary[Tag]
def withExcludedTags(tags: Set[Tag]): TagDictionary[Tag]
}
/**
* ONLY INSTANTIATE THIS VIA THE COMPANION OBJECT
*
* A Tag Dictionary is a mapping from words to all of their potential
* tags. A word not found in the dictionary (including "unknown" words)
* may take any tag.
*
* This class guarantees that looking up the startWord or endWord will
* return a set containing ony the startTag or endTag, respectively.
*
* The allWords property is the complete set of known words excluding
* the special startWord and endWord. Likewise for allTags. For the
* complete set of known words and tags including these special tags
* use allWordsSE and allTagsSE.
*/
class SimpleTagDictionary[Tag] private (
map: Map[String, Set[Tag]],
val allWords: Set[String], val allTags: Set[Tag],
val startWord: String, val startTag: Tag, val endWord: String, val endTag: Tag,
val excludedTags: Set[Tag] = Set.empty)
extends TagDictionary[Tag] {
def apply(w: Word): Set[Tag] = {
map.get(w).map(_ -- excludedTags).filter(_.nonEmpty).getOrElse(allTags) -- excludedTags
}
def reversed: Map[Tag, Set[Word]] = ???
val entries: Map[Word, Set[Tag]] = map.mapVals(_ -- excludedTags).filter(_._2.nonEmpty)
val knownWordsForTag: Map[Tag, Set[Word]] = allTags.mapToVal(Set.empty[Word]).toMap ++ entries.ungroup.map(_.swap).groupByKey.mapVals(_.toSet)
def withWords(words: Set[Word]) = new SimpleTagDictionary(map, allWords ++ words, allTags -- excludedTags, startWord, startTag, endWord, endTag, excludedTags)
def withTags(tags: Set[Tag]) = new SimpleTagDictionary(map, allWords, (allTags ++ tags) -- excludedTags, startWord, startTag, endWord, endTag, excludedTags)
def withExcludedTags(tags: Set[Tag]) = new SimpleTagDictionary(map, allWords, allTags, startWord, startTag, endWord, endTag, excludedTags ++ tags)
}
object SimpleTagDictionary {
type Word = String
def apply[Tag](
map: Map[Word, Set[Tag]],
startWord: Word, startTag: Tag, endWord: Word, endTag: Tag,
additionalWords: Set[Word] = Set[Word](), additionalTags: Set[Tag] = Set[Tag](),
excludedTags: Set[Tag] = Set[Tag]()) = {
val allAllWords = additionalWords ++ map.keys
val allAllTags = additionalTags ++ map.flatMap(_._2) -- excludedTags
new SimpleTagDictionary(
map.mapVals(_ -- excludedTags) ++ Map(startWord -> Set(startTag), endWord -> Set(endTag)),
allAllWords - (startWord, endWord), allAllTags -- excludedTags - (startTag, endTag),
startWord, startTag, endWord, endTag,
excludedTags)
}
def empty[Tag](startWord: Word, startTag: Tag, endWord: Word, endTag: Tag, excludedTags: Set[Tag] = Set.empty[Tag]) = {
SimpleTagDictionary(Map(), startWord, startTag, endWord, endTag, excludedTags = excludedTags)
}
}
object DummyCatTagDictionary extends TagDictionary[Cat] {
type Tag = Cat
def allWords: Set[Word] = Set.empty; def allTags: Set[Tag] = Set.empty
def startWord: Word = "<S>"; def startTag: Tag = StartCat; def endWord: Word = "<E>"; def endTag: Tag = EndCat
def excludedTags: Set[Tag] = Set.empty
def apply(w: Word): Set[Tag] = Set.empty
def reversed: Map[Tag, Set[Word]] = Map.empty
def entries: Map[Word, Set[Tag]] = Map.empty
def knownWordsForTag: Map[Tag, Set[Word]] = Map.empty
def withWords(words: Set[Word]): TagDictionary[Tag] = this
def withTags(tags: Set[Tag]): TagDictionary[Tag] = this
def withExcludedTags(tags: Set[Tag]): TagDictionary[Tag] = this
}
//
trait TagDictionaryFactory[Tag] {
type Word = String
def apply[Tag](
sentences: Vector[Vector[(Word, Tag)]],
startWord: Word, startTag: Tag, endWord: Word, endTag: Tag,
additionalWords: Set[Word] = Set[Word](), additionalTags: Set[Tag] = Set[Tag](),
excludedTags: Set[Tag] = Set[Tag]() //
): TagDictionary[Tag]
}
class SimpleTagDictionaryFactory[Tag](tdCutoff: Option[Double] = None) extends TagDictionaryFactory[Tag] {
override def apply[Tag](
taggedSentences: Vector[Vector[(Word, Tag)]],
startWord: Word, startTag: Tag, endWord: Word, endTag: Tag,
additionalWords: Set[Word], additionalTags: Set[Tag],
excludedTags: Set[Tag] = Set[Tag]()) = {
val tagCounts = taggedSentences.flatten.groupByKey.mapVals(_.counts.normalizeValues)
val cutoff = tdCutoff.getOrElse(0.0)
val pruned = tagCounts.mapVals(_.collect { case (t, p) if p >= cutoff => t }.toSet -- excludedTags).filter(_._2.nonEmpty)
SimpleTagDictionary(pruned, startWord, startTag, endWord, endTag,
additionalWords ++ tagCounts.keys,
additionalTags -- excludedTags,
excludedTags)
}
}
class StartEndSwappedTagDictionary[Tag](wrapped: TagDictionary[Tag]) extends TagDictionary[Tag] {
def allWords: Set[Word] = wrapped.allWords; def allTags: Set[Tag] = wrapped.allTags
def startWord: Word = wrapped.endWord; def startTag: Tag = wrapped.endTag; def endWord: Word = wrapped.startWord; def endTag: Tag = wrapped.startTag
def excludedTags: Set[Tag] = wrapped.excludedTags
def apply(w: Word): Set[Tag] = wrapped(w)
def reversed: Map[Tag, Set[Word]] = wrapped.reversed
def entries: Map[Word, Set[Tag]] = wrapped.entries
def knownWordsForTag: Map[Tag, Set[Word]] = wrapped.knownWordsForTag
def withWords(words: Set[Word]): TagDictionary[Tag] = new StartEndSwappedTagDictionary(wrapped.withWords(words))
def withTags(tags: Set[Tag]): TagDictionary[Tag] = new StartEndSwappedTagDictionary(wrapped.withTags(tags))
def withExcludedTags(tags: Set[Tag]): TagDictionary[Tag] = new StartEndSwappedTagDictionary(wrapped.withExcludedTags(tags))
}
| dhgarrette/2015-ccg-parsing | src/main/scala/dhg/ccg/tagdict/TagDictionary.scala | Scala | apache-2.0 | 6,303 |
package com.indix.petstore.api
import akka.http.scaladsl.testkit.ScalatestRouteTest
import org.scalatest.{ Matchers, FlatSpec }
class RoutesSpec extends FlatSpec with Matchers with ScalatestRouteTest {
"Routes" should "respond to a single user query" in {
}
}
| codingnirvana/scala-microservices-template | src/test/scala/com/indix/petstore/api/RoutesSpec.scala | Scala | apache-2.0 | 269 |
package org.jetbrains.plugins.scala
package codeInsight
package hints
import com.intellij.codeInsight.hints.settings.InlayHintsConfigurable
import com.intellij.openapi.actionSystem.{AnAction, AnActionEvent, ToggleAction}
import com.intellij.openapi.project.ProjectManager
import com.intellij.openapi.util.{Getter, Setter}
import org.jetbrains.annotations.Nls
import org.jetbrains.plugins.scala.codeInsight.ScalaCodeInsightBundle
import org.jetbrains.plugins.scala.codeInsight.ScalaCodeInsightSettings.{getInstance => ScalaCodeInsightSettings}
import org.jetbrains.plugins.scala.codeInsight.implicits.ImplicitHints
import org.jetbrains.plugins.scala.extensions._
object ScalaTypeHintsConfigurable {
import java.lang.{Boolean => JBoolean}
private def forceHintsUpdateOnNextPass(): Unit = {
ImplicitHints.updateInAllEditors()
}
sealed abstract class ToggleTypeAction(@Nls text: String,
@Nls description: String,
getter: Getter[JBoolean],
setter: Setter[JBoolean]) extends ToggleAction(text, description, null) {
override def isSelected(event: AnActionEvent): Boolean = getter.get()
override def setSelected(event: AnActionEvent, value: Boolean): Unit = {
setter.set(value)
forceHintsUpdateOnNextPass()
}
}
class ToggleTypeHintsAction extends ToggleTypeAction(
ScalaCodeInsightBundle.message("type.hints.action.text"),
ScalaCodeInsightBundle.message("type.hints.action.description"),
ScalaCodeInsightSettings.showTypeHintsGetter,
ScalaCodeInsightSettings.showTypeHintsSetter,
)
class ToggleMethodChainInlayHintsAction extends ToggleTypeAction(
ScalaCodeInsightBundle.message("method.chain.hints.action.text"),
ScalaCodeInsightBundle.message("method.chain.hints.action.description"),
ScalaCodeInsightSettings.showMethodChainInlayHintsGetter(),
ScalaCodeInsightSettings.showMethodChainInlayHintsSetter()
)
class ToggleRangeHintsForToAndUntilAction extends ToggleTypeAction(
ScalaCodeInsightBundle.message("range.hints.for.to.and.until"),
ScalaCodeInsightBundle.message("show.range.hints.for.to.and.until"),
ScalaCodeInsightSettings.showRangeHintsForToAndUntilGetter(),
ScalaCodeInsightSettings.showRangeHintsForToAndUntilSetter()
)
class ToggleRangeExclusiveHintAction extends ToggleTypeAction(
ScalaCodeInsightBundle.message("range.exclusive.hint"),
ScalaCodeInsightBundle.message("show.exclusive.range.hint"),
ScalaCodeInsightSettings.showExclusiveRangeHintDefaultGetter(),
ScalaCodeInsightSettings.showExclusiveRangeHintDefaultSetter()
)
/*
class ToggleMethodResultTypeAction extends ToggleTypeAction(
ScalaCodeInsightSettings.showMethodResultTypeGetter,
ScalaCodeInsightSettings.showMethodResultTypeSetter
)
class ToggleMemberVariableTypeAction extends ToggleTypeAction(
ScalaCodeInsightSettings.showMemberVariableTypeGetter,
ScalaCodeInsightSettings.showMemberVariableSetter
)
class ToggleLocalVariableTypeAction extends ToggleTypeAction(
ScalaCodeInsightSettings.showLocalVariableTypeGetter,
ScalaCodeInsightSettings.showLocalVariableTypeSetter
)
class ToggleObviousTypeAction extends ToggleTypeAction(
ScalaCodeInsightSettings.showObviousTypeGetter,
ScalaCodeInsightSettings.showObviousTypeSetter
)
class ToggleAlignMethodChainInlayHintsAction extends ToggleTypeAction(
ScalaCodeInsightSettings.alignMethodChainInlayHintsGetter(),
ScalaCodeInsightSettings.alignMethodChainInlayHintsSetter()
)
*/
class ConfigureTypeHintActions extends AnAction(
ScalaCodeInsightBundle.message("configure.type.hints.text"),
ScalaCodeInsightBundle.message("configure.type.hints.description"),
null
) {
override def actionPerformed(e: AnActionEvent): Unit = {
def defaultProject = ProjectManager.getInstance().getDefaultProject
val project = e.getProject.nullSafe.getOrElse(defaultProject)
InlayHintsConfigurable.showSettingsDialogForLanguage(project, ScalaLanguage.INSTANCE)
}
}
}
| JetBrains/intellij-scala | scala/codeInsight/src/org/jetbrains/plugins/scala/codeInsight/hints/ScalaTypeHintsConfigurable.scala | Scala | apache-2.0 | 4,128 |
/*
lm-tests.scala
Test code for regression modelling
*/
package scalaglm
import breeze.linalg._
import breeze.numerics._
import org.scalatest._
import flatspec._
import matchers._
class LmSpec extends AnyFlatSpec {
import Utils._
"Lm" should "handle 2 points on a horizontal line (manual intercept)" in {
val y = DenseVector(5.0,5.0)
val x = DenseMatrix((1.0,2.0),(1.0,4.0))
val mod = Lm(y,x,List("Intercept","x"),false)
val beta = DenseVector(5.0,0.0)
assert(norm(mod.coefficients - beta) < 0.00001)
}
it should "handle 2 points on a slope (manual intercept)" in {
val y = DenseVector(2.0,3.0)
val x = DenseMatrix((1.0,2.0),(1.0,4.0))
val mod = Lm(y,x,List("Intercept","x"),false)
val beta = DenseVector(1.0,0.5)
assert(norm(mod.coefficients - beta) < 0.00001)
assert(abs(mod.rSquared - 1.0) < 0.00001)
}
it should "handle 3 points on a diagonal (manual intercept)" in {
val y = DenseVector(4.0,5.0,6.0)
val x = DenseMatrix((1.0,2.0),(1.0,3.0),(1.0,4.0))
val mod = Lm(y,x,List("Intercept","x"),false)
val beta = DenseVector(2.0,1.0)
assert(norm(mod.coefficients - beta) < 0.00001)
assert(abs(mod.rSquared - 1.0) < 0.00001)
}
it should "handle 2 points on a horizontal line (auto intercept)" in {
val y = DenseVector(5.0,5.0)
val x = DenseMatrix((2.0),(4.0))
val mod = Lm(y,x,List("x"))
val beta = DenseVector(5.0,0.0)
assert(norm(mod.coefficients - beta) < 0.00001)
}
it should "handle 2 points on a slope (auto intercept)" in {
val y = DenseVector(2.0,3.0)
val x = DenseMatrix((2.0),(4.0))
val mod = Lm(y,x,List("x"))
val beta = DenseVector(1.0,0.5)
assert(norm(mod.coefficients - beta) < 0.00001)
assert(abs(mod.rSquared - 1.0) < 0.00001)
}
it should "handle 3 points on a diagonal (auto intercept)" in {
val y = DenseVector(4.0,5.0,6.0)
val x = DenseMatrix((2.0),(3.0),(4.0))
val mod = Lm(y,x,List("x"))
val beta = DenseVector(2.0,1.0)
assert(norm(mod.coefficients - beta) < 0.00001)
assert(abs(mod.rSquared - 1.0) < 0.00001)
}
it should "fit a simple linear regression model and get the same as R" in {
val y = DenseVector(1.0,2.5,0.5,3.0)
val x = DenseMatrix((1.0),(2.5),(3.0),(2.0))
val mod = Lm(y,x,List("Covariate"))
//mod.summary
val R = org.ddahl.rscala.RClient()
R.eval("y = %-", y.toArray)
R.eval("x = %-", x(::,0).toDenseVector.toArray)
R.eval("mod = lm(y~x)")
val rCoef = DenseVector[Double](R.evalD1("mod$coefficients"))
assert(norm(mod.coefficients - rCoef) <= 0.00001)
val rSe = DenseVector[Double](R.evalD1("summary(mod)$coefficients[,2]"))
assert(norm(mod.se - rSe) <= 0.00001)
val rT = DenseVector[Double](R.evalD1("summary(mod)$coefficients[,3]"))
assert(norm(mod.t - rT) <= 0.00001)
val rP = DenseVector[Double](R.evalD1("summary(mod)$coefficients[,4]"))
assert(norm(mod.p - rP) <= 0.00001)
val rF = DenseVector[Double](R.evalD1("mod$fitted.values"))
assert(norm(mod.fitted - rF) <= 0.0001)
val rStud = DenseVector[Double](R.evalD1("rstandard(mod)"))
assert(norm(mod.studentised - rStud) <= 0.0001)
val rPred = DenseVector[Double](R.evalD1("predict(mod)"))
assert(norm(mod.predict().fitted - rPred) <= 0.0001)
val rPredSe = DenseVector[Double](R.evalD1("predict(mod,se.fit=TRUE)$se.fit"))
assert(norm(mod.predict().se - rPredSe) <= 0.0001)
}
}
// eof
| darrenjw/scala-glm | src/test/scala/lm-tests.scala | Scala | apache-2.0 | 3,462 |
package spatial.interpreter
import spatial.dsl._
import virtualized._
import spatial.SpatialCompiler
import argon.interpreter.{Interpreter => AInterpreter}
import argon.core.Const
import scala.collection.JavaConverters._
import argon.lang.typeclasses.Bits
trait SpatialStream extends SpatialCompiler {
abstract class IBus extends Bus {
def length: scala.Int = -1
}
case object In1 extends IBus
case object In2 extends IBus
case object In3 extends IBus
case object In4 extends IBus
case object In5 extends IBus
case object Out1 extends IBus
case object Out2 extends IBus
}
trait SpatialStreamInterpreter {
self: SpatialStream =>
def inputs: Map[Bus, List[MetaAny[_]]]
def outs: List[Bus]
def spatial(): Unit
private var __stagingArgs = scala.Array[java.lang.String]()
override def stagingArgs = scala.Array[java.lang.String]("--interpreter") ++ __stagingArgs
def runI() {
init(stagingArgs)
compileProgram(() => spatial())
}
def exit() = {
val out = outs.map(bus => {
val c = AInterpreter.stringify(Streams.streamsOut(bus))
s"$bus: $c"
}).mkString("\\n \\n")
Console.println()
Console.println(s"${Console.GREEN_B}[result]${Console.RESET}\\n$out")
Console.println()
}
def main(args: scala.Array[java.lang.String]) {
__stagingArgs ++= args
config.exit = () => exit()
inputs.foreach { case (bus, content) =>
Streams.addStreamIn(bus)
content.foreach(x => Streams.streamsIn(bus).put(x.s.asInstanceOf[Const[_]].c))
}
outs.foreach(Streams.addStreamOut)
runI()
exit()
}
}
trait SpatialStreamCompiler extends SpatialApp {
self: SpatialStream =>
def spatial(): Unit
def main() =
spatial()
}
| stanford-ppl/spatial-lang | spatial/core/src/spatial/interpreter/StreamInterpreter.scala | Scala | mit | 1,744 |
package gg.uhc.hosts.endpoints
import com.github.blemale.scaffeine.{AsyncLoadingCache, Scaffeine}
import doobie._
import gg.uhc.hosts.CustomJsonCodec
import gg.uhc.hosts.database.Database
import io.circe.syntax._
import io.circe.{Json, JsonObject}
import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.duration._
import scala.language.postfixOps
class BasicCache(database: Database) {
import CustomJsonCodec._
private[this] sealed trait ListingKey
private[this] object UpcomingMatches extends ListingKey
private[this] object CurrentUbl extends ListingKey
implicit val ec: ExecutionContext = database.ec
private[this] val cache: AsyncLoadingCache[ListingKey, Json] = Scaffeine()
.recordStats()
.expireAfterWrite(5 minutes)
.buildAsyncFuture[ListingKey, Json](loader = {
case UpcomingMatches => database.run(listUpcomingMatchesQuery).map(_.asJson)
case CurrentUbl => database.run(database.getCurrentUbl).map(_.asJson)
case _ => Future.failed(new IllegalArgumentException("Unknown cache key"))
})
private[this] def listUpcomingMatchesQuery: ConnectionIO[List[JsonObject]] =
for {
matches <- database.getUpcomingMatches
perms <- database.getPermissions(matches.map(_.author))
} yield matches.map(row => row.toJsonWithRoles(perms.getOrElse(row.author, List.empty)))
def getUpcomingMatches: Future[Json] = cache.get(UpcomingMatches)
def getCurrentUbl: Future[Json] = cache.get(CurrentUbl)
def invalidateUpcomingMatches(): Unit = cache.synchronous().invalidate(UpcomingMatches)
def invalidateCurrentUbl(): Unit = cache.synchronous().invalidate(CurrentUbl)
}
| Eluinhost/hosts.uhc.gg | src/main/scala/gg/uhc/hosts/endpoints/BasicCache.scala | Scala | mit | 1,693 |
package list
/**
* P15 (**) Duplicate the elements of a list a given number of times.
*/
object P15 {
def duplicateN[A](duplications: Int, list: List[A]): List[A] = list flatMap {
List.fill(duplications)(_)
}
}
| zjt1114/scala99 | src/main/scala/list/P15.scala | Scala | apache-2.0 | 224 |
package com.meteorcode.pathway.script
import jdk.nashorn.api.scripting._
/**
* Class filter for Pathway embedded JavaScript.
*
* This class filter controls what Java classes may be accessed by JavaScript
* running within Nashorn, primarily to ensure that script execution is secure.
*
* Currently, access to the `java.io` and `java.nio` namespaces is completely
* denied.
*
* Please note that this requires a JDK version >= 8u40 to compile.
*
* Created by hawk on 8/22/15.
*/
object PathwayClassFilter
extends ClassFilter {
private[this] val denied: Array[String]
= Array("java.io", "java.nio", "scala.io")
// TODO: what other class namespaces should be denied?
// potential candidates:
// - java.net
// * will we need this for multiplayer?
// * should we provide our own wrappers like we do for the fs?
// - java.lang.System
// * scripts already have access to print streams on the script engine
// * we probably don't want to give them access to things like System.exit
// TODO: we might want to operate with a whitelist rather than a blacklist
def exposeToScripts(className: String): Boolean
= !denied.exists( className startsWith _ )
}
| MeteorCode/Pathway | src/main/scala/com/meteorcode/pathway/script/PathwayClassFilter.scala | Scala | mit | 1,201 |
package org.lolhens.minechanics.common.block.material
import net.minecraft.block.material.{MapColor, Material}
import org.lolhens.minechanics.common.block.material.MaterialCustom._
/**
* Created by LolHens on 21.09.2014.
*/
class MaterialCustom() extends Material(MapColor.airColor) {
var block: ICustomMaterial = null
//override def getCanBlockGrass: Boolean = if (block == null) super.getCanBlockGrass else !block.getCanBlockGrass
override def getMaterialMobility: Int = if (block == null) super.getMaterialMobility else block.getMobilityFlag
override def isOpaque: Boolean = if (block == null) super.isOpaque else block.isOpaqueCube
override def isLiquid: Boolean = if (block == null) super.isLiquid else block.isLiquid
override def isSolid: Boolean = if (block == null) super.isSolid else block.isSolid
override def blocksMovement: Boolean = if (block == null) super.blocksMovement else block.getBlocksMovement
override def getCanBurn: Boolean = if (block == null) super.getCanBurn else block.isFlammable
override def isReplaceable: Boolean = if (block == null) super.isReplaceable else block.isReplaceable
override def isToolNotRequired: Boolean = if (block == null) super.isToolNotRequired else !block.isToolRequired
//override def isAdventureModeExempt: Boolean = if (block == null) super.isAdventureModeExempt else block.isAdventureModeExempt
override def getMaterialMapColor: MapColor = if (block == null) super.getMaterialMapColor else block.getMapColor
}
object MaterialCustom {
trait ICustomMaterial {
def getCanBlockGrass: Boolean = false
def getMobilityFlag: Int = 0
def isOpaqueCube: Boolean = true
def isLiquid: Boolean = false
def isSolid: Boolean = true
def getBlocksMovement: Boolean = true
def isFlammable: Boolean = false
def isReplaceable: Boolean = false
def isToolRequired = false
def isAdventureModeExempt: Boolean = false
def getMapColor: MapColor = MapColor.airColor
}
} | LolHens/Minechanics | src/main/scala/org/lolhens/minechanics/common/block/material/MaterialCustom.scala | Scala | gpl-2.0 | 1,997 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.arrow.io
import java.io.{ByteArrayOutputStream, Closeable, OutputStream}
import java.nio.channels.Channels
import java.util.PriorityQueue
import java.util.concurrent.ThreadLocalRandom
import com.google.common.collect.HashBiMap
import com.typesafe.scalalogging.StrictLogging
import org.apache.arrow.memory.BufferAllocator
import org.apache.arrow.vector.complex.StructVector
import org.apache.arrow.vector.dictionary.DictionaryProvider.MapDictionaryProvider
import org.apache.arrow.vector.ipc.ArrowStreamWriter
import org.apache.arrow.vector.ipc.message.IpcOption
import org.apache.arrow.vector.types.pojo.{ArrowType, DictionaryEncoding}
import org.apache.arrow.vector.util.TransferPair
import org.apache.arrow.vector.{FieldVector, IntVector}
import org.locationtech.geomesa.arrow.ArrowAllocator
import org.locationtech.geomesa.arrow.io.records.{RecordBatchLoader, RecordBatchUnloader}
import org.locationtech.geomesa.arrow.vector.ArrowAttributeReader.ArrowDateReader
import org.locationtech.geomesa.arrow.vector.SimpleFeatureVector.SimpleFeatureEncoding
import org.locationtech.geomesa.arrow.vector._
import org.locationtech.geomesa.features.serialization.ObjectType
import org.locationtech.geomesa.utils.collection.CloseableIterator
import org.locationtech.geomesa.utils.geotools.{SimpleFeatureOrdering, SimpleFeatureTypes}
import org.locationtech.geomesa.utils.index.ByteArrays
import org.locationtech.geomesa.utils.io.CloseWithLogging
import org.locationtech.jts.geom.Geometry
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
import scala.math.Ordering
import scala.util.control.NonFatal
/**
* Builds up dictionaries and write record batches. Dictionaries are encoded as deltas
* to minimize redundant messages.
*
* @param sft simple feature type
* @param dictionaryFields dictionary fields
* @param encoding simple feature encoding
* @param sort sort
* @param initialCapacity initial allocation size, will expand if needed
*/
class DeltaWriter(
val sft: SimpleFeatureType,
dictionaryFields: Seq[String],
encoding: SimpleFeatureEncoding,
ipcOpts: IpcOption,
sort: Option[(String, Boolean)],
initialCapacity: Int
) extends Closeable with StrictLogging {
import DeltaWriter._
import scala.collection.JavaConverters._
private val allocator = ArrowAllocator("delta-writer")
// threading key that we use to group results in the reduce phase
private var threadingKey: Long = math.abs(ThreadLocalRandom.current().nextLong)
logger.trace(s"$threadingKey created")
private val result = new ByteArrayOutputStream
private val vector = StructVector.empty(sft.getTypeName, allocator)
private val ordering = sort.map { case (field, reverse) =>
val o = SimpleFeatureOrdering(sft.indexOf(field))
if (reverse) { o.reverse } else { o }
}
private val idWriter = ArrowAttributeWriter.id(sft, encoding, vector)
private val writers = sft.getAttributeDescriptors.asScala.map { descriptor =>
val name = descriptor.getLocalName
val bindings = ObjectType.selectType(descriptor)
val metadata = Map(SimpleFeatureVector.DescriptorKey -> SimpleFeatureTypes.encodeDescriptor(sft, descriptor))
if (dictionaryFields.contains(name)) {
val dictMetadata = Map(SimpleFeatureVector.DescriptorKey -> s"$name:Integer")
val attribute = ArrowAttributeWriter(name, Seq(ObjectType.INT), None, dictMetadata, encoding, VectorFactory(vector))
val dictionary = {
val attribute = ArrowAttributeWriter(name, bindings, None, metadata, encoding, VectorFactory(allocator))
val writer = new BatchWriter(attribute.vector, ipcOpts)
attribute.vector.setInitialCapacity(initialCapacity)
attribute.vector.allocateNew()
Some(DictionaryWriter(sft.indexOf(name), attribute, writer, scala.collection.mutable.Map.empty))
}
FieldWriter(name, sft.indexOf(name), attribute, dictionary)
} else {
val attribute = ArrowAttributeWriter(name, bindings, None, metadata, encoding, VectorFactory(vector))
FieldWriter(name, sft.indexOf(name), attribute, None)
}
}
// writer per-dictionary
private val dictionaryWriters = dictionaryFields.map(f => writers.find(_.name == f).get.dictionary.get)
// single writer to write out all vectors at once (not including dictionaries)
private val writer = new BatchWriter(vector, ipcOpts)
// set capacity after all child vectors have been created by the writers, then allocate
vector.setInitialCapacity(initialCapacity)
vector.allocateNew()
/**
* Clear any existing dictionary values
*/
def reset(): Unit = {
val last = threadingKey
threadingKey = math.abs(ThreadLocalRandom.current().nextLong)
logger.trace(s"$last resetting to $threadingKey")
writers.foreach(writer => writer.dictionary.foreach(_.values.clear()))
}
/**
* Writes out a record batch. Format is:
*
* 8 bytes long - threading key
* (foreach dictionaryField) -> {
* 4 byte int - length of dictionary batch
* anyref vector batch with dictionary delta values
* }
* 4 byte int - length of record batch
* record batch (may be dictionary encodings)
*
* Note: will sort the feature array in place if sorting is defined
*
* @param features features to write
* @param count number of features to write, starting from array index 0
* @return serialized record batch
*/
def encode(features: Array[SimpleFeature], count: Int): Array[Byte] = {
result.reset()
result.write(ByteArrays.toBytes(threadingKey))
ordering.foreach(java.util.Arrays.sort(features, 0, count, _))
// write out the dictionaries
// come up with the delta of new dictionary values
val delta = new java.util.TreeSet[AnyRef](dictionaryOrdering)
dictionaryWriters.foreach { dictionary =>
var i = 0
while (i < count) {
val value = features(i).getAttribute(dictionary.index)
if (!dictionary.values.contains(value)) {
delta.add(value)
}
i += 1
}
val size = dictionary.values.size
i = 0
// update the dictionary mappings, and write the new values to the vector
delta.asScala.foreach { n =>
dictionary.values.put(n, i + size)
dictionary.attribute.apply(i, n)
i += 1
}
// write out the dictionary batch
dictionary.attribute.setValueCount(i)
logger.trace(s"$threadingKey writing dictionary delta with $i values")
dictionary.writer.writeBatch(i, result)
delta.clear()
}
// set feature ids in the vector
if (encoding.fids.isDefined) {
var i = 0
while (i < count) {
idWriter.apply(i, features(i))
i += 1
}
idWriter.setValueCount(count)
}
// set attributes in the vector
writers.foreach { writer =>
val getAttribute: Int => AnyRef = writer.dictionary match {
case None => i => features(i).getAttribute(writer.index)
case Some(dictionary) => i => dictionary.values(features(i).getAttribute(writer.index)) // dictionary encoded value
}
var i = 0
while (i < count) {
writer.attribute.apply(i, getAttribute(i))
i += 1
}
writer.attribute.setValueCount(count)
}
logger.trace(s"$threadingKey writing batch with $count values")
// write out the vector batch
writer.writeBatch(count, result)
result.toByteArray
}
/**
* Close the writer
*/
override def close(): Unit = {
CloseWithLogging(writer) // also closes `vector`
dictionaryWriters.foreach(w => CloseWithLogging(w.writer)) // also closes dictionary vectors
CloseWithLogging(allocator)
}
}
object DeltaWriter extends StrictLogging {
import scala.collection.JavaConverters._
// empty provider
private val provider = new MapDictionaryProvider()
private val dictionaryOrdering: Ordering[AnyRef] = new Ordering[AnyRef] {
override def compare(x: AnyRef, y: AnyRef): Int =
SimpleFeatureOrdering.nullCompare(x.asInstanceOf[Comparable[Any]], y)
}
/**
* Reduce function for delta records created by DeltaWriter
*
* @param sft simple feature type
* @param dictionaryFields dictionary fields
* @param encoding simple feature encoding
* @param sort sort metadata, if defined each delta is assumed to be sorted
* @param sorted whether features are already globally sorted or not
* @param batchSize batch size
* @param deltas output from `DeltaWriter.encode`
* @return single arrow streaming file, with potentially multiple record batches
*/
def reduce(
sft: SimpleFeatureType,
dictionaryFields: Seq[String],
encoding: SimpleFeatureEncoding,
ipcOpts: IpcOption,
sort: Option[(String, Boolean)],
sorted: Boolean,
batchSize: Int,
deltas: CloseableIterator[Array[Byte]]): CloseableIterator[Array[Byte]] = {
new ReducingIterator(sft, dictionaryFields, encoding, ipcOpts, sort, sorted, batchSize, deltas)
}
/**
* Merge without sorting
*
* @param sft simple feature type
* @param dictionaryFields dictionary fields
* @param encoding simple feature encoding
* @param mergedDictionaries merged dictionaries and batch mappings
* @param sort sort metadata for file headers
* @param batchSize record batch size
* @param threadedBatches record batches, grouped by threading key
* @return
*/
private def reduceNoSort(
sft: SimpleFeatureType,
dictionaryFields: Seq[String],
encoding: SimpleFeatureEncoding,
ipcOpts: IpcOption,
mergedDictionaries: MergedDictionaries,
sort: Option[(String, Boolean)],
batchSize: Int,
threadedBatches: Array[Array[Array[Byte]]]): CloseableIterator[Array[Byte]] = {
val iter: CloseableIterator[Array[Byte]] = new CloseableIterator[Array[Byte]] {
private var writeHeader = true
private val toLoad = SimpleFeatureVector.create(sft, mergedDictionaries.dictionaries, encoding, batchSize)
private val result = SimpleFeatureVector.create(sft, mergedDictionaries.dictionaries, encoding, batchSize)
logger.trace(s"merge unsorted deltas - read schema ${result.underlying.getField}")
private val loader = new RecordBatchLoader(toLoad.underlying)
private val unloader = new RecordBatchUnloader(result, ipcOpts)
private val transfers: Seq[(String, (Int, Int, java.util.Map[Integer, Integer]) => Unit)] = {
toLoad.underlying.getChildrenFromFields.asScala.map { fromVector =>
val name = fromVector.getField.getName
val toVector = result.underlying.getChild(name)
val transfer: (Int, Int, java.util.Map[Integer, Integer]) => Unit =
if (fromVector.getField.getDictionary != null) {
val from = fromVector.asInstanceOf[IntVector]
val to = toVector.asInstanceOf[IntVector]
(fromIndex: Int, toIndex: Int, mapping: java.util.Map[Integer, Integer]) => {
val n = from.getObject(fromIndex)
if (n == null) {
to.setNull(toIndex)
} else {
to.setSafe(toIndex, mapping.get(n))
}
}
} else if (sft.indexOf(name) != -1 &&
classOf[Geometry].isAssignableFrom(sft.getDescriptor(name).getType.getBinding)) {
// geometry vectors use FixedSizeList vectors, for which transfer pairs aren't implemented
val from = GeometryFields.wrap(fromVector).asInstanceOf[GeometryVector[Geometry, FieldVector]]
val to = GeometryFields.wrap(toVector).asInstanceOf[GeometryVector[Geometry, FieldVector]]
(fromIndex: Int, toIndex: Int, _: java.util.Map[Integer, Integer]) => {
from.transfer(fromIndex, toIndex, to)
}
} else {
val pair = fromVector.makeTransferPair(toVector)
(fromIndex: Int, toIndex: Int, _: java.util.Map[Integer, Integer]) => {
pair.copyValueSafe(fromIndex, toIndex)
}
}
(name, transfer)
}
}
private val threadIterator = threadedBatches.iterator
private var threadIndex = -1
private var batches: Iterator[Array[Byte]] = Iterator.empty
private var mappings: Map[String, java.util.Map[Integer, Integer]] = _
private var count = 0 // records read in current batch
override def hasNext: Boolean = count < toLoad.reader.getValueCount || loadNextBatch()
override def next(): Array[Byte] = {
var total = 0
while (total < batchSize && hasNext) {
// read the rest of the current vector, up to the batch size
val toRead = math.min(batchSize - total, toLoad.reader.getValueCount - count)
transfers.foreach { case (name, transfer) =>
val mapping = mappings.get(name).orNull
logger.trace(s"dictionary mappings for $name: $mapping")
var i = 0
while (i < toRead) {
transfer(i + count, i + total, mapping)
i += 1
}
}
count += toRead
total += toRead
}
if (writeHeader) {
// write the header in the first result, which includes the metadata and dictionaries
writeHeader = false
writeHeaderAndFirstBatch(result, mergedDictionaries.dictionaries, ipcOpts, sort, total)
} else {
unloader.unload(total)
}
}
override def close(): Unit = CloseWithLogging.raise(Seq(toLoad, result, mergedDictionaries))
/**
* Read the next batch
*
* @return true if there was a batch to load, false if we've read all batches
*/
@tailrec
private def loadNextBatch(): Boolean = {
if (batches.hasNext) {
val batch = batches.next
// skip the dictionary batches
var offset = 8 // initial threading key offset
dictionaryFields.foreach { _ =>
offset += ByteArrays.readInt(batch, offset) + 4
}
val messageLength = ByteArrays.readInt(batch, offset)
offset += 4 // skip over message length bytes
// load the record batch
loader.load(batch, offset, messageLength)
if (toLoad.reader.getValueCount > 0) {
count = 0 // reset count for this batch
true
} else {
loadNextBatch()
}
} else if (threadIterator.hasNext) {
threadIndex += 1
// set the mappings for this thread
mappings = mergedDictionaries.mappings.map { case (f, m) => (f, m(threadIndex)) }
batches = threadIterator.next.iterator
loadNextBatch()
} else {
false
}
}
}
createFileFromBatches(sft, mergedDictionaries.dictionaries, encoding, ipcOpts, None, iter, firstBatchHasHeader = true)
}
/**
* Merge with sorting. Each batch is assumed to be already sorted
*
* @param sft simple feature type
* @param dictionaryFields dictionary fields
* @param encoding simple feature encoding
* @param mergedDictionaries merged dictionaries and batch mappings
* @param sortBy sort field
* @param reverse reverse sort or not
* @param batchSize record batch size
* @param threadedBatches record batches, grouped by threading key, internally sorted
* @return
*/
private def reduceWithSort(
sft: SimpleFeatureType,
dictionaryFields: Seq[String],
encoding: SimpleFeatureEncoding,
ipcOpts: IpcOption,
mergedDictionaries: MergedDictionaries,
sortBy: String,
reverse: Boolean,
batchSize: Int,
threadedBatches: Array[Array[Array[Byte]]]): CloseableIterator[Array[Byte]] = {
import org.locationtech.geomesa.utils.conversions.ScalaImplicits.RichArray
val dictionaries = mergedDictionaries.dictionaries
val result = SimpleFeatureVector.create(sft, dictionaries, encoding)
val unloader = new RecordBatchUnloader(result, ipcOpts)
logger.trace(s"merging sorted deltas - read schema: ${result.underlying.getField}")
// we do a merge sort of each batch
// queue sorted by current value in each batch
val queue = {
val ordering = if (reverse) { BatchMergerOrdering.reverse } else { BatchMergerOrdering }
new PriorityQueue[BatchMerger[Any]](ordering)
}
// track our open vectors to close later
val cleanup = ArrayBuffer.empty[SimpleFeatureVector]
cleanup.sizeHint(threadedBatches.foldLeft(0)((sum, a) => sum + a.length))
threadedBatches.foreachIndex { case (batches, batchIndex) =>
val mappings = mergedDictionaries.mappings.map { case (f, m) => (f, m(batchIndex)) }
logger.trace(s"loading ${batches.length} batch(es) from a single thread")
batches.foreach { batch =>
val toLoad = SimpleFeatureVector.create(sft, dictionaries, encoding)
val loader = new RecordBatchLoader(toLoad.underlying)
cleanup += toLoad
// skip the dictionary batches
var offset = 8
dictionaryFields.foreach { _ =>
offset += ByteArrays.readInt(batch, offset) + 4
}
val messageLength = ByteArrays.readInt(batch, offset)
offset += 4 // skip the length bytes
// load the record batch
loader.load(batch, offset, messageLength)
if (toLoad.reader.getValueCount > 0) {
val transfers: Seq[(Int, Int) => Unit] = toLoad.underlying.getChildrenFromFields.asScala.map { fromVector =>
val name = fromVector.getField.getName
val toVector = result.underlying.getChild(name)
if (fromVector.getField.getDictionary != null) {
val mapping = mappings(name)
val to = toVector.asInstanceOf[IntVector]
(fromIndex: Int, toIndex: Int) => {
val n = fromVector.getObject(fromIndex).asInstanceOf[Integer]
if (n == null) {
to.setNull(toIndex)
} else {
to.setSafe(toIndex, mapping.get(n))
}
}
} else if (sft.indexOf(name) != -1 &&
classOf[Geometry].isAssignableFrom(sft.getDescriptor(name).getType.getBinding)) {
// geometry vectors use FixedSizeList vectors, for which transfer pairs aren't implemented
val from = GeometryFields.wrap(fromVector).asInstanceOf[GeometryVector[Geometry, FieldVector]]
val to = GeometryFields.wrap(toVector).asInstanceOf[GeometryVector[Geometry, FieldVector]]
(fromIndex: Int, toIndex: Int) => from.transfer(fromIndex, toIndex, to)
} else {
val transfer = fromVector.makeTransferPair(toVector)
(fromIndex: Int, toIndex: Int) => transfer.copyValueSafe(fromIndex, toIndex)
}
}
val mapVector = toLoad.underlying
val dict = dictionaries.get(sortBy)
val merger = ArrowAttributeReader(sft.getDescriptor(sortBy), mapVector.getChild(sortBy), dict, encoding) match {
case r: ArrowDictionaryReader => new DictionaryBatchMerger(toLoad, transfers, r, mappings.get(sortBy).orNull)
case r: ArrowDateReader => new DateBatchMerger(toLoad, transfers, r)
case r => new AttributeBatchMerger(toLoad, transfers, r)
}
queue.add(merger.asInstanceOf[BatchMerger[Any]])
}
}
}
var writtenHeader = false
// gets the next record batch to write - returns null if no further records
def nextBatch(): Array[Byte] = {
if (queue.isEmpty) { null } else {
result.clear()
var resultIndex = 0
// copy the next sorted value and then queue and sort the next element out of the batch we copied from
do {
val next = queue.remove()
if (next.transfer(resultIndex)) {
queue.add(next)
}
result.underlying.setIndexDefined(resultIndex)
resultIndex += 1
} while (!queue.isEmpty && resultIndex < batchSize)
if (writtenHeader) {
unloader.unload(resultIndex)
} else {
// write the header in the first result, which includes the metadata and dictionaries
writtenHeader = true
writeHeaderAndFirstBatch(result, dictionaries, ipcOpts, Some(sortBy -> reverse), resultIndex)
}
}
}
val merged: CloseableIterator[Array[Byte]] = new CloseableIterator[Array[Byte]] {
private var batch: Array[Byte] = _
override def hasNext: Boolean = {
if (batch == null) {
batch = nextBatch()
}
batch != null
}
override def next(): Array[Byte] = {
val res = batch
batch = null
res
}
override def close(): Unit = {
CloseWithLogging(result)
CloseWithLogging(cleanup)
CloseWithLogging(mergedDictionaries)
}
}
createFileFromBatches(sft, dictionaries, encoding, ipcOpts, Some(sortBy -> reverse), merged, firstBatchHasHeader = true)
}
/**
* Merge delta dictionary batches
*
* @param sft simple feature type
* @param dictionaryFields dictionary fields
* @param deltas Seq of threaded dictionary deltas
* @return
*/
private def mergeDictionaries(
sft: SimpleFeatureType,
dictionaryFields: Seq[String],
deltas: Array[Array[Array[Byte]]],
encoding: SimpleFeatureEncoding): MergedDictionaries = {
import org.locationtech.geomesa.utils.conversions.ScalaImplicits.RichArray
val allocator = ArrowAllocator("merge-dictionaries")
if (dictionaryFields.isEmpty) {
return MergedDictionaries(Map.empty, Map.empty, allocator)
}
// calculate our vector bindings/metadata once up front
val vectorMetadata = dictionaryFields.toArray.map { name =>
val descriptor = sft.getDescriptor(name)
val bindings = ObjectType.selectType(descriptor)
val metadata = Map(SimpleFeatureVector.DescriptorKey -> SimpleFeatureTypes.encodeDescriptor(sft, descriptor))
val factory = VectorFactory(allocator)
(name, bindings, metadata, factory)
}
// create a vector for each dictionary field
def createNewVectors: Array[ArrowAttributeReader] = {
vectorMetadata.map { case (name, bindings, metadata, factory) =>
// use the writer to create the appropriate child vector
val vector = ArrowAttributeWriter(name, bindings, None, metadata, encoding, factory).vector
ArrowAttributeReader(bindings, vector, None, encoding)
}
}
// final results
val results = createNewVectors
// re-used queue, gets emptied after each dictionary field
// batch state is tracked in the DictionaryMerger instances
val queue = new PriorityQueue[DictionaryMerger](Ordering.ordered[DictionaryMerger])
// merge each threaded delta vector into a single dictionary for that thread
var batch = -1
val allMerges: Array[DictionaryMerger] = deltas.map { deltas =>
// deltas are threaded batches containing partial dictionary vectors
batch += 1
// per-dictionary vectors for our final merged results for this threaded batch
val dictionaries = createNewVectors
// tracks the offset for each dictionary, based on the deltas that came before it
val offsets = Array.fill(dictionaries.length)(0)
// the delta vectors, each sorted internally
val toMerge: Array[DictionaryMerger] = deltas.map { bytes =>
val vectors = createNewVectors // per-dictionary vectors from this batch
var i = 0
var offset = 8 // start after threading key
while (i < dictionaries.length) {
val length = ByteArrays.readInt(bytes, offset)
offset += 4 // increment past length
if (length > 0) {
new RecordBatchLoader(vectors(i).vector).load(bytes, offset, length)
offset += length
}
i += 1
}
logger.trace(s"dictionary deltas: ${vectors.map(v => Seq.tabulate(v.getValueCount)(v.apply).mkString(",")).mkString(";")}")
// copy the current dictionary offsets to account for previous batches
val off = Array.ofDim[Int](offsets.length)
System.arraycopy(offsets, 0, off, 0, offsets.length)
val transfers = vectors.mapWithIndex { case (v, j) =>
offsets(j) += v.getValueCount // note: side-effect in map - update our offsets for the next batch
v.vector.makeTransferPair(dictionaries(j).vector)
}
new DictionaryMerger(vectors, transfers, off, null, -1) // we don't care about the batch number here
}
val transfers = Array.ofDim[TransferPair](dictionaries.length)
val mappings = Array.fill(dictionaries.length)(HashBiMap.create[Integer, Integer]())
var i = 0 // dictionary field index
while (i < dictionaries.length) {
// set initial values in the sorting queue
toMerge.foreach { merger =>
if (merger.setCurrent(i)) {
queue.add(merger)
}
}
var count = 0
while (!queue.isEmpty) {
val merger = queue.remove()
merger.transfer(count)
mappings(i).put(merger.offset, count)
if (merger.advance()) {
queue.add(merger)
}
count += 1
}
dictionaries(i).vector.setValueCount(count)
transfers(i) = dictionaries(i).vector.makeTransferPair(results(i).vector)
i += 1
}
new DictionaryMerger(dictionaries, transfers, Array.empty, mappings, batch)
}
// now merge the separate threads together
// final mappings - we build up a new map as otherwise we'd get key/value overlaps
// dictionary(batch(mapping))
val mappings = Array.fill(results.length)(Array.fill(allMerges.length)(new java.util.HashMap[Integer, Integer]()))
results.foreachIndex { case (result, i) =>
allMerges.foreach { merger =>
if (merger.setCurrent(i)) {
queue.add(merger)
}
}
var count = 0
while (!queue.isEmpty) {
val merger = queue.remove()
// check for duplicates
if (count == 0 || result.apply(count - 1) != merger.value) {
merger.transfer(count)
count += 1
}
// update the dictionary mapping from the per-thread to the global dictionary
logger.trace(s"remap ${merger.value} ${merger.batch} ${merger.mappings(i)} ${merger.index} -> ${count - 1}")
val remap = merger.remap
if (remap != null) {
mappings(i)(merger.batch).put(remap, count - 1)
}
if (merger.advance()) {
queue.add(merger)
}
}
result.vector.setValueCount(count)
}
// convert from indexed arrays to dictionary-field-keyed maps
val dictionaryBuilder = Map.newBuilder[String, ArrowDictionary]
dictionaryBuilder.sizeHint(dictionaryFields.length)
val mappingsBuilder = Map.newBuilder[String, Array[java.util.Map[Integer, Integer]]]
mappingsBuilder.sizeHint(dictionaryFields.length)
vectorMetadata.foreachIndex { case ((name, bindings, _, _), i) =>
logger.trace("merged dictionary: " + Seq.tabulate(results(i).getValueCount)(results(i).apply).mkString(","))
val enc = new DictionaryEncoding(i, true, new ArrowType.Int(32, true))
dictionaryBuilder += name -> ArrowDictionary.create(enc, results(i).vector, bindings, encoding)
mappingsBuilder += name -> mappings(i).asInstanceOf[Array[java.util.Map[Integer, Integer]]]
}
val dictionaryMap = dictionaryBuilder.result()
val mappingsMap = mappingsBuilder.result()
logger.trace(s"batch dictionary mappings: ${mappingsMap.mapValues(_.mkString(",")).mkString(";")}")
MergedDictionaries(dictionaryMap, mappingsMap, allocator)
}
// holder for merged dictionaries and mappings from written values to merged values
private case class MergedDictionaries(
dictionaries: Map[String, ArrowDictionary],
mappings: Map[String, Array[java.util.Map[Integer, Integer]]],
allocator: BufferAllocator
) extends Closeable {
override def close(): Unit = {
dictionaries.foreach { case (_, d) => CloseWithLogging(d) }
CloseWithLogging(allocator)
}
}
private case class FieldWriter(
name: String,
index: Int,
attribute: ArrowAttributeWriter,
dictionary: Option[DictionaryWriter] = None
)
private case class DictionaryWriter(
index: Int,
attribute: ArrowAttributeWriter,
writer: BatchWriter,
values: scala.collection.mutable.Map[AnyRef, Integer]
)
private object BatchMergerOrdering extends Ordering[BatchMerger[Any]] {
override def compare(x: BatchMerger[Any], y: BatchMerger[Any]): Int = x.compare(y)
}
/**
* Tracks sorted merging of delta record batches
*
* @param vector vector for this batch
* @param transfers transfer functions to the result batch
* @tparam T type param
*/
private abstract class BatchMerger[T](
vector: SimpleFeatureVector,
transfers: Seq[(Int, Int) => Unit]
) extends Ordered[T] {
protected var index: Int = 0
def transfer(to: Int): Boolean = {
transfers.foreach(_.apply(index, to))
index += 1
if (vector.reader.getValueCount > index) {
load()
true
} else {
false
}
}
protected def load(): Unit
}
/**
* Batch merger for dictionary-encoded values
*
* @param vector vector for this batch
* @param transfers transfer functions to the result batch
* @param sort vector holding the values being sorted on
* @param dictionaryMappings mappings from the batch to the global dictionary
*/
private class DictionaryBatchMerger(
vector: SimpleFeatureVector,
transfers: Seq[(Int, Int) => Unit],
sort: ArrowDictionaryReader,
dictionaryMappings: java.util.Map[Integer, Integer]
) extends BatchMerger[DictionaryBatchMerger](vector, transfers) {
private var value: Int = dictionaryMappings.get(sort.getEncoded(0))
override protected def load(): Unit = {
// since we've sorted the dictionaries, we can just compare the encoded index values
value = dictionaryMappings.get(sort.getEncoded(index))
}
override def compare(that: DictionaryBatchMerger): Int = java.lang.Integer.compare(value, that.value)
}
/**
* Merger for date values. We can avoid allocating a Date object and just compare the millisecond timestamp
*
* @param vector vector for this batch
* @param transfers transfer functions to the result batch
* @param sort vector holding the values being sorted on
*/
private class DateBatchMerger(
vector: SimpleFeatureVector,
transfers: Seq[(Int, Int) => Unit],
sort: ArrowDateReader
) extends BatchMerger[DateBatchMerger](vector, transfers) {
private var value: Long = sort.getTime(0)
override protected def load(): Unit = value = sort.getTime(index)
override def compare(that: DateBatchMerger): Int = java.lang.Long.compare(value, that.value)
}
/**
* Generic batch merger for non-specialized attribute types
*
* @param vector vector for this batch
* @param transfers transfer functions to the result batch
* @param sort vector holding the values being sorted on
*/
private class AttributeBatchMerger(
vector: SimpleFeatureVector,
transfers: Seq[(Int, Int) => Unit],
sort: ArrowAttributeReader
) extends BatchMerger[AttributeBatchMerger](vector, transfers) {
private var value: Comparable[Any] = sort.apply(0).asInstanceOf[Comparable[Any]]
override protected def load(): Unit = value = sort.apply(index).asInstanceOf[Comparable[Any]]
override def compare(that: AttributeBatchMerger): Int = SimpleFeatureOrdering.nullCompare(value, that.value)
}
/**
* Dictionary merger for tracking threaded delta batches. Each member variable is an array, with
* one entry per dictionary field
*
* @param readers attribute readers for the dictionary values
* @param transfers transfers for the dictionary vectors
* @param offsets dictionary offsets based on the number of threaded delta batches
* @param mappings mappings from the local threaded batch dictionary to the global dictionary
* @param batch the batch number
*/
class DictionaryMerger(
readers: Array[ArrowAttributeReader],
transfers: Array[TransferPair],
offsets: Array[Int],
val mappings: Array[HashBiMap[Integer, Integer]],
val batch: Int
) extends Ordered[DictionaryMerger] {
private var current: Int = 0
private var _index: Int = 0
private var _value: Comparable[Any] = _
/**
* The read position of the current dictionary
*
* @return
*/
def index: Int = _index
/**
* The current dictionary value
*
* @return
*/
def value: Comparable[Any] = _value
/**
* The global offset of the current dictionary, based on the batch threading and the current read position
*
* @return
*/
def offset: Int = offsets(current) + _index
/**
* Set the current dictionary to operate on, and reads the first value
*
* @param i dictionary index
* @return true if the dictionary has any values to read
*/
def setCurrent(i: Int): Boolean = {
current = i
_index = -1
advance()
}
/**
* Transfer the current dictionary/value to a new vector
*
* @param to destination index to transfer to
*/
def transfer(to: Int): Unit = transfers(current).copyValueSafe(_index, to)
/**
* Get the reverse global mapping for the current dictionary and value
*
* @return
*/
def remap: Integer = mappings(current).inverse().get(_index)
/**
* Read the next value from the current dictionary. Closes the current dictionary if there are no more values.
*
* @return true if there are more values
*/
def advance(): Boolean = {
_index += 1
if (readers(current).getValueCount > _index) {
_value = readers(current).apply(_index).asInstanceOf[Comparable[Any]]
true
} else {
_value = null
CloseWithLogging(readers(current).vector)
false
}
}
override def compare(that: DictionaryMerger): Int = SimpleFeatureOrdering.nullCompare(_value, that._value)
}
/**
* Writes out a 4-byte int with the batch length, then a single batch
*
* @param vector vector
*/
private class BatchWriter(vector: FieldVector, ipcOpts: IpcOption) extends Closeable {
private val root = createRoot(vector)
private val os = new ByteArrayOutputStream()
private val writer = new ArrowStreamWriter(root, provider, Channels.newChannel(os), ipcOpts)
writer.start() // start the writer - we'll discard the metadata later, as we only care about the record batches
logger.trace(s"write schema: ${vector.getField}")
def writeBatch(count: Int, to: OutputStream): Unit = {
os.reset()
if (count < 1) {
logger.trace("writing 0 bytes")
to.write(ByteArrays.toBytes(0))
} else {
vector.setValueCount(count)
root.setRowCount(count)
writer.writeBatch()
logger.trace(s"writing ${os.size} bytes")
to.write(ByteArrays.toBytes(os.size()))
os.writeTo(to)
}
}
override def close(): Unit = {
CloseWithLogging(writer)
CloseWithLogging(root) // also closes the vector
}
}
private class ReducingIterator(
sft: SimpleFeatureType,
dictionaryFields: Seq[String],
encoding: SimpleFeatureEncoding,
ipcOpts: IpcOption,
sort: Option[(String, Boolean)],
sorted: Boolean,
batchSize: Int,
deltas: CloseableIterator[Array[Byte]]
) extends CloseableIterator[Array[Byte]] {
private lazy val reduced = {
try {
val grouped = scala.collection.mutable.Map.empty[Long, scala.collection.mutable.ArrayBuilder[Array[Byte]]]
while (deltas.hasNext) {
val delta = deltas.next
grouped.getOrElseUpdate(ByteArrays.readLong(delta), Array.newBuilder) += delta
}
val threaded = Array.ofDim[Array[Array[Byte]]](grouped.size)
var i = 0
grouped.foreach { case (_, builder) => threaded(i) = builder.result; i += 1 }
logger.trace(s"merging delta batches from ${threaded.length} thread(s)")
val dictionaries = mergeDictionaries(sft, dictionaryFields, threaded, encoding)
if (sorted || sort.isEmpty) {
reduceNoSort(sft, dictionaryFields, encoding, ipcOpts, dictionaries, sort, batchSize, threaded)
} else {
val Some((s, r)) = sort
reduceWithSort(sft, dictionaryFields, encoding, ipcOpts, dictionaries, s, r, batchSize, threaded)
}
} catch {
case NonFatal(e) =>
// if we get an error, re-throw it on next()
new CloseableIterator[Array[Byte]] {
override def hasNext: Boolean = true
override def next(): Array[Byte] = throw e
override def close(): Unit = {}
}
}
}
override def hasNext: Boolean = reduced.hasNext
override def next(): Array[Byte] = reduced.next()
override def close(): Unit = CloseWithLogging(deltas, reduced)
}
}
| aheyne/geomesa | geomesa-arrow/geomesa-arrow-gt/src/main/scala/org/locationtech/geomesa/arrow/io/DeltaWriter.scala | Scala | apache-2.0 | 38,126 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.controller
import kafka.network.{Receive, BlockingChannel}
import kafka.utils.{Logging, ShutdownableThread}
import collection.mutable.HashMap
import kafka.cluster.Broker
import java.util.concurrent.{LinkedBlockingQueue, BlockingQueue}
import kafka.server.KafkaConfig
import collection.mutable
import kafka.api._
import org.apache.log4j.Logger
class ControllerChannelManager (private val controllerContext: ControllerContext, config: KafkaConfig) extends Logging {
private val brokerStateInfo = new HashMap[Int, ControllerBrokerStateInfo]
private val brokerLock = new Object
this.logIdent = "[Channel manager on controller " + config.brokerId + "]: "
controllerContext.liveBrokers.foreach(addNewBroker(_))
def startup() = {
brokerLock synchronized {
brokerStateInfo.foreach(brokerState => startRequestSendThread(brokerState._1))
}
}
def shutdown() = {
brokerLock synchronized {
brokerStateInfo.foreach(brokerState => removeExistingBroker(brokerState._1))
}
}
def sendRequest(brokerId : Int, request : RequestOrResponse, callback: (RequestOrResponse) => Unit = null) {
brokerLock synchronized {
val stateInfoOpt = brokerStateInfo.get(brokerId)
stateInfoOpt match {
case Some(stateInfo) =>
stateInfo.messageQueue.put((request, callback))
case None =>
warn("Not sending request %s to broker %d, since it is offline.".format(request, brokerId))
}
}
}
def addBroker(broker: Broker) {
// be careful here. Maybe the startup() API has already started the request send thread
brokerLock synchronized {
if(!brokerStateInfo.contains(broker.id)) {
addNewBroker(broker)
startRequestSendThread(broker.id)
}
}
}
def removeBroker(brokerId: Int) {
brokerLock synchronized {
removeExistingBroker(brokerId)
}
}
private def addNewBroker(broker: Broker) {
val messageQueue = new LinkedBlockingQueue[(RequestOrResponse, (RequestOrResponse) => Unit)](config.controllerMessageQueueSize)
val channel = new BlockingChannel(broker.host, broker.port,
BlockingChannel.UseDefaultBufferSize,
BlockingChannel.UseDefaultBufferSize,
config.controllerSocketTimeoutMs)
channel.connect()
val requestThread = new RequestSendThread(config.brokerId, controllerContext, broker.id, messageQueue, channel)
requestThread.setDaemon(false)
brokerStateInfo.put(broker.id, new ControllerBrokerStateInfo(channel, broker, messageQueue, requestThread))
}
private def removeExistingBroker(brokerId: Int) {
try {
brokerStateInfo(brokerId).channel.disconnect()
brokerStateInfo(brokerId).requestSendThread.shutdown()
brokerStateInfo.remove(brokerId)
}catch {
case e => error("Error while removing broker by the controller", e)
}
}
private def startRequestSendThread(brokerId: Int) {
val requestThread = brokerStateInfo(brokerId).requestSendThread
if(requestThread.getState == Thread.State.NEW)
requestThread.start()
}
}
class RequestSendThread(val controllerId: Int,
val controllerContext: ControllerContext,
val toBrokerId: Int,
val queue: BlockingQueue[(RequestOrResponse, (RequestOrResponse) => Unit)],
val channel: BlockingChannel)
extends ShutdownableThread("Controller-%d-to-broker-%d-send-thread".format(controllerId, toBrokerId)) {
private val lock = new Object()
private val stateChangeLogger = Logger.getLogger(KafkaController.stateChangeLogger)
override def doWork(): Unit = {
val queueItem = queue.take()
val request = queueItem._1
val callback = queueItem._2
var receive: Receive = null
try{
lock synchronized {
channel.send(request)
receive = channel.receive()
var response: RequestOrResponse = null
request.requestId.get match {
case RequestKeys.LeaderAndIsrKey =>
response = LeaderAndIsrResponse.readFrom(receive.buffer)
case RequestKeys.StopReplicaKey =>
response = StopReplicaResponse.readFrom(receive.buffer)
}
stateChangeLogger.trace("Controller %d epoch %d received response correlationId %d for a request sent to broker %d"
.format(controllerId, controllerContext.epoch, response.correlationId, toBrokerId))
if(callback != null){
callback(response)
}
}
} catch {
case e =>
// log it and let it go. Let controller shut it down.
debug("Exception occurs", e)
}
}
}
class ControllerBrokerRequestBatch(sendRequest: (Int, RequestOrResponse, (RequestOrResponse) => Unit) => Unit, controllerId: Int)
extends Logging {
val leaderAndIsrRequestMap = new mutable.HashMap[Int, mutable.HashMap[(String, Int), PartitionStateInfo]]
val stopReplicaRequestMap = new mutable.HashMap[Int, Seq[(String, Int)]]
val stopAndDeleteReplicaRequestMap = new mutable.HashMap[Int, Seq[(String, Int)]]
private val stateChangeLogger = Logger.getLogger(KafkaController.stateChangeLogger)
def newBatch() {
// raise error if the previous batch is not empty
if(leaderAndIsrRequestMap.size > 0 || stopReplicaRequestMap.size > 0)
throw new IllegalStateException("Controller to broker state change requests batch is not empty while creating " +
"a new one. Some state changes %s might be lost ".format(leaderAndIsrRequestMap.toString()))
leaderAndIsrRequestMap.clear()
stopReplicaRequestMap.clear()
stopAndDeleteReplicaRequestMap.clear()
}
def addLeaderAndIsrRequestForBrokers(brokerIds: Seq[Int], topic: String, partition: Int,
leaderIsrAndControllerEpoch: LeaderIsrAndControllerEpoch, replicationFactor: Int) {
brokerIds.foreach { brokerId =>
leaderAndIsrRequestMap.getOrElseUpdate(brokerId, new mutable.HashMap[(String, Int), PartitionStateInfo])
leaderAndIsrRequestMap(brokerId).put((topic, partition), PartitionStateInfo(leaderIsrAndControllerEpoch, replicationFactor))
}
}
def addStopReplicaRequestForBrokers(brokerIds: Seq[Int], topic: String, partition: Int, deletePartition: Boolean) {
brokerIds.foreach { brokerId =>
stopReplicaRequestMap.getOrElseUpdate(brokerId, Seq.empty[(String, Int)])
stopAndDeleteReplicaRequestMap.getOrElseUpdate(brokerId, Seq.empty[(String, Int)])
if (deletePartition) {
val v = stopAndDeleteReplicaRequestMap(brokerId)
stopAndDeleteReplicaRequestMap(brokerId) = v :+ (topic, partition)
}
else {
val v = stopReplicaRequestMap(brokerId)
stopReplicaRequestMap(brokerId) = v :+ (topic, partition)
}
}
}
def sendRequestsToBrokers(controllerEpoch: Int, correlationId: Int, liveBrokers: Set[Broker]) {
leaderAndIsrRequestMap.foreach { m =>
val broker = m._1
val partitionStateInfos = m._2.toMap
val leaderIds = partitionStateInfos.map(_._2.leaderIsrAndControllerEpoch.leaderAndIsr.leader).toSet
val leaders = liveBrokers.filter(b => leaderIds.contains(b.id))
val leaderAndIsrRequest = new LeaderAndIsrRequest(partitionStateInfos, leaders, controllerId, controllerEpoch, correlationId)
for (p <- partitionStateInfos) {
val typeOfRequest = if (broker == p._2.leaderIsrAndControllerEpoch.leaderAndIsr.leader) "become-leader" else "become-follower"
stateChangeLogger.trace(("Controller %d epoch %d sending %s LeaderAndIsr request with correlationId %d to broker %d " +
"for partition [%s,%d]").format(controllerId, controllerEpoch, typeOfRequest, correlationId, broker,
p._1._1, p._1._2))
}
sendRequest(broker, leaderAndIsrRequest, null)
}
leaderAndIsrRequestMap.clear()
Seq((stopReplicaRequestMap, false), (stopAndDeleteReplicaRequestMap, true)) foreach {
case(m, deletePartitions) => {
m foreach {
case(broker, replicas) =>
if (replicas.size > 0) {
debug("The stop replica request (delete = %s) sent to broker %d is %s"
.format(deletePartitions, broker, replicas.mkString(",")))
val stopReplicaRequest = new StopReplicaRequest(deletePartitions, Set.empty[(String, Int)] ++ replicas, controllerId,
controllerEpoch, correlationId)
sendRequest(broker, stopReplicaRequest, null)
}
}
m.clear()
}
}
}
}
case class ControllerBrokerStateInfo(channel: BlockingChannel,
broker: Broker,
messageQueue: BlockingQueue[(RequestOrResponse, (RequestOrResponse) => Unit)],
requestSendThread: RequestSendThread)
| akosiaris/kafka | core/src/main/scala/kafka/controller/ControllerChannelManager.scala | Scala | apache-2.0 | 9,761 |
package lambdacart.util.typealigned
import lambdacart.util.~~>
import scalaz.{Compose, Semigroup}
/**
* Binary counter-like accumulator for type-aligned binary type constructors,
* with the most significant bit on the right and addition of new elements (i.e. "increment") from the left.
*/
final class PreComposeBalancer[F[_, _], A, B] private(count: Int, stack: AList1[F, A, B]) {
/** Pre-compose an element. */
def +:[Z](f: F[Z, A])(implicit F: Compose[F]): PreComposeBalancer[F, Z, B] =
add(f, stack, 1, count)
def result(implicit F: Compose[F]): F[A, B] =
stack.tail.foldLeft(stack.head)(RightAction.compose(F))
private def add[X, Y](h: F[X, Y], t: AList1[F, Y, B], hcount: Int, tfactor: Int)(implicit F: Compose[F]): PreComposeBalancer[F, X, B] = {
// hcount: number of elemnts composed in the head (`h`)
// tfactor: how many times more elements are there in tail (`t`) than in head (tcount = hcount * tfactor)
if(tfactor % 2 == 0) new PreComposeBalancer(hcount * (tfactor + 1), h :: t)
else {
t.uncons match {
case Left(f) => assert(tfactor == 1); new PreComposeBalancer(hcount * 2, AList1(F.compose(f, h)))
case Right(ht) => add(F.compose(ht._1, h), ht._2, hcount * 2, tfactor / 2)
}
}
}
}
object PreComposeBalancer {
def apply[F[_, _], A, B](f: F[A, B]): PreComposeBalancer[F, A, B] =
new PreComposeBalancer(1, AList1(f))
def leftAction[F[_, _], Z](implicit F: Compose[F]): LeftAction[PreComposeBalancer[F, ?, Z], F] =
ν[LeftAction[PreComposeBalancer[F, ?, Z], F]][X, Y]((f, acc) => f +: acc)
def leftAction[G[_, _], F[_, _], Z](φ: F ~~> G)(implicit G: Compose[G]): LeftAction[PreComposeBalancer[G, ?, Z], F] =
ν[LeftAction[PreComposeBalancer[G, ?, Z], F]][X, Y]((f, acc) => φ.apply(f) +: acc)
}
/**
* Binary counter-like accumulator for type-aligned binary type constructors,
* with the most significant bit on the left and addition of new elements (i.e. "increment") from the right.
*/
final class PostComposeBalancer[F[_, _], A, B](private val repr: PreComposeBalancer[λ[(α, β) => F[β, α]], B, A]) extends AnyVal {
import PostComposeBalancer._
/** Post-compose an element. */
def :+[C](f: F[B, C])(implicit F: Compose[F]): PostComposeBalancer[F, A, C] =
wrap((f +: repr)(flip(F)))
def result(implicit F: Compose[F]): F[A, B] =
repr.result(flip(F))
}
object PostComposeBalancer {
def apply[F[_, _], A, B](f: F[A, B]): PostComposeBalancer[F, A, B] =
wrap(PreComposeBalancer[λ[(α, β) => F[β, α]], B, A](f))
def wrap[F[_, _], A, B](pre: PreComposeBalancer[λ[(α, β) => F[β, α]], B, A]): PostComposeBalancer[F, A, B] =
new PostComposeBalancer[F, A, B](pre)
def rightAction[F[_, _], A](implicit F: Compose[F]): RightAction[PostComposeBalancer[F, A, ?], F] =
ν[RightAction[PostComposeBalancer[F, A, ?], F]][B, C]((acc, f) => acc :+ f)
def rightAction[G[_, _], F[_, _], A](φ: F ~~> G)(implicit G: Compose[G]): RightAction[PostComposeBalancer[G, A, ?], F] =
ν[RightAction[PostComposeBalancer[G, A, ?], F]][B, C]((acc, f) => acc :+ φ.apply(f))
private def flip[F[_, _]](F: Compose[F]): Compose[λ[(α, β) => F[β, α]]] =
new Compose[λ[(α, β) => F[β, α]]] {
def compose[A, B, C](f: F[C, B], g: F[B, A]): F[C, A] =
F.compose(g, f)
}
}
final case class AppendBalancer[A] private(repr: PreComposeBalancer[λ[(α, β) => A], Nothing, Nothing]) extends AnyVal {
def append(a: A)(implicit A: Semigroup[A]): AppendBalancer[A] =
AppendBalancer((a +: repr)(A.compose))
def result(implicit A: Semigroup[A]): A = repr.result(A.compose)
}
object AppendBalancer {
def apply[A](a: A): AppendBalancer[A] =
AppendBalancer(PreComposeBalancer[λ[(α, β) => A], Nothing, Nothing](a))
}
final case class PrependBalancer[A] private(repr: PreComposeBalancer[λ[(α, β) => A], Nothing, Nothing]) extends AnyVal {
import PrependBalancer._
def prepend(a: A)(implicit A: Semigroup[A]): PrependBalancer[A] =
PrependBalancer((a +: repr)(andThen(A)))
def result(implicit A: Semigroup[A]): A = repr.result(andThen(A))
}
object PrependBalancer {
def apply[A](a: A): PrependBalancer[A] =
PrependBalancer(PreComposeBalancer[λ[(α, β) => A], Nothing, Nothing](a))
private def andThen[A](A: Semigroup[A]): Compose[λ[(α, β) => A]] = new Compose[λ[(α, β) => A]] {
def compose[X, Y, Z](a: A, b: A) = A.append(b, a)
}
}
| TomasMikula/LambdaCart | src/main/scala/lambdacart/util/typealigned/balanced.scala | Scala | apache-2.0 | 4,437 |
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.commons.exception
import io.deepsense.commons.models
object DeepSenseFailure {
type Id = models.Id
val Id = models.Id
}
| deepsense-io/seahorse-workflow-executor | commons/src/main/scala/io/deepsense/commons/exception/DeepSenseFailure.scala | Scala | apache-2.0 | 746 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats
import io.truthencode.ddo.model.skill.Skill
import io.truthencode.ddo.support.requisite._
/**
* Icon Feat Single Weapon Fighting.png Greater Single Weapon Fighting Passive Your Single-Weapon
* Fighting bonus is increased to a +30% Combat Style bonus to attack speed and 2 additional Melee
* Power (for a total of 6) and 50% more of your appropriate ability score to your damage (similar
* to Two-Handed Fighting).
*
* Note: Further increases base amount of attacks per second from 1.8 to 2.0.[Unverified] Improved
* Single Weapon Fighting 7 ranks of Balance Base Attack Bonus +11
*/
trait GreaterSingleWeaponFighting
extends FeatRequisiteImpl with SkillRequisiteImpl with Passive with RequiresAllOfFeat
with RequiresAnyOfSkill with RequiresBaB with FighterBonusFeat {
self: GeneralFeat =>
override def requiresBaB = 11
override def oneOfSkill: Seq[(Skill, Int)] = List((Skill.Balance, 7))
override def allOfFeats: Seq[GeneralFeat] =
List(GeneralFeat.ImprovedSingleWeaponFighting)
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/GreaterSingleWeaponFighting.scala | Scala | apache-2.0 | 1,735 |
package usbinstall.settings
import java.nio.file.{Files, Path}
import javafx.beans.property.{ObjectProperty, SimpleObjectProperty}
import suiryc.scala.sys.linux.Device
object InstallSettings {
val profile: ObjectProperty[Option[ProfileSettings]] =
new SimpleObjectProperty(None)
val device: ObjectProperty[Option[Device]] =
new SimpleObjectProperty(None)
protected def tempDirectory(root: Option[Path], prefix: String, deleteOnExit: Boolean): Path = {
val path = root.fold {
Files.createTempDirectory(prefix)
} { root =>
Files.createTempDirectory(root, prefix)
}
if (deleteOnExit) path.toFile.deleteOnExit()
path
}
def tempDirectory(prefix: String): Path =
tempDirectory(Some(pathTemp), prefix, deleteOnExit = false)
lazy val pathTemp: Path = tempDirectory(None, "usbinstall.tmp-", deleteOnExit = true)
lazy val pathMountISO: Path = tempDirectory(None, "usbinstall.iso-", deleteOnExit = true)
lazy val pathMountPartition: Path = tempDirectory(None, "usbinstall.part-", deleteOnExit = true)
}
| suiryc/usbinstall | src/main/scala/usbinstall/settings/InstallSettings.scala | Scala | gpl-3.0 | 1,063 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.client
import java.io.File
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.{Logging, SparkFunSuite}
import org.apache.spark.sql.catalyst.expressions.{NamedExpression, Literal, AttributeReference, EqualTo}
import org.apache.spark.sql.catalyst.util.quietly
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.util.Utils
/**
* A simple set of tests that call the methods of a hive ClientInterface, loading different version
* of hive from maven central. These tests are simple in that they are mostly just testing to make
* sure that reflective calls are not throwing NoSuchMethod error, but the actually functionality
* is not fully tested.
* 一套简单的测试,称为hive 群clientinterface方法,从Maven中央蜂巢加载不同的版本,
* 这些测试是简单的,他们大多只是测试确保反射调用不扔nosuchmethod误差,但实际功能不完全测试。
*/
class VersionsSuite extends SparkFunSuite with Logging {
// Do not use a temp path here to speed up subsequent executions of the unit test during
// development.
//不要在这里使用临时路径来加快开发过程中单元测试的后续执行
private val ivyPath = Some(
new File(sys.props("java.io.tmpdir"), "hive-ivy-cache").getAbsolutePath())
private def buildConf() = {
lazy val warehousePath = Utils.createTempDir()
lazy val metastorePath = Utils.createTempDir()
metastorePath.delete()
Map(
"javax.jdo.option.ConnectionURL" -> s"jdbc:derby:;databaseName=$metastorePath;create=true",
"hive.metastore.warehouse.dir" -> warehousePath.toString)
}
//sanity 正常
test("success sanity check") {
val badClient = IsolatedClientLoader.forVersion(HiveContext.hiveExecutionVersion,
buildConf(),
ivyPath).client
val db = new HiveDatabase("default", "")
badClient.createDatabase(db)
}
private def getNestedMessages(e: Throwable): String = {
var causes = ""
var lastException = e
while (lastException != null) {
causes += lastException.toString + "\\n"
lastException = lastException.getCause
}
causes
}
private val emptyDir = Utils.createTempDir().getCanonicalPath
private def partSpec = {
val hashMap = new java.util.LinkedHashMap[String, String]
hashMap.put("key", "1")
hashMap
}
// Its actually pretty easy to mess things up and have all of your tests "pass" by accidentally
// connecting to an auto-populated, in-process metastore. Let's make sure we are getting the
// versions right by forcing a known compatibility failure.
//它实际上很容易把事情搞砸了,你的所有的测试的“通行证”的意外连接到自动填充,中间元数据。
// 让我们确保通过强迫已知的兼容性失败来获得正确的版本。
// TODO: currently only works on mysql where we manually create the schema...
//破坏性检查
ignore("failure sanity check") {
val e = intercept[Throwable] {
val badClient = quietly {
IsolatedClientLoader.forVersion("13", buildConf(), ivyPath).client
}
}
assert(getNestedMessages(e) contains "Unknown column 'A0.OWNER_NAME' in 'field list'")
}
private val versions = Seq("12", "13", "14", "1.0.0", "1.1.0", "1.2.0")
private var client: ClientInterface = null
versions.foreach { version =>
//创建客户端
test(s"$version: create client") {
client = null
//Hack为了避免一些JVM版本segv。
System.gc() // Hack to avoid SEGV on some JVM versions.
client = IsolatedClientLoader.forVersion(version, buildConf(), ivyPath).client
}
//创建数据库
test(s"$version: createDatabase") {
val db = HiveDatabase("default", "")
client.createDatabase(db)
}
//创建表
test(s"$version: createTable") {
val table =
HiveTable(
specifiedDatabase = Option("default"),
name = "src",
schema = Seq(HiveColumn("key", "int", "")),
partitionColumns = Seq.empty,
properties = Map.empty,
serdeProperties = Map.empty,
tableType = ManagedTable,
location = None,
inputFormat =
Some(classOf[org.apache.hadoop.mapred.TextInputFormat].getName),
outputFormat =
Some(classOf[org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat[_, _]].getName),
serde =
Some(classOf[org.apache.hadoop.hive.serde2.`lazy`.LazySimpleSerDe].getName()))
client.createTable(table)
}
//获得表
test(s"$version: getTable") {
client.getTable("default", "src")
}
//获得列表
test(s"$version: listTables") {
assert(client.listTables("default") === Seq("src"))
}
//当前数据库
test(s"$version: currentDatabase") {
assert(client.currentDatabase === "default")
}
//当前数据库
test(s"$version: getDatabase") {
client.getDatabase("default")
}
//修改表
test(s"$version: alterTable") {
client.alterTable(client.getTable("default", "src"))
}
//设置命令
test(s"$version: set command") {
client.runSqlHive("SET spark.sql.test.key=1")
}
//创建分区表的DDL
test(s"$version: create partitioned table DDL") {
client.runSqlHive("CREATE TABLE src_part (value INT) PARTITIONED BY (key INT)")
client.runSqlHive("ALTER TABLE src_part ADD PARTITION (key = '1')")
}
//得到的分区
test(s"$version: getPartitions") {
client.getAllPartitions(client.getTable("default", "src_part"))
}
//通过过滤器获取分区
test(s"$version: getPartitionsByFilter") {
client.getPartitionsByFilter(client.getTable("default", "src_part"), Seq(EqualTo(
AttributeReference("key", IntegerType, false)(NamedExpression.newExprId),
Literal(1))))
}
//加载分区
test(s"$version: loadPartition") {
client.loadPartition(
emptyDir,
"default.src_part",
partSpec,
false,
false,
false,
false)
}
//加载表
test(s"$version: loadTable") {
client.loadTable(
emptyDir,
"src",
false,
false)
}
//加载动态分区
test(s"$version: loadDynamicPartitions") {
client.loadDynamicPartitions(
emptyDir,
"default.src_part",
partSpec,
false,
1,
false,
false)
}
//创建索引并重置
test(s"$version: create index and reset") {
client.runSqlHive("CREATE TABLE indexed_table (key INT)")
client.runSqlHive("CREATE INDEX index_1 ON TABLE indexed_table(key) " +
"as 'COMPACT' WITH DEFERRED REBUILD")
client.reset()
}
}
}
| tophua/spark1.52 | sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala | Scala | apache-2.0 | 7,597 |
package org.jetbrains.plugins.scala
package lang.refactoring.rename
import java.util
import com.intellij.psi.{PsiElement, PsiPackage}
import com.intellij.refactoring.rename.RenamePsiPackageProcessor
import org.jetbrains.plugins.scala.caches.ScalaShortNamesCacheManager
/**
* @author Alefas
* @since 06.11.12
*/
class RenameScalaPackageProcessor extends RenamePsiPackageProcessor with ScalaRenameProcessor {
override def prepareRenaming(element: PsiElement, newName: String, allRenames: util.Map[PsiElement, String]) {
element match {
case p: PsiPackage =>
val po = ScalaShortNamesCacheManager.getInstance(element.getProject).getPackageObjectByName(p.getQualifiedName, element.getResolveScope)
if (po != null && po.name != "`package`") {
allRenames.put(po, newName)
}
case _ =>
}
}
}
| ilinum/intellij-scala | src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaPackageProcessor.scala | Scala | apache-2.0 | 847 |
/*
* # Trove
*
* This file is part of Trove - A FREE desktop budgeting application that
* helps you track your finances, FREES you from complex budgeting, and
* enables you to build your TROVE of savings!
*
* Copyright © 2016-2019 Eric John Fredericks.
*
* Trove is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Trove is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Trove. If not, see <http://www.gnu.org/licenses/>.
*/
package trove.models
object AccountTypes extends Enumeration {
type AccountType = Value
val Asset: AccountType = Value("Asset")
val Liability: AccountType = Value("Liability")
val Income: AccountType = Value("Income")
val Expense: AccountType = Value("Expense")
val Equity: AccountType = Value("Equity")
}
| emanchgo/budgetfree | src/main/scala/trove/models/AccountTypes.scala | Scala | gpl-3.0 | 1,285 |
/*
* Copyright (c) 2013 David Soergel <[email protected]>
* Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0
*/
package worldmake
import java.util.UUID
import worldmake.storage.{ExternalPathArtifact, ManagedPathArtifact, Identifier}
import scala.collection.{GenSeq, GenIterable, GenTraversable, GenSet}
import scala.collection.immutable.Queue
import scala.concurrent._
import com.typesafe.scalalogging.slf4j.Logging
import worldmake.cookingstrategy.CookingStrategy
import scala.Some
import org.joda.time.DateTime
import scalax.file.Path
import scala.util.{Failure, Success}
//import java.lang.ProcessBuilder.Redirect
import worldmake.WorldMakeConfig._
//import scala.collection.JavaConversions._
import edu.umass.cs.iesl.scalacommons.StringUtils._
import ExecutionContext.Implicits.global
// even constant artifacts must be stored in the DB, to provide provenance even when the code changes, etc.
/**
* @author <a href="mailto:[email protected]">David Soergel</a>
*/
trait Recipe[+T] extends WorldmakeEntity {
// could be a complete serialization, or a UUID for an atomic artifact, or a hash of dependency IDs, etc.
// careful using a hash as an ID, per Valerie Aurora
def recipeId: Identifier[Recipe[T]]
lazy val description: String = longDescription.firstLine.limitAtWhitespace(80, "...") // used only for human-readable debug logs and such
def longDescription: String
private var providedSummary: String = ""
def setProvidedSummary(s: String) {
providedSummary = s
}
def summary = providedSummary
lazy val shortId = recipeId.short
lazy val shortDesc = shortId
lazy val queue: Queue[Recipe[_]] = Queue(this)
lazy val isGloballyDeterministic: Boolean = true
override def equals(other: Any): Boolean = other match {
case that: Recipe[T] => (that canEqual this) && recipeId == that.recipeId
case _ => false
}
def canEqual(other: Any): Boolean = other.isInstanceOf[Recipe[T]]
override def hashCode: Int = (41 + recipeId.hashCode)
//def stage(implicit upstreamStrategy: CookingStrategy): Provenance[T]
def deriveFuture(implicit upstreamStrategy: CookingStrategy): Future[Successful[T]]
}
object Recipe {
implicit def toDescribedRecipe(s: String): RecipeSummary = new RecipeSummary(s)
}
class RecipeSummary(s: String) {
def via[T <: Recipe[_]](d: T): T = {
d.setProvidedSummary(s) //copy(description = s)
d
}
}
/*
object DerivationStatuses {
sealed abstract class DerivationStatus(val name: String)
case object Constant extends DerivationStatus("Constant")
case object Cached extends DerivationStatus("Cached")
case object Blocked extends DerivationStatus("Blocked")
case object Pending extends DerivationStatus("Pending")
case object Error extends DerivationStatus("Error")
}*/
object ConstantRecipe {
def apply[T](p: ConstantProvenance[T]) = new ConstantRecipe(p)
implicit def fromString(s: String): ConstantRecipe[String] = ConstantRecipe(ConstantProvenance(StringArtifact(s)))
implicit def fromBoolean(s: Boolean): ConstantRecipe[Boolean] = ConstantRecipe(ConstantProvenance(BooleanArtifact(s)))
implicit def fromDouble(s: Double): ConstantRecipe[Double] = ConstantRecipe(ConstantProvenance(DoubleArtifact(s)))
implicit def fromInt(s: Int): ConstantRecipe[Int] = ConstantRecipe(ConstantProvenance(IntArtifact(s)))
// don't allow this: use RecipeWrapper.pathFromString instead. Otherwise you end up rehashing the file all the time.
//implicit def fromPath(s: Path): ConstantRecipe[ManagedPath] = ConstantRecipe(ConstantProvenance(PathArtifact(s)))
// this would be OK, but it would be better to force use of RecipeWrapper directly for clarity
// implicit def pathFromString(s: String): Recipe[ManagedPath] = RecipeWrapper.pathFromString(s)
}
class ConstantRecipe[T](p: ConstantProvenance[T]) extends Recipe[T] with (() => ConstantProvenance[T]) {
lazy val recipeId = Identifier[Recipe[T]](p.provenanceId.s)
private lazy val outputString: String = p.output.value.toString.replace("\\n", "\\\\n")
lazy val longDescription = outputString
def apply = p
def deriveFuture(implicit upstreamStrategy: CookingStrategy) = Future.successful(p)
/*future {
p
} // (promise[Successful[T]]() success p).future
*/
// def statusString: String = ProvenanceStatus.Constant.toString
override lazy val shortId = " "
override lazy val shortDesc = if (outputString.matches("[\\\\s]")) outputString.limitAtWhitespace(30, "...") else outputString
}
trait DerivableRecipe[T] extends Recipe[T] {
//def dependencies : A
def dependencies: GenSet[Recipe[_]]
override lazy val queue: Queue[Recipe[_]] = {
val deps = dependencies.seq.toSeq.flatMap(_.queue)
Queue[Recipe[_]](deps: _*).distinct.enqueue(this)
}
}
trait LocallyDeterministic[T] extends DerivableRecipe[T] {
override lazy val isGloballyDeterministic: Boolean = !dependencies.exists(!_.isGloballyDeterministic)
}
trait LocallyNondeterministic[T] extends DerivableRecipe[T] {
def resolvePrecomputed: GenSet[Provenance[T]]
//def resolveNew: Option[Provenance[T]]
override lazy val isGloballyDeterministic = false
}
/*
class SystemDerivationJava(val script: Derivation[String], namedDependencies: Map[String, Derivation[_]]) extends ExternalPathDerivation with DerivableDerivation[Path] with Logging {
def dependencies = namedDependencies.values.toSet
// "toEnvironmentString" is not a method of the Derivation trait because the Any->String conversion may differ by
// context (at least, eg., # of sig figs, or filename vs file contents, etc.)
// For that matter, what if it differs for different arguments of the same type?
def toEnvironmentString[T](x:Artifact[T]) : String = x match {
case f:ExternalPathArtifact => f.abspath
case f:TraversableArtifact => f.artifacts.map(toEnvironmentString).mkString(" ")
//case f:GenTraversableArtifact => f.artifacts.map(toEnvironmentString).mkString(" ")
case f => f.value.toString
}
def derive = {
val outputPath: Path = fileStore.newPath
val reifiedDependencies = namedDependencies.mapValues(_.resolve)
// environment is only mutable for the sake of Java conversion
val environment : mutable.Map[String,String] = mutable.Map[String,String]().addAll(reifiedDependencies.mapValues(toEnvironmentString))
val workingDir = Path.createTempDirectory()
val pb = new ProcessBuilder("/bin/sh", "./worldmake.runner")
pb.directory(workingDir.jfile) //fileOption.getOrElse(throw new Error("bad temp dir: " + workingDir)))
val pbenv = pb.environment()
pbenv.putAll(WorldMakeConfig.globalEnvironment)
pbenv.putAll(environment)
pbenv.put("out", workingDir.toAbsolute.path)
pbenv.put("PATH", aoeaoe)
val log: File = (outputPath / "worldmake.log").fileOption.getOrElse(throw new Error("can't create log: " + outputPath / "worldmake.log"))
pb.redirectErrorStream(true)
pb.redirectOutput(Redirect.appendTo(log))
val p: Process = pb.start()
assert(pb.redirectInput() == Redirect.PIPE)
assert(pb.redirectOutput().file() == log)
assert(p.getInputStream.read() == -1)
val exitCode = p.waitFor()
val result = new ExternalPathArtifact(outputPath) with DerivedArtifact[TypedPath] {
def derivedFrom = SystemDerivation.this
}
if (exitCode != 0) {
logger.warn("Retaining working directory: " + workingDir)
throw new FailedDerivationException
}
if (WorldMakeConfig.debugWorkingDirectories) {
logger.warn("Retaining working directory: " + workingDir)
} else {
workingDir.deleteRecursively()
}
result
}
}
*/
//http://mblinn.com/blog/2012/06/30/scala-custom-exceptions/
object FailedRecipeException {
def apply(message: String, recipeId: Identifier[Recipe[_]]): FailedRecipeException = new FailedRecipeException(message, recipeId, None)
def apply(message: String, pr: FailedProvenance[_]): FailedRecipeException = new FailedRecipeException(message, pr.recipeId, Some(pr))
def apply(message: String, pr: FailedProvenance[_], cause: Throwable) = new FailedRecipeException(message, pr.recipeId, Some(pr)).initCause(cause)
}
class FailedRecipeException(message: String, recipeId: Identifier[Recipe[_]], opr: Option[FailedProvenance[_]]) extends Exception(message)
/*
trait ExternalPathDerivation extends Derivation[Path] {
//def children : Seq[ExternalPathArtifact]
def /(s: String): Derivation[Path] = new Derivation1[Path, Path](new IdentifiableFunction1[Path, Path]("/", (p: Path) => p / s), this) //with ExternalPathDerivation
}
*/
/*
class TraversableProvenance[T](val provenances: Traversable[Provenance[T]]) extends Provenance[Traversable[T]] {
def provenanceId = Identifier[Provenance[Traversable[T]]](UUID.randomUUID().toString)
def output = Some(new TraversableArtifact[T](provenances.map))
def status = ???
}
*/
object TraversableRecipe extends Logging {
implicit def wrapTraversable[T](xs: GenTraversable[Recipe[T]]) = new TraversableRecipe(xs)
//implicit def unwrapTraversable[T](t:Recipe[GenTraversable[Artifact[T]]]) : GenTraversable[Recipe[T]] =
// how to unwrap
def seqGet[T] = new IdentifiableFunction2[GenSeq[T], Int, T]("seqGet", {
(a: GenSeq[T], b: Int) => {
logger.debug(s"seqGet getting element $b of a Seq of size ${a.size}")
//logger.debug(a.toString())
a(b)
}
})
import ConstantRecipe._
def seqRecipeUnwrap[T](r: Recipe[GenSeq[T]], maxElems:Int): Traversable[Recipe[T]] = {
val result = for (i <- 0 to (maxElems-1)) yield seqGet[T](r, i)
result
}
/*
def seqGetOpt[T] = new IdentifiableFunction2[GenSeq[T], Int, Option[T]]("seqGet", {
(a: GenSeq[T], b: Int) => {
logger.debug(s"seqGet getting element $b of a Seq of size ${a.size}")
//logger.debug(a.toString())
try{Some(a(b))}
catch{ case e: IndexOutOfBoundsException => None }
}
})
import ConstantRecipe._
def seqRecipeUnwrapOpt[T](r: Recipe[GenSeq[T]], maxElems:Int): Traversable[Recipe[Option[T]]] = {
val result = for (i <- 0 to maxElems) yield seqGetOpt[T](r, i)
result
}
*/
/*
def seqRecipeUnwrap[T](r: Recipe[GenTraversable[T]])(implicit upstreamStrategy: CookingStrategy): Iterator[Recipe[T]] = {
val f = r.deriveFuture.map(x => {
// get the completed sequence
val s = x.output.value
// make a provenance for each element
val pp: GenIterable[CompletedProvenance[T]] = s.toSeq.zipWithIndex.map({
case (elem, index) => {
InstantCompletedProvenance(Identifier[Provenance[T]](UUID.randomUUID().toString), Identifier[Recipe[T]](r.recipeId.s + "/" + index), Set.empty, Map.empty, x.createdTime, x.enqueuedTime, x.startTime, x.runningInfo, x.endTime, x.exitCode, None, Map.empty, Artifact(elem))
}})
pp
})
// we don't know how many elements the sequence should have until we resolve the Future!
// so we have to return an Iterator, which just resolves the underlying Seq on the first call.
val result: GenIterable[Recipe[T]] = new Iterator[Recipe[T]]{}
f onComplete {
case Success(x) => {
val w = x.zipWithIndex.map({case (p: CompletedProvenance[T], index) => new Recipe[T]{
def deriveFuture(implicit upstreamStrategy: CookingStrategy) = Future.successful(p)
def longDescription = r.longDescription + "/" + index
def recipeId = p.recipeId
}})
w
}
case Failure(t) => {
logger.error("Failed to unwrap traversable recipes.", t)
throw t
}
}
result
}*/
}
class TraversableRecipe[T](val xs: GenTraversable[Recipe[T]]) extends DerivableRecipe[GenTraversable[T]] with Logging {
/*def derive = {
val upstream = xs.par.map(_.resolveOne)
SuccessfulProvenance[GenTraversable[T]](Identifier[Provenance[GenTraversable[T]]](UUID.randomUUID().toString),
derivationId, ProvenanceStatus.Success,
derivedFromUnnamed = upstream.toSet.seq,
output = Some(new GenTraversableArtifact(upstream.map(_.artifact))))
}
def deriveFuture = {
val upstreamFF = xs.map(_.resolveOneFuture)
val upstreamF = Future.sequence(upstreamFF.seq)
upstreamF.map(upstream => {
SuccessfulProvenance[GenTraversable[T]](Identifier[Provenance[GenTraversable[T]]](UUID.randomUUID().toString),
derivationId, ProvenanceStatus.Success,
derivedFromUnnamed = upstream.toSet.seq,
output = Some(new GenTraversableArtifact(upstream.map(_.artifact))))
})
}*/
/*
def deriveWithArgs(derivedFromUnnamed:GenTraversable[Successful[_]]) = {
SuccessfulProvenance[GenTraversable[T]](Identifier[Provenance[GenTraversable[T]]](UUID.randomUUID().toString),
derivationId, ProvenanceStatus.Success,
derivedFromUnnamed = derivedFromUnnamed.toSet.seq,
output = Some(new GenTraversableArtifact(derivedFromUnnamed.map(_.artifact))))
}*/
// could be a complete serialization, or a UUID for an atomic artifact, or a hash of dependency IDs, etc.
lazy val recipeId = Identifier[TraversableRecipe[T]](WMHashHex("traversable" + xs.toSeq.par.map(_.recipeId).mkString))
lazy val longDescription = ("Traversable(" + xs.map(_.description) + ")") //.limitAtWhitespace(80, "...")
lazy val dependencies: GenSet[Recipe[_]] = xs.toSet
/* def apply(args: ArgumentsSet[AA]) = {
val argValues = args.resolved.values
SuccessfulProvenance[GenTraversable[AA]](Identifier[Provenance[GenTraversable[AA]]](UUID.randomUUID().toString),
derivationId, ProvenanceStatus.Success,
derivedFromUnnamed = argValues, //derivedFromUnnamed.toSet.seq,
output = Some(new GenTraversableArtifact(argValues.map(_.artifact))))
}
*/
/*def stage(implicit upstreamStrategy: CookingStrategy) = {
val pr = BlockedProvenance(Identifier[Provenance[GenTraversable[Artifact[T]]]](UUID.randomUUID().toString), recipeId)
pr
}*/
def deriveFuture(implicit upstreamStrategy: CookingStrategy): Future[CompletedProvenance[GenTraversable[T]]] = {
val pr = BlockedProvenance[GenTraversable[T]](Identifier[Provenance[GenTraversable[T]]](UUID.randomUUID().toString), recipeId)
val upstreamFF: GenTraversable[Future[Successful[T]]] = xs.map(upstreamStrategy.cookOne)
val upstreamF: Future[Traversable[Successful[T]]] = Future.sequence(upstreamFF.seq)
val result: Future[CompletedProvenance[GenTraversable[T]]] = upstreamF.map(upstream => deriveWithArg(pr.pending(upstream.toSet, Map.empty), upstream))
result
}
private def deriveWithArg(pr: PendingProvenance[GenTraversable[T]], a1: Traversable[Successful[T]]): CompletedProvenance[GenTraversable[T]] = {
val prs = pr.running(new MemoryWithinJvmRunningInfo)
try {
val artifact: GenTraversableArtifact[T] = new MemoryGenTraversableArtifact(a1.map(_.output).toSeq) //Artifact[GenTraversable[T]](f.evaluate(a1.output.value))
val result: CompletedProvenance[GenTraversable[T]] = prs.completed(0, None, Map.empty, artifact)
result
}
catch {
case t: Throwable => {
val prf = prs.failed(1, None, Map.empty)
logger.debug("Error in TraversableRecipe: ", t) // todo better log message
throw FailedRecipeException("Failed TraversableRecipe", prf, t)
}
}
}
// todo
// def map[R](f:T=>R) : TraversableRecipe[R]
}
/*
class DerivationSet(xs: Traversable[Derivation[_]]) extends DerivableDerivation[Traversable[_]] {
def derive = {
val upstream = xs.map(_.resolveOne)
SuccessfulProvenance[Traversable[_]](Identifier[Provenance[Traversable[_]]](UUID.randomUUID().toString),
derivationId, ProvenanceStatus.Success,
derivedFromUnnamed = upstream.toSet,
output = Some(new TraversableArtifact(upstream.map(_.artifact))))
}
// could be a complete serialization, or a UUID for an atomic artifact, or a hash of dependency IDs, etc.
def derivationId = Identifier[Derivation[Traversable[_]]](WMHashHex("set" + xs.toSeq.map(_.derivationId).mkString))
def description = ("Traversable(" + xs.map(_.description) + ")").take(40)
def dependencies = xs.toSet
}
*/
/*
class GenTraversableArtifact[T](val artifacts: GenTraversable[Artifact[T]]) extends Artifact[GenTraversable[T]] {
def contentHashBytes = artifacts.seq.toSeq.sorted.map(_.contentHashBytes).flatten.toArray
lazy val value = artifacts.map(_.value)
}
class GenTraversableDerivation[T](xs: GenTraversable[Derivation[T]]) extends Derivation[GenTraversable[T]]{
def resolve = new GenTraversableArtifact(xs.map(_.resolve))
// could be a complete serialization, or a UUID for an atomic artifact, or a hash of dependency IDs, etc.
def uniqueId = Hash.toHex(Hash("SHA-256",xs.seq.toSeq.sorted.map(_.uniqueId).mkString))
}
*/
object ManagedPathRecipe {
implicit def toManagedPathRecipe(r: Recipe[ManagedPath]) = new ManagedPathRecipe(r)
}
class ManagedPathRecipe(underlying: Recipe[ManagedPath]) extends Recipe[ManagedPath] {
def deriveFuture(implicit upstreamStrategy: CookingStrategy) = underlying.deriveFuture
def longDescription = underlying.longDescription
def recipeId = underlying.recipeId
def /(s: String): ManagedPathRecipe = new ManagedPathRecipe(new DerivableRecipe[ManagedPath] {
//def deriveFuture(implicit upstreamStrategy: CookingStrategy) = ManagedPathRecipe.this.deriveFuture.map(s=>new MemoryCompletedProvenance[ManagedPath]())
def dependencies = Set(underlying)
def deriveFuture(implicit upstreamStrategy: CookingStrategy): Future[Successful[ManagedPath]] = {
//val pr = BlockedProvenance(Identifier[Provenance[ManagedPath]](UUID.randomUUID().toString), recipeId)
val pf = upstreamStrategy.cookOne(ManagedPathRecipe.this)
val now = new DateTime()
val result: Future[Successful[ManagedPath]] = pf.map((a1: Successful[ManagedPath]) => {
// ** expanded for clarity
val childRelative: Option[Path] = a1.output.value.relative.map(r => r / s).orElse(Some(Path(s)))
val childPath = ManagedPath(a1.output.value.id, childRelative)
val childArtifact = ManagedPathArtifact(childPath)
InstantCompletedProvenance[ManagedPath](
Identifier[Provenance[ManagedPath]](UUID.randomUUID().toString),
recipeId,
Set(a1),
Map.empty,
now,
now,
now,
new MemoryWithinJvmRunningInfo,
now,
0,
None,
Map.empty,
childArtifact)
})
result
}
def longDescription = ManagedPathRecipe.this.longDescription + "/" + s
// could be a complete serialization, or a UUID for an atomic artifact, or a hash of dependency IDs, etc.
def recipeId = Identifier[Recipe[ManagedPath]](WMHashHex(ManagedPathRecipe.this.recipeId.s + "/" + s))
})
def children(): Recipe[GenTraversable[ManagedPath]] = new DerivableRecipe[GenTraversable[ManagedPath]] {
def dependencies = Set(underlying)
def deriveFuture(implicit upstreamStrategy: CookingStrategy): Future[Successful[GenTraversable[ManagedPath]]] = ManagedPathRecipe.this.deriveFuture.map((r: Successful[ManagedPath]) => {
val m = r.output.value
val cpaths = m.path.children().toSet.map((c: Path) => m.child(c.name)) //ManagedPath(m.id, m.relative / c.path))
val now = new DateTime()
val cpathartifacts: GenTraversable[Artifact[ManagedPath]] = cpaths.map(ManagedPathArtifact(_)).toSeq
InstantCompletedProvenance[GenTraversable[ManagedPath]](
Identifier[Provenance[GenTraversable[ManagedPath]]](UUID.randomUUID().toString),
recipeId,
Set(r),
Map.empty,
now,
now,
now,
new MemoryWithinJvmRunningInfo,
now,
0,
None,
Map.empty,
new MemoryGenTraversableArtifact[ManagedPath](cpathartifacts))
})
def longDescription = ManagedPathRecipe.this.longDescription + "/*"
// could be a complete serialization, or a UUID for an atomic artifact, or a hash of dependency IDs, etc.
def recipeId = Identifier[Recipe[GenTraversable[ManagedPath]]](WMHashHex(ManagedPathRecipe.this.recipeId.s + "/*"))
}
//}
}
object ExternalPathRecipe {
implicit def toExternalPathRecipe(r: Recipe[ExternalPath]) = new ExternalPathRecipe(r)
}
class ExternalPathRecipe(underlying: Recipe[ExternalPath]) extends Recipe[ExternalPath] with Logging {
def deriveFuture(implicit upstreamStrategy: CookingStrategy) = underlying.deriveFuture
def longDescription = underlying.longDescription
def recipeId = underlying.recipeId
def /(s: String): ExternalPathRecipe = new ExternalPathRecipe(new DerivableRecipe[ExternalPath] {
//def deriveFuture(implicit upstreamStrategy: CookingStrategy) = ExternalPathRecipe.this.deriveFuture.map(s=>new MemoryCompletedProvenance[ExternalPath]())
def dependencies = Set(underlying)
def deriveFuture(implicit upstreamStrategy: CookingStrategy): Future[Successful[ExternalPath]] = ExternalPathRecipe.this.deriveFuture.map((r: Successful[ExternalPath]) => {
val now = new DateTime()
try {
val now = new DateTime()
InstantCompletedProvenance[ExternalPath](
Identifier[Provenance[ExternalPath]](UUID.randomUUID().toString),
recipeId,
Set(r),
Map.empty,
now,
now,
now,
new MemoryWithinJvmRunningInfo,
now,
0,
None,
Map.empty,
ExternalPathArtifact(ExternalPath(r.output.value.path / s)))
} catch {
case t: Throwable => {
val prf = InstantFailedProvenance[ExternalPath](
Identifier[Provenance[ExternalPath]](UUID.randomUUID().toString),
recipeId,
Set(r),
Map.empty,
now,
now,
now,
new MemoryWithinJvmRunningInfo,
now,
0,
None,
Map.empty)
logger.debug("Error in ExternalPathRecipe./: ", t) // todo better log message
throw FailedRecipeException("Failed ExternalPathRecipe./", prf, t)
}
}
})
def longDescription = ExternalPathRecipe.this.longDescription + "/" + s
// could be a complete serialization, or a UUID for an atomic artifact, or a hash of dependency IDs, etc.
def recipeId = Identifier[Recipe[ExternalPath]](WMHashHex(ExternalPathRecipe.this.recipeId.s + "/" + s))
})
def children(): Recipe[GenTraversable[ExternalPath]] = new DerivableRecipe[GenTraversable[ExternalPath]] {
def dependencies = Set(underlying)
def deriveFuture(implicit upstreamStrategy: CookingStrategy): Future[Successful[GenTraversable[ExternalPath]]] = ExternalPathRecipe.this.deriveFuture.map((r: Successful[ExternalPath]) => {
val now = new DateTime()
try {
val m = r.output.value
val cpaths = {
val ch: Set[Path] = m.path.children().toSet
val exch = ch.map((c: Path) => ExternalPath(c))
exch
}
val cpathartifacts: GenTraversable[Artifact[ExternalPath]] = cpaths.map(ExternalPathArtifact(_))
logger.debug(s"Found ${cpathartifacts.size} children of ${m.path}.")
InstantCompletedProvenance[GenTraversable[ExternalPath]](
Identifier[Provenance[GenTraversable[ExternalPath]]](UUID.randomUUID().toString),
recipeId,
Set(r),
Map.empty,
now,
now,
now,
new MemoryWithinJvmRunningInfo,
now,
0,
None,
Map.empty,
new MemoryGenTraversableArtifact[ExternalPath](cpathartifacts))
}
catch {
case t: Throwable => {
val prf = InstantFailedProvenance[GenTraversable[ExternalPath]](
Identifier[Provenance[GenTraversable[ExternalPath]]](UUID.randomUUID().toString),
recipeId,
Set(r),
Map.empty,
now,
now,
now,
new MemoryWithinJvmRunningInfo,
now,
0,
None,
Map.empty)
logger.debug("Error in ExternalPathRecipe.children: ", t) // todo better log message
throw FailedRecipeException("Failed ExternalPathRecipe.children", prf, t)
}
}
})
def longDescription = ExternalPathRecipe.this.longDescription + "/*"
// could be a complete serialization, or a UUID for an atomic artifact, or a hash of dependency IDs, etc.
def recipeId = Identifier[Recipe[GenTraversable[ExternalPath]]](WMHashHex(ExternalPathRecipe.this.recipeId.s + "/*"))
}
//}
}
| davidsoergel/worldmake | src/main/scala/worldmake/Recipe.scala | Scala | apache-2.0 | 25,280 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import java.util.concurrent.LinkedBlockingQueue
import java.util.concurrent.atomic.{AtomicBoolean, AtomicLong}
import com.codahale.metrics.{Gauge, Timer}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config._
import org.apache.spark.util.Utils
/**
* An asynchronous queue for events. All events posted to this queue will be delivered to the child
* listeners in a separate thread.
*
* Delivery will only begin when the `start()` method is called. The `stop()` method should be
* called when no more events need to be delivered.
*/
private class AsyncEventQueue(val name: String, conf: SparkConf, metrics: LiveListenerBusMetrics)
extends SparkListenerBus
with Logging {
import AsyncEventQueue._
// Cap the capacity of the queue so we get an explicit error (rather than an OOM exception) if
// it's perpetually being added to more quickly than it's being drained.
private val eventQueue = new LinkedBlockingQueue[SparkListenerEvent](
conf.get(LISTENER_BUS_EVENT_QUEUE_CAPACITY))
// Keep the event count separately, so that waitUntilEmpty() can be implemented properly;
// this allows that method to return only when the events in the queue have been fully
// processed (instead of just dequeued).
private val eventCount = new AtomicLong()
/** A counter for dropped events. It will be reset every time we log it. */
private val droppedEventsCounter = new AtomicLong(0L)
/** When `droppedEventsCounter` was logged last time in milliseconds. */
@volatile private var lastReportTimestamp = 0L
private val logDroppedEvent = new AtomicBoolean(false)
private var sc: SparkContext = null
private val started = new AtomicBoolean(false)
private val stopped = new AtomicBoolean(false)
private val droppedEvents = metrics.metricRegistry.counter(s"queue.$name.numDroppedEvents")
private val processingTime = metrics.metricRegistry.timer(s"queue.$name.listenerProcessingTime")
// Remove the queue size gauge first, in case it was created by a previous incarnation of
// this queue that was removed from the listener bus.
metrics.metricRegistry.remove(s"queue.$name.size")
metrics.metricRegistry.register(s"queue.$name.size", new Gauge[Int] {
override def getValue: Int = eventQueue.size()
})
private val dispatchThread = new Thread(s"spark-listener-group-$name") {
setDaemon(true)
override def run(): Unit = Utils.tryOrStopSparkContext(sc) {
dispatch()
}
}
private def dispatch(): Unit = LiveListenerBus.withinListenerThread.withValue(true) {
try {
var next: SparkListenerEvent = eventQueue.take()
while (next != POISON_PILL) {
val ctx = processingTime.time()
try {
super.postToAll(next)
} finally {
ctx.stop()
}
eventCount.decrementAndGet()
next = eventQueue.take()
}
eventCount.decrementAndGet()
} catch {
case ie: InterruptedException =>
logInfo(s"Stopping listener queue $name.", ie)
}
}
override protected def getTimer(listener: SparkListenerInterface): Option[Timer] = {
metrics.getTimerForListenerClass(listener.getClass.asSubclass(classOf[SparkListenerInterface]))
}
/**
* Start an asynchronous thread to dispatch events to the underlying listeners.
*
* @param sc Used to stop the SparkContext in case the async dispatcher fails.
*/
private[scheduler] def start(sc: SparkContext): Unit = {
if (started.compareAndSet(false, true)) {
this.sc = sc
dispatchThread.start()
} else {
throw new IllegalStateException(s"$name already started!")
}
}
/**
* Stop the listener bus. It will wait until the queued events have been processed, but new
* events will be dropped.
*/
private[scheduler] def stop(): Unit = {
if (!started.get()) {
throw new IllegalStateException(s"Attempted to stop $name that has not yet started!")
}
if (stopped.compareAndSet(false, true)) {
eventCount.incrementAndGet()
eventQueue.put(POISON_PILL)
}
dispatchThread.join()
}
def post(event: SparkListenerEvent): Unit = {
if (stopped.get()) {
return
}
eventCount.incrementAndGet()
if (eventQueue.offer(event)) {
return
}
eventCount.decrementAndGet()
droppedEvents.inc()
droppedEventsCounter.incrementAndGet()
if (logDroppedEvent.compareAndSet(false, true)) {
// Only log the following message once to avoid duplicated annoying logs.
logError(s"Dropping event from queue $name. " +
"This likely means one of the listeners is too slow and cannot keep up with " +
"the rate at which tasks are being started by the scheduler.")
}
logTrace(s"Dropping event $event")
val droppedCount = droppedEventsCounter.get
if (droppedCount > 0) {
// Don't log too frequently
if (System.currentTimeMillis() - lastReportTimestamp >= 60 * 1000) {
// There may be multiple threads trying to decrease droppedEventsCounter.
// Use "compareAndSet" to make sure only one thread can win.
// And if another thread is increasing droppedEventsCounter, "compareAndSet" will fail and
// then that thread will update it.
if (droppedEventsCounter.compareAndSet(droppedCount, 0)) {
val prevLastReportTimestamp = lastReportTimestamp
lastReportTimestamp = System.currentTimeMillis()
val previous = new java.util.Date(prevLastReportTimestamp)
logWarning(s"Dropped $droppedCount events from $name since $previous.")
}
}
}
}
/**
* For testing only. Wait until there are no more events in the queue.
*
* @return true if the queue is empty.
*/
def waitUntilEmpty(deadline: Long): Boolean = {
while (eventCount.get() != 0) {
if (System.currentTimeMillis > deadline) {
return false
}
Thread.sleep(10)
}
true
}
}
private object AsyncEventQueue {
val POISON_PILL = new SparkListenerEvent() { }
}
| szhem/spark | core/src/main/scala/org/apache/spark/scheduler/AsyncEventQueue.scala | Scala | apache-2.0 | 6,929 |
/*
* Copyright (c) 2015 Andreas Wolf
*
* See te LICENSE file in the project root for further copyright information.
*/
package info.andreaswolf.roadhopper.server
import javax.servlet.http.{HttpServletRequest, HttpServletResponse}
import com.google.inject.Inject
import com.graphhopper.GraphHopper
import com.graphhopper.matching.{EdgeMatch, LocationIndexMatch, MapMatching}
import com.graphhopper.storage.NodeAccess
import com.graphhopper.storage.index.LocationIndexTree
import com.graphhopper.util.{CmdArgs, GPXEntry}
import info.andreaswolf.roadhopper.measurements.{DataPoint, Measurement, MeasurementRepository}
import info.andreaswolf.roadhopper.persistence.Database
import org.json.{JSONString, JSONArray, JSONObject, JSONStringer}
import org.slf4j.LoggerFactory
import scala.StringBuilder
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
/**
* Servlet to convert data from the measurements done by HEV to a format usable for display in the map.
*/
class MeasurementsServlet extends BaseServlet {
@Inject
var args: CmdArgs = null
@Inject var database: Database = null
@Inject var measurementRepository: MeasurementRepository = null
@Inject var hopper: GraphHopper = null
val measurements = new mutable.TreeSet[DataPoint]()
val log = LoggerFactory.getLogger("MeasurementsServlet")
def getParameter(req: HttpServletRequest, name: String, default: String = "") = {
val params: Array[String] = req.getParameterMap.get(name)
if (params != null && params.length > 0) {
params.apply(0)
} else {
default
}
}
override def doGet(req: HttpServletRequest, resp: HttpServletResponse): Unit = {
val measurement = req.getParameter("name")
val format = req.getParameter("format") match {
case "csv" => "csv"
case "json" => "json"
case null => "json" // TODO use Content-Type header here
}
log.debug(s"Output format: $format")
if (measurement == null) {
val json = new JSONStringer
val groups = measurementRepository.findAllGroups
json.`object`().key("files").`object`()
groups.sortBy(_.name).foreach({ g =>
json.key(g.name).array()
g.measurements.foreach({ m => json.value(m) })
json.endArray()
})
json.endObject().endObject()
resp.getWriter.append(json.toString)
} else {
val foundMeasurements: List[Measurement] = measurementRepository.findByName(measurement)
if (foundMeasurements.isEmpty) {
writeError(resp, 404, s"Measurement $measurement not found")
}
val measurementObject = foundMeasurements.head
val exporter = format match {
case "json" => new JsonExporter()
case "csv" => new CsvExporter()
}
resp.setContentType(format match {
case "json" => "application/json"
case "csv" => "text/csv"
})
resp.getWriter.append(exporter.exportMeasurement(measurementObject))
}
resp.setStatus(200)
resp.setCharacterEncoding("UTF-8")
}
trait MeasurementExporter {
def exportMeasurement(measurementObject: Measurement): String
}
class JsonExporter extends MeasurementExporter {
class JSONMeasurement(val measurement: Measurement) extends JSONString {
/**
* Converts the data points of a given measurement to a JSON object with the time as the key.
*/
def toJSONString: String = {
val json = new JSONStringer().`object`()
for (datum <- measurement.points) { // .seq.sortWith((a, b) => a.date < b.date)
json.key(datum.date.toString).value(datum: JSONObject)
}
json.endObject()
json.toString
}
}
def exportMeasurement(measurementObject: Measurement): String = {
val json = new JSONStringer
json.`object`()
//json.key("duration").value(measurementObject.duration)
json.key("measurements").value(new JSONMeasurement(measurementObject))
try {
implicit val nodes: NodeAccess = hopper.getGraphHopperStorage.getNodeAccess
val matchedCoordinates: JSONArray = matchCoordinates(measurementObject): JSONArray
json.key("matchedRoad").value(matchedCoordinates)
} catch {
case e: RuntimeException =>
json.key("matchedRoad").value("Matching error: " + e.getMessage)
}
json.endObject().toString
}
/**
* Converts a bunch of edges into a continuous list of coordinates fit for GeoJSON usage.
*
* @return A list of [lon,lat] pairs
*/
implicit def convertCoordinatesToJSON(edges: List[EdgeMatch])(implicit nodes: NodeAccess): JSONArray = {
val result = new JSONArray()
if (edges.isEmpty) {
return result
}
val node: Int = edges.head.getEdgeState.getBaseNode
result.put(new JSONArray().put(nodes.getLon(node)).put(nodes.getLat(node)))
edges.foreach(edge => {
// add the
val wayNodes = edge.getEdgeState.fetchWayGeometry(2)
for (i <- 0 to wayNodes.size() - 1) {
result.put(new JSONArray().put(wayNodes.getLon(i)).put(wayNodes.getLat(i)))
}
})
result
}
def matchCoordinates(measurement: Measurement): List[EdgeMatch] = {
val graph = hopper.getGraphHopperStorage
val locationIndex = new LocationIndexMatch(graph, hopper.getLocationIndex.asInstanceOf[LocationIndexTree])
val mapMatching = new MapMatching(graph, locationIndex, hopper.getEncodingManager.getEncoder("car"))
val gpxPointsBuffer = new ListBuffer[GPXEntry]()
measurement.points.foreach(dp => gpxPointsBuffer.append(new GPXEntry(dp.position, dp.date)))
import scala.collection.JavaConversions._
mapMatching.doWork(gpxPointsBuffer.toList).getEdgeMatches.toList
}
}
class CsvExporter extends MeasurementExporter {
def exportMeasurement(measurementObject: Measurement): String = {
val output = new StringBuilder()
output.append("time,v\\n")
val startTime = measurementObject.points.head.date
measurementObject.points.foreach(point => output.append(s"${point.date - startTime},${point.velocity}\\n"))
output.toString()
}
}
}
| andreaswolf/roadhopper | src/main/scala/info/andreaswolf/roadhopper/server/MeasurementsServlet.scala | Scala | mit | 5,859 |
/* Copyright 2009-2018 EPFL, Lausanne */
package inox
package tip
import org.scalatest.funspec.AnyFunSpec
class TipSerializationSuite extends AnyFunSpec with ResourceUtils {
import inox.trees._
val ctx = TestContext.empty
val filesWithCat = resourceFiles("regression/tip", filter = _ endsWith ".tip", recursive = true).map { f =>
f.getParentFile.getName -> f
}
// We have to be careful what we unregister as not all `classSerializers` are case classes
class ProductSerializer(trees: ast.Trees) extends utils.InoxSerializer(trees, serializeProducts = true) {
override protected def classSerializers =
super.classSerializers.filterNot(p => 30 <= p._2.id && p._2.id <= 40)
}
def checkSerializer(
serializer: utils.Serializer { val trees: inox.trees.type },
program: Program { val trees: inox.trees.type },
expr: Expr
) = {
import serializer._
val out = new java.io.ByteArrayOutputStream
serializer.serialize(program.symbols, out)
serializer.serialize(expr, out)
val outBytes = out.toByteArray
val in = new java.io.ByteArrayInputStream(outBytes)
val newSymbols = serializer.deserialize[Symbols](in)
val newExpr = serializer.deserialize[Expr](in)
assert(program.symbols == newSymbols)
assert(expr == newExpr)
val out2 = new java.io.ByteArrayOutputStream
serializer.serialize(newSymbols, out2)
serializer.serialize(newExpr, out2)
val out2Bytes = out2.toByteArray
assert(java.util.Arrays.equals(outBytes, out2Bytes))
}
for ((cat, file) <- filesWithCat) {
describe(s"Serializing/deserializing file $cat/${file.getName}") {
it("with registered classes") {
val serializer = utils.Serializer(inox.trees)
for ((program, expr) <- Parser(file).parseScript) checkSerializer(serializer, program, expr)
}
it("with unregistered classes") {
val serializer = new ProductSerializer(inox.trees)
.asInstanceOf[utils.Serializer { val trees: inox.trees.type }]
for ((program, expr) <- Parser(file).parseScript) checkSerializer(serializer, program, expr)
}
}
}
}
| epfl-lara/inox | src/it/scala/inox/tip/TipSerializationSuite.scala | Scala | apache-2.0 | 2,135 |
package org.jetbrains.plugins.scala.lang.refactoring.extractMethod
import _root_.org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFunction, ScValue}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypedDefinition
/**
* User: Alexander Podkhalyuzin
* Date: 30.03.2010
*/
case class ExtractMethodOutput(paramName: String, returnType: ScType, needNewDefinition: Boolean, fromElement: ScTypedDefinition) {
val isVal = ScalaPsiUtil.nameContext(fromElement) match {
case _: ScValue | _: ScFunction => true
case _ => false
}
}
object ExtractMethodOutput {
def from(variableData: ScalaVariableData): ExtractMethodOutput = {
val element = variableData.element
ExtractMethodOutput(element.name, variableData.scType, variableData.isInsideOfElements, element)
}
} | ilinum/intellij-scala | src/org/jetbrains/plugins/scala/lang/refactoring/extractMethod/ExtractMethodOutput.scala | Scala | apache-2.0 | 915 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package base
package patterns
import com.intellij.openapi.project.ProjectManager
import com.intellij.psi._
import com.intellij.psi.search.GlobalSearchScope
import com.intellij.psi.util.PsiModificationTracker
import com.intellij.psi.util.PsiModificationTracker._
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScSimpleTypeElement, ScTypeVariableTypeElement}
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.expr.xml.ScXmlPattern
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScClassParameter, ScParameter, ScTypeParam}
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFunction, ScValue, ScVariable}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScObject, ScClass, ScTemplateDefinition}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiManager.ClassCategory
import org.jetbrains.plugins.scala.lang.psi.impl.base.patterns.ScPatternArgumentListImpl
import org.jetbrains.plugins.scala.lang.psi.impl.{ScalaPsiElementFactory, ScalaPsiManager}
import org.jetbrains.plugins.scala.lang.psi.impl.base.ScStableCodeReferenceElementImpl
import org.jetbrains.plugins.scala.lang.psi.types
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.result.{Failure, Success, TypeResult, TypingContext}
import org.jetbrains.plugins.scala.lang.resolve._
import org.jetbrains.plugins.scala.lang.resolve.processor.{CompletionProcessor, ExpandedExtractorResolveProcessor}
import org.jetbrains.plugins.scala.macroAnnotations.{ModCount, CachedInsidePsiElement}
import org.jetbrains.plugins.scala.project.ScalaLanguageLevel.Scala_2_11
import org.jetbrains.plugins.scala.project._
import scala.annotation.tailrec
import scala.collection.immutable.Set
import scala.collection.mutable.ArrayBuffer
/**
* @author Alexander Podkhalyuzin
*/
trait ScPattern extends ScalaPsiElement {
def isIrrefutableFor(t: Option[ScType]): Boolean = false
def getType(ctx: TypingContext): TypeResult[ScType] = Failure("Cannot type pattern", Some(this))
def bindings: Seq[ScBindingPattern] = {
val b = new ArrayBuffer[ScBindingPattern]
def inner(p: ScPattern) {
p match {
case binding: ScBindingPattern => b += binding
case _ =>
}
for (sub <- p.subpatterns) {
inner(sub)
}
}
inner(this)
b
}
def typeVariables: Seq[ScTypeVariableTypeElement] = {
val b = new ArrayBuffer[ScTypeVariableTypeElement]
def inner(p: ScPattern) {
p match {
case ScTypedPattern(te) =>
te.accept(new ScalaRecursiveElementVisitor {
override def visitTypeVariableTypeElement(tvar: ScTypeVariableTypeElement): Unit = {
b += tvar
}
})
case _ =>
}
for (sub <- p.subpatterns) {
inner(sub)
}
}
inner(this)
b
}
override def accept(visitor: ScalaElementVisitor) {
visitor.visitPattern(this)
}
def subpatterns: Seq[ScPattern] = this match {
case _: ScReferencePattern => Seq.empty
case _ => findChildrenByClassScala[ScPattern](classOf[ScPattern])
}
private def expectedTypeForExtractorArg(ref: ScStableCodeReferenceElement,
argIndex: Int,
expected: Option[ScType],
totalNumberOfPatterns: Int): Option[ScType] = {
val bind: Option[ScalaResolveResult] = ref.bind() match {
case Some(ScalaResolveResult(_: ScBindingPattern | _: ScParameter, _)) =>
val resolve = ref match {
case refImpl: ScStableCodeReferenceElementImpl =>
refImpl.doResolve(refImpl, new ExpandedExtractorResolveProcessor(ref, ref.refName, ref.getKinds(incomplete = false), ref.getContext match {
case inf: ScInfixPattern => inf.expectedType
case constr: ScConstructorPattern => constr.expectedType
case _ => None
}))
}
if (resolve.length != 1) None
else {
resolve(0) match {
case s: ScalaResolveResult => Some(s)
case _ => None
}
}
case m => m
}
def calculateSubstitutor(_tp: ScType, funType: ScType, substitutor: ScSubstitutor): ScSubstitutor = {
val tp = _tp match {
case ex: ScExistentialType => ex.skolem
case _ => _tp
}
def rightWay: ScSubstitutor = {
val t = Conformance.conformsInner(tp, substitutor.subst(funType), Set.empty, new ScUndefinedSubstitutor)
if (t._1) {
val undefSubst = t._2
undefSubst.getSubstitutor match {
case Some(newSubst) => newSubst.followed(substitutor)
case _ => substitutor
}
} else substitutor
}
//todo: looks quite hacky to try another direction first, do you know better? see SCL-6543
val t = Conformance.conformsInner(substitutor.subst(funType), tp, Set.empty, new ScUndefinedSubstitutor)
if (t._1) {
val undefSubst = t._2
undefSubst.getSubstitutor match {
case Some(newSubst) => newSubst.followed(substitutor)
case _ => rightWay
}
} else rightWay
}
bind match {
case Some(ScalaResolveResult(fun: ScFunction, _)) if fun.name == "unapply" && ScPattern.isQuasiquote(fun) =>
val tpe = getContext.getContext match {
case ip: ScInterpolationPattern =>
val parts = getParent.asInstanceOf[ScalaPsiElement]
.findChildrenByType(ScalaTokenTypes.tINTERPOLATED_STRING)
.map(_.getText)
if (argIndex < parts.length && parts(argIndex).endsWith("..."))
ScalaPsiElementFactory.createTypeElementFromText("Seq[Seq[scala.reflect.api.Trees#Tree]]", PsiManager.getInstance(getProject))
if (argIndex < parts.length && parts(argIndex).endsWith(".."))
ScalaPsiElementFactory.createTypeElementFromText("Seq[scala.reflect.api.Trees#Tree]", PsiManager.getInstance(getProject))
else
ScalaPsiElementFactory.createTypeElementFromText("scala.reflect.api.Trees#Tree", PsiManager.getInstance(getProject))
}
tpe.getType().toOption
case Some(ScalaResolveResult(fun: ScFunction, substitutor: ScSubstitutor)) if fun.name == "unapply" &&
fun.parameters.length == 1 =>
val subst = if (fun.typeParameters.isEmpty) substitutor else {
var undefSubst = fun.typeParameters.foldLeft(ScSubstitutor.empty) { (s, p) =>
s.bindT((p.name, ScalaPsiUtil.getPsiElementId(p)), ScUndefinedType(new ScTypeParameterType(p, substitutor)))
}
val clazz = ScalaPsiUtil.getContextOfType(this, true, classOf[ScTemplateDefinition])
clazz match {
case clazz: ScTemplateDefinition =>
undefSubst = undefSubst.followed(new ScSubstitutor(ScThisType(clazz)))
case _ =>
}
val firstParameterType = fun.parameters.head.getType(TypingContext.empty) match {
case Success(tp, _) => tp
case _ => return None
}
val funType = undefSubst.subst(firstParameterType)
expected match {
case Some(tp) => calculateSubstitutor(tp, funType, substitutor)
case _ => substitutor
}
}
fun.returnType match {
case Success(rt, _) =>
def updateRes(tp: ScType): ScType = {
val parameters: Seq[ScTypeParam] = fun.typeParameters
tp.recursiveVarianceUpdate {
case (tp: ScTypeParameterType, variance) if parameters.contains(tp.param) =>
(true, if (variance == -1) substitutor.subst(tp.lower.v)
else substitutor.subst(tp.upper.v))
case (typez, _) => (false, typez)
}
}
val subbedRetTp: ScType = subst.subst(rt)
if (subbedRetTp.equiv(lang.psi.types.Boolean)) None
else {
val args = ScPattern.extractorParameters(subbedRetTp, this, ScPattern.isOneArgCaseClassMethod(fun))
if (totalNumberOfPatterns == 1 && args.length > 1) Some(ScTupleType(args)(getProject, getResolveScope))
else if (argIndex < args.length) Some(updateRes(subst.subst(args(argIndex)).unpackedType))
else None
}
case _ => None
}
case Some(ScalaResolveResult(fun: ScFunction, substitutor: ScSubstitutor)) if fun.name == "unapplySeq" &&
fun.parameters.length == 1 =>
val subst = if (fun.typeParameters.isEmpty) substitutor else {
val undefSubst = substitutor followed fun.typeParameters.foldLeft(ScSubstitutor.empty) { (s, p) =>
s.bindT((p.name, ScalaPsiUtil.getPsiElementId(p)), ScUndefinedType(new ScTypeParameterType(p, substitutor)))
}
val firstParameterRetTp = fun.parameters.head.getType(TypingContext.empty) match {
case Success(tp, _) => tp
case _ => return None
}
val funType = undefSubst.subst(firstParameterRetTp)
expected match {
case Some(tp) => calculateSubstitutor(tp, funType, substitutor)
case _ => substitutor
}
}
fun.returnType match {
case Success(rt, _) =>
val args = ScPattern.extractorParameters(subst.subst(rt), this, ScPattern.isOneArgCaseClassMethod(fun))
if (args.isEmpty) return None
if (argIndex < args.length - 1) return Some(subst.subst(args(argIndex)))
val lastArg = args.last
(lastArg +: BaseTypes.get(lastArg)).find {
case ScParameterizedType(des, seqArgs) => seqArgs.length == 1 && ScType.extractClass(des).exists { clazz =>
clazz.qualifiedName == "scala.collection.Seq"
}
case _ => false
} match {
case Some(seq@ScParameterizedType(des, seqArgs)) =>
this match {
case n: ScNamingPattern if n.getLastChild.isInstanceOf[ScSeqWildcard] => Some(subst.subst(seq))
case _ => Some(subst.subst(seqArgs.head))
}
case _ => None
}
case _ => None
}
case _ => None
}
}
@CachedInsidePsiElement(this, ModCount.getBlockModificationCount)
def expectedType: Option[ScType] = getContext match {
case list : ScPatternList => list.getContext match {
case _var : ScVariable => _var.getType(TypingContext.empty).toOption
case _val : ScValue => _val.getType(TypingContext.empty).toOption
}
case argList : ScPatternArgumentList =>
argList.getContext match {
case constr : ScConstructorPattern =>
val thisIndex: Int = constr.args.patterns.indexWhere(_ == this)
expectedTypeForExtractorArg(constr.ref, thisIndex, constr.expectedType, argList.patterns.length)
case _ => None
}
case composite: ScCompositePattern => composite.expectedType
case infix: ScInfixPattern =>
val i =
if (infix.leftPattern == this) 0
else if (this.isInstanceOf[ScTuplePattern]) return None //this is handled elsewhere in this function
else 1
expectedTypeForExtractorArg(infix.reference, i, infix.expectedType, 2)
case par: ScParenthesisedPattern => par.expectedType
case patternList : ScPatterns => patternList.getContext match {
case tuple : ScTuplePattern =>
tuple.getContext match {
case infix: ScInfixPattern =>
if (infix.leftPattern != tuple) {
//so it's right pattern
val i = tuple.patternList match {
case Some(patterns: ScPatterns) => patterns.patterns.indexWhere(_ == this)
case _ => return None
}
val patternLength: Int = tuple.patternList match {
case Some(pat) => pat.patterns.length
case _ => -1 //is it possible to get here?
}
return expectedTypeForExtractorArg(infix.reference, i + 1, infix.expectedType, patternLength)
}
case _ =>
}
tuple.expectedType.flatMap {
case ScTupleType(comps) =>
for ((t, p) <- comps.iterator.zip(patternList.patterns.iterator)) {
if (p == this) return Some(t)
}
None
case et0 if et0 == types.AnyRef || et0 == types.Any => Some(types.Any)
case _ => None
}
case _: ScXmlPattern =>
val nodeClass: Option[PsiClass] = ScalaPsiManager.instance(getProject).getCachedClass(getResolveScope, "scala.xml.Node")
nodeClass.flatMap { nodeClass =>
this match {
case n: ScNamingPattern if n.getLastChild.isInstanceOf[ScSeqWildcard] =>
val seqClass: Option[PsiClass] =
ScalaPsiManager.instance(getProject).getCachedClass(getResolveScope, "scala.collection.Seq")
seqClass.map { seqClass =>
ScParameterizedType(ScDesignatorType(seqClass), Seq(ScDesignatorType(nodeClass)))
}
case _ => Some(ScDesignatorType(nodeClass))
}
}
case _ => None
}
case clause: ScCaseClause => clause.getContext/*clauses*/.getContext match {
case matchStat : ScMatchStmt => matchStat.expr match {
case Some(e) => Some(e.getType(TypingContext.empty).getOrAny)
case _ => None
}
case b: ScBlockExpr if b.getContext.isInstanceOf[ScCatchBlock] =>
val thr = ScalaPsiManager.instance(getProject).getCachedClass(getResolveScope, "java.lang.Throwable")
thr.map(ScType.designator(_))
case b : ScBlockExpr =>
b.expectedType(fromUnderscore = false) match {
case Some(et) =>
et.removeAbstracts match {
case ScFunctionType(_, Seq()) => Some(types.Unit)
case ScFunctionType(_, Seq(p0)) => Some(p0)
case ScFunctionType(_, params) =>
val tt = ScTupleType(params)(getProject, getResolveScope)
Some(tt)
case ScPartialFunctionType(_, param) => Some(param)
case _ => None
}
case None => None
}
case _ => None
}
case named: ScNamingPattern => named.expectedType
case gen: ScGenerator =>
val analog = getAnalog
if (analog != this) analog.expectedType
else None
case enum: ScEnumerator =>
Option(enum.rvalue).flatMap { rvalue =>
rvalue.getType(TypingContext.empty).toOption
}
case _ => None
}
def getAnalog: ScPattern = {
getContext match {
case gen: ScGenerator =>
val f: ScForStatement = gen.getContext.getContext match {
case fr: ScForStatement => fr
case _ => return this
}
f.getDesugarizedExpr match {
case Some(expr) =>
if (analog != null) return analog
case _ =>
}
this
case _ => this
}
}
var desugarizedPatternIndex = -1
var analog: ScPattern = null
}
object ScPattern {
def isOneArgCaseClassMethod(fun: ScFunction): Boolean = {
fun.syntheticCaseClass match {
case Some(c: ScClass) => c.constructor.exists(_.effectiveFirstParameterSection.length == 1)
case _ => false
}
}
private def findMember(name: String, tp: ScType, place: PsiElement): Option[ScType] = {
val cp = new CompletionProcessor(StdKinds.methodRef, place, forName = Some(name))
cp.processType(tp, place)
cp.candidatesS.flatMap {
case ScalaResolveResult(fun: ScFunction, subst) if fun.parameters.isEmpty && fun.name == name =>
Seq(subst.subst(fun.returnType.getOrAny))
case ScalaResolveResult(b: ScBindingPattern, subst) if b.name == name =>
Seq(subst.subst(b.getType(TypingContext.empty).getOrAny))
case ScalaResolveResult(param: ScClassParameter, subst) if param.name == name =>
Seq(subst.subst(param.getType(TypingContext.empty).getOrAny))
case _ => Seq.empty
}.headOption
}
private def extractPossibleProductParts(receiverType: ScType, place: PsiElement, isOneArgCaseClass: Boolean): Seq[ScType] = {
val res: ArrayBuffer[ScType] = new ArrayBuffer[ScType]()
@tailrec
def collect(i: Int) {
findMember(s"_$i", receiverType, place) match {
case Some(tp) if !isOneArgCaseClass =>
res += tp
collect(i + 1)
case _ =>
if (i == 1) res += receiverType
}
}
collect(1)
res.toSeq
}
def extractProductParts(tp: ScType, place: PsiElement): Seq[ScType] = {
extractPossibleProductParts(tp, place, isOneArgCaseClass = false)
}
def expectedNumberOfExtractorArguments(returnType: ScType, place: PsiElement, isOneArgCaseClass: Boolean): Int =
extractorParameters(returnType, place, isOneArgCaseClass).size
def extractorParameters(returnType: ScType, place: PsiElement, isOneArgCaseClass: Boolean): Seq[ScType] = {
def collectFor2_11: Seq[ScType] = {
findMember("isEmpty", returnType, place) match {
case Some(tp) if types.Boolean.equiv(tp) =>
case _ => return Seq.empty
}
val receiverType = findMember("get", returnType, place).getOrElse(return Seq.empty)
extractPossibleProductParts(receiverType, place, isOneArgCaseClass)
}
val level = place.languageLevel
if (level >= Scala_2_11) collectFor2_11
else {
returnType match {
case ScParameterizedType(des, args) =>
ScType.extractClass(des) match {
case Some(clazz) if clazz.qualifiedName == "scala.Option" ||
clazz.qualifiedName == "scala.Some" =>
if (args.length == 1) {
def checkProduct(tp: ScType): Seq[ScType] = {
val productChance = collectFor2_11
if (productChance.length <= 1) Seq(tp)
else {
val productFqn = "scala.Product" + productChance.length
(for {
productClass <- ScalaPsiManager.instance(place.getProject).getCachedClass(place.getResolveScope, productFqn)
clazz <- ScType.extractClass(tp, Some(place.getProject))
} yield clazz == productClass || clazz.isInheritor(productClass, true)).
filter(identity).fold(Seq(tp))(_ => productChance)
}
}
args.head match {
case tp if isOneArgCaseClass => Seq(tp)
case ScTupleType(comps) => comps
case tp => checkProduct(tp)
}
} else Seq.empty
case _ => Seq.empty
}
case _ => Seq.empty
}
}
}
def isQuasiquote(fun: ScFunction) = {
val fqnO = Option(fun.containingClass).map(_.qualifiedName)
fqnO.exists(fqn => fqn.contains('.') && fqn.substring(0, fqn.lastIndexOf('.')) == "scala.reflect.api.Quasiquotes.Quasiquote")
}
} | advancedxy/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/api/base/patterns/ScPattern.scala | Scala | apache-2.0 | 19,445 |
/** soar
*
* Copyright (c) 2017 Hugo Firth
* Email: <[email protected]/>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ncl.la.soar.db
import java.sql.Date
import cats._
import cats.implicits._
import doobie.imports._
import doobie.postgres.imports._
import monix.eval.Task
import monix.cats._
import uk.ac.ncl.la.soar.{ModuleCode, StudentNumber}
import uk.ac.ncl.la.soar.data.{Module, ModuleRecords}
import Implicits._
class ModuleDb(xa: Transactor[Task]) extends Repository[Module] {
import ModuleDb._
override type PK = ModuleCode
override val init: Task[Unit] = initQ.transact(xa)
override val list: Task[List[Module]] = listQ.transact(xa)
override def find(id: ModuleCode): Task[Option[Module]] = findQ(id).transact(xa)
def findRecord(id: ModuleCode): Task[Option[ModuleRecords[Map, StudentNumber, Double]]] = findRecordQ(id).transact(xa)
override def save(entry: Module): Task[Unit] = saveQ(entry).transact(xa)
override def delete(id: ModuleCode): Task[Boolean] = deleteQ(id).transact(xa)
}
private[db] object ModuleDb extends RepositoryCompanion[Module, ModuleDb] {
private case class ModuleRow(num: ModuleCode,
start: Option[Date],
length: Option[String],
title: Option[String],
keywords: List[String],
description: Option[String])
private def fromRow[F[_]: Functor](row: F[ModuleRow]): F[Module] =
row.map(r => Module(r.num, r.title, r.keywords, r.description))
override val initQ: ConnectionIO[Unit] = ().pure[ConnectionIO]
override val listQ: ConnectionIO[List[Module]] = sql"SELECT * FROM module;".query[ModuleRow].list.map(fromRow[List])
override def findQ(id: ModuleCode): ConnectionIO[Option[Module]] =
sql"SELECT * FROM module m WHERE m.num = $id;".query[ModuleRow].option.map(fromRow[Option])
override def saveQ(entry: Module): ConnectionIO[Unit] =
sql"INSERT INTO module (num) VALUES (${entry.code});".update.run.map(_ => ())
override def deleteQ(id: ModuleCode): ConnectionIO[Boolean] =
sql"DELETE FROM module m WHERE m.num = $id;".update.run.map(_ > 0)
def findRecordQ(id: ModuleCode): ConnectionIO[Option[ModuleRecords[Map, StudentNumber, Double]]] = {
val q =
sql"""
SELECT m.student_num, m.score FROM module_score m WHERE m.module_num = $id;
""".query[(StudentNumber, Double)].list
q.map {
case Nil => None
case scores => Some(ModuleRecords[Map, StudentNumber, Double](id, scores.toMap))
}
}
}
| NewcastleComputingScience/student-outcome-accelerator | db/src/main/scala/uk/ac/ncl/la/soar/db/ModuleRepository.scala | Scala | apache-2.0 | 3,136 |
package example
import org.scalatra.test.scalatest.ScalatraFlatSpec
import skinny.micro.{ AsyncSkinnyMicroServlet, ServletConcurrencyException }
import scala.concurrent.Future
import scala.concurrent.duration._
class FutureSpec extends ScalatraFlatSpec {
addServlet(new AsyncSkinnyMicroServlet {
before() { implicit ctx =>
Future {
Thread.sleep(100)
request.setAttribute("before", "done")
}
}
after() { implicit ctx =>
Future {
Thread.sleep(100)
response.setHeader("foo", "bar")
}
}
get("/") { implicit ctx =>
params.keys.mkString(",")
}
get("/future") { implicit ctx =>
Future {
params.keys.mkString(",")
}
}
get("/no-future-error") { implicit ctx =>
awaitFutures(3.seconds) {
Future {
try {
params.keys.mkString(",")
} catch {
case e: ServletConcurrencyException =>
Map("message" -> e.getMessage)
}
}
}
}
get("/before") { implicit ctx =>
Future {
request.getAttribute("before")
}
}
}, "/*")
it should "simply work" in {
get("/?foo=bar") {
status should equal(200)
body should equal("foo")
}
}
it should "fail with simple Future" in {
get("/no-future-error?foo=bar") {
status should equal(200)
var found = false
var count = 0
while (!found && count < 10) {
if (body.contains("Concurrency Issue Detected")) {
found = true
} else {
count += 1
}
}
found should be(false)
}
}
it should "work with futureWithContext" in {
get("/future?foo=bar&baz=zzz") {
status should equal(200)
body should equal("baz,foo")
header("foo") should equal("bar")
}
}
it should "work with async before filters" in {
get("/before") {
status should equal(200)
body should equal("done")
header("foo") should equal("bar")
}
}
}
| xerial/skinny-micro | micro/src/test/scala/example/FutureSpec.scala | Scala | bsd-2-clause | 2,040 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.carbondata.spark.testsuite.datacompaction
import java.io.File
import scala.collection.JavaConverters._
import org.apache.spark.sql.Row
import org.apache.spark.sql.common.util.CarbonHiveContext._
import org.apache.spark.sql.common.util.QueryTest
import org.apache.carbondata.core.carbon.{AbsoluteTableIdentifier, CarbonTableIdentifier}
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
import org.apache.carbondata.lcm.status.SegmentStatusManager
import org.scalatest.BeforeAndAfterAll
/**
* FT for data compaction scenario.
*/
class DataCompactionNoDictionaryTest extends QueryTest with BeforeAndAfterAll {
// return segment details
def getSegments(databaseName : String, tableName : String, tableId : String): List[String] = {
val segmentStatusManager: SegmentStatusManager = new SegmentStatusManager(new
AbsoluteTableIdentifier(
CarbonProperties.getInstance.getProperty(CarbonCommonConstants.STORE_LOCATION),
new CarbonTableIdentifier(databaseName, tableName.toLowerCase , tableId)
)
)
val segments = segmentStatusManager.getValidAndInvalidSegments.getValidSegments.asScala.toList
segments
}
val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../")
.getCanonicalPath
var csvFilePath1 = currentDirectory + "/src/test/resources/compaction/compaction1.csv"
var csvFilePath2 = currentDirectory + "/src/test/resources/compaction/compaction2.csv"
var csvFilePath3 = currentDirectory + "/src/test/resources/compaction/compaction3.csv"
override def beforeAll {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "mm/dd/yyyy")
sql(
"CREATE TABLE nodictionaryCompaction (country String, ID Int, date Timestamp, name " +
"String, " +
"phonetype String, serialname String, salary Int) STORED BY 'org.apache.carbondata" +
".format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='country')"
)
sql("LOAD DATA LOCAL INPATH '" + csvFilePath1 + "' INTO TABLE nodictionaryCompaction " +
"OPTIONS('DELIMITER' = ',')"
)
sql("LOAD DATA LOCAL INPATH '" + csvFilePath2 + "' INTO TABLE nodictionaryCompaction " +
"OPTIONS('DELIMITER' = ',')"
)
sql("LOAD DATA LOCAL INPATH '" + csvFilePath3 + "' INTO TABLE nodictionaryCompaction " +
"OPTIONS('DELIMITER' = ',')"
)
// compaction will happen here.
sql("alter table nodictionaryCompaction compact 'major'"
)
// wait for compaction to finish.
Thread.sleep(1000)
}
// check for 15 seconds if the compacted segments has come or not .
// if not created after 15 seconds then testcase will fail.
test("check if compaction is completed or not.") {
var status = true
var noOfRetries = 0
while (status && noOfRetries < 10) {
val segments: List[String] = getSegments(
CarbonCommonConstants.DATABASE_DEFAULT_NAME, "nodictionaryCompaction", "uni21")
if (!segments.contains("0.1")) {
// wait for 2 seconds for compaction to complete.
Thread.sleep(2000)
noOfRetries += 1
}
else {
status = false
}
}
}
test("select country from nodictionaryCompaction") {
// check answers after compaction.
checkAnswer(
sql("select country from nodictionaryCompaction"),
Seq(Row("america"),
Row("canada"),
Row("chile"),
Row("china"),
Row("england"),
Row("burma"),
Row("butan"),
Row("mexico"),
Row("newzealand"),
Row("westindies"),
Row("china"),
Row("india"),
Row("iran"),
Row("iraq"),
Row("ireland")
)
)
}
test("delete merged folder and execute query") {
// delete merged segments
sql("clean files for table nodictionaryCompaction")
// merged segment should not be there
val segments =
getSegments(CarbonCommonConstants.DATABASE_DEFAULT_NAME, "nodictionaryCompaction", "uni21")
assert(!segments.contains("0"))
assert(!segments.contains("1"))
assert(!segments.contains("2"))
assert(segments.contains("0.1"))
// now check the answers it should be same.
checkAnswer(
sql("select country from nodictionaryCompaction"),
Seq(Row("america"),
Row("canada"),
Row("chile"),
Row("china"),
Row("england"),
Row("burma"),
Row("butan"),
Row("mexico"),
Row("newzealand"),
Row("westindies"),
Row("china"),
Row("india"),
Row("iran"),
Row("iraq"),
Row("ireland")
)
)
sql("LOAD DATA LOCAL INPATH '" + csvFilePath1 + "' INTO TABLE nodictionaryCompaction " +
"OPTIONS('DELIMITER' = ',')"
)
sql("DELETE segment 0.1,3 FROM TABLE nodictionaryCompaction")
checkAnswer(
sql("select country from nodictionaryCompaction"),
Seq()
)
}
override def afterAll {
sql("drop table nodictionaryCompaction")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
CarbonProperties.getInstance().addProperty("carbon.enable.load.merge", "false")
}
}
| foryou2030/incubator-carbondata | integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionNoDictionaryTest.scala | Scala | apache-2.0 | 6,108 |
package test.types.inference
import org.scalatest.FlatSpec
import org.scalatest.matchers.ShouldMatchers._
import tap.types._
import tap.types.Type._
import tap.types.Natives._
import tap.types.kinds._
import tap.types.inference.Substitutions._
import tap.ModuleId
import tap.types.classes.{Qual, IsIn}
import language.reflectiveCalls
class SubstitutionsTests extends FlatSpec {
behavior of "applySubst for Type"
it should "do nothing when encountering a type var that is not in the substitutions" in {
val a = TVar("a", Star)
val b = TVar("b", Star)
val x = TVar("x", Star)
applySubst(Map(a -> x), b) should be === b
}
it should "return the new type when encountering a type var is in the substitutions" in {
val a = TVar("a", Star)
val x = TVar("x", Star)
applySubst(Map(a -> x), a) should be === x
}
it should "apply substitutions to type vars in a TAp" in {
val a = TVar("a", Star)
val b = TVar("b", Star)
val x = TVar("x", Star)
val y = TVar("y", Star)
applySubst(Map(a -> x, b -> y), TAp(a, b)) should be === TAp(x, y)
}
it should "apply substitutions to type vars in a Forall" in {
val a = TVar("a", Star)
val x = TVar("x", Star)
applySubst(Map(a -> x), Forall(0, Nil, a)) should be === Forall(0, Nil, x)
}
it should "do nothing when encountering other types" in {
val a = TVar("a", Star)
val x = TVar("x", Star)
val s = Map(a -> x)
applySubst(s, TCon(ModuleId("Prelude", "Number"), Star)) should be === TCon(ModuleId("Prelude", "Number"), Star)
applySubst(s, TGen(0, 0)) should be === TGen(0, 0)
}
//-------------------------------------------------------------------------
"applySubst for IsIn" should "apply substitutions to the predicates list" in {
val a = TVar("a", Star)
val x = TVar("x", Star)
val s = Map(a -> x)
applySubst(s, IsIn(ModuleId("Test", "Test"), List(a))) should be === IsIn(ModuleId("Test", "Test"), List(x))
}
//-------------------------------------------------------------------------
"applySubst for Qual[Type]" should "apply substitutions to the predicates list and type" in {
val a = TVar("a", Star)
val x = TVar("x", Star)
val s = Map(a -> x)
applySubst(s, Qual(List(IsIn(ModuleId("Test", "Test"), List(a))), a)) should be === Qual(List(IsIn(ModuleId("Test", "Test"), List(x))), x)
}
//-------------------------------------------------------------------------
"@@" should "compose substitutions" in {
val a = TVar("a", Star)
val b = TVar("b", Star)
val c = TVar("c", Star)
val x = TVar("x", Star)
val y = TVar("y", Star)
val q = TVar("q", Star)
val s1 = Map(x -> tString, y -> q)
val s2 = Map(a -> x, b -> TAp(y, c))
val s3 = composeSubst(s1, s2)
s3 should be === Map(a -> tString, b -> TAp(q, c), x -> tString, y -> q)
}
//-------------------------------------------------------------------------
behavior of "merge"
it should "return None if the substitutions have conflicting entrie" in {
val a = TVar("a", Star)
val x = TVar("x", Star)
val y = TVar("y", Star)
val s1 = Map(a -> x)
val s2 = Map(a -> y)
merge(s1, s2) should be === None
}
it should "return the combined substitution if the substitutions have no overlap" in {
val a = TVar("a", Star)
val b = TVar("b", Star)
val x = TVar("x", Star)
val y = TVar("y", Star)
val s1 = Map(a -> x)
val s2 = Map(b -> y)
merge(s1, s2) should be === Some(Map(a -> x, b -> y))
}
it should "return the combined substitution if the substitutions equivalent entries" in {
val a = TVar("a", Star)
val b = TVar("b", Star)
val c = TVar("c", Star)
val x = TVar("x", Star)
val y = TVar("y", Star)
val z = TVar("z", Star)
val s1 = Map(a -> x, c -> z)
val s2 = Map(b -> y, c -> z)
merge(s1, s2) should be === Some(Map(a -> x, b -> y, c -> z))
}
}
| garyb/tap | src/test/scala/test/types/inference/SubstitutionsTests.scala | Scala | mit | 4,226 |
package twitter4s.request
import http.client.method.HttpMethod
import http.client.request.{CompletionEvaluation, OrElseCompletionEvaluation, HttpRequest, TrueCompletionEvaluation}
import http.client.response.{HttpHeader, HttpResponse}
import twitter4s.response.TwitterEmptyNextCursorCompletionEvaluation
case class TwitterCursoredRequest(
baseUrl: String,
relativeUrl: String,
headers: Seq[HttpHeader],
queryString: Map[String, Seq[String]],
body: Option[Array[Byte]],
method: HttpMethod, paginated: Boolean,
authHeaderGen: (TwitterRequest) ⇒ TwitterAuthorizationHeader,
customCompletionEvaluator: Option[CompletionEvaluation] = None)
extends TwitterRequest {
override val completionEvaluator = if (paginated) {
customCompletionEvaluator match {
case Some(c) ⇒ OrElseCompletionEvaluation(customCompletionEvaluator.get, TrueCompletionEvaluation)
case _ ⇒ TwitterEmptyNextCursorCompletionEvaluation
}
} else TrueCompletionEvaluation
override def nextRequest(response: HttpResponse): TwitterRequest = {
val next = (response.json \\ "next_cursor").validate[Long].get
val newQS = queryString + ("cursor" → Seq(next.toString))
val requestWithnewQS = copy(queryString = newQS)
requestWithnewQS.copy(
headers = TwitterRequest.newAuthHeaderForRequest(requestWithnewQS))
}
override protected def withoutHeader(httpHeaderName: String): TwitterRequest = {
copy(headers = headers.filterNot(_.name == httpHeaderName))
}
}
| SocialOrra/social4s | twitter4s/src/main/scala/twitter4s/request/TwitterCursoredRequest.scala | Scala | apache-2.0 | 1,553 |
/*
* Copyright 2013 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.locationtech.geomesa.core.util
import java.util.{Collection => JCollection}
import org.apache.accumulo.core.data.{Key, PartialKey, Range => AccRange}
import org.apache.hadoop.io.Text
import org.locationtech.geomesa.utils.geohash.BoundingBox
import scala.collection.JavaConversions._
/**
* This object holds any Accumulo functions related to Geohashes.
*
*/
object BoundingBoxUtil {
/**
* returns a list of AccRange objects which may be used in a accumulo query.
*
* @param bbox
* @return
*/
def getRanges(bbox: BoundingBox): JCollection[AccRange] =
getRanges(BoundingBox.getGeoHashesFromBoundingBox(bbox, 32), 0, Long.MaxValue / 2)
/**
* as above but includes time.
*
* @param geohashes
* @param startTime
* @param endTime
* @return
*/
def getRanges(geohashes: List[String], startTime: Long, endTime: Long): Iterable[AccRange] = {
def getRange(startHash: String, endHash: String) = {
val List(b, e) = getKeys(startHash, endHash)
b.setTimestamp(startTime)
e.setTimestamp(endTime)
// it expects ranges, but the precision is maxed out, so fake it
if (b.equals(e, PartialKey.ROW_COLFAM_COLQUAL)) {
new AccRange(b, new Key(e.getRow, e.getColumnFamily, new Text(e.getColumnQualifier + "~")))
} else {
new AccRange(b, e)
}
}
def getKeys(startHash: String, endHash: String) = {
List(startHash, endHash) map (h => new Key(h.substring(0, 4),
h.substring(4, 6),
h.substring(6, 8)))
}
val geoBegin = (geohashes map (geohash => geohash.padTo(8, "!").mkString))
val geoEnd = (geohashes map (geohash => geohash.padTo(8, "~").mkString))
(geoBegin, geoEnd).zipped.map((b, e) => getRange(b, e)).toIterable
}
def getRangesByRow(geohashes: List[String], startTime: Long = 0 , endTime: Long = Long.MaxValue/2): Iterable[AccRange] = {
def getRange(startHash: String, endHash: String) = {
val List(b, e) = getKeys(startHash, endHash)
b.setTimestamp(startTime)
e.setTimestamp(endTime)
// it expects ranges, but the precision is maxed out, so fake it
if (b.equals(e, PartialKey.ROW)) {
new AccRange(b, new Key(e.getRow + "~", e.getTimestamp))
} else {
new AccRange(b, e)
}
}
def getKeys(startHash: String, endHash: String) = {
List(startHash, endHash) map (h => new Key(h.substring(0, 8)))
}
val geoBegin = (geohashes map (geohash => geohash.padTo(8, "!").mkString))
val geoEnd = (geohashes map (geohash => geohash.padTo(8, "~").mkString))
(geoBegin, geoEnd).zipped.map((b, e) => getRange(b, e)).toIterable
}
implicit def string2Text(s: String): Text = new Text(s)
}
| jwkessi/geomesa | geomesa-core/src/main/scala/org/locationtech/geomesa/core/util/BoundingBoxUtil.scala | Scala | apache-2.0 | 3,340 |
package com.github.j5ik2o.dddbase.skinny
import cats.data.ReaderT
import com.github.j5ik2o.dddbase.{ AggregateMultiSoftDeletable, AggregateMultiWriter }
import monix.eval.Task
import scalikejdbc.DBSession
trait AggregateMultiSoftDeleteFeature
extends AggregateMultiSoftDeletable[ReaderT[Task, DBSession, ?]]
with AggregateBaseReadFeature {
this: AggregateMultiWriter[ReaderT[Task, DBSession, ?]] with AggregateSingleSoftDeleteFeature =>
override def softDeleteMulti(ids: Seq[IdType]): ReaderT[Task, DBSession, Long] = ReaderT { implicit dbDesion =>
Task {
dao.updateBy(byConditions(ids)).withAttributes('status -> DELETE).toLong
}
}
}
| j5ik2o/scala-ddd-base-functional | jdbc/skinny/src/main/scala/com/github/j5ik2o/dddbase/skinny/AggregateMultiSoftDeleteFeature.scala | Scala | mit | 667 |
/* Copyright (c) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.xml.combinators
import com.google.gdata.data.util.DateTime
import scala.xml.{Node, Elem, NamespaceBinding, NodeSeq, Null, Text, TopScope}
import java.text.ParseException
/**
* A class for XML Pickling combinators.
* <p>
* A pickler for some type A is a class that can save objects of type A to XML (pickle)
* and read XML back to objects of type A (unpickle). This class provides some basic
* building blocks (like text), and several combinators (like elem, attr,
* seq) to build more complex picklers.
* <p>
* Example:
* <xmp>
* def picklePair: Pickler[String ~ String] =
* elem("p", URI, "pair",
* elem("p", URI, "a", text) ~ elem("p", URI, "b", text))
*
* val input =
* <p:pair xmlns:p="testing-uri">
* <p:a>alfa</p:a>
* <p:b>omega</p:b>
* </p:pair>
* </xmp>
* picklePair will be able to pickle and unpickle pairs of Strings that look like the
* input.
*
* @author Iulian Dragos ([email protected])
* @see <a href="http://www.fh-wedel.de/~si/HXmlToolbox/">Haskell XML Toolbox</a>,
* @see Andrew Kennedy's
* <a href="http://research.microsoft.com/%7Eakenn/fun/">Pickler Combinators</a>,
* @see <a
* href="http://www.scala-lang.org/docu/files/api/scala/util/parsing/combinator/Parsers.html">
* Scala combinator parsers</a>
*/
object Picklers extends AnyRef with TupleToPairFunctions {
/**
* The state of the pickler is a collection of attributes, a list of
* nodes (which might be Text nodes), and namespace bindings.
*/
type St = XmlInputStore
def emptyStore: XmlOutputStore = PlainOutputStore.empty
/**
* A class representing pickling results. It encapsulate the result and the state of
* the pickler. It can be either @see Success or @see Failure.
*/
sealed abstract class PicklerResult[+A] {
/** Apply 'f' when this result is successful. */
def andThen[B](f: (A, St) => PicklerResult[B]): PicklerResult[B]
/** Apply 'f' when this result is failure. */
def orElse[B >: A](f: => PicklerResult[B]): PicklerResult[B]
/** Is this result a successful parse result? */
def isSuccessful: Boolean
/** Retrieve the result in case of success. */
def get: A
}
/** A successful parse. */
case class Success[+A](v: A, in: St) extends PicklerResult[A] {
def andThen[B](f: (A, St) => PicklerResult[B]): PicklerResult[B] = f(v, in)
def orElse[B >: A](f: => PicklerResult[B]): PicklerResult[B] = this
def isSuccessful = true
def get = v
}
/**
* Parsing failed. There are two possible subclasses, Failure and Error. Failure is
* recoverable, while Error makes the whole parsing fail. To make a pickler return
* errors, apply 'commit' to it. All 'elem' picklers will commit on their contents,
* so that an error parsing the contents of an element will stop everything. This
* is almost always the best thing to do (and yields the best error messages).
*/
abstract class NoSuccess(val msg: String, val in: St) extends PicklerResult[Nothing] {
def andThen[B](f: (Nothing, St) => PicklerResult[B]) = this
def orElse[B >: Nothing](f: => PicklerResult[B]): PicklerResult[B] = f
def isSuccessful = false
def get = throw new NoSuchElementException("Unpickling failed.")
val prefix: String
override def toString = prefix + msg + " with input: " + in
}
/** A Failure means the parsing has failed, but alternatives can still be tried. */
case class Failure(m: String, i: St) extends NoSuccess(m, i) {
override val prefix = "Failure: "
}
/** An Error is a failure which causes the entire parsing to fail (no alternatives are tried). */
case class Error(m: String, i: St) extends NoSuccess(m, i) {
override val prefix = "Error: "
}
/** Pickler for type A */
abstract class Pickler[A] {
def pickle(v: A, in: XmlOutputStore): XmlOutputStore
def unpickle(in: St): PicklerResult[A]
/** Sequential composition. This pickler will accept an A and then a B. */
def ~[B](pb: => Pickler[B]): Pickler[~[A, B]] =
seq(this, pb)
}
/** A basic pickler that serializes a value to a string and back. */
def text: Pickler[String] = new Pickler[String] {
def pickle(v: String, in: XmlOutputStore): XmlOutputStore =
in.addText(v)
def unpickle(in: St): PicklerResult[String] = {
in.acceptText match {
case (Some(Text(content)), in1) => Success(content, in1)
case (None, in1) => Failure("Text node expected", in1)
}
}
}
/** A basic pickler that serializes an integer value to a string and back. */
def intVal: Pickler[Int] = {
def parseInt(literal: String, in: St): PicklerResult[Int] = try {
Success(literal.toInt, in)
} catch {
case e: NumberFormatException => Failure("Integer literal expected", in)
}
filter(text, parseInt, String.valueOf(_))
}
/**
* A basic pickler for boolean values. Everything equal to the string 'true' is
* unpickled to the boolean value <code>true</code>, everything else to <code>false</code>.
* It is not case sensitive.
*/
def boolVal: Pickler[Boolean] =
wrap (text) (java.lang.Boolean.valueOf(_).booleanValue) (String.valueOf(_))
/**
* A basic pickler for floating point values. It accepts double values as specified by the
* Scala and Java language.
*
* @see java.lang.Double.valueOf for the exact grammar.
*/
def doubleVal: Pickler[Double] = {
def parseDouble(literal: String, in: St): PicklerResult[Double] = try {
Success(literal.toDouble, in)
} catch {
case e: NumberFormatException => Failure("Floating point literal expected", in)
}
filter(text, parseDouble, String.valueOf(_))
}
/**
* Pickler for a list of elements. It unpickles a list of elements separated by 'sep'.
* It makes little sense to use this pickler on elem or attr picklers (use '~' and 'rep'
* instead.) For an example how this is used, see MediaRss which defines a comma-separated
* list of categories as the contents of an element.
* <p/>
*/
def list[A](sep: Char, pa: => Pickler[A]): Pickler[List[A]] = {
def parseList(str: String, unused: St): PicklerResult[List[A]] = {
val elems = str.split(sep).toList.map(_.trim).reverse
elems.foldLeft(Success(Nil, LinearStore.empty): PicklerResult[List[A]]) { (result, e) =>
result andThen { (es, in) => pa.unpickle(LinearStore(Null, List(Text(e)), TopScope)) match {
case Success(v, in1) => Success(v :: es, in1)
case f: NoSuccess => f
}}
}
}
def pickleList(es: List[A]): String = {
val store =
es.foldLeft(PlainOutputStore.empty: XmlOutputStore) { (in, e) => pa.pickle(e, in) }
store.nodes.mkString("", sep.toString, "")
}
filter(text, parseList, pickleList)
}
/**
* A pickler for date/time in RFC 3339 format. It handles dates that look like
* <code>2008-02-15T16:16:02+01:00</code>. The time offset can be replaced by Z
* (zulu time) when it is zero (UTC time).
*
* @param allowDateOnly When true, accepts date component alone.
* @see http://atomenabled.org/developers/syndication/atom-format-spec.php#date.constructs
*/
def dateTime(allowDateOnly: Boolean): Pickler[DateTime] = new Pickler[DateTime] {
def pickle(v: DateTime, in: XmlOutputStore): XmlOutputStore =
in.addText(v.toString)
def unpickle(in:St): PicklerResult[DateTime] =
in.acceptText match {
case (Some(Text(str)), in1) =>
try {
if (allowDateOnly)
Success(DateTime.parseDateOrDateTime(str), in1)
else
Success(DateTime.parseDateTime(str), in1)
} catch {
case e: ParseException => Failure("Invalid date: " + e.getMessage, in1)
}
case (None, in1) =>
Failure("Expected date in textual format", in1)
}
}
/**
* A pickler for date/time in RFC 3339 format. It handles dates that look like
* <code>2008-02-15T16:16:02+01:00</code>. The time offset can be replaced by Z
* (zulu time) when it is zero (UTC time). The time component is required.
*
* @see http://atomenabled.org/developers/syndication/atom-format-spec.php#date.constructs
*/
def dateTime: Pickler[DateTime] = dateTime(false)
/**
* Apply a pair of functions on the result of pa. Unlike 'wrap', 'f' may cause the
* pickler to fail.
*
* For an example, see the implementation of intVal.
*/
def filter[A, B](pa: => Pickler[A], f: (A, St) => PicklerResult[B], g: B => A): Pickler[B] =
new Pickler[B] {
def pickle(v: B, in: XmlOutputStore): XmlOutputStore =
pa.pickle(g(v), in)
def unpickle(in: St): PicklerResult[B] =
pa.unpickle(in) andThen { (v, in1) => f(v, in) }
}
/**
* A constant pickler: it always pickles 'v'. Unpickle fails when the value that is found
* is not equal to 'v'.
*/
def const[A](pa: => Pickler[A], v: A): Pickler[A] = new Pickler[A] {
def pickle(ignored: A, in: XmlOutputStore) = pa.pickle(v, in)
def unpickle(in: St): PicklerResult[A] = {
pa.unpickle(in) andThen {(v1, in1) =>
if (v == v1)
Success(v1, in1)
else
Failure("Expected '" + v + "', but " + v1 + " found.", in1)
}
}
}
/** A pickler for default values. If 'pa' fails, returns 'v' instead. */
def default[A](pa: => Pickler[A], v: A): Pickler[A] =
wrap (opt(pa)) ({
case Some(v1) => v1
case None => v
}) (v => Some(v))
/** A marker pickler: 'true' when the unpickler succeeds, false otherwise. */
def marker(pa: => Pickler[String]): Pickler[Boolean] =
wrap (opt(pa)) {
case Some(_) => true
case None => false
} (b => if (b) Some("") else None)
/** Convenience method for creating an attribute within a namepace. */
def attr[A](label: String, pa: => Pickler[A], ns: (String, String)): Pickler[A] =
attr(ns._1, ns._2, label, pa)
/**
* Wrap a parser into a prefixed attribute. The attribute will contain all the
* content produced by 'pa' in the 'nodes' field.
*/
def attr[A](pre: String, uri: String, key: String, pa: => Pickler[A]) = new Pickler[A] {
def pickle(v: A, in: XmlOutputStore) = {
in.addNamespace(pre, uri).addAttribute(pre, key, pa.pickle(v, emptyStore).nodes.text)
}
def unpickle(in: St): PicklerResult[A] = {
in.acceptAttr(key, uri) match {
case (Some(nodes), in1) =>
pa.unpickle(LinearStore(Null, nodes.toList, in.ns)) andThen { (v, in2) => Success(v, in1) }
case (None, in1) =>
Failure("Expected attribute " + pre + ":" + key + " in " + uri, in)
}
}
}
/**
* A pickler for unprefixed attributes. Such attributes have no namespace.
*/
def attr[A](label: String, pa: => Pickler[A]): Pickler[A] = new Pickler[A] {
def pickle(v: A, in: XmlOutputStore) =
in.addAttribute(label, pa.pickle(v, emptyStore).nodes.text)
def unpickle(in: St): PicklerResult[A] = {
in.acceptAttr(label) match {
case (Some(nodes), in1) =>
pa.unpickle(LinearStore(Null, nodes.toList, in.ns)) andThen { (v, in2) => Success(v, in1) }
case (None, in1) =>
Failure("Expected unprefixed attribute " + label, in)
}
}
}
/**
* Convenience method for creating an element with an implicit namepace. Contents of
* this element are committed (this parser is not allowed to recover from failures in
* parsing its content.
*/
def elem[A](label: String, pa: => Pickler[A])(implicit ns: (String, String)): Pickler[A] =
elem(ns._1, ns._2, label, commit(pa))
/** Wrap a pickler into an element. */
def elem[A](pre: String, uri: String, label: String, pa: => Pickler[A]) = new Pickler[A] {
def pickle(v: A, in: XmlOutputStore): XmlOutputStore = {
val ns1 = if (in.ns.getURI(pre) == uri) in.ns else new NamespaceBinding(pre, uri, in.ns)
val in1 = pa.pickle(v, PlainOutputStore(ns1))
in.addNode(Elem(pre, label, in1.attrs, in1.ns, in1.nodes:_*))
}
def unpickle(in: St): PicklerResult[A] = {
in.acceptElem(label, uri) match {
case (Some(e: Elem), in1) =>
pa.unpickle(LinearStore.enterElem(e)) andThen { (v, in2) =>
Success(v, in1)
}
case _ =>
Failure("Expected a <" + pre + ":" + label + "> in " + uri, in)
}
}
}
/** Sequential composition of two picklers */
def seq[A, B](pa: => Pickler[A], pb: => Pickler[B]): Pickler[~[A, B]] = new Pickler[~[A, B]] {
def pickle(v: A ~ B, in: XmlOutputStore): XmlOutputStore =
pb.pickle(v._2, pa.pickle(v._1, in))
def unpickle(in: St): PicklerResult[~[A, B]] = {
pa.unpickle(in) match {
case Success(va, in1) =>
pb.unpickle(in1) match {
case Success(vb, in2) => Success(new ~(va, vb), in2)
case f: NoSuccess => f
}
case f: NoSuccess => f
}
}
}
/**
* Convenience method for creating an element with interleaved elements. Elements enclosed
* by the given element label can be parsed in any order. Any unknown elements are ignored.
* <p/>
* Example:
* <code>interleaved("entry", elem("link", text) ~ elem("author", text))</code> will
* will parse an element entry with two subelements, link and author, in any order, with
* possibly other elements between them.
*/
def interleaved[A](label: String, pa: => Pickler[A])(implicit ns: (String, String)): Pickler[A] =
elem(label, interleaved(pa))(ns)
/**
* Transform the given parser into a parser that accepts permutations of its containing
* sequences. That is, interleaved(a ~ b ~ c) will parse a, b, c in any order (with possibly
* other elements in between. It should not be called directly, instead use the
* interleaved which wraps an element around the interleaved elements.
*/
def interleaved[A](pa: => Pickler[A]): Pickler[A] = new Pickler[A] {
def pickle(v: A, in: XmlOutputStore): XmlOutputStore = pa.pickle(v, in)
def unpickle(in: St): PicklerResult[A] =
pa.unpickle(in.randomAccessMode) andThen { (v, in1) =>
Success(v, in1.linearAccessMode)
}
}
/**
* A commit parser. Failures are transformed to errors, so alternatives (when combined with
* other parsers) are not tried.
*/
def commit[A](pa: => Pickler[A]): Pickler[A] = new Pickler[A] {
def pickle(v: A, in: XmlOutputStore): XmlOutputStore = pa.pickle(v, in)
def unpickle(in: St): PicklerResult[A] = pa.unpickle(in) match {
case s: Success[_] => s
case Failure(msg, in1) => Error(msg, in1)
case e: Error => e
}
}
/**
* Return a pickler that always pickles the first value, but unpickles using the second when the
* first one fails.
*/
def or[A](pa: => Pickler[A], qa: => Pickler[A]): Pickler[A] = new Pickler[A] {
def pickle(v: A, in: XmlOutputStore): XmlOutputStore =
pa.pickle(v, in)
def unpickle(in: St): PicklerResult[A] =
pa.unpickle(in) match {
case s: Success[_] => s
case f: Failure => qa.unpickle(in)
case e: Error => e
}
}
/**
* An optional pickler. It pickles v when it is there, and leaves the input unchanged when empty.
* It unpickles the value when the underlying parser succeeds, and returns None otherwise.
*/
def opt[A](pa: => Pickler[A]) = new Pickler[Option[A]] {
def pickle(v: Option[A], in: XmlOutputStore) = v match {
case Some(v) => pa.pickle(v, in)
case None => in
}
def unpickle(in: St): PicklerResult[Option[A]] =
pa.unpickle(in) andThen {(v, in1) => Success(Some(v), in1) } orElse Success(None, in)
}
/** A repetition pickler. It applies 'pa' until there it fails. */
def rep[A](pa: => Pickler[A]): Pickler[List[A]] = new Pickler[List[A]] {
def pickle(vs: List[A], in: XmlOutputStore): XmlOutputStore = vs match {
case v :: vs => pickle(vs, pa.pickle(v, in))
case Nil => in
}
def unpickle(in: St): PicklerResult[List[A]] = {
val res1 = pa.unpickle(in).andThen { (v: A, in1: St) =>
val Success(vs, in2) = unpickle(in1)
Success(v :: vs, in2)
}
res1 match {
case s: Success[_] => s
case f: Failure => Success(Nil, in)
case e: Error => e
}
}
}
/**
* Runs 'pb' unpickler on the first element that 'pa' successfully parses. It
* is more general than 'interleaved', which uses only the element name to decide
* the input on which to run a pickler. 'pa' can be arbitrarily complex.
*
* Example:
* when(elem("feedLink", const(attr("rel", "#kinds"), rel)), kindsPickler)
*
* will look for the first 'feedLink' element with an attribute equal to '#kinds'
* and then run 'kindsPickler' on that element.
*/
def when[A, B](pa: => Pickler[A], pb: => Pickler[B]): Pickler[B] = new Pickler[B] {
def pickle(v: B, in: XmlOutputStore) = pb.pickle(v, in)
def unpickle(in: St) = {
var lastFailed: Option[NoSuccess] = None
val target = in.nodes find {
case e: Elem =>
pa.unpickle(LinearStore.fromElem(e)) match {
case _: Success[_] => true
case f: NoSuccess => lastFailed = Some(f); false
}
case _ => false
}
target match {
case Some(e: Elem) =>
pb.unpickle(LinearStore.fromElem(e)) match {
case Success(v1, in1) =>
Success(v1, in.mkState(in.attrs, in.nodes.toList.filterNot(_ == e), in.ns))
case f: NoSuccess =>
Failure(f.msg, in)
}
case None =>
if (lastFailed.isDefined)
lastFailed.get
else
Failure("Expected at least one element", in)
}
}
}
/** Wrap a pair of functions around a given pickler */
def wrap[A, B](pb: => Pickler[B])(g: B => A)(f: A => B): Pickler[A] = new Pickler[A] {
def pickle(v: A, in: XmlOutputStore): XmlOutputStore =
pb.pickle(f(v), in)
def unpickle(in: St): PicklerResult[A] =
pb.unpickle(in) match {
case Success(vb, in1) => Success(g(vb), in1)
case f: NoSuccess => f
}
}
/* // Waiting for bug fix in 2.7.0
def wrapCaseClass[A, B](pa: => Pickler[A])(f: A => B)(g: B => Some[A]): Pickler[B] =
wrap(pa) (f) { x => g(x).get }
*/
/** Collect all unconsumed input into a XmlStore. */
def collect: Pickler[XmlStore] = new Pickler[XmlStore] {
def pickle(v: XmlStore, in: XmlOutputStore) = in.addStore(v)
def unpickle(in: St) = Success(in, LinearStore.empty)
}
/** An xml pickler that collects all remaining XML nodes. */
def xml: Pickler[NodeSeq] = new Pickler[NodeSeq] {
def pickle(v: NodeSeq, in: XmlOutputStore) = v.foldLeft(in) (_.addNode(_))
def unpickle(in: St) = Success(in.nodes, LinearStore.empty)
}
/**
* Apply 'pb' on the state stored in the value unpickled by 'pa'.
* It is used for after-the-fact extension. The type 'A' has to be an instance of HasStore.
* The pickler will apply 'pb' on HasStore.store. The assumption is that 'pa' stores in there
* the unconsumed input.
*
* @see makeExtensible
*/
def extend[A <: HasStore, B](pa: => Pickler[A], pb: => Pickler[B]) = new Pickler[A ~ B] {
def pickle(v: A ~ B, in: XmlOutputStore): XmlOutputStore = {
val in1 = pb.pickle(v._2, PlainOutputStore.empty)
v._1.store = in1
pa.pickle(v._1, in)
}
def unpickle(in: St): PicklerResult[A ~ B] = {
pa.unpickle(in) andThen { (a, in1) =>
pb.unpickle(LinearStore(a.store)) andThen { (b, in2) =>
Success(new ~(a, b), in1)
}
}
}
}
/**
* Make a given pickler store unconsumed input for later use. The given type should
* mix in HasStore. This pickler will store unconsumed input in the HasStore instance. Use
* 'extend' to apply another pickler on the stored input.
*
* <code>makeExtensible(Person.pickler)</code> will make a Person pickler ready for future
* extensions by keeping around all input left.
*
* @see 'exted'.
*/
def makeExtensible[A <: HasStore](pa: => Pickler[A]): Pickler[A] =
wrap (pa ~ collect) { case a ~ ext => a.store = ext; a } { a => new ~ (a, a.store) }
/** A logging combinator */
def logged[A](name: String, pa: => Pickler[A]): Pickler[A] = new Pickler[A] {
def pickle(v: A, in: XmlOutputStore): XmlOutputStore = {
println("pickling [" + name + "] " + v + " at: " + in)
val res = pa.pickle(v, in)
println("got back: " + res)
res
}
def unpickle(in: St) = {
println("unpickling " + name + " at: " + in)
val res = pa.unpickle(in)
println("got back: " + res)
res
}
}
}
/** Convenience class to hold two values (it has lighter syntax than pairs). */
final case class ~[+A, +B](_1: A, _2: B) {
override def toString = "~(" + _1 + ", " + _2 + ")"
/** Append another value to this pair. */
def ~[C](c: C) = new ~(this, c)
}
| mjanson/gdata-scala-client | src/com/google/xml/combinators/Picklers.scala | Scala | apache-2.0 | 21,920 |
import sbt._
import sbt.Keys._
object UnfilteredMMailSettings extends WithResolvers {
val nameM = "unfilteredM-mail"
val versionM = "0.2-SNAPSHOT"
val scalaVersionM = "2.10.2"
val organizationM = "com.logikujo"
val dependenciesBuild = Seq(
"org.clapper" % "avsl_2.10" % "1.0.1",
"com.github.kxbmap" %% "configs" % "0.2.1",
"org.apache.commons" % "commons-email" % "1.3.2",
"org.scalaz" %% "scalaz-core" % "7.0.6"
)
val settings = Defaults.defaultSettings ++ Seq(
name := nameM,
version := versionM,
scalaVersion := scalaVersionM,
organization := organizationM,
resolvers ++= resolversBuild,
scalacOptions <<= scalaVersion map { sv: String =>
if (sv.startsWith("2.10."))
Seq("-deprecation", "-unchecked", "-feature", "-language:postfixOps", "-language:implicitConversions")
else
Seq("-deprecation", "-unchecked")
}
)
}
| AitorATuin/UnfilteredM | project/unfilteredMMailBuild.scala | Scala | mit | 908 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import uk.gov.hmrc.ct.accounts.{MockFrs102AccountsRetriever, AccountsPreviousPeriodValidationFixture, AccountsMoneyValidationFixture}
import uk.gov.hmrc.ct.accounts.frs102.retriever.Frs102AccountsBoxRetriever
class AC31Spec extends AccountsMoneyValidationFixture[Frs102AccountsBoxRetriever] with AccountsPreviousPeriodValidationFixture[Frs102AccountsBoxRetriever] with MockFrs102AccountsRetriever {
testAccountsMoneyValidation("AC31", AC31.apply)
testAccountsPreviousPoAValidation("AC31", AC31.apply)
}
| liquidarmour/ct-calculations | src/test/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC31Spec.scala | Scala | apache-2.0 | 1,161 |
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.comm
import java.util.UUID
import com.ibm.spark.kernel.protocol.v5._
import com.ibm.spark.kernel.protocol.v5.content._
import com.ibm.spark.kernel.protocol.v5.kernel.ActorLoader
import play.api.libs.json.Json
import scala.concurrent.duration._
import akka.actor.{ActorSelection, ActorSystem}
import akka.testkit.{TestProbe, TestKit}
import com.typesafe.config.ConfigFactory
import org.scalatest.mock.MockitoSugar
import org.scalatest.{BeforeAndAfter, FunSpecLike, Matchers}
import org.mockito.Mockito._
import org.mockito.Matchers._
object KernelCommWriterSpec {
val config ="""
akka {
loglevel = "WARNING"
}"""
}
class KernelCommWriterSpec extends TestKit(
ActorSystem("KernelCommWriterSpec",
ConfigFactory.parseString(KernelCommWriterSpec.config))
) with FunSpecLike with Matchers with BeforeAndAfter with MockitoSugar
{
private val commId = UUID.randomUUID().toString
private var kernelCommWriter: KernelCommWriter = _
private var kernelMessageBuilder: KMBuilder = _
private var actorLoader: ActorLoader = _
private var kernelMessageRelayProbe: TestProbe = _
/**
* Retrieves the next message available.
*
* @return The KernelMessage instance (or an error if timed out)
*/
private def getNextMessage =
kernelMessageRelayProbe.receiveOne(200.milliseconds)
.asInstanceOf[KernelMessage]
/**
* Retrieves the next message available and returns its type.
*
* @return The type of the message (pulled from message header)
*/
private def getNextMessageType = getNextMessage.header.msg_type
/**
* Retrieves the next message available and parses the content string.
*
* @tparam T The type to coerce the content string into
*
* @return The resulting KernelMessageContent instance
*/
private def getNextMessageContents[T <: KernelMessageContent]
(implicit fjs: play.api.libs.json.Reads[T], mf: Manifest[T]) =
{
val receivedMessage = getNextMessage
Json.parse(receivedMessage.contentString).as[T]
}
before {
kernelMessageBuilder = spy(KMBuilder())
// Construct path for kernel message relay
actorLoader = mock[ActorLoader]
kernelMessageRelayProbe = TestProbe()
val kernelMessageRelaySelection: ActorSelection =
system.actorSelection(kernelMessageRelayProbe.ref.path.toString)
doReturn(kernelMessageRelaySelection)
.when(actorLoader).load(SystemActorType.KernelMessageRelay)
// Create a new writer to use for testing
kernelCommWriter = new KernelCommWriter(actorLoader, kernelMessageBuilder, commId)
}
describe("KernelCommWriter") {
describe("#writeOpen") {
it("should send a comm_open message to the relay") {
kernelCommWriter.writeOpen(anyString())
getNextMessageType should be (CommOpen.toTypeString)
}
it("should include the comm_id in the message") {
val expected = commId
kernelCommWriter.writeOpen(anyString())
val actual = getNextMessageContents[CommOpen].comm_id
actual should be (expected)
}
it("should include the target name in the message") {
val expected = "<TARGET_NAME>"
kernelCommWriter.writeOpen(expected)
val actual = getNextMessageContents[CommOpen].target_name
actual should be (expected)
}
it("should provide empty data in the message if no data is provided") {
val expected = Data()
kernelCommWriter.writeOpen(anyString())
val actual = getNextMessageContents[CommOpen].data
actual should be (expected)
}
it("should include the data in the message") {
val expected = Data("some key" -> "some value")
kernelCommWriter.writeOpen(anyString(), expected)
val actual = getNextMessageContents[CommOpen].data
actual should be (expected)
}
}
describe("#writeMsg") {
it("should send a comm_msg message to the relay") {
kernelCommWriter.writeMsg(Data())
getNextMessageType should be (CommMsg.toTypeString)
}
it("should include the comm_id in the message") {
val expected = commId
kernelCommWriter.writeMsg(Data())
val actual = getNextMessageContents[CommMsg].comm_id
actual should be (expected)
}
it("should fail a require if the data is null") {
intercept[IllegalArgumentException] {
kernelCommWriter.writeMsg(null)
}
}
it("should include the data in the message") {
val expected = Data("some key" -> "some value")
kernelCommWriter.writeMsg(expected)
val actual = getNextMessageContents[CommMsg].data
actual should be (expected)
}
}
describe("#writeClose") {
it("should send a comm_close message to the relay") {
kernelCommWriter.writeClose()
getNextMessageType should be (CommClose.toTypeString)
}
it("should include the comm_id in the message") {
val expected = commId
kernelCommWriter.writeClose()
val actual = getNextMessageContents[CommClose].comm_id
actual should be (expected)
}
it("should provide empty data in the message if no data is provided") {
val expected = Data()
kernelCommWriter.writeClose()
val actual = getNextMessageContents[CommClose].data
actual should be (expected)
}
it("should include the data in the message") {
val expected = Data("some key" -> "some value")
kernelCommWriter.writeClose(expected)
val actual = getNextMessageContents[CommClose].data
actual should be (expected)
}
}
describe("#write") {
it("should send a comm_msg message to the relay") {
kernelCommWriter.write(Array('a'), 0, 1)
getNextMessageType should be (CommMsg.toTypeString)
}
it("should include the comm_id in the message") {
val expected = commId
kernelCommWriter.write(Array('a'), 0, 1)
val actual = getNextMessageContents[CommMsg].comm_id
actual should be (expected)
}
it("should package the string as part of the data with a 'message' key") {
val expected = Data("message" -> "a")
kernelCommWriter.write(Array('a'), 0, 1)
val actual = getNextMessageContents[CommMsg].data
actual should be (expected)
}
}
describe("#flush") {
it("should do nothing") {
// TODO: Is this test necessary? It does nothing.
kernelCommWriter.flush()
}
}
describe("#close") {
it("should send a comm_close message to the relay") {
kernelCommWriter.close()
getNextMessageType should be (CommClose.toTypeString)
}
it("should include the comm_id in the message") {
val expected = commId
kernelCommWriter.close()
val actual = getNextMessageContents[CommClose].comm_id
actual should be (expected)
}
it("should provide empty data in the message") {
val expected = Data()
kernelCommWriter.close()
val actual = getNextMessageContents[CommClose].data
actual should be (expected)
}
}
}
}
| malcolmgreaves/spark-kernel | kernel/src/test/scala/com/ibm/spark/comm/KernelCommWriterSpec.scala | Scala | apache-2.0 | 7,825 |
package com.cloudray.scalapress.security
import org.scalatest.{FlatSpec, OneInstancePerTest}
import org.scalatest.mock.MockitoSugar
import org.mockito.Mockito
import javax.servlet.http.HttpServletRequest
import com.cloudray.scalapress.theme.ThemeService
import com.cloudray.scalapress.account.{AccountPluginDao, AccountPlugin}
import com.cloudray.scalapress.framework.ScalapressContext
/** @author Stephen Samuel */
class LoginControllerTest extends FlatSpec with MockitoSugar with OneInstancePerTest {
val themeService = mock[ThemeService]
val context = new ScalapressContext
val accountPluginDao = mock[AccountPluginDao]
val plugin = new AccountPlugin
Mockito.when(accountPluginDao.get).thenReturn(plugin)
val req = mock[HttpServletRequest]
val controller = new LoginController(themeService, context, accountPluginDao)
"a login controller" should "show error message when credentials are incorrect" in {
Mockito.when(req.getParameter("login_error")).thenReturn("1")
val page = controller.weblogin(req)
assert(page.render.contains(LoginController.CREDENTIALS_ERROR_MSG))
}
it should "show no error message when error parameter is not set" in {
Mockito.when(req.getParameter("login_error")).thenReturn("0")
val page = controller.weblogin(req)
assert(!page.render.contains(LoginController.CREDENTIALS_ERROR_MSG))
}
it should "display login header when set" in {
plugin.loginPageHeader = "some super header"
val page = controller.weblogin(req)
assert(page.render.contains("some super header"))
}
it should "display login footer when set" in {
plugin.loginPageFooter = "some super footer"
val page = controller.weblogin(req)
assert(page.render.contains("some super footer"))
}
}
| vidyacraghav/scalapress | src/test/scala/com/cloudray/scalapress/security/LoginControllerTest.scala | Scala | apache-2.0 | 1,763 |
package microtools.patch
import play.api.libs.json._
import scala.util.parsing.combinator.Parsers
import scala.util.parsing.input.CharSequenceReader
/**
* RFC 6901 json pointer to JsPath
*/
object JsonPointer {
private object PathParser extends Parsers {
type Elem = Char
private val digit: Parser[Char] = elem("digit", _.isDigit)
private val escapedSlash: Parser[Char] = '~' ~ '1' ^^ (_ => '/')
private val escapedTilde: Parser[Char] = '~' ~ '0' ^^ (_ => '~')
private val notSeparator: Parser[Char] = elem("notSeparator", _ != '/')
private val number: Parser[PathNode] = digit.+ ^^ (chs => IdxPathNode(chs.mkString.toInt))
private val string: Parser[PathNode] = (escapedSlash | escapedTilde | notSeparator).* ^^ (
chs => KeyPathNode(chs.mkString)
)
private val part: Parser[PathNode] = '/' ~> (string ||| number)
private val pointer: Parser[JsPath] = part.* ^^ { parts =>
JsPath(parts)
}
def parser: Parser[JsPath] = phrase(pointer)
def apply(pointer: String): Either[String, JsPath] =
parser(new CharSequenceReader(pointer)) match {
case Success(path, _) => Right(path)
case NoSuccess(msg, _) => Left(msg)
case Failure(_, _) => Right(JsPath)
case Error(_, _) => Right(JsPath)
}
}
private val jsPathReads: Reads[JsPath] = implicitly[Reads[String]].flatMap { pointer =>
PathParser(pointer) match {
case Right(path) => Reads.pure(path)
case Left(msg) => Reads.apply(_ => JsError(s"Invalid json pointer: $msg"))
}
}
private val jsPathWrites: Writes[JsPath] = {
def escape(node: String) =
node.replace("~", "~0").replace("/", "~1")
Writes[JsPath](
path =>
JsString(path.path.map {
case KeyPathNode(key) => "/" + escape(key)
case IdxPathNode(idx) => "/" + idx.toString
case RecursiveSearch(key) => "/" + escape(key)
}.mkString)
)
}
implicit val jsPathFormat: Format[JsPath] = Format(jsPathReads, jsPathWrites)
}
| 21re/play-micro-tools | src/main/scala/microtools/patch/JsonPointer.scala | Scala | mit | 2,065 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.s2graph.rest.play.controllers
import org.apache.s2graph.core.rest.RestHandler
import play.api.mvc._
import scala.concurrent.ExecutionContext.Implicits.global
object ExperimentController extends Controller {
private val rest: RestHandler = org.apache.s2graph.rest.play.Global.s2rest
import ApplicationController._
def experiments() = experiment("", "", "")
def experiment(accessToken: String, experimentName: String, uuid: String) = withHeaderAsync(jsonText) { request =>
val body = request.body
val res = rest.doPost(request.uri, body, request.headers)
res.body.map { case js =>
val headers = res.headers :+ ("result_size" -> rest.calcSize(js).toString)
jsonResponse(js, headers: _*)
} recoverWith ApplicationController.requestFallback(body)
}
}
| daewon/incubator-s2graph | s2rest_play/app/org/apache/s2graph/rest/play/controllers/ExperimentController.scala | Scala | apache-2.0 | 1,619 |
/**
* Copyright (C) 2010-2011 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.kernel.config
import net.lshift.diffa.kernel.util.SessionHelper._
import net.lshift.diffa.kernel.frontend.FrontendConversions._
import org.hibernate.{Session, SessionFactory}
import scala.collection.JavaConversions._
import net.lshift.diffa.kernel.frontend._
import net.lshift.diffa.kernel.hooks.HookManager
import net.sf.ehcache.CacheManager
import net.lshift.diffa.kernel.util.{CacheWrapper, HibernateQueryUtils}
class HibernateDomainConfigStore(val sessionFactory: SessionFactory,
pairCache:PairCache,
hookManager:HookManager,
cacheManager:CacheManager)
extends DomainConfigStore
with HibernateQueryUtils {
val hook = hookManager.createDifferencePartitioningHook(sessionFactory)
private val cachedConfigVersions = new CacheWrapper[String,Int]("configVersions", cacheManager)
def createOrUpdateEndpoint(domainName:String, e: EndpointDef) : Endpoint = withVersionUpgrade(domainName, s => {
pairCache.invalidate(domainName)
val domain = getDomain(domainName)
val endpoint = fromEndpointDef(domain, e)
s.saveOrUpdate(endpoint)
// Update the view definitions
val existingViews = listEndpointViews(s, domainName, e.name)
val viewsToRemove = existingViews.filter(existing => e.views.find(v => v.name == existing.name).isEmpty)
viewsToRemove.foreach(r => s.delete(r))
e.views.foreach(v => s.saveOrUpdate(fromEndpointViewDef(endpoint, v)))
endpoint
})
def deleteEndpoint(domain:String, name: String): Unit = withVersionUpgrade(domain, s => {
pairCache.invalidate(domain)
val endpoint = getEndpoint(s, domain, name)
// Remove all pairs that reference the endpoint
s.createQuery("FROM DiffaPair WHERE upstream = :endpoint OR downstream = :endpoint").
setString("endpoint", name).list.foreach(p => deletePairInSession(s, domain, p.asInstanceOf[DiffaPair]))
endpoint.views.foreach(s.delete(_))
s.delete(endpoint)
})
def listEndpoints(domain:String): Seq[EndpointDef] = sessionFactory.withSession(s => {
listQuery[Endpoint](s, "endpointsByDomain", Map("domain_name" -> domain)).map(toEndpointDef(_))
})
def createOrUpdateRepairAction(domain:String, a: RepairActionDef) = sessionFactory.withSession(s => {
val pair = getPair(s, domain, a.pair)
s.saveOrUpdate(fromRepairActionDef(pair, a))
})
def deleteRepairAction(domain:String, name: String, pairKey: String) {
sessionFactory.withSession(s => {
val action = getRepairAction(s, domain, name, pairKey)
s.delete(action)
})
}
def createOrUpdatePair(domain:String, p: PairDef): Unit = {
withVersionUpgrade(domain, s => {
p.validate()
pairCache.invalidate(domain)
val dom = getDomain(domain)
val toUpdate = DiffaPair(p.key, dom, p.upstreamName, p.downstreamName, p.versionPolicyName, p.matchingTimeout,
p.scanCronSpec, p.allowManualScans, eventsToLog = p.eventsToLog, maxExplainFiles = p.maxExplainFiles)
s.saveOrUpdate(toUpdate)
// Update the view definitions
val existingViews = listPairViews(s, domain, p.key)
val viewsToRemove = existingViews.filter(existing => p.views.find(v => v.name == existing.name).isEmpty)
viewsToRemove.foreach(r => s.delete(r))
p.views.foreach(v => s.saveOrUpdate(fromPairViewDef(toUpdate, v)))
})
hook.pairCreated(domain, p.key)
}
def deletePair(domain:String, key: String) {
withVersionUpgrade(domain, s => {
pairCache.invalidate(domain)
val pair = getPair(s, domain, key)
deletePairInSession(s, domain, pair)
})
hook.pairRemoved(domain, key)
}
// TODO This read through cache should not be necessary when the 2L cache miss issue is resolved
def listPairs(domain:String) = pairCache.readThrough(domain, () => listPairsFromPersistence(domain))
def listPairsFromPersistence(domain:String) = sessionFactory.withSession(s => listQuery[DiffaPair](s, "pairsByDomain", Map("domain_name" -> domain)).map(toPairDef(_)))
def listPairsForEndpoint(domain:String, endpoint:String) = sessionFactory.withSession(s =>
listQuery[DiffaPair](s, "pairsByEndpoint", Map("domain_name" -> domain, "endpoint_name" -> endpoint)))
def listRepairActionsForPair(domain:String, pairKey: String) : Seq[RepairActionDef] =
sessionFactory.withSession(s => {
getRepairActionsInPair(s, domain, pairKey).map(toRepairActionDef(_))
})
def listEscalations(domain:String) = sessionFactory.withSession(s => {
listQuery[Escalation](s, "escalationsByDomain", Map("domain_name" -> domain)).map(toEscalationDef(_))
})
def deleteEscalation(domain:String, name: String, pairKey: String) = {
sessionFactory.withSession(s => {
val escalation = getEscalation(s, domain, name, pairKey)
s.delete(escalation)
})
}
def createOrUpdateEscalation(domain:String, e: EscalationDef) = sessionFactory.withSession( s => {
val pair = getPair(s, domain, e.pair)
val escalation = fromEscalationDef(pair,e)
s.saveOrUpdate(escalation)
})
def listEscalationsForPair(domain:String, pairKey: String) : Seq[EscalationDef] =
sessionFactory.withSession(s => getEscalationsForPair(s, domain, pairKey).map(toEscalationDef(_)))
def listReports(domain:String) = sessionFactory.withSession(s => {
listQuery[PairReport](s, "reportsByDomain", Map("domain_name" -> domain)).map(toPairReportDef(_))
})
def deleteReport(domain:String, name: String, pairKey: String) = {
sessionFactory.withSession(s => {
val report = getReport(s, domain, name, pairKey)
s.delete(report)
})
}
def createOrUpdateReport(domain:String, r: PairReportDef) = sessionFactory.withSession( s => {
val pair = getPair(s, domain, r.pair)
val report = fromPairReportDef(pair, r)
s.saveOrUpdate(report)
})
def listReportsForPair(domain:String, pairKey: String) : Seq[PairReportDef] =
sessionFactory.withSession(s => getReportsForPair(s, domain, pairKey).map(toPairReportDef(_)))
private def getRepairActionsInPair(s: Session, domain:String, pairKey: String): Seq[RepairAction] =
listQuery[RepairAction](s, "repairActionsByPair", Map("pair_key" -> pairKey,
"domain_name" -> domain))
private def getEscalationsForPair(s: Session, domain:String, pairKey:String): Seq[Escalation] =
listQuery[Escalation](s, "escalationsByPair", Map("pair_key" -> pairKey,
"domain_name" -> domain))
private def getReportsForPair(s: Session, domain:String, pairKey:String): Seq[PairReport] =
listQuery[PairReport](s, "reportsByPair", Map("pair_key" -> pairKey,
"domain_name" -> domain))
def listRepairActions(domain:String) : Seq[RepairActionDef] = sessionFactory.withSession(s =>
listQuery[RepairAction](s, "repairActionsByDomain", Map("domain_name" -> domain)).map(toRepairActionDef(_)))
def getEndpointDef(domain:String, name: String) = sessionFactory.withSession(s => toEndpointDef(getEndpoint(s, domain, name)))
def getEndpoint(domain:String, name: String) = sessionFactory.withSession(s => getEndpoint(s, domain, name))
def getPairDef(domain:String, key: String) = sessionFactory.withSession(s => toPairDef(getPair(s, domain, key)))
def getRepairActionDef(domain:String, name: String, pairKey: String) = sessionFactory.withSession(s => toRepairActionDef(getRepairAction(s, domain, name, pairKey)))
def getPairReportDef(domain:String, name: String, pairKey: String) = sessionFactory.withSession(s => toPairReportDef(getReport(s, domain, name, pairKey)))
def getConfigVersion(domain:String) = cachedConfigVersions.readThrough(domain, () => sessionFactory.withSession(s => {
s.getNamedQuery("configVersionByDomain").setString("domain", domain).uniqueResult().asInstanceOf[Int]
}))
/**
* Force the DB to uprev the config version column for this particular domain
*/
private def upgradeConfigVersion(domain:String)(s:Session) = {
s.getNamedQuery("upgradeConfigVersionByDomain").setString("domain", domain).executeUpdate()
}
/**
* Force an upgrade of the domain config version in the db and the cache after the DB work has executed successfully.
*/
private def withVersionUpgrade[T](domain:String, dbCommands:Function1[Session, T]) : T = {
def beforeCommit(session:Session) = upgradeConfigVersion(domain)(session)
def commandsToExecute(session:Session) = dbCommands(session)
def afterCommit() = cachedConfigVersions.remove(domain)
sessionFactory.withSession(
beforeCommit _,
commandsToExecute,
afterCommit _
)
}
def allConfigOptions(domain:String) = {
sessionFactory.withSession(s => {
listQuery[ConfigOption](s, "configOptionsByDomain", Map("domain_name" -> domain)).map(opt => opt.key -> opt.value).toMap
})
}
def maybeConfigOption(domain:String, key:String) =
sessionFactory.withSession(s => singleQueryOpt[String](s, "configOptionByNameAndKey", Map("key" -> key, "domain_name" -> domain)))
def configOptionOrDefault(domain:String, key: String, defaultVal: String) =
maybeConfigOption(domain, key) match {
case Some(str) => str
case None => defaultVal
}
def setConfigOption(domain:String, key:String, value:String) = writeConfigOption(domain, key, value)
def clearConfigOption(domain:String, key:String) = deleteConfigOption(domain, key)
private def deletePairInSession(s:Session, domain:String, pair:DiffaPair) = {
getRepairActionsInPair(s, domain, pair.key).foreach(s.delete)
getEscalationsForPair(s, domain, pair.key).foreach(s.delete)
getReportsForPair(s, domain, pair.key).foreach(s.delete)
pair.views.foreach(s.delete(_))
deleteStoreCheckpoint(pair.asRef)
s.delete(pair)
}
def makeDomainMember(domain:String, userName:String) = sessionFactory.withSession(s => {
val member = Member(User(name = userName), Domain(name = domain))
s.saveOrUpdate(member)
member
})
def removeDomainMembership(domain:String, userName:String) = sessionFactory.withSession(s => {
s.delete(Member(User(name = userName), Domain(name = domain)))
})
def listDomainMembers(domain:String) = sessionFactory.withSession(s => {
listQuery[Member](s, "membersByDomain", Map("domain_name" -> domain))
})
def listEndpointViews(s:Session, domain:String, endpointName:String) =
listQuery[EndpointView](s, "endpointViewsByEndpoint", Map("domain_name" -> domain, "endpoint_name" -> endpointName))
def listPairViews(s:Session, domain:String, pairKey:String) =
listQuery[PairView](s, "pairViewsByPair", Map("domain_name" -> domain, "pair_key" -> pairKey))
}
| aprescott/diffa | kernel/src/main/scala/net/lshift/diffa/kernel/config/HibernateDomainConfigStore.scala | Scala | apache-2.0 | 11,435 |
package com.kakashi.simpleservices
import akka.actor.ActorDSL._
import akka.actor.{ActorLogging, ActorSystem, Props}
import akka.io.IO
import akka.io.Tcp._
import spray.can.Http
object SampleApp extends App {
implicit val system = ActorSystem("simpleServices")
/* Spray Service */
val service= system.actorOf(Props[SampleServiceActor], "spray-swagger-sample-service")
val ioListener = actor("ioListener")(new Act with ActorLogging {
become {
case b @ Bound(connection) =>
println(b.toString)
// log.info(b.toString)
}
})
IO(Http).tell(Http.Bind(service, SampleConfig.HttpConfig.interface, SampleConfig.HttpConfig.port), ioListener)
}
| freeservices/simpleservices | src/main/scala/com/kakashi/simpleservices/SampleApp.scala | Scala | mit | 681 |
/**
* Copyright (C) 2014 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.fr
import org.orbeon.oxf.util.ScalaUtils._
import org.orbeon.oxf.xml.TransformerUtils
import org.orbeon.saxon.om.{DocumentInfo, NodeInfo}
import org.orbeon.scaxon.XML
import org.orbeon.scaxon.XML._
object DataMigration {
// NOTE: We could build this with JSON objects instead
def encodeMigrationsToJSON(migrations: Seq[(String, String)]) =
migrations map { case (path, iterationName) ⇒
s"""{ "path": "$path", "iteration-name": "$iterationName" }"""
} mkString ("[", ",", "]")
// Ouch, a lot of boilerplate! (Rapture JSON might provide nicer syntax)
def decodeMigrationsFromJSON(jsonMigrationMap: String): List[(String, String)] = {
import spray.json._
val json = jsonMigrationMap.parseJson
val names =
Iterator(json) collect {
case JsArray(migrations) ⇒
migrations.iterator collect {
case JsObject(fields) ⇒
(
fields("path").asInstanceOf[JsString].value,
fields("iteration-name").asInstanceOf[JsString].value
)
}
}
names.flatten.toList
}
private val TrimPathElementRE = """\\s*\\(?([^)]+)\\)?\\s*""".r
private def partitionNodes(
mutableData : NodeInfo,
migration : List[(String, String)]
): List[(NodeInfo, List[NodeInfo], String, String)] =
migration flatMap {
case (path, iterationName) ⇒
val (pathToParentNodes, pathToChildNodes) = {
// NOTE: The format of the path is like `(section-3)/(section-3-iteration)/(grid-4)`. Form Builder
// puts parentheses for the abandoned case of a custom XML format, and we kept that when producing
// the migration data. As long as we know that there are no `/` within the parentheses we are fine.
val parts = split[List](path, "/")
val TrimPathElementRE(trimmedLast) = parts.last
(parts.init mkString "/", trimmedLast)
}
// NOTE: Use collect, but we know they are nodes if the JSON is correct and contains paths
val parentNodes = XML.eval(mutableData.rootElement, pathToParentNodes) collect {
case node: NodeInfo ⇒ node
}
parentNodes map { parentNode ⇒
val nodes = XML.eval(parentNode, pathToChildNodes) collect {
case node: NodeInfo ⇒ node
}
// NOTE: Should ideally test on uriQualifiedName instead. The data in practice has elements which
// in no namespaces, and if they were in a namespace, the prefixes would likely be unique.
(parentNode, nodes.to[List], pathToChildNodes, iterationName)
}
}
import org.orbeon.oxf.xforms.action.XFormsAPI._
//@XPathFunction
def dataMaybeMigratedFrom(data: DocumentInfo, metadata: Option[DocumentInfo]) =
dataMaybeMigratedFromTo(data, metadata, migrateDataFrom)
//@XPathFunction
def dataMaybeMigratedTo(data: DocumentInfo, metadata: Option[DocumentInfo]) =
dataMaybeMigratedFromTo(data, metadata, migrateDataTo)
private def dataMaybeMigratedFromTo(
data : DocumentInfo,
metadata : Option[DocumentInfo],
migrate : (DocumentInfo, String) ⇒ DocumentInfo
) =
for {
metadata ← metadata
migration ← migrationMapFromMetadata(metadata.rootElement)
} yield
migrate(data, migration)
def migrationMapFromMetadata(metadataRootElement: NodeInfo) =
metadataRootElement firstChild "migration" filter (_.attValue("version") == "4.8.0") map (_.stringValue)
def migrateDataTo(data: DocumentInfo, jsonMigrationMap: String): DocumentInfo = {
val mutableData = TransformerUtils.extractAsMutableDocument(data)
partitionNodes(mutableData, decodeMigrationsFromJSON(jsonMigrationMap)) foreach {
case (parentNode, iterations, repeatName, iterationName) ⇒
iterations match {
case Nil ⇒
// Issue: we don't know, based just on the migration map, where to insert container elements to
// follow bind order. This is not a new problem as we don't enforce order, see:
//
// https://github.com/orbeon/orbeon-forms/issues/443.
//
// For now we choose to add after the last element.
//
// BTW at runtime `fr:grid[@repeat = 'true']` inserts iterations before the first element.
insert(
into = parentNode,
after = parentNode / *,
origin = elementInfo(repeatName, Nil),
doDispatch = false
)
case iterations ⇒
val contentForEachIteration =
iterations map (iteration ⇒ (iteration /@ @*) ++ (iteration / Node) toList) // force
delete(iterations.head /@ @*, doDispatch = false)
delete(iterations.head / Node, doDispatch = false)
delete(iterations.tail, doDispatch = false)
insert(
into = iterations.head,
origin = contentForEachIteration map (elementInfo(iterationName, _)),
doDispatch = false
)
}
}
mutableData
}
def migrateDataFrom(data: DocumentInfo, jsonMigrationMap: String): DocumentInfo = {
val mutableData = TransformerUtils.extractAsMutableDocument(data)
partitionNodes(mutableData, decodeMigrationsFromJSON(jsonMigrationMap)) foreach {
case (_, iterations, _, _) ⇒
assert(iterations.tail.isEmpty)
val container = iterations.head
val contentForEachIteration =
(container / * toList) map (iteration ⇒ (iteration /@ @*) ++ (iteration / Node) toList) // force
insert(
after = container,
origin = contentForEachIteration map (elementInfo(container.name, _)),
doDispatch = false
)
delete(container, doDispatch = false)
}
mutableData
}
}
| joansmith/orbeon-forms | src/main/scala/org/orbeon/oxf/fr/DataMigration.scala | Scala | lgpl-2.1 | 6,603 |
package org.vitrivr.adampro.data.index.structures.va.signature
import org.vitrivr.adampro.data.datatypes.bitstring.BitString
/**
* adamtwo
*
* Ivan Giangreco
* August 2015
*/
@SerialVersionUID(100L)
private[va] trait SignatureGenerator extends Serializable {
/**
*
* @param cells cell ids to translate to signature
* @return
*/
def toSignature(cells: Seq[Int]): BitString[_]
/**
*
* @param signature signature to translate to cell ids
* @return
*/
def toCells(signature: BitString[_]): IndexedSeq[Int]
}
| dbisUnibas/ADAMpro | src/main/scala/org/vitrivr/adampro/data/index/structures/va/signature/SignatureGenerator.scala | Scala | mit | 553 |
// #Sireum
/*
Copyright (c) 2017-2021, Robby, Kansas State University
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.sireum.extension
import org.sireum._
@ext object Time {
def currentMillis: Z = $
}
| sireum/v3-logika-runtime | library/shared/src/main/scala/org/sireum/extension/Time.scala | Scala | bsd-2-clause | 1,457 |
package test
import io.circe._
import io.circe.parser._
import io.circe.syntax._
import scoutagent._
import scoutagent.State._
import scoutagent.controller._
import environment._
import environment.anomaly._
import environment.element._
import environment.element.seed._
import environment.terrainmodification._
import environment.EnvironmentBuilder._
import operation._
import scoututil.Util._
import jsonhandler.Encoder._
import jsonhandler.Decoder._
import filemanager.FileManager._
import scala.collection.mutable.{Map => MutableMap}
import scala.collection.mutable.{ArrayBuffer => AB}
class Test(
val testEnvironments: Map[String,Int], // environmentFileName -> test iterations
val testTemplates: Map[String,(Int,Int)], // templateFileName -> (build iterations, test iteration per build)
val controllers: Map[String,Controller], // agentName -> controller
val sensors: List[Sensor],
val goalTemplate: GoalTemplate,
val maxActions: Option[Int],
val verbose: Boolean = false,
val sensorLists: Map[String,List[Sensor]] = Map()
) {
// Test Metrics to gather
val testMetrics: Map[String,TestMetric] = for ((name,controller) <- controllers) yield name -> new TestMetric(name, AB())
// Validation Agent used to assure a valid start point is selected
val validationAgent: Agent = new Agent("VALIDATOR", new RandomController())
def run: Unit = {
// Setup environments
val environments = generateEnvironments
var runNumber = 0
for ((environment, iterations) <- environments) {
for (i <- 0 until iterations) {
runNumber += 1
if (verbose) {
println()
println()
println(s"Running Test $runNumber")
}
val startPosition = getValidStartPosition(environment)
val startX = startPosition._1
val startY = startPosition._2
// Setup agent and run operation
for ((name, controller) <- controllers) {
val agentSensors = sensorLists.get(name) match {
case Some(sl) => sl
case None => sensors
}
val agent = new Agent(
name = name,
controller = controller.copy,
sensors = agentSensors,
mapHeight = environment.height,
mapWidth = environment.width,
mapScale = environment.scale,
xPosition = startX,
yPosition = startY)
// OPERATION
val goal = goalTemplate.generateGoal(environment)
val operation = new Operation(agent, environment, goal, maxActions)
// println(s"Running ${agent.name}")
operation.run
testMetrics(name).addRun(operation.runData)
// operation.printActions
if (verbose) operation.printOutcome
// println(s"Start Position ($startX, $startY)")
// println(s"End Position (${operation.eventLog.last.state.xPosition}, ${operation.eventLog.last.state.yPosition})")
// println()
}
}
}
if (verbose) for ((name,data) <- testMetrics) data.printRunResults
}
// Generate Environments: environment -> iterations to run
def generateEnvironments: Map[Environment,Int] = {
if (verbose) println("GENERATING ENVIRONMENTS...")
var environments: MutableMap[Environment,Int] = MutableMap()
// Load environment files
if (verbose) println("LOADING FROM FILES...")
for ((fName, iterations) <- testEnvironments) {
if (verbose) println(s"Loading from: $fName")
val envString = readJsonFile(fName, environmentPath)
parse(envString) match {
case Left(_) => // Load or parse failure
case Right(envJson) => {
val env = extractEnvironment(envJson)
environments += (env -> iterations)
}
}
}
// Load template files
if (verbose) println("LOADING FROM TEMPLATES...")
for ((fName, iterations) <- testTemplates) {
if (verbose) println(s"Loading from: $fName")
val templateString = readJsonFile(fName, environmentTemplatePath)
val buildIterations = iterations._1
val testIterations = iterations._2
parse(templateString) match {
case Left(_) => // Load or parse failure
case Right(templateJson) => {
val template = extractEnvironmentTemplate(templateJson)
for (i <- 0 until buildIterations) {
if (verbose) println(s" loading ${i+1} / $buildIterations")
val env = buildEnvironment(template)
environments += (env -> testIterations)
}
}
}
}
return environments.toMap
}
def getValidStartPosition(env: Environment): (Int,Int) = {
// Choose a random point
val startX = randomInt(0, env.height - 1)
val startY = randomInt(0, env.width - 1)
// Check if start position is clear of hazards and doesn't start on an anomaly
validationAgent.calculateHazardDamage(env, startX, startY, 10000) match {
case d if (d > 0.0) => return getValidStartPosition(env) // Try different start position
case d => env.getAnomaliesCluster(startX, startY, 3) match {
case as if (as.size > 0) => return getValidStartPosition(env)
case _ => return (startX, startY)
}
}
}
}
class TestMetric(val controllerName: String, val runs: AB[RunData]) {
def addRun(runData: RunData) = runs += runData
def avgGoalCompletion: Double = runs.map(_.goalCompletion).foldLeft(0.0)(_ + _) / runs.size
def avgActions: Int = runs.map(_.steps).foldLeft(0)(_ + _) / runs.size
def avgRemainingHealth: Double = runs.map(_.remainingHealth).foldLeft(0.0)(_ + _) / runs.size
def avgRemainingEnergy: Double = runs.map(_.remainingEnergy).foldLeft(0.0)(_ + _) / runs.size
def printRunResults = {
println()
println(s"Controller: $controllerName")
println(s"Runs: ${runs.size}")
println(s"Successes: ${runs.filter(_.successful == true).size}")
println(s"Avg Steps: ${avgActions}")
println(s"Avg Remaining Health: ${roundDouble2(avgRemainingHealth)}")
println(s"Avg Remaining Energy: ${roundDouble2(avgRemainingEnergy)}")
println(s"AVG GOAL COMPLETION: ${roundDouble2(avgGoalCompletion)}")
}
}
class RunData(val goalCompletion: Double, val steps: Int, val remainingHealth: Double, val remainingEnergy: Double) {
def successful: Boolean = goalCompletion >= 100.0
}
| KeithCissell/SCOUt | app/src/main/scala/testing/Test.scala | Scala | mit | 6,357 |
package intellij.haskell.editor
import java.util
import java.util.UUID
import com.intellij.compiler.ProblemsView
import com.intellij.compiler.impl.ProblemsViewPanel
import com.intellij.compiler.progress.CompilerTask
import com.intellij.icons.AllIcons
import com.intellij.ide.errorTreeView.{ErrorTreeElement, ErrorTreeElementKind, GroupingElement}
import com.intellij.openapi.compiler.{CompileScope, CompilerMessage}
import com.intellij.openapi.fileEditor.OpenFileDescriptor
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.{Disposer, IconLoader}
import com.intellij.openapi.vfs.VirtualFile
import com.intellij.openapi.wm.{ToolWindowAnchor, ToolWindowManager}
import com.intellij.pom.Navigatable
import com.intellij.ui.content.ContentFactory
import com.intellij.util.concurrency.SequentialTaskExecutor
import com.intellij.util.ui.UIUtil
import intellij.haskell.util.HaskellProjectUtil
class HaskellProblemsView(project: Project) extends ProblemsView(project) {
private final val ProblemsToolWindowId = "Haskell Problems"
private final val ActiveIcon = AllIcons.Toolwindows.Problems
private final val PassiveIcon = IconLoader.getDisabledIcon(ActiveIcon)
private val viewUpdater = SequentialTaskExecutor.createSequentialApplicationPoolExecutor("ProblemsView Pool")
private lazy val problemsPanel = new ProblemsViewPanel(project)
private val toolWindowManager = ToolWindowManager.getInstance(project)
Disposer.register(project, () => {
Disposer.dispose(problemsPanel)
})
if (HaskellProjectUtil.isHaskellProject(project)) {
UIUtil.invokeLaterIfNeeded(() => {
if (!project.isDisposed) {
val toolWindow = toolWindowManager.registerToolWindow(ProblemsToolWindowId, false, ToolWindowAnchor.LEFT, project, true)
val content = ContentFactory.SERVICE.getInstance.createContent(problemsPanel, "", false)
content.setHelpId("reference.problems.tool.window")
toolWindow.getContentManager.addContent(content)
Disposer.register(project, () => {
toolWindow.getContentManager.removeAllContents(true)
})
updateIcon()
}
})
}
def clearOldMessages(currentFile: VirtualFile): Unit = {
viewUpdater.execute(() => {
cleanupChildrenRecursively(problemsPanel.getErrorViewStructure.getRootElement.asInstanceOf[ErrorTreeElement], currentFile)
updateIcon()
problemsPanel.reload()
})
}
def clearOldMessages(scope: CompileScope, currentSessionId: UUID): Unit = {
// This method can be called. Do not know the reason. See issue #419.
// For now do nothing because do not know why this method is called.
}
override def addMessage(messageCategoryIndex: Int, text: Array[String], groupName: String, navigatable: Navigatable, exportTextPrefix: String, rendererTextPrefix: String, sessionId: UUID): Unit = {
viewUpdater.execute(() => {
if (navigatable != null) {
problemsPanel.addMessage(messageCategoryIndex, text, groupName, navigatable, exportTextPrefix, rendererTextPrefix, sessionId)
}
else {
problemsPanel.addMessage(messageCategoryIndex, text, null, -1, -1, sessionId)
}
updateIcon()
})
}
def addMessage(message: CompilerMessage): Unit = {
val file = message.getVirtualFile
val navigatable = if (message.getNavigatable == null && file != null && !file.getFileType.isBinary) {
new OpenFileDescriptor(myProject, file, -1, -1)
} else {
message.getNavigatable
}
val category = message.getCategory
val categoryIndex = CompilerTask.translateCategory(category)
val messageText = splitMessage(message)
val groupName = if (file != null) file.getPresentableUrl else category.getPresentableText
addMessage(categoryIndex, messageText, groupName, navigatable, message.getExportTextPrefix, message.getRenderTextPrefix, null)
}
def clear(): Unit = {
val view = problemsPanel.getErrorViewStructure
view.clear()
problemsPanel.reload()
}
override def setProgress(text: String, fraction: Float): Unit = {
problemsPanel.setProgress(text, fraction)
}
override def setProgress(text: String): Unit = {
problemsPanel.setProgressText(text)
}
override def clearProgress(): Unit = {
problemsPanel.clearProgressData()
}
private def splitMessage(message: CompilerMessage): Array[String] = {
val messageText = message.getMessage
if (messageText.contains("\\n")) {
messageText.split("\\n")
} else {
Array[String](messageText)
}
}
private def cleanupChildrenRecursively(errorTreeElement: ErrorTreeElement, currentFile: VirtualFile): Unit = {
val errorViewStructure = problemsPanel.getErrorViewStructure
for (element <- errorViewStructure.getChildElements(errorTreeElement)) {
element match {
case groupElement: GroupingElement =>
if (groupElement.getFile == currentFile) {
cleanupChildrenRecursively(element, currentFile)
}
case _ => errorViewStructure.removeElement(element)
}
}
}
private def updateIcon(): Unit = {
UIUtil.invokeLaterIfNeeded(() => {
if (!myProject.isDisposed) {
val toolWindow = ToolWindowManager.getInstance(myProject).getToolWindow(ProblemsToolWindowId)
if (toolWindow != null) {
val active = problemsPanel.getErrorViewStructure.hasMessages(util.EnumSet.of(ErrorTreeElementKind.ERROR, ErrorTreeElementKind.WARNING, ErrorTreeElementKind.NOTE))
toolWindow.setIcon(if (active) ActiveIcon else PassiveIcon)
}
}
})
}
}
object HaskellProblemsView {
def getInstance(project: Project): HaskellProblemsView = {
ProblemsView.SERVICE.getInstance(project).asInstanceOf[HaskellProblemsView]
}
} | rikvdkleij/intellij-haskell | src/main/scala/intellij/haskell/editor/HaskellProblemsView.scala | Scala | apache-2.0 | 5,769 |
/*
* Copyright 2001-2012 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
/**
* Trait used to pass an <code>Informer</code> and a <code>Documenter</code> to
* test methods in trait <code>Suite</code> and <code>fixture.Suite</code>.
*/
private[scalatest] trait Rep {
/**
* An <code>Informer</code> that during test execution will forward strings (and other objects) passed to its
* <code>apply</code> method to the current reporter. This method can be called safely by any thread.
*
* <p>
* This field is implicit to enable it to be easily used with constructs like the <code>given</code>,
* <code>when</code>, and <code>then</code> methods of trait
* <a href="GivenWhenThen.html"><code>GivenWhenThen</code></a>.
* </p>
*/
implicit val info: Informer
/**
* A <code>Documenter</code> that during test execution will forward strings passed to its
* <code>apply</code> method to the current reporter. This method can be called safely by any thread.
*/
implicit val markup: Documenter
}
| hubertp/scalatest | src/main/scala/org/scalatest/Rep.scala | Scala | apache-2.0 | 1,582 |
package V2
/**
* A major difficulty of substitution is the number of times we traverse
* the source program. Substitution traverses everything - e.g unvisited
* branches of conditionals. We come over this problem by introducing the
* environment concept that enables fast identifier lookups. Note, we still
* have only integers as values (First-Order language).
*
* In addition we will use dynamic scoping, where the scope binding for
* an identifier is determined the by execution context at runtime. So
* a construct like the following will be valid:
*
* Let(n, 5, f(10)) where f(x) = { n }
*
* We will implement dynamic scoping by extending current environment for every function
* application Let the function argument bindings.
*/
object F1WAEDynamicInterp extends App {
sealed abstract class F1WAE
case class Num(n: Int) extends F1WAE
case class Sub(lhs: F1WAE, rhs: F1WAE) extends F1WAE
case class Add(lhs: F1WAE, rhs: F1WAE) extends F1WAE
case class Let(boundId: Symbol, namedExpr: F1WAE, boundBody: F1WAE) extends F1WAE
case class Id(name: Symbol) extends F1WAE
case class App(funName: Symbol, arg: F1WAE) extends F1WAE
case class FunDef(argName: Symbol, body: F1WAE)
type FunDefs = Map[Symbol, FunDef]
// Our first-order language just has Int as values
type Env = Map[Symbol, Int]
def interp(expr: F1WAE, env: Env, funDefs: Map[Symbol, FunDef]): Int = expr match {
case Num(n) => n
case Sub(lhs, rhs) => interp(lhs, env, funDefs) - interp(rhs, env, funDefs)
case Add(lhs, rhs) => interp(lhs, env, funDefs) + interp(rhs, env, funDefs)
case Let(boundId, namedExpr, boundBody) =>
val extendedEnv = env + (boundId -> interp(namedExpr, env, funDefs))
interp(boundBody, extendedEnv, funDefs)
case Id(name) =>
// This will cause an exception if a unbound identifier (identifier not in the env) occurs
env(name)
case App(funName, argExpr) => funDefs(funName) match {
case FunDef(argName, bodyExpr) =>
val extendedEnv = env + (argName -> interp(argExpr, env, funDefs))
interp(bodyExpr, extendedEnv, funDefs)
}
}
// some assertions on the interpreter
import scala.language.implicitConversions
implicit def symbolToFWAE(symbol: Symbol) = Id(symbol)
implicit def intToFWAE(n: Int) = Num(n)
val funDefs = Map(
'f -> FunDef('n, App('g, Add('n, 5))),
'g -> FunDef('n, Sub('n, 1)))
assert(interp(App('f, 5), Map(), funDefs) == 9)
val funDefs2 = Map(
'f -> FunDef('y, Sub('y, 1)),
'g -> FunDef('y, Sub('y, 1)),
'f -> FunDef('x, App('g, Add('x, 3))))
assert(interp(App('f, 10), Map(), funDefs2) == 12)
// Due to dynamic scoping the interpreter will "walk" up the "symbol-table"
// and retrieve the binding for n
assert(interp(Let('n, 5, App('f, 10)),
Map(),
Map('f -> FunDef('x, 'n))) == 5)
}
| Tooa/interpreters | src/V2/F1WAEDynamicInterp.scala | Scala | apache-2.0 | 2,865 |
package sbt
import sbt.internal.util.{ AttributeKey, complete, Relation, Settings, Show, Types, Util }
import sbt.librarymanagement.Configuration
import java.io.File
import java.net.URI
import Project._
import Def.{ ScopedKey, Setting }
import Scope.{ GlobalScope, ThisScope }
import Types.{ const, idFun, Id }
import complete._
import DefaultParsers._
/**
* The resulting `session` and verbose and quiet summaries of the result of a set operation.
* The verbose summary will typically use more vertical space and show full details,
* while the quiet summary will be a couple of lines and truncate information.
*/
private[sbt] class SetResult(val session: SessionSettings, val verboseSummary: String, val quietSummary: String)
/** Defines methods for implementing the `set` command.*/
private[sbt] object SettingCompletions {
/**
* Implementation of the `set every` command. Each setting in the provided `settings` sequence will be applied in all scopes,
* overriding all previous definitions of the underlying AttributeKey.
* The settings injected by this method cannot be later persisted by the `session save` command.
*/
def setAll(extracted: Extracted, settings: Seq[Setting[_]]): SetResult =
{
import extracted._
val r = relation(extracted.structure, true)
val allDefs = Def.flattenLocals(Def.compiled(extracted.structure.settings, true)(structure.delegates, structure.scopeLocal, implicitly[Show[ScopedKey[_]]])).keys
val projectScope = Load.projectScope(currentRef)
def resolve(s: Setting[_]): Seq[Setting[_]] = Load.transformSettings(projectScope, currentRef.build, rootProject, s :: Nil)
def rescope[T](setting: Setting[T]): Seq[Setting[_]] =
{
val akey = setting.key.key
val global = ScopedKey(Global, akey)
val globalSetting = resolve(Def.setting(global, setting.init, setting.pos))
globalSetting ++ allDefs.flatMap { d =>
if (d.key == akey)
Seq(SettingKey(akey) in d.scope <<= global)
else
Nil
}
}
val redefined = settings.flatMap(x => rescope(x))
val session = extracted.session.appendRaw(redefined)
setResult(session, r, redefined)
}
/** Implementation of the `set` command that will reload the current project with `settings` appended to the current settings. */
def setThis(s: State, extracted: Extracted, settings: Seq[Def.Setting[_]], arg: String): SetResult =
{
import extracted._
val append = Load.transformSettings(Load.projectScope(currentRef), currentRef.build, rootProject, settings)
val newSession = session.appendSettings(append map (a => (a, arg.split('\\n').toList)))
val struct = extracted.structure
val r = relation(newSession.mergeSettings, true)(structure.delegates, structure.scopeLocal, implicitly)
setResult(newSession, r, append)
}
private[this] def setResult(session: SessionSettings, r: Relation[ScopedKey[_], ScopedKey[_]], redefined: Seq[Setting[_]])(implicit show: Show[ScopedKey[_]]): SetResult =
{
val redefinedKeys = redefined.map(_.key).toSet
val affectedKeys = redefinedKeys.flatMap(r.reverse)
def summary(verbose: Boolean): String = setSummary(redefinedKeys, affectedKeys, verbose)
new SetResult(session, summary(true), summary(false))
}
private[this] def setSummary(redefined: Set[ScopedKey[_]], affected: Set[ScopedKey[_]], verbose: Boolean)(implicit display: Show[ScopedKey[_]]): String =
{
val QuietLimit = 3
def strings(in: Set[ScopedKey[_]]): Seq[String] = in.toSeq.map(sk => display(sk)).sorted
def lines(in: Seq[String]): (String, Boolean) =
if (in.isEmpty)
("no settings or tasks.", false)
else if (verbose)
(in.mkString("\\n\\t", "\\n\\t", "\\n"), false)
else
quietList(in)
def quietList(in: Seq[String]): (String, Boolean) =
{
val (first, last) = in.splitAt(QuietLimit)
if (last.isEmpty)
(first.mkString(", "), false)
else {
val s = first.take(QuietLimit - 1).mkString("", ", ", " and " + last.size + " others.")
(s, true)
}
}
if (redefined.isEmpty)
"No settings or tasks were redefined."
else {
val (redef, trimR) = lines(strings(redefined))
val (used, trimU) = lines(strings(affected))
val details = if (trimR || trimU) "\\n\\tRun `last` for details." else ""
val valuesString = if (redefined.size == 1) "value" else "values"
"Defining %s\\nThe new %s will be used by %s%s".format(redef, valuesString, used, details)
}
}
/**
* Parser that provides tab completion for the main argument to the `set` command.
* `settings` are the evaluated settings for the build, `rawKeyMap` maps the hypenated key identifier to the key object,
* and `context` is the current project.
* The tab completion will try to present the most relevant information first, with additional descriptions or keys available
* when there are fewer choices or tab is pressed multiple times.
* The last part of the completion will generate a template for the value or function literal that will initialize the setting or task.
*/
def settingParser(settings: Settings[Scope], rawKeyMap: Map[String, AttributeKey[_]], context: ResolvedProject): Parser[String] =
{
val cutoff = KeyRanks.MainCutoff
val keyMap: Map[String, AttributeKey[_]] = rawKeyMap.map { case (k, v) => (keyScalaID(k), v) } toMap;
def inputScopedKey(pred: AttributeKey[_] => Boolean): Parser[ScopedKey[_]] =
scopedKeyParser(keyMap.filter { case (_, k) => pred(k) }, settings, context)
val full = for {
defineKey <- scopedKeyParser(keyMap, settings, context)
a <- assign(defineKey)
deps <- valueParser(defineKey, a, inputScopedKey(keyFilter(defineKey.key)))
} yield () // parser is currently only for completion and the parsed data structures are not used
matched(full) | any.+.string
}
/** Parser for a Scope+AttributeKey (ScopedKey). */
def scopedKeyParser(keyMap: Map[String, AttributeKey[_]], settings: Settings[Scope], context: ResolvedProject): Parser[ScopedKey[_]] =
{
val cutoff = KeyRanks.MainCutoff
val keyCompletions = fixedCompletions { (seen, level) => completeKey(seen, keyMap, level, cutoff, 10).toSet }
val keyID: Parser[AttributeKey[_]] = scalaID(keyMap, "key")
val keyParser = token(keyID, keyCompletions)
for (key <- keyParser; scope <- scopeParser(key, settings, context)) yield ScopedKey(scope, key)
}
/** Parser for the `in` method name that slightly augments the naive completion to give a hint of the purpose of `in`.*/
val inParser = tokenDisplay(Space ~> InMethod, "%s <scope>".format(InMethod))
/**
* Parser for the initialization expression for the assignment method `assign` on the key `sk`.
* `scopedKeyP` is used to parse and complete the input keys for an initialization that depends on other keys.
*/
def valueParser(sk: ScopedKey[_], assign: Assign.Value, scopedKeyP: Parser[ScopedKey[_]]): Parser[Seq[ScopedKey[_]]] =
{
val fullTypeString = keyTypeString(sk.key)
val typeString = if (assignNoAppend(assign)) fullTypeString else "..."
if (assign == Assign.Update) {
val function = "{(prev: " + typeString + ") => /*" + typeString + "*/ }"
token(OptSpace ~ function) ^^^ Nil
} else {
val value = "/* value of type " + typeString + " */"
token(Space ~ value) ^^^ Nil
}
}
/**
* For a setting definition `definingKey <<= (..., in, ...) { ... }`,
* `keyFilter(definingKey)(in)` returns true when `in` is an allowed input for `definingKey` based on whether they are settings or not.
* For example, if `definingKey` is for a setting, `in` may only be a setting itself.
*/
def keyFilter(definingKey: AttributeKey[_]): AttributeKey[_] => Boolean =
if (isSetting(definingKey)) isSetting _ else isTaskOrSetting _
/**
* Parser for a Scope for a `key` given the current project `context` and evaluated `settings`.
* The completions are restricted to be more useful. Currently, this parser will suggest
* only known axis values for configurations and tasks and only in that order.
*/
def scopeParser(key: AttributeKey[_], settings: Settings[Scope], context: ResolvedProject): Parser[Scope] = {
val data = settings.data
val allScopes = data.keys.toSeq
val definedScopes = data.toSeq flatMap { case (scope, attrs) => if (attrs contains key) scope :: Nil else Nil }
scope(key, allScopes, definedScopes, context)
}
private[this] def scope(key: AttributeKey[_], allScopes: Seq[Scope], definedScopes: Seq[Scope], context: ResolvedProject): Parser[Scope] =
{
def axisParser[T](axis: Scope => ScopeAxis[T], name: T => String, description: T => Option[String], label: String): Parser[ScopeAxis[T]] =
{
def getChoice(s: Scope): Seq[(String, T)] = axis(s) match {
case Select(t) => (name(t), t) :: Nil
case _ => Nil
}
def getChoices(scopes: Seq[Scope]): Map[String, T] = scopes.flatMap(getChoice).toMap
val definedChoices: Set[String] = definedScopes.flatMap(s => axis(s).toOption.map(name)).toSet
val fullChoices: Map[String, T] = getChoices(allScopes.toSeq)
val completions = fixedCompletions { (seen, level) => completeScope(seen, level, definedChoices, fullChoices)(description).toSet }
Act.optionalAxis(inParser ~> token(Space) ~> token(scalaID(fullChoices, label), completions), This)
}
val configurations: Map[String, Configuration] = context.configurations.map(c => (configScalaID(c.name), c)).toMap
val configParser = axisParser[ConfigKey](_.config, c => configScalaID(c.name), ck => configurations.get(ck.name).map(_.description), "configuration")
val taskParser = axisParser[AttributeKey[_]](_.task, k => keyScalaID(k.label), _.description, "task")
val nonGlobal = (configParser ~ taskParser) map { case (c, t) => Scope(This, c, t, Global) }
val global = inParser ~> token((Space ~ GlobalID) ^^^ GlobalScope)
global | nonGlobal
}
/** Parser for the assignment method (such as `:=`) for defining `key`. */
def assign(key: ScopedKey[_]): Parser[Assign.Value] =
{
val completions = fixedCompletions { (seen, level) => completeAssign(seen, level, key).toSet }
val identifier = Act.filterStrings(Op, Assign.values.map(_.toString), "assignment method") map Assign.withName
token(Space) ~> token(optionallyQuoted(identifier), completions)
}
private[this] def fixedCompletions(f: (String, Int) => Set[Completion]): TokenCompletions =
TokenCompletions.fixed((s, l) => Completions(f(s, l)))
private[this] def scalaID[T](keyMap: Map[String, T], label: String): Parser[T] =
{
val identifier = Act.filterStrings(ScalaID, keyMap.keySet, label) map keyMap
optionallyQuoted(identifier)
}
/** Produce a new parser that allows the input accepted by `p` to be quoted in backticks. */
def optionallyQuoted[T](p: Parser[T]): Parser[T] =
(Backtick.? ~ p) flatMap { case (quote, id) => if (quote.isDefined) Backtick.? ^^^ id else success(id) }
/**
* Completions for an assignment method for `key` given the tab completion `level` and existing partial string `seen`.
* This will filter possible assignment methods based on the underlying type of `key`, so that only `<<=` is shown for input tasks, for example.
*/
def completeAssign(seen: String, level: Int, key: ScopedKey[_]): Seq[Completion] =
{
val allowed: Iterable[Assign.Value] =
if (appendable(key.key)) Assign.values
else assignNoAppend
val applicable = allowed.toSeq.flatMap { a =>
val s = a.toString
if (s startsWith seen) (s, a) :: Nil else Nil
}
completeDescribed(seen, true, applicable)(assignDescription)
}
def completeKey(seen: String, keys: Map[String, AttributeKey[_]], level: Int, prominentCutoff: Int, detailLimit: Int): Seq[Completion] =
completeSelectDescribed(seen, level, keys, detailLimit)(_.description) { case (k, v) => v.rank <= prominentCutoff }
def completeScope[T](seen: String, level: Int, definedChoices: Set[String], allChoices: Map[String, T])(description: T => Option[String]): Seq[Completion] =
completeSelectDescribed(seen, level, allChoices, 10)(description) { case (k, v) => definedChoices(k) }
def completeSelectDescribed[T](seen: String, level: Int, all: Map[String, T], detailLimit: Int)(description: T => Option[String])(prominent: (String, T) => Boolean): Seq[Completion] =
{
val applicable = all.toSeq.filter { case (k, v) => k startsWith seen }
val prominentOnly = applicable filter { case (k, v) => prominent(k, v) }
val showAll = (level >= 3) || (level == 2 && prominentOnly.size <= detailLimit) || prominentOnly.isEmpty
val showKeys = if (showAll) applicable else prominentOnly
val showDescriptions = (level >= 2) || (showKeys.size <= detailLimit)
completeDescribed(seen, showDescriptions, showKeys)(s => description(s).toList.mkString)
}
def completeDescribed[T](seen: String, showDescriptions: Boolean, in: Seq[(String, T)])(description: T => String): Seq[Completion] =
{
def appendString(id: String): String = id.stripPrefix(seen) + " "
if (in.isEmpty)
Nil
else if (showDescriptions) {
val withDescriptions = in map { case (id, key) => (id, description(key)) }
val padded = CommandUtil.aligned("", " ", withDescriptions)
(padded, in).zipped.map {
case (line, (id, key)) =>
Completion.tokenDisplay(append = appendString(id), display = line + "\\n")
}
} else
in map {
case (id, key) =>
Completion.tokenDisplay(display = id, append = appendString(id))
}
}
/**
* Transforms the hypenated key label `k` into camel-case and quotes it with backticks if it is a Scala keyword.
* This is intended to be an estimate of the Scala identifier that may be used to reference the keyword in the default sbt context.
*/
def keyScalaID(k: String): String = Util.quoteIfKeyword(Util.hyphenToCamel(k))
/**
* Transforms the configuration name `c` so that the first letter is capitalized and the name is quoted with backticks if it is a Scala keyword.
* This is intended to be an estimate of the Scala identifier that may be used to reference the keyword in the default sbt context.
*/
def configScalaID(c: String): String = Util.quoteIfKeyword(c.capitalize)
/** Applies a function on the underlying manifest for T for `key` depending if it is for a `Setting[T]`, `Task[T]`, or `InputTask[T]`.*/
def keyType[S](key: AttributeKey[_])(onSetting: Manifest[_] => S, onTask: Manifest[_] => S, onInput: Manifest[_] => S)(implicit tm: Manifest[Task[_]], im: Manifest[InputTask[_]]): S =
{
def argTpe = key.manifest.typeArguments.head
val e = key.manifest.runtimeClass
if (e == tm.runtimeClass) onTask(argTpe)
else if (e == im.runtimeClass) onInput(argTpe)
else onSetting(key.manifest)
}
/** For a Task[T], InputTask[T], or Setting[T], this returns the manifest for T. */
def keyUnderlyingType(key: AttributeKey[_]): Manifest[_] = keyType(key)(idFun, idFun, idFun)
/**
* Returns a string representation of the underlying type T for a `key` representing a `Setting[T]`, `Task[T]`, or `InputTask[T]`.
* This string representation is currently a cleaned up toString of the underlying Manifest.
*/
def keyTypeString[T](key: AttributeKey[_]): String =
{
val mfToString = (mf: Manifest[_]) => complete.TypeString.cleanup(mf.toString)
keyType(key)(mfToString, mfToString, mfToString)
}
/** True if the `key` represents an input task, false if it represents a task or setting. */
def isInputTask(key: AttributeKey[_]): Boolean = keyType(key)(const(false), const(false), const(true))
/** True if the `key` represents a setting, false if it represents a task or an input task.*/
def isSetting(key: AttributeKey[_]): Boolean = keyType(key)(const(true), const(false), const(false))
/** True if the `key` represents a setting or task, false if it is for an input task. */
def isTaskOrSetting(key: AttributeKey[_]): Boolean = keyType(key)(const(true), const(true), const(false))
/** True if the `key` represents a setting or task that may be appended using an assignment method such as `+=`. */
def appendable(key: AttributeKey[_]): Boolean =
{
val underlying = keyUnderlyingType(key).runtimeClass
appendableClasses.exists(_ isAssignableFrom underlying)
}
/** The simple name of the global scope axis, which can be used to reference it in the default setting context. */
final val GlobalID = Global.getClass.getSimpleName.stripSuffix("$")
/** Character used to quote a Scala identifier that would otherwise be interpreted as a keyword.*/
final val Backtick = '`'
/** Name of the method that modifies the scope of a key. */
final val InMethod = "in"
/** Assignment methods that may be called on a setting or task. */
object Assign extends Enumeration {
val AppendValue = Value("+=")
val AppendValues = Value("++=")
val Define = Value(":=")
val Update = Value("~=")
}
import Assign._
/** Returns the description associated with the provided assignment method. */
def assignDescription(a: Assign.Value): String = a match {
case AppendValue => "append value"
case AppendValues => "append values"
case Define => "define value, overwriting any existing value"
case Update => "transform existing value"
}
/** The assignment methods except for the ones that append. */
val assignNoAppend: Set[Assign.Value] = Set(Define, Update)
/** Class values to approximate which types can be appended*/
val appendableClasses = Seq(
classOf[Seq[_]],
classOf[Map[_, _]],
classOf[Set[_]],
classOf[Int],
classOf[Double],
classOf[Long],
classOf[String]
)
}
| dansanduleac/sbt | main/src/main/scala/sbt/SettingCompletions.scala | Scala | bsd-3-clause | 18,277 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming.state
import java.sql.Timestamp
import java.util.UUID
import org.apache.hadoop.conf.Configuration
import org.scalatest.BeforeAndAfter
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.expressions.{Attribute, BoundReference, Expression, GenericInternalRow, LessThanOrEqual, Literal, UnsafeProjection, UnsafeRow}
import org.apache.spark.sql.catalyst.expressions.codegen.GeneratePredicate
import org.apache.spark.sql.catalyst.plans.logical.EventTimeWatermark
import org.apache.spark.sql.execution.streaming.StatefulOperatorStateInfo
import org.apache.spark.sql.execution.streaming.StreamingSymmetricHashJoinHelper.LeftSide
import org.apache.spark.sql.streaming.StreamTest
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
class SymmetricHashJoinStateManagerSuite extends StreamTest with BeforeAndAfter {
before {
SparkSession.setActiveSession(spark) // set this before force initializing 'joinExec'
spark.streams.stateStoreCoordinator // initialize the lazy coordinator
}
SymmetricHashJoinStateManager.supportedVersions.foreach { version =>
test(s"StreamingJoinStateManager V${version} - all operations") {
testAllOperations(version)
}
}
SymmetricHashJoinStateManager.supportedVersions.foreach { version =>
test(s"SPARK-35689: StreamingJoinStateManager V${version} - " +
"printable key of keyWithIndexToValue") {
val keyExprs = Seq[Expression](
Literal(false),
Literal(10.0),
Literal("string"),
Literal(Timestamp.valueOf("2021-6-8 10:25:50")))
val keyGen = UnsafeProjection.create(keyExprs.map(_.dataType).toArray)
withJoinStateManager(inputValueAttribs, keyExprs, version) { manager =>
val currentKey = keyGen.apply(new GenericInternalRow(Array[Any](
false, 10.0, UTF8String.fromString("string"),
Timestamp.valueOf("2021-6-8 10:25:50").getTime)))
val projectedRow = manager.getInternalRowOfKeyWithIndex(currentKey)
assert(s"$projectedRow" == "[false,10.0,string,1623173150000]")
}
}
}
private def testAllOperations(stateFormatVersion: Int): Unit = {
withJoinStateManager(inputValueAttribs, joinKeyExprs, stateFormatVersion) { manager =>
implicit val mgr = manager
assert(get(20) === Seq.empty) // initially empty
append(20, 2)
assert(get(20) === Seq(2)) // should first value correctly
assert(numRows === 1)
append(20, 3)
assert(get(20) === Seq(2, 3)) // should append new values
append(20, 3)
assert(get(20) === Seq(2, 3, 3)) // should append another copy if same value added again
assert(numRows === 3)
assert(get(30) === Seq.empty)
append(30, 1)
assert(get(30) === Seq(1))
assert(get(20) === Seq(2, 3, 3)) // add another key-value should not affect existing ones
assert(numRows === 4)
removeByKey(25)
assert(get(20) === Seq.empty)
assert(get(30) === Seq(1)) // should remove 20, not 30
assert(numRows === 1)
removeByKey(30)
assert(get(30) === Seq.empty) // should remove 30
assert(numRows === 0)
def appendAndTest(key: Int, values: Int*): Unit = {
values.foreach { value => append(key, value)}
require(get(key) === values)
}
appendAndTest(40, 100, 200, 300)
appendAndTest(50, 125)
appendAndTest(60, 275) // prepare for testing removeByValue
assert(numRows === 5)
removeByValue(125)
assert(get(40) === Seq(200, 300))
assert(get(50) === Seq.empty)
assert(get(60) === Seq(275)) // should remove only some values, not all
assert(numRows === 3)
append(40, 50)
assert(get(40) === Seq(50, 200, 300))
assert(numRows === 4)
removeByValue(200)
assert(get(40) === Seq(300))
assert(get(60) === Seq(275)) // should remove only some values, not all
assert(numRows === 2)
removeByValue(300)
assert(get(40) === Seq.empty)
assert(get(60) === Seq.empty) // should remove all values now
assert(numRows === 0)
}
}
val watermarkMetadata = new MetadataBuilder().putLong(EventTimeWatermark.delayKey, 10).build()
val inputValueSchema = new StructType()
.add(StructField("time", IntegerType, metadata = watermarkMetadata))
.add(StructField("value", BooleanType))
val inputValueAttribs = inputValueSchema.toAttributes
val inputValueAttribWithWatermark = inputValueAttribs(0)
val joinKeyExprs = Seq[Expression](Literal(false), inputValueAttribWithWatermark, Literal(10.0))
val inputValueGen = UnsafeProjection.create(inputValueAttribs.map(_.dataType).toArray)
val joinKeyGen = UnsafeProjection.create(joinKeyExprs.map(_.dataType).toArray)
def toInputValue(i: Int): UnsafeRow = {
inputValueGen.apply(new GenericInternalRow(Array[Any](i, false)))
}
def toJoinKeyRow(i: Int): UnsafeRow = {
joinKeyGen.apply(new GenericInternalRow(Array[Any](false, i, 10.0)))
}
def toValueInt(inputValueRow: UnsafeRow): Int = inputValueRow.getInt(0)
def append(key: Int, value: Int)(implicit manager: SymmetricHashJoinStateManager): Unit = {
// we only put matched = false for simplicity - StreamingJoinSuite will test the functionality
manager.append(toJoinKeyRow(key), toInputValue(value), matched = false)
}
def get(key: Int)(implicit manager: SymmetricHashJoinStateManager): Seq[Int] = {
manager.get(toJoinKeyRow(key)).map(toValueInt).toSeq.sorted
}
/** Remove keys (and corresponding values) where `time <= threshold` */
def removeByKey(threshold: Long)(implicit manager: SymmetricHashJoinStateManager): Unit = {
val expr =
LessThanOrEqual(
BoundReference(
1, inputValueAttribWithWatermark.dataType, inputValueAttribWithWatermark.nullable),
Literal(threshold))
val iter = manager.removeByKeyCondition(GeneratePredicate.generate(expr).eval _)
while (iter.hasNext) iter.next()
}
/** Remove values where `time <= threshold` */
def removeByValue(watermark: Long)(implicit manager: SymmetricHashJoinStateManager): Unit = {
val expr = LessThanOrEqual(inputValueAttribWithWatermark, Literal(watermark))
val iter = manager.removeByValueCondition(
GeneratePredicate.generate(expr, inputValueAttribs).eval _)
while (iter.hasNext) iter.next()
}
def numRows(implicit manager: SymmetricHashJoinStateManager): Long = {
manager.metrics.numKeys
}
def withJoinStateManager(
inputValueAttribs: Seq[Attribute],
joinKeyExprs: Seq[Expression],
stateFormatVersion: Int)(f: SymmetricHashJoinStateManager => Unit): Unit = {
withTempDir { file =>
val storeConf = new StateStoreConf()
val stateInfo = StatefulOperatorStateInfo(file.getAbsolutePath, UUID.randomUUID, 0, 0, 5)
val manager = new SymmetricHashJoinStateManager(
LeftSide, inputValueAttribs, joinKeyExprs, Some(stateInfo), storeConf, new Configuration,
partitionId = 0, stateFormatVersion)
try {
f(manager)
} finally {
manager.abortIfNeeded()
}
}
StateStore.stop()
}
}
| mahak/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/SymmetricHashJoinStateManagerSuite.scala | Scala | apache-2.0 | 8,053 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command.management
import java.util
import scala.collection.JavaConverters._
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.execution.command.{AlterTableAddPartitionCommand, MetadataCommand}
import org.apache.spark.sql.execution.command.table.CarbonCreateTableCommand
import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datastore.impl.FileFactory
import org.apache.carbondata.core.indexstore.PartitionSpec
import org.apache.carbondata.core.metadata.{AbsoluteTableIdentifier, SegmentFileStore}
import org.apache.carbondata.core.metadata.schema.SchemaReader
import org.apache.carbondata.core.metadata.schema.partition.PartitionType
import org.apache.carbondata.core.metadata.schema.table.{DataMapSchema, TableInfo}
import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema
import org.apache.carbondata.core.statusmanager.{SegmentStatus, SegmentStatusManager}
import org.apache.carbondata.core.util.path.CarbonTablePath
import org.apache.carbondata.events.{OperationContext, OperationListenerBus, RefreshTablePostExecutionEvent, RefreshTablePreExecutionEvent}
/**
* Command to register carbon table from existing carbon table data
*/
case class RefreshCarbonTableCommand(
databaseNameOp: Option[String],
tableName: String)
extends MetadataCommand {
val LOGGER: LogService =
LogServiceFactory.getLogService(this.getClass.getName)
override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
val metaStore = CarbonEnv.getInstance(sparkSession).carbonMetastore
val databaseName = CarbonEnv.getDatabaseName(databaseNameOp)(sparkSession)
// Steps
// 1. get table path
// 2. perform the below steps
// 2.1 check if the table already register with hive then ignore and continue with the next
// schema
// 2.2 register the table with the hive check if the table being registered has aggregate table
// then do the below steps
// 2.2.1 validate that all the aggregate tables are copied at the store location.
// 2.2.2 Register the aggregate tables
val tablePath = CarbonEnv.getTablePath(databaseNameOp, tableName)(sparkSession)
val identifier = AbsoluteTableIdentifier.from(tablePath, databaseName, tableName)
// 2.1 check if the table already register with hive then ignore and continue with the next
// schema
if (!sparkSession.sessionState.catalog.listTables(databaseName)
.exists(_.table.equalsIgnoreCase(tableName))) {
// check the existence of the schema file to know its a carbon table
val schemaFilePath = CarbonTablePath.getSchemaFilePath(identifier.getTablePath)
// if schema file does not exist then the table will either non carbon table or stale
// carbon table
if (FileFactory.isFileExist(schemaFilePath, FileFactory.getFileType(schemaFilePath))) {
// read TableInfo
val tableInfo = SchemaReader.getTableInfo(identifier)
// 2.2 register the table with the hive check if the table being registered has
// aggregate table then do the below steps
// 2.2.1 validate that all the aggregate tables are copied at the store location.
val dataMapSchemaList = tableInfo.getDataMapSchemaList
if (null != dataMapSchemaList && dataMapSchemaList.size() != 0) {
// validate all the aggregate tables are copied at the storeLocation
val allExists = validateAllAggregateTablePresent(databaseName,
dataMapSchemaList, sparkSession)
if (!allExists) {
// fail the register operation
val msg = s"Table registration with Database name [$databaseName] and Table name " +
s"[$tableName] failed. All the aggregate Tables for table [$tableName] is" +
s" not copied under database [$databaseName]"
LOGGER.audit(msg)
throwMetadataException(databaseName, tableName, msg)
}
// 2.2.1 Register the aggregate tables to hive
registerAggregates(databaseName, dataMapSchemaList)(sparkSession)
}
registerTableWithHive(databaseName, tableName, tableInfo, tablePath)(sparkSession)
// Register partitions to hive metastore in case of hive partitioning carbon table
if (tableInfo.getFactTable.getPartitionInfo != null &&
tableInfo.getFactTable.getPartitionInfo.getPartitionType == PartitionType.NATIVE_HIVE) {
registerAllPartitionsToHive(identifier, sparkSession)
}
} else {
LOGGER.audit(
s"Table registration with Database name [$databaseName] and Table name [$tableName] " +
s"failed." +
s"Table [$tableName] either non carbon table or stale carbon table under database " +
s"[$databaseName]")
}
} else {
LOGGER.audit(
s"Table registration with Database name [$databaseName] and Table name [$tableName] " +
s"failed." +
s"Table [$tableName] either already exists or registered under database [$databaseName]")
}
// update the schema modified time
metaStore.updateAndTouchSchemasUpdatedTime()
Seq.empty
}
/**
* the method prepare the data type for raw column
*
* @param column
* @return
*/
def prepareDataType(column: ColumnSchema): String = {
column.getDataType.getName.toLowerCase() match {
case "decimal" =>
"decimal(" + column.getPrecision + "," + column.getScale + ")"
case others =>
others
}
}
/**
* The method register the carbon table with hive
*
* @param dbName
* @param tableName
* @param tableInfo
* @param sparkSession
* @return
*/
def registerTableWithHive(dbName: String,
tableName: String,
tableInfo: TableInfo,
tablePath: String)(sparkSession: SparkSession): Any = {
val operationContext = new OperationContext
try {
val refreshTablePreExecutionEvent: RefreshTablePreExecutionEvent =
new RefreshTablePreExecutionEvent(sparkSession,
tableInfo.getOrCreateAbsoluteTableIdentifier())
OperationListenerBus.getInstance.fireEvent(refreshTablePreExecutionEvent, operationContext)
CarbonCreateTableCommand(tableInfo, ifNotExistsSet = false, tableLocation = Some(tablePath))
.run(sparkSession)
LOGGER.audit(s"Table registration with Database name [$dbName] and Table name " +
s"[$tableName] is successful.")
} catch {
case e: AnalysisException => throw e
case e: Exception =>
throw e
}
val refreshTablePostExecutionEvent: RefreshTablePostExecutionEvent =
new RefreshTablePostExecutionEvent(sparkSession,
tableInfo.getOrCreateAbsoluteTableIdentifier())
OperationListenerBus.getInstance.fireEvent(refreshTablePostExecutionEvent, operationContext)
}
/**
* The method validate that all the aggregate table are physically present
*
* @param dataMapSchemaList
* @param sparkSession
*/
def validateAllAggregateTablePresent(dbName: String, dataMapSchemaList: util.List[DataMapSchema],
sparkSession: SparkSession): Boolean = {
var fileExist = false
dataMapSchemaList.asScala.foreach(dataMap => {
val tableName = dataMap.getChildSchema.getTableName
val tablePath = CarbonEnv.getTablePath(Some(dbName), tableName)(sparkSession)
val schemaFilePath = CarbonTablePath.getSchemaFilePath(tablePath)
try {
fileExist = FileFactory.isFileExist(schemaFilePath, FileFactory.getFileType(schemaFilePath))
} catch {
case e: Exception =>
fileExist = false
}
if (!fileExist) {
return fileExist;
}
})
true
}
/**
* The method iterates over all the aggregate tables and register them to hive
*
* @param dataMapSchemaList
* @return
*/
def registerAggregates(dbName: String,
dataMapSchemaList: util.List[DataMapSchema])(sparkSession: SparkSession): Any = {
dataMapSchemaList.asScala.foreach(dataMap => {
val tableName = dataMap.getChildSchema.getTableName
if (!sparkSession.sessionState.catalog.listTables(dbName)
.exists(_.table.equalsIgnoreCase(tableName))) {
val tablePath = CarbonEnv.getTablePath(Some(dbName), tableName)(sparkSession)
val absoluteTableIdentifier = AbsoluteTableIdentifier
.from(tablePath, dbName, tableName)
val tableInfo = SchemaReader.getTableInfo(absoluteTableIdentifier)
registerTableWithHive(dbName, tableName, tableInfo, tablePath)(sparkSession)
}
})
}
/**
* Read all the partition information which is stored in each segment and add to
* the hive metastore
*/
private def registerAllPartitionsToHive(
absIdentifier: AbsoluteTableIdentifier,
sparkSession: SparkSession): Unit = {
val metadataDetails =
SegmentStatusManager.readLoadMetadata(
CarbonTablePath.getMetadataPath(absIdentifier.getTablePath))
// First read all partition information from each segment.
val allpartitions = metadataDetails.map{ metadata =>
if (metadata.getSegmentStatus == SegmentStatus.SUCCESS ||
metadata.getSegmentStatus == SegmentStatus.LOAD_PARTIAL_SUCCESS) {
val mapper = new SegmentFileStore(absIdentifier.getTablePath, metadata.getSegmentFile)
val specs = mapper.getLocationMap.asScala.map { case(location, fd) =>
var updatedLoc =
if (fd.isRelative) {
absIdentifier.getTablePath + CarbonCommonConstants.FILE_SEPARATOR + location
} else {
location
}
new PartitionSpec(fd.getPartitions, updatedLoc)
}
Some(specs)
} else {
None
}
}.filter(_.isDefined).map(_.get)
val identifier =
TableIdentifier(absIdentifier.getTableName, Some(absIdentifier.getDatabaseName))
// Register the partition information to the hive metastore
allpartitions.foreach { segPartitions =>
val specs: Seq[(TablePartitionSpec, Option[String])] = segPartitions.map { indexPartitions =>
(indexPartitions.getPartitions.asScala.map{ p =>
val spec = p.split("=")
(spec(0), spec(1))
}.toMap, Some(indexPartitions.getLocation.toString))
}.toSeq
// Add partition information
AlterTableAddPartitionCommand(identifier, specs, true).run(sparkSession)
}
}
}
| jatin9896/incubator-carbondata | integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/RefreshCarbonTableCommand.scala | Scala | apache-2.0 | 11,469 |
/**
* Copyright (C) 2015, Jaguar Land Rover
*/
import java.io.File
import com.twitter.bijection.Injection
import com.twitter.bijection.avro.{SpecificAvroCodecs, GenericAvroCodecs}
import kafka.TraceEntryRecord
import org.apache.avro.Schema
import org.apache.avro.generic.GenericRecord
import org.joda.time.DateTime
import org.specs2.mutable._
import org.specs2.runner._
import org.junit.runner._
import play.api.test._
import play.api.test.Helpers._
/**
* add your integration spec here.
* An integration test will fire up a whole play application in a real (or headless) browser
*/
@RunWith(classOf[JUnitRunner])
class IntegrationSpec extends Specification {
"Application" should {
"serialize and deserialize trace entry using bijection and avro" in {
// new_askmecle.txt,2015-03-11T12:16:45.000+01:00,37.79991872746409906432235929024477,-122.4351238522933425846584727780817,false),31.7044277691886160671905518109585)
val entry = TraceEntryRecord.newBuilder()
.setId( "new_askmecle.txt" )
.setTimestamp( DateTime.now().getMillis )
.setLat(37.79991872746409906432235929024477)
.setLng(-122.4351238522933425846584727780817)
.setIsOccupied( false )
.setSpeed( 31.70442776918861606719055181095 ).build()
implicit val entryInjection = SpecificAvroCodecs[TraceEntryRecord]
val bytes = Injection[TraceEntryRecord, Array[Byte]]( entry )
val attempt = Injection.invert[TraceEntryRecord, Array[Byte]]( bytes )
attempt.get == entry
}
}
}
| PDXostc/rvi_big-data_api | test/IntegrationSpec.scala | Scala | mpl-2.0 | 1,535 |
/*
* Copyright (C) 2015 morinb
* https://github.com/morinb
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.bm.scalacompute.exception
/**
*
* @author morinb.
*/
class IllegalTokenException(val message: String) extends RuntimeException(message)
| morinb/scala-compute | src/main/scala/org/bm/scalacompute/exception/IllegalTokenException.scala | Scala | lgpl-3.0 | 1,015 |
package com.owlike.genson.ext.scala
import java.lang.reflect.{Type => JType, Modifier, ParameterizedType}
import java.io.{OutputStream, Writer, InputStream, StringReader, Reader => JReader}
import java.net.URL
import java.util.{List => JList, Map => JMap}
import scala.collection.JavaConversions._
import com.owlike.genson.{Context, GenericType, Factory, Converter}
import com.owlike.genson.reflect._
import com.owlike.genson.reflect.TypeUtil._
import com.owlike.genson.stream.{ObjectWriter, ObjectReader}
import com.owlike.genson.annotation.{JsonProperty, JsonCreator}
import com.owlike.genson.ext.GensonBundle
import com.owlike.genson.reflect.AbstractBeanDescriptorProvider.ContextualConverterFactory
import com.owlike.genson.reflect.BeanMutatorAccessorResolver.{StandardMutaAccessorResolver, CompositeResolver}
import java.util
class ScalaBundle extends GensonBundle {
def configure(builder: GensonBuilder) {
builder.useConstructorWithArguments(true)
.withConverterFactory(new TraversableConverterFactory())
.withConverterFactory(new MapConverterFactory())
.withConverterFactory(ScalaUntypedConverterFactory)
.withConverterFactory(new TupleConverterFactory())
.withConverterFactory(new OptionConverterFactory())
}
override def createBeanDescriptorProvider(contextualConverterFactory: ContextualConverterFactory,
beanPropertyFactory: BeanPropertyFactory,
propertyResolver: BeanMutatorAccessorResolver,
propertyNameResolver: PropertyNameResolver,
builder: GensonBuilder): BeanDescriptorProvider = {
val caseClassPropertyResolver = new CompositeResolver(util.Arrays.asList(
new StandardMutaAccessorResolver(VisibilityFilter.PRIVATE, VisibilityFilter.NONE, VisibilityFilter.PRIVATE),
propertyResolver)
)
new CaseClassDescriptorProvider(contextualConverterFactory, beanPropertyFactory, caseClassPropertyResolver, propertyNameResolver, true)
}
}
object ScalaBundle {
def apply() = new ScalaBundle()
protected[scala] def getTraversableType(genType: JType): JType = {
if (genType.isInstanceOf[Class[_]]) {
val clazz: Class[_] = genType.asInstanceOf[Class[_]]
if (clazz.isArray) return clazz.getComponentType
else if (classOf[Traversable[_]].isAssignableFrom(clazz)) {
val expandedType = expandType(lookupGenericType(classOf[Traversable[_]], clazz), clazz)
return typeOf(0, expandedType)
}
} else if (genType.isInstanceOf[ParameterizedType] && classOf[Traversable[_]].isAssignableFrom(getRawClass(genType))) {
return typeOf(0, genType)
}
throw new IllegalArgumentException("Could not extract parametrized type, are you sure it is a Traversable or an Array?")
}
}
class ScalaGenson(val genson: Genson) extends AnyVal {
def toJson[T: Manifest](value: T): String = genson.serialize(value, GenericType.of(toJavaType))
def toJsonBytes[T: Manifest](value: T): Array[Byte] = genson.serializeBytes(value, GenericType.of(toJavaType))
def toJson[T: Manifest](value: T, writer: Writer): Unit = toJson(value, genson.createWriter(writer))
def toJson[T: Manifest](value: T, os: OutputStream): Unit = toJson(value, genson.createWriter(os))
def toJson[T: Manifest](value: T, writer: ObjectWriter): Unit = genson.serialize(value, toJavaType, writer, new Context(genson))
def fromJson[T: Manifest](json: String): T = fromJson(genson.createReader(new StringReader(json)))
def fromJson[T: Manifest](jsonUrl: URL): T = fromJson(genson.createReader(jsonUrl.openStream()))
def fromJson[T: Manifest](json: JReader): T = fromJson(genson.createReader(json))
def fromJson[T: Manifest](json: InputStream): T = fromJson(genson.createReader(json))
def fromJson[T: Manifest](json: Array[Byte]): T = fromJson(genson.createReader(json))
def fromJson[T: Manifest](reader: ObjectReader): T = {
genson.deserialize(GenericType.of(toJavaType), reader, new Context(genson)).asInstanceOf[T]
}
private def toJavaType(implicit m: Manifest[_]): JType = {
if (m.typeArguments.nonEmpty) {
new ScalaParameterizedType(None, m.runtimeClass, m.typeArguments.map(m => toJavaType(m)).toArray)
} else {
if (m.runtimeClass.isPrimitive) wrap(m.runtimeClass)
else m.runtimeClass
}
}
}
class CaseClassDescriptorProvider(ctxConverterFactory: AbstractBeanDescriptorProvider.ContextualConverterFactory,
propertyFactory: BeanPropertyFactory,
mutatorAccessorResolver: BeanMutatorAccessorResolver,
nameResolver: PropertyNameResolver,
useOnlyConstructorFields: Boolean)
extends BaseBeanDescriptorProvider(ctxConverterFactory, propertyFactory, mutatorAccessorResolver, nameResolver, false, true, true) {
override def provide[T](ofClass: Class[T], ofType: JType, genson: Genson): BeanDescriptor[T] = {
if (classOf[Product].isAssignableFrom(ofClass)) super.provide(ofClass, ofType, genson)
else null
}
protected override def checkAndMerge(ofType: JType, creators: JList[BeanCreator]): BeanCreator = {
val ctr = super.checkAndMerge(ofType, creators)
if (creators.size() > 1 && !ctr.isAnnotationPresent(classOf[JsonCreator]))
throw new JsonBindingException("Case classes with multiple constructor must indicate what constructor to use with @JsonCreator annotation.")
ctr
}
protected override def mergeAccessorsWithCreatorProperties(ofType: JType, accessors: JList[PropertyAccessor], creator: BeanCreator) {
super.mergeAccessorsWithCreatorProperties(ofType, accessors, creator)
if (useOnlyConstructorFields) {
val ctrProps = creator.getProperties
// don't serialize properties that are not used in a constructor and are final and note annotated with JsonProperty
val it = accessors.iterator()
while (it.hasNext) {
val prop = it.next()
if (!ctrProps.containsKey(prop.getName)
&& isFinal(prop)
&& prop.getAnnotation(classOf[JsonProperty]) == null) it.remove()
}
}
}
private def isFinal(prop: PropertyAccessor) = (prop.getModifiers & Modifier.FINAL) != 0
}
private class ScalaParameterizedType(val ownerType: Option[JType], val rawType: JType, val typeArgs: Array[JType])
extends ParameterizedType {
def getOwnerType: JType = ownerType.getOrElse(null)
def getRawType: JType = rawType
def getActualTypeArguments: Array[JType] = typeArgs
def canEqual(other: Any): Boolean = other.isInstanceOf[ScalaParameterizedType]
override def equals(other: Any): Boolean = other match {
case that: ScalaParameterizedType =>
(that canEqual this) &&
ownerType == that.ownerType &&
rawType == that.rawType &&
typeArgs == that.typeArgs
case _ => false
}
override def hashCode(): Int = {
val state = Seq(ownerType, rawType, typeArgs)
state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
}
} | keithdmoore/genson | src/main/scala/com/owlike/genson/ext/scala/ScalaBundle.scala | Scala | apache-2.0 | 7,349 |
package p1 {
object InlineHolder {
@inline def inlinable = (p1.PackageProtectedJava_1.protectedMethod(): @noinline) + 1
}
}
object O {
@noinline
def x = p1.InlineHolder.inlinable
}
object Test {
def main(args: Array[String]): Unit = {
println(O.x)
}
}
| martijnhoekstra/scala | test/files/run/t7582b/InlineHolder_2.scala | Scala | apache-2.0 | 274 |
package models.organization
import com.artclod.slick.JodaUTC
import models.support._
import org.joda.time.{DateTimeZone, DateTime}
object TestCourse {
def apply(name: String = "course",
organizationId: OrganizationId,
owner: UserId,
editCode: String = "editCode",
viewCode: Option[String] = Some("viewCode"),
date: DateTime = JodaUTC.zero) = Course(null, name, organizationId, owner, editCode, viewCode, date, date)
} | kristiankime/web-education-games | test/models/organization/TestCourse.scala | Scala | mit | 435 |
//
// Copyright 2014-2020 Paytronix Systems, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package com.paytronix.utils.interchange.format.json
import java.io.StringWriter
import com.fasterxml.jackson.core.{JsonFactory, JsonGenerator, JsonParser, JsonToken}
import org.scalacheck.{Arbitrary, Gen}
import org.specs2.SpecificationWithJUnit
import org.specs2.execute.{Result => SpecsResult}
import org.specs2.matcher.Matcher
import com.paytronix.utils.interchange.base.{CoderFailure, CoderResult, Receiver, formatFailedPath}
import com.paytronix.utils.scala.result.{FailedG, Result, ResultG, unless}
import Arbitrary.arbitrary
object arbitraries {
val nonnumericStr = Gen.frequency (
(1, ""),
(5, Gen.alphaStr),
(5, Arbitrary.arbString.arbitrary.filter(s => !s.forall(Character.isDigit)))
)
val safeJavaBigDecimals = arbitrary[BigDecimal].map(_.bigDecimal).filter { bd =>
try { new java.math.BigDecimal(bd.toString); true }
catch { case nfe: NumberFormatException => false }
}
val safeScalaBigDecimals = arbitrary[BigDecimal].filter { bd =>
try { new java.math.BigDecimal(bd.bigDecimal.toString); true }
catch { case nfe: NumberFormatException => false }
}
implicit val arbJavaMathBigDecimals = Arbitrary(arbitrary[BigDecimal].map(_.bigDecimal))
implicit val arbJavaMathBigInteger = Arbitrary(arbitrary[BigInt].map(_.bigInteger))
}
trait JsonMatchers { self: SpecificationWithJUnit =>
def withParser[A](in: String)(f: InterchangeJsonParser => A): A = {
val jp = new JsonFactory().createParser(in)
val ijp = new InterchangeJacksonJsonParser(jp)
f(ijp)
}
def withGenerator[A](f: InterchangeJsonGenerator => A): (A, String) = {
val sw = new StringWriter
val jg = new JsonFactory().createGenerator(sw)
val ijg = new InterchangeJacksonJsonGenerator(jg)
val res = f(ijg)
jg.close()
(res, sw.toString)
}
def encodeField[A](encoder: JsonEncoder[A], value: A): CoderResult[String] = {
val (result, string) = withGenerator { ijg =>
ijg.writeFieldName("field")
encoder.run(value, ijg)
}
result.map { _ => string }
}
def decodeMissing[A](decoder: JsonDecoder[A]): CoderResult[A] =
withParser("") { ijp =>
ijp.currentValueIsMissing()
val rec = new Receiver[A]
decoder.run(ijp, rec) map { _ => rec.value }
}
def decode[A](decoder: JsonDecoder[A])(s: String): CoderResult[A] =
withParser(s"[11111,$s,22222]") { ijp =>
val rec = new Receiver[A]
for {
_ <- ijp.advanceTokenUnguarded()
_ <- unless(ijp.currentToken == JsonToken.START_ARRAY)(FailedG("expected START_ARRAY not " + ijp.currentToken, CoderFailure.terminal) )
_ <- ijp.advanceTokenUnguarded()
_ <- unless(ijp.currentToken == JsonToken.VALUE_NUMBER_INT)(FailedG("expected VALUE_NUMBER_INT not " + ijp.currentToken, CoderFailure.terminal) )
_ <- unless(ijp.intValue ==== 11111)(FailedG("expected number 11111 not " + ijp.intValue, CoderFailure.terminal) )
_ <- ijp.advanceToken()
_ <- decoder.run(ijp, rec)
_ <- ijp.advanceTokenUnguarded()
_ <- unless(ijp.currentToken == JsonToken.VALUE_NUMBER_INT)(FailedG("expected VALUE_NUMBER_INT not " + ijp.currentToken, CoderFailure.terminal) )
_ <- unless(ijp.intValue ==== 22222)(FailedG("expected number 22222 not " + ijp.intValue, CoderFailure.terminal) )
_ <- ijp.advanceTokenUnguarded()
_ <- unless(ijp.currentToken == JsonToken.END_ARRAY)(FailedG("expected END_ARRAY not " + ijp.currentToken, CoderFailure.terminal) )
} yield rec.value
}
def checkMissing[A](decoder: JsonDecoder[A]): SpecsResult =
(decoder.fromString("null") must beMissingValue).updateMessage("explicit null: " + _) and
(formatFailedPath(decodeMissing(decoder)) must beMissingValue).updateMessage("missing value: " + _) and
(withParser("") { ijp =>
val rec = new Receiver[A]
ijp.advanceToken()
decoder.run(ijp, rec) must beLike { case f@FailedG(_, _) =>
f.message must beMatching("expected .*? but instead got EOF")
}
}).updateMessage("exhausted input: " + _)
def beMissingValue[E, A]: Matcher[ResultG[E, A]] =
beLike { case f@FailedG(_, _) =>
f.message must beMatching("At source location \\\\d+:\\\\d+: required but missing")
}
def encodeString(s: String): String = {
val sw = new StringWriter
val jg = new JsonFactory().createGenerator(sw)
jg.writeString(s)
jg.close()
sw.toString
}
}
| paytronix/utils-open | interchange/json/src/test/scala/com/paytronix/utils/interchange/format/json/utils.scala | Scala | apache-2.0 | 5,395 |
/* Copyright 2009-2016 EPFL, Lausanne */
object IfExpr1 {
def foo(): Int = {
var a = 1
var b = 2
if({a = a + 1; a != b})
a = a + 3
else
b = a + b
a
} ensuring(_ == 3)
}
| epfl-lara/leon | src/test/resources/regression/verification/xlang/invalid/IfExpr1.scala | Scala | gpl-3.0 | 208 |
package com.twitter.util.tunable
import com.fasterxml.jackson.annotation.JsonProperty
import com.fasterxml.jackson.databind.module.SimpleModule
import com.fasterxml.jackson.databind.{JsonDeserializer, ObjectMapper}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.twitter.util.{Return, Throw, Try}
import java.net.URL
import scala.jdk.CollectionConverters._
object JsonTunableMapper {
import com.twitter.util.tunable.json._
private case class JsonTunable(
@JsonProperty(required = true) id: String,
@JsonProperty(value = "type", required = true) valueType: Class[Any],
@JsonProperty(required = true) value: Any,
@JsonProperty(required = false) comment: String)
private case class JsonTunables(@JsonProperty(required = true) tunables: Seq[JsonTunable])
/**
* The Deserializers that [[JsonTunableMapper]] uses by default, in addition to Scala data type
* deserializers afforded by `com.fasterxml.jackson.module.scala.DefaultScalaModule`.
*
* These deserializers are:
*
* - `com.twitter.util.tunable.json.DurationFromString`
* - `com.twitter.util.tunable.json.StorageUnitFromString`
*/
val DefaultDeserializers: Seq[JsonDeserializer[_]] =
Seq(DurationFromString, StorageUnitFromString)
/**
* Create a new [[JsonTunableMapper]], using the provided deserializers `deserializers`.
*/
def apply(deserializers: Seq[JsonDeserializer[_ <: Any]]): JsonTunableMapper =
new JsonTunableMapper(deserializers)
/**
* Create a new [[JsonTunableMapper]], using the default deserializers, [[DefaultDeserializers]]
*/
def apply(): JsonTunableMapper =
apply(JsonTunableMapper.DefaultDeserializers)
/**
* Construct String paths for JSON files starting with `root` using environmentOpt and instanceOpt
* in priority order. Where environementOpt and instanceOpt are available (env, instance),
* paths are ordered:
*
* i. \\$root/\\$env/instance-\\$id.json
* i. \\$root/\\$env/instances.json
* i. \\$root/instance-\\$id.json
* i. \\$root/instances.json
*/
def pathsByPriority(
root: String,
environmentOpt: Option[String],
instanceIdOpt: Option[Long]
): Seq[String] = {
val template = s"${root}%sinstance%s.json"
val envPathParams = (environmentOpt, instanceIdOpt) match {
case (Some(env), Some(id)) => Seq(Seq(s"$env/", s"-$id"), Seq(s"$env/", "s"))
case (Some(env), None) => Seq(Seq(s"$env/", "s"))
case (None, _) => Seq.empty[Seq[String]]
}
val instancePathParams = instanceIdOpt match {
case Some(instanceId) => Seq(Seq("", s"-$instanceId"), Seq("", "s"))
case None => Seq(Seq("", "s"))
}
val pathParams = envPathParams ++ instancePathParams
pathParams.map(params => template.format(params: _*))
}
}
/**
* Parses a given JSON string into a [[TunableMap]]. The expected format is:
*
* {{{
* "tunables":
* [
* {
* "id" : "\\$id1",
* "value" : \\$value,
* "type" : "\\$class"
* },
* {
* "id" : "\\$id2",
* "value" : \\$value,
* "type" : "\\$class",
* "comment": "optional comment"
* }
* ]
* }}}
*
* Where \\$id1 and \\$id2 are unique identifiers used to access the [[Tunable]], \\$value is the value,
* and \\$class is the fully-qualified class name (e.g. com.twitter.util.Duration)
*
* If the JSON is invalid, or contains duplicate ids for [[Tunable]]s, `parse` will
* return a [[Throw]]. Otherwise, `parse` returns [[Return Return[TunableMap]]
*/
final class JsonTunableMapper(deserializers: Seq[JsonDeserializer[_ <: Any]]) {
import JsonTunableMapper._
private[this] object DeserializationModule extends SimpleModule {
deserializers.foreach { jd => addDeserializer(jd.handledType().asInstanceOf[Class[Any]], jd) }
}
private[this] val mapper: ObjectMapper =
new ObjectMapper().registerModules(DefaultScalaModule, DeserializationModule)
private[this] def jsonTunablesToTunableMap(
jsonTunables: JsonTunables,
source: String
): TunableMap = {
val ids = jsonTunables.tunables.map(_.id)
val uniqueIds = ids.distinct
if (ids.size != uniqueIds.size)
throw new IllegalArgumentException(s"Duplicate Tunable ids found: ${ids.mkString(",")}")
if (jsonTunables.tunables.isEmpty) {
NullTunableMap
} else {
val tunableMap = TunableMap.newMutable(source)
jsonTunables.tunables.map { jsonTunable =>
val valueAsValueType = mapper.convertValue(jsonTunable.value, jsonTunable.valueType)
tunableMap.put(jsonTunable.id, jsonTunable.valueType, valueAsValueType)
}
tunableMap
}
}
/**
* Parse the contents of the given file URL `url` into a [[TunableMap]]
*/
private[tunable] def parse(url: URL): Try[TunableMap] = Try {
jsonTunablesToTunableMap(mapper.readValue(url, classOf[JsonTunables]), url.toString)
}
// Exposed for testing
private[tunable] def tunableMapForResources(id: String, paths: List[URL]): TunableMap =
paths match {
case Nil =>
NullTunableMap
case path :: Nil =>
parse(path) match {
case Throw(t) =>
throw new IllegalArgumentException(
s"Failed to parse Tunable configuration file for $id, from $path",
t
)
case Return(tunableMap) =>
tunableMap
}
case _ =>
throw new IllegalArgumentException(
s"Found multiple Tunable configuration files for $id: ${paths.mkString(", ")}"
)
}
/**
* Load and parse the JSON file located at `path` in the application's resources.
*
* If no configuration files exist, return [[NullTunableMap]].
* If multiple configuration files exists, return `IllegalArgumentException`
* If the configuration file cannot be parsed, return `IllegalArgumentException`
*/
def loadJsonTunables(id: String, path: String): TunableMap = {
val classLoader = getClass.getClassLoader
val files = classLoader.getResources(path).asScala.toList
tunableMapForResources(id, files)
}
/**
* Parse the given JSON string `json` into a [[TunableMap]]
*/
def parse(json: String): Try[TunableMap] = Try {
jsonTunablesToTunableMap(mapper.readValue(json, classOf[JsonTunables]), "JSON String")
}
}
| twitter/util | util-tunable/src/main/scala/com/twitter/util/tunable/JsonTunableMapper.scala | Scala | apache-2.0 | 6,355 |
package com.arcusys.valamis.persistence.impl.scorm.storage
import java.sql.Connection
import com.arcusys.valamis.lesson.scorm.model.manifest.{ExitConditionRule, PostConditionRule, PreConditionRule, Sequencing}
import com.arcusys.valamis.lesson.scorm.storage.sequencing.{ChildrenSelectionStorage, SequencingPermissionsStorage, _}
import com.arcusys.valamis.persistence.common.SlickProfile
import com.arcusys.valamis.persistence.impl.scorm.schema._
import org.scalatest.{BeforeAndAfter, FunSuite}
import scala.slick.driver.H2Driver
import scala.slick.driver.H2Driver.simple._
/**
* Created by eboystova on 10.05.16.
*/
class SequencingPermissionsStorageTest extends FunSuite
with ChildrenSelectionTableComponent
with ConditionRuleTableComponent
with SequencingTableComponent
with SeqPermissionsTableComponent
with SequencingTrackingTableComponent
with ObjectiveTableComponent
with ObjectiveMapTableComponent
with RollupContributionTableComponent
with RollupRuleTableComponent
with SlickProfile
with BeforeAndAfter {
val db = Database.forURL("jdbc:h2:mem:SequencingPermissionsTest", driver = "org.h2.Driver")
override val driver = H2Driver
val storages = new StorageFactory(db, driver)
val sequencingPermissionsStorage = storages.getSequencingPermissionsStorage
val sequencingStorage = storages.getSequencingStorage
var connection: Connection = _
// db data will be released after connection close
before {
connection = db.source.createConnection()
createSchema()
}
after {
connection.close()
}
def createSchema() {
import driver.simple._
db.withSession { implicit session =>
sequencingTQ.ddl.create
seqPermissionsTQ.ddl.create
rollupContributionTQ.ddl.create
objectiveTQ.ddl.create
objectiveMapTQ.ddl.create
childrenSelectionTQ.ddl.create
sequencingTrackingTQ.ddl.create
conditionRuleTQ.ddl.create
rollupRuleTQ.ddl.create
}
}
test("execute 'get' without errors") {
sequencingStorage.create(123, "456", Sequencing.Default)
import driver.simple._
db.withSession { implicit session =>
val seq = sequencingTQ.filter(a => a.activityId === "456" && a.packageId === 123L).firstOption
assert(seq.isDefined)
val seqTracking = sequencingPermissionsStorage.get(seq.get.id.get)
assert(seqTracking.isDefined)
}
}
test("execute 'delete' without errors") {
sequencingStorage.create(123, "456", Sequencing.Default)
import driver.simple._
db.withSession { implicit session =>
val seq = sequencingTQ.filter(a => a.activityId === "456" && a.packageId === 123L).firstOption
assert(seq.isDefined)
sequencingPermissionsStorage.delete(seq.get.id.get)
val isSeqTracking = seqPermissionsTQ.filter(_.sequencingId === seq.get.id.get).exists.run
assert(!isSeqTracking)
}
}
}
| igor-borisov/valamis | valamis-slick-persistence/src/test/scala/com/arcusys/valamis/persistence/impl/scorm/storage/SequencingPermissionsStorageTest.scala | Scala | gpl-3.0 | 2,885 |
package com.excilys.computerdatabase.gatling.process
import com.typesafe.config.ConfigFactory
import io.gatling.core.Predef._
import io.gatling.http.Predef._
/**
* Created by Cédric Cousseran on 29/03/16.
* Delete the computer which was edited before.
*/
object Delete {
val config = ConfigFactory.load()
val delete = exec(http("Delete: Search for delete")
.get(config.getString("application.urls.dashboardPage"))
.queryParam(config.getString("application.urls.param.search").toString(), "${addComputerName}_edited")
.check(
status.is(200),
css("#results input", "value").saveAs("computerId")
))
.pause(3, 10)
.exec(http("Delete: Delete post")
.post(config.getString("application.urls.deletePost").get)
.formParam(config.getString("application.urls.form.delete.selection").get, "${computerId}"))
.pause(3, 10)
}
| ublanquet/training-java | gatling-test/src/test/scala/com/excilys/computerdatabase/gatling/process/Delete.scala | Scala | apache-2.0 | 878 |
package org.scalatest.tools
import scala.collection.mutable.ListBuffer
import org.apache.tools.ant.BuildException
import org.apache.tools.ant.Task
import org.apache.tools.ant.types.Path
import org.apache.tools.ant.AntClassLoader
import org.apache.tools.ant.taskdefs.Java
/**
* <p>
* An ant task to run ScalaTest. Instructions on how to specify various
* options are below. See the scaladocs for the <code>Runner</code> class for a description
* of what each of the options does.
* </p>
*
* <p>
* To use the ScalaTest ant task, you must first define it in your ant file using <code>taskdef</code>.
* Here's an example:
* </p>
*
* <pre class="stExamples">
* <path id="scalatest.classpath">
* <pathelement location="${lib}/scalatest.jar"/>
* <pathelement location="${lib}/scala-library.jar"/>
* </path>
*
* <target name="main" depends="dist">
* <taskdef name="scalatest" classname="org.scalatest.tools.ScalaTestAntTask">
* <classpath refid="scalatest.classpath"/>
* </taskdef>
*
* <scalatest ...
* </target>
* </pre>
*
* <p>
* Once defined, you use the task by specifying information in a <code>scalatest</code> element:
* </p>
*
* <pre class="stExamples">
* <scalatest ...>
* ...
* </scalatest>
* </pre>
*
* <p>
* You can place key value pairs into the <code>configMap</code> using nested <code><config></code> elements,
* like this:
* </p>
*
* <pre class="stExamples">
* <scalatest>
* <config name="dbname" value="testdb"/>
* <config name="server" value="192.168.1.188"/>
* </pre>
*
* <p>
* You can specify a runpath using either a <code>runpath</code> attribute and/or nested
* <code><runpath></code> elements, using standard ant path notation:
* </p>
*
* <pre class="stExamples">
* <scalatest runpath="serviceuitest-1.1beta4.jar:myjini">
* </pre>
*
* or
*
* <pre class="stExamples">
* <scalatest>
* <runpath>
* <pathelement location="serviceuitest-1.1beta4.jar"/>
* <pathelement location="myjini"/>
* </runpath>
* </pre>
*
* <p>
* To add a URL to your runpath, use a <code><runpathurl></code> element
* (since ant paths don't support URLs):
* </p>
*
* <pre class="stExamples">
* <scalatest>
* <runpathurl url="http://foo.com/bar.jar"/>
* </pre>
*
* <p>
* You can specify reporters using nested <code><reporter></code> elements, where the <code>type</code>
* attribute must be one of the following:
* </p>
*
* <ul>
* <li> <code>graphic</code> </li>
* <li> <code>file</code> </li>
* <li> <code>junitxml</code> </li>
* <li> <code>dashboard</code> </li>
* <li> <code>stdout</code> </li>
* <li> <code>stderr</code> </li>
* <li> <code>reporterclass</code> </li>
* </ul>
*
* <p>
* Each may include a <code>config</code> attribute to specify the reporter configuration.
* Types <code>file</code>, <code>junitxml</code>, <code>dashboard</code>, and <code>reporterclass</code> require additional attributes
* <code>filename</code>, <code>directory</code>, and <code>classname</code>, respectively:
* </p>
*
* <pre class="stExamples">
* <scalatest>
* <reporter type="stdout" config="FAB"/>
* <reporter type="file" filename="test.out"/>
* <reporter type="junitxml" directory="target"/>
* <reporter type="dashboard" directory="target"/>
* <reporter type="reporterclass" classname="my.ReporterClass"/>
* </pre>
*
* <p>
* For reporter type 'dashboard', an optional <code>numfiles</code> attribute may be
* included to specify the number of old summary and duration files to be archived.
* Default is 2.
* </p>
*
* <p>
* Specify tags to include and/or exclude using <code><tagsToInclude></code> and
* <code><tagsToExclude></code> elements, like this:
* </p>
*
* <pre class="stExamples">
* <scalatest>
* <tagsToInclude>
* CheckinTests
* FunctionalTests
* </tagsToInclude>
*
* <tagsToExclude>
* SlowTests
* NetworkTests
* </tagsToExclude>
* </pre>
*
* <p>
* To specify suites to run, use either a <code>suite</code> attribute or nested
* <code><suite></code> elements:
* </p>
*
* <pre class="stExamples">
* <scalatest suite="com.artima.serviceuitest.ServiceUITestkit">
* </pre>
*
* <p>
* or
* </p>
*
* <pre class="stExamples">
* <scalatest>
* <suite classname="com.artima.serviceuitest.ServiceUITestkit"/>
* </pre>
*
* <p>
* To specify suites using members-only or wildcard package names, use
* either the <code>membersonly</code> or <code>wildcard</code> attributes, or nested
* <code><membersonly></code> or <code><wildcard></code> elements:
* </p>
*
* <pre class="stExamples">
* <scalatest membersonly="com.artima.serviceuitest">
* </pre>
*
* <p>
* or
* </p>
*
* <pre class="stExamples">
* <scalatest wildcard="com.artima.joker">
* </pre>
*
* <p>
* or
* </p>
*
* <pre class="stExamples">
* <scalatest>
* <membersonly package="com.artima.serviceuitest"/>
* <wildcard package="com.artima.joker"/>
* </pre>
*
* <p>
* Use attribute <code>suffixes="[pipe-delimited list of suffixes]"</code>
* to specify that only classes whose names end in one of the specified suffixes
* should be included in discovery searches for Suites to test. This can
* be used to improve discovery time or to limit the scope of a test. E.g.:
* </p>
*
* <pre class="stExamples">
* <scalatest suffixes="Spec|Suite">
* </pre>
*
* <p>
* Use attribute <code>parallel="true"</code> to specify parallel execution of suites.
* (If the <code>parallel</code> attribute is left out or set to false, suites will be executed sequentially by one thread.)
* When <code>parallel</code> is true, you can include an optional <code>numthreads</code> attribute to specify the number
* of threads to be created in thread pool (<em>e.g.</em>, <code>numthreads="10"</code>).
* </p>
*
* <p>
* Use attribute <code>haltonfailure="true"</code> to cause ant to fail the
* build if there's a test failure.
* </p>
*
* <p>
* Use attribute <code>fork="true"</code> to cause ant to run the tests in
* a separate process.
* </p>
*
* <p>
* When <code>fork</code> is <code>true</code>, attribute <code>maxmemory</code> may be used to specify
* the maximum memory size that will be passed to the forked jvm. For example, the following setting
* will cause <code>"-Xmx1280M"</code> to be passed to the java command used to
* run the tests.
* </p>
*
* <pre class="stExamples">
* <scalatest maxmemory="1280M">
* </pre>
*
* <p>
* When <code>fork</code> is true, nested <code><jvmarg></code> elements may be used
* to pass additional arguments to the forked jvm.
* For example, if you are running into 'PermGen space' memory errors,
* you could add the following <code>jvmarg</code> to bump up the JVM's <code>MaxPermSize</code> value:
* </p>
*
* <pre class="stExamples">
* <jvmarg value="-XX:MaxPermSize=128m"/>
* </pre>
*
* @author George Berger
*/
class ScalaTestAntTask extends Task {
private var includes: String = null
private var excludes: String = null
private var maxMemory: String = null
private var suffixes: String = null
private var parallel = false
private var haltonfailure = false
private var fork = false
private var numthreads = 0
private val runpath = new ListBuffer[String]
private val jvmArgs = new ListBuffer[String]
private val suites = new ListBuffer[String]
private val membersonlys = new ListBuffer[String]
private val wildcards = new ListBuffer[String]
private val testNGSuites = new ListBuffer[String]
private val reporters = new ListBuffer[ReporterElement]
private val properties = new ListBuffer[NameValuePair]
/**
* Executes the task.
*/
override def execute {
val args = new ListBuffer[String]
addSuiteArgs(args)
addReporterArgs(args)
addPropertyArgs(args)
addIncludesArgs(args)
addExcludesArgs(args)
addRunpathArgs(args)
addTestNGSuiteArgs(args)
addParallelArg(args)
addSuffixesArg(args)
val argsArray = args.toArray
val success = if (fork) javaTaskRunner(args.toList)
else Runner.run(argsArray)
if (!success && haltonfailure)
throw new BuildException("ScalaTest run failed.")
}
private def javaTaskRunner(args: List[String]): Boolean = {
val java = new Java
java.bindToOwner(this)
java.init()
java.setFork(true)
java.setClassname("org.scalatest.tools.Runner")
val classLoader = getClass.getClassLoader.asInstanceOf[AntClassLoader]
java.setClasspath(new Path(getProject, classLoader.getClasspath))
if (maxMemory != null) java.createJvmarg.setValue("-Xmx" + maxMemory)
for (jvmArg <- jvmArgs)
java.createJvmarg.setValue(jvmArg)
for (arg <- args)
java.createArg.setValue(arg)
val result = java.executeJava
return (result == 0)
}
//
// Adds '-p runpath' arg pair to args list if a runpath
// element or attribute was specified for task.
//
private def addRunpathArgs(args: ListBuffer[String]) {
if (runpath.size > 0) {
args += "-p"
args += getSpacedOutPathStr(runpath.toList)
}
}
private def addTestNGSuiteArgs(args: ListBuffer[String]) {
if (testNGSuites.size > 0) {
args += "-t"
args += getSpacedOutPathStr(testNGSuites.toList)
}
}
//
// Adds '-c' arg to args list if 'parallel' attribute was
// specified true for task.
//
private def addParallelArg(args: ListBuffer[String]) {
if (parallel) {
args += "-c" + (if (numthreads > 0) ("" + numthreads) else "")
}
}
//
// Adds '-q' arg to args list if 'suffixes' attribute was
// specified for task.
//
private def addSuffixesArg(args: ListBuffer[String]) {
if (suffixes != null) {
args += "-q"
args += suffixes
}
}
//
// Adds '-n includes-list' arg pair to args list if a <tagsToInclude>
// element was supplied for task.
//
private def addIncludesArgs(args: ListBuffer[String]) {
if (includes != null) {
args += "-n"
args += singleSpace(includes)
}
}
//
// Adds '-l excludes-list' arg pair to args list if an <excludes>
// element was supplied for task.
//
private def addExcludesArgs(args: ListBuffer[String]) {
if (excludes != null) {
args += "-l"
args += singleSpace(excludes)
}
}
//
// Adds '-Dname=value' argument to args list for each nested
// <property> element supplied for task.
//
private def addPropertyArgs(args: ListBuffer[String]) {
for (pair <- properties)
args += "-D" + pair.getName + "=" + pair.getValue
}
//
// Adds '-s classname' argument to args list for each suite
// specified for task. Adds '-m packagename' for each
// membersonly element specified, and '-w packagename' for
// each wildcard element specified.
//
private def addSuiteArgs(args: ListBuffer[String]) {
for (suite <- suites) {
if (suite == null)
throw new BuildException(
"missing classname attribute for <suite> element")
args += "-s"
args += suite
}
for (packageName <- membersonlys) {
if (packageName == null)
throw new BuildException(
"missing package attribute for <membersonly> element")
args += "-m"
args += packageName
}
for (packageName <- wildcards) {
if (packageName == null)
throw new BuildException(
"missing package attribute for <wildcard> element")
args += "-w"
args += packageName
}
}
//
// Adds appropriate reporter options to args list for each
// nested reporter element specified for task. Defaults to
// stdout if no reporter specified.
//
private def addReporterArgs(args: ListBuffer[String]) {
if (reporters.size == 0)
args += "-o"
for (reporter <- reporters) {
reporter.getType match {
case "stdout" => addReporterOption(args, reporter, "-o")
case "stderr" => addReporterOption(args, reporter, "-e")
case "graphic" => addReporterOption(args, reporter, "-g")
case "file" => addFileReporter(args, reporter)
case "xml" => addXmlReporter(args, reporter)
case "junitxml" => addJunitXmlReporter(args, reporter)
case "dashboard" => addDashboardReporter(args, reporter)
case "html" => addHtmlReporter(args, reporter)
case "reporterclass" => addReporterClass(args, reporter)
case t =>
throw new BuildException("unexpected reporter type [" + t + "]")
}
}
}
//
// Adds specified option to args for reporter. Appends reporter
// config string to option if specified, e.g. "-eFAB".
//
private def addReporterOption(args: ListBuffer[String],
reporter: ReporterElement,
option: String)
{
val config = reporter.getConfig
if (config == null) args += option
else args += option + config
}
//
// Adds '-f' file reporter option to args. Appends reporter
// config string to option if specified. Adds reporter's
// filename as additional argument, e.g. "-fFAB", "filename".
//
private def addFileReporter(args: ListBuffer[String],
reporter: ReporterElement)
{
addReporterOption(args, reporter, "-f")
if (reporter.getFilename == null)
throw new BuildException(
"reporter type 'file' requires 'filename' attribute")
args += reporter.getFilename
}
//
// Adds '-x' xml reporter option to args. Adds reporter's
// directory as additional argument, e.g. "-x", "directory".
// [disabled for now]
//
private def addXmlReporter(args: ListBuffer[String],
reporter: ReporterElement)
{
addReporterOption(args, reporter, "-x")
if (reporter.getDirectory == null)
throw new BuildException(
"reporter type 'xml' requires 'directory' attribute")
args += reporter.getDirectory
}
//
// Adds '-u' junit xml reporter option to args. Adds reporter's
// directory as additional argument, e.g. "-u", "directory".
//
private def addJunitXmlReporter(args: ListBuffer[String],
reporter: ReporterElement)
{
addReporterOption(args, reporter, "-u")
if (reporter.getDirectory == null)
throw new BuildException(
"reporter type 'junitxml' requires 'directory' attribute")
args += reporter.getDirectory
}
//
// Adds '-d' Dashboard reporter option to args. Adds reporter's
// directory as additional argument, e.g. "-d", "directory".
//
private def addDashboardReporter(args: ListBuffer[String],
reporter: ReporterElement)
{
addReporterOption(args, reporter, "-d")
if (reporter.getDirectory == null)
throw new BuildException(
"reporter type 'dashboard' requires 'directory' attribute")
args += reporter.getDirectory
if (reporter.getNumfiles >= 0) {
args += "-a"
args += reporter.getNumfiles.toString
}
}
//
// Adds '-h' html reporter option to args. Appends reporter
// config string to option if specified. Adds reporter's
// filename as additional argument, e.g. "-hFAB", "filename".
//
private def addHtmlReporter(args: ListBuffer[String],
reporter: ReporterElement)
{
addReporterOption(args, reporter, "-h")
if (reporter.getFilename == null)
throw new BuildException(
"reporter type 'html' requires 'filename' attribute")
args += reporter.getFilename
}
//
// Adds '-r' reporter class option to args. Appends
// reporter config string to option if specified. Adds
// reporter's classname as additional argument, e.g. "-rFAB",
// "my.ReporterClass".
//
private def addReporterClass(args: ListBuffer[String],
reporter: ReporterElement)
{
addReporterOption(args, reporter, "-r")
if (reporter.getClassName == null)
throw new BuildException(
"reporter type 'reporterclass' requires 'classname' attribute")
args += reporter.getClassName
}
/**
* Sets value of the <code>runpath</code> attribute.
*/
def setRunpath(runpath: Path) {
for (element <- runpath.list) {
this.runpath += element
}
}
/**
* Sets value of the <code>haltonfailure</code> attribute.
*/
def setHaltonfailure(haltonfailure: Boolean) {
this.haltonfailure = haltonfailure
}
/**
* Sets value of the <code>fork</code> attribute.
*/
def setFork(fork: Boolean) {
this.fork = fork
}
/**
* Sets value of the <code>suffixes</code> attribute.
*/
def setSuffixes(suffixes: String) {
this.suffixes = suffixes
}
/**
* Sets value of the <code>maxmemory</code> attribute.
*/
def setMaxmemory(max: String) {
this.maxMemory = max
}
/**
* Sets value of the <code>testngsuites</code> attribute.
*/
def setTestNGSuites(testNGSuitePath: Path) {
for (element <- testNGSuitePath.list)
this.testNGSuites += element
}
/**
* Sets value of the <code>concurrent</code> attribute.
* <b>Note: The <code>concurrent</code> attribute has been deprecated and will be removed in a future version of ScalaTest.
* Please use the <code>parallel</code> attribute instead.</b>
*/
@deprecated("Please use parallel instead")
def setConcurrent(concurrent: Boolean) {
Console.err.println("WARNING: 'concurrent' attribute is deprecated " +
"- please use 'parallel' instead")
this.parallel = concurrent
}
/**
* Sets value of the <code>numthreads</code> attribute.
*/
def setNumthreads(numthreads: Int) {
this.numthreads = numthreads
}
/**
* Sets value of the <code>parallel</code> attribute.
*/
def setParallel(parallel: Boolean) {
this.parallel = parallel
}
/**
* Sets value from nested element <code>runpath</code>.
*/
def addConfiguredRunpath(runpath: Path) {
for (element <- runpath.list)
this.runpath += element
}
/**
* Sets value from nested element <code>testngsuites</code>.
*/
def addConfiguredTestNGSuites(testNGSuitePath: Path) {
for (element <- testNGSuitePath.list)
this.testNGSuites += element
}
/**
* Sets value from nested element <code>runpathurl</code>.
*/
def addConfiguredRunpathUrl(runpathurl: RunpathUrl) {
runpath += runpathurl.getUrl
}
/**
* Sets value from nested element <code>jvmarg</code>.
*/
def addConfiguredJvmArg(arg: JvmArg) {
jvmArgs += arg.getValue
}
/**
* Sets values from nested element <code>property</code>.
* <b>The <code>property</code> attribute has been deprecated and will be removed in a future version of ScalaTest.
* Please use the <code>config</code> attribute instead.</b>
*/
@deprecated("Please use config instead")
def addConfiguredProperty(property: NameValuePair) {
Console.err.println("WARNING: <property> is deprecated - " +
"please use <config> instead [name: " +
property.getName + "]")
properties += property
}
/**
* Sets values from nested element <code>config</code>.
*/
def addConfiguredConfig(config: NameValuePair) {
properties += config
}
/**
* Sets value of <code>suite</code> attribute.
*/
def setSuite(suite: String) {
suites += suite
}
/**
* Sets value of <code>membersonly</code> attribute.
*/
def setMembersonly(packageName: String) {
membersonlys += packageName
}
/**
* Sets value of <code>wildcard</code> attribute.
*/
def setWildcard(packageName: String) {
wildcards += packageName
}
/**
* Sets value from nested element <code>suite</code>.
*/
def addConfiguredSuite(suite: SuiteElement) {
suites += suite.getClassName
}
/**
* Sets value from nested element <code>membersonly</code>.
*/
def addConfiguredMembersOnly(membersonly: PackageElement) {
membersonlys += membersonly.getPackage
}
/**
* Sets value from nested element <code>wildcard</code>.
*/
def addConfiguredWildcard(wildcard: PackageElement) {
wildcards += wildcard.getPackage
}
/**
* Sets value from nested element <code>reporter</code>.
*/
def addConfiguredReporter(reporter: ReporterElement) {
reporters += reporter
}
/**
* Sets value from nested element <code>tagsToInclude</code>.
*/
def addConfiguredTagsToInclude(tagsToInclude: TextElement) {
this.includes = tagsToInclude.getText
}
/**
* Sets value from nested element <code>includes</code>.
* <b>The <code>includes</code> attribute has been deprecated and will be removed in a future version of ScalaTest.
* Please use the <code>tagsToInclude</code> attribute instead.</b>
*/
@deprecated("Please use tagsToInclude instead")
def addConfiguredIncludes(includes: TextElement) {
Console.err.println("WARNING: 'includes' is deprecated - " +
"use 'tagsToInclude' instead [includes: " +
includes.getText + "]")
this.includes = includes.getText
}
/**
* Sets value from nested element <code>excludes</code>.
*/
def addConfiguredTagsToExclude(tagsToExclude: TextElement) {
this.excludes = tagsToExclude.getText
}
/**
* Sets value from nested element <code>excludes</code>.
* <b>The <code>excludes</code> attribute has been deprecated and will be removed in a future version of ScalaTest.
* Please use the <code>tagsToExclude</code> attribute instead.</b>
*/
@deprecated("Please use tagsToExclude instead")
def addConfiguredExcludes(excludes: TextElement) {
Console.err.println("WARNING: 'excludes' is deprecated - " +
"use 'tagsToExclude' instead [excludes: " +
excludes.getText + "]")
this.excludes = excludes.getText
}
//
// Translates a list of strings making up a path into a
// single space-delimited string. Uses backslashes to escape
// spaces within individual path elements, since that's what
// Runner's -p option expects.
//
private def getSpacedOutPathStr(path: List[String]): String = {
path.map(_.replaceAll(" ", """\\\\ """)).mkString("", " ", "")
}
//
// Translates a whitespace-delimited string into a
// whitespace-delimited string, but not the same whitespace. Trims
// off leading and trailing whitespace and converts inter-element
// whitespace to a single space.
//
private def singleSpace(str: String): String = {
str.trim.replaceAll("\\\\s+", " ")
}
}
//
// Class to hold data from <membersonly> and <wildcard> elements.
//
private class PackageElement {
private var packageName: String = null
def setPackage(packageName: String) {
this.packageName = packageName
}
def getPackage = packageName
}
//
// Class to hold data from <suite> elements.
//
private class SuiteElement {
private var className: String = null
def setClassName(className: String) {
this.className = className
}
def getClassName = className
}
//
// Class to hold data from <includes> and <excludes> elements.
//
private class TextElement {
private var text: String = null
def addText(text: String) {
this.text = text
}
def getText = text
}
//
// Class to hold data from <property> elements.
//
private class NameValuePair {
private var name : String = null
private var value : String = null
def setName(name : String) { this.name = name }
def setValue(value : String) { this.value = value }
def getName = name
def getValue = value
}
//
// Class to hold data from <runpathurl> elements.
//
private class RunpathUrl {
private var url: String = null
def setUrl(url: String) { this.url = url }
def getUrl = url
}
//
// Class to hold data from <jvmarg> elements.
//
private class JvmArg {
private var value: String = null
def setValue(value: String) { this.value = value }
def getValue = value
}
//
// Class to hold data from <reporter> elements.
//
private class ReporterElement {
private var rtype : String = null
private var config : String = null
private var filename : String = null
private var directory : String = null
private var classname : String = null
private var numfiles : Int = -1
def setType(rtype : String) { this.rtype = rtype }
def setConfig(config : String) { this.config = config }
def setFilename(filename : String) { this.filename = filename }
def setDirectory(directory : String) { this.directory = directory }
def setClassName(classname : String) { this.classname = classname }
def setNumfiles(numfiles : Int) { this.numfiles = numfiles }
def getType = rtype
def getConfig = config
def getFilename = filename
def getDirectory = directory
def getClassName = classname
def getNumfiles = numfiles
}
| epishkin/scalatest-google-code | src/main/scala/org/scalatest/tools/ScalaTestAntTask.scala | Scala | apache-2.0 | 25,695 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.