code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package com.doanduyhai.elevator
import java.io.File
trait FileContentReader {
def readContentFromFile(filename: String):String = {
val classLoader = getClass().getClassLoader()
val file = new File(classLoader.getResource(filename).getFile())
val source = scala.io.Source.fromFile(file.getAbsolutePath)
val lines = try source.getLines mkString "\\n" finally source.close()
lines
}
}
| doanduyhai/elevator-control-system | src/test/scala/com/doanduyhai/elevator/FileContentReader.scala | Scala | apache-2.0 | 409 |
package frameless
package ml
import org.scalacheck.Prop._
import org.apache.spark.ml.linalg._
import org.apache.spark.ml.regression.DecisionTreeRegressor
import Generators._
import scala.util.Random
class TypedEncoderInstancesTests extends FramelessMlSuite {
test("Vector encoding is injective using collect()") {
val prop = forAll { vector: Vector =>
TypedDataset.create(Seq(vector)).collect().run() == Seq(vector)
}
check(prop)
}
test("Matrix encoding is injective using collect()") {
val prop = forAll { matrix: Matrix =>
TypedDataset.create(Seq(matrix)).collect().run() == Seq(matrix)
}
check(prop)
}
test("Vector is encoded as VectorUDT and thus can be run in a Spark ML model") {
case class Input(features: Vector, label: Double)
val prop = forAll { trainingData: Matrix =>
(trainingData.numRows >= 1) ==> {
val inputs = trainingData.rowIter.toVector.map(vector => Input(vector, 0D))
val inputsDS = TypedDataset.create(inputs)
val model = new DecisionTreeRegressor()
// this line would throw a runtime exception if Vector was not encoded as VectorUDT
val trainedModel = model.fit(inputsDS.dataset)
val randomInput = inputs(Random.nextInt(inputs.length))
val randomInputDS = TypedDataset.create(Seq(randomInput))
val prediction = trainedModel.transform(randomInputDS.dataset)
.select("prediction")
.head
.getAs[Double](0)
prediction == 0D
}
}
check(prop, MinSize(1))
}
}
| adelbertc/frameless | ml/src/test/scala/frameless/ml/TypedEncoderInstancesTests.scala | Scala | apache-2.0 | 1,568 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.functions
import org.apache.flink.api.common.functions.InvalidTypesException
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.typeutils.TypeExtractor
import org.apache.flink.table.api.ValidationException
import org.apache.flink.table.expressions.{Expression, ScalarFunctionCall}
/**
* Base class for a user-defined scalar function. A user-defined scalar functions maps zero, one,
* or multiple scalar values to a new scalar value.
*
* The behavior of a [[ScalarFunction]] can be defined by implementing a custom evaluation
* method. An evaluation method must be declared publicly and named "eval". Evaluation methods
* can also be overloaded by implementing multiple methods named "eval".
*
* User-defined functions must have a default constructor and must be instantiable during runtime.
*
* By default the result type of an evaluation method is determined by Flink's type extraction
* facilities. This is sufficient for basic types or simple POJOs but might be wrong for more
* complex, custom, or composite types. In these cases [[TypeInformation]] of the result type
* can be manually defined by overriding [[getResultType()]].
*
* Internally, the Table/SQL API code generation works with primitive values as much as possible.
* If a user-defined scalar function should not introduce much overhead during runtime, it is
* recommended to declare parameters and result types as primitive types instead of their boxed
* classes. DATE/TIME is equal to int, TIMESTAMP is equal to long.
*/
abstract class ScalarFunction extends UserDefinedFunction {
/**
* Creates a call to a [[ScalarFunction]] in Scala Table API.
*
* @param params actual parameters of function
* @return [[Expression]] in form of a [[ScalarFunctionCall]]
*/
final def apply(params: Expression*): Expression = {
ScalarFunctionCall(this, params)
}
override def toString: String = getClass.getCanonicalName
// ----------------------------------------------------------------------------------------------
/**
* Returns the result type of the evaluation method with a given signature.
*
* This method needs to be overriden in case Flink's type extraction facilities are not
* sufficient to extract the [[TypeInformation]] based on the return type of the evaluation
* method. Flink's type extraction facilities can handle basic types or
* simple POJOs but might be wrong for more complex, custom, or composite types.
*
* @param signature signature of the method the return type needs to be determined
* @return [[TypeInformation]] of result type or null if Flink should determine the type
*/
def getResultType(signature: Array[Class[_]]): TypeInformation[_] = null
/**
* Returns [[TypeInformation]] about the operands of the evaluation method with a given
* signature.
*
* In order to perform operand type inference in SQL (especially when NULL is used) it might be
* necessary to determine the parameter [[TypeInformation]] of an evaluation method.
* By default Flink's type extraction facilities are used for this but might be wrong for
* more complex, custom, or composite types.
*
* @param signature signature of the method the operand types need to be determined
* @return [[TypeInformation]] of operand types
*/
def getParameterTypes(signature: Array[Class[_]]): Array[TypeInformation[_]] = {
signature.map { c =>
try {
TypeExtractor.getForClass(c)
} catch {
case ite: InvalidTypesException =>
throw new ValidationException(
s"Parameter types of scalar function '${this.getClass.getCanonicalName}' cannot be " +
s"automatically determined. Please provide type information manually.")
}
}
}
}
| WangTaoTheTonic/flink | flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/ScalarFunction.scala | Scala | apache-2.0 | 4,692 |
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.commons.datetime
import java.sql.Timestamp
import org.joda.time.format.{DateTimeFormatter, ISODateTimeFormat}
import org.joda.time.{DateTime, DateTimeZone}
trait DateTimeConverter {
val zone: DateTimeZone = DateTimeZone.getDefault
val dateTimeFormatter: DateTimeFormatter = ISODateTimeFormat.dateTime()
def toString(dateTime: DateTime): String = dateTime.toString(dateTimeFormatter)
def parseDateTime(s: String): DateTime = dateTimeFormatter.parseDateTime(s).withZone(zone)
def parseTimestamp(s: String): Timestamp = new Timestamp(parseDateTime(s).getMillis)
def now: DateTime = new DateTime(zone)
def fromMillis(millis: Long): DateTime = new DateTime(zone).withMillis(millis)
def dateTime(
year: Int,
monthOfyear: Int,
dayOfMonth: Int,
hourOfDay: Int = 0,
minutesOfHour: Int = 0,
secondsOfMinute: Int = 0): DateTime =
new DateTime(year, monthOfyear, dayOfMonth, hourOfDay, minutesOfHour, secondsOfMinute, zone)
def dateTimeFromUTC(
year: Int,
monthOfyear: Int,
dayOfMonth: Int,
hourOfDay: Int = 0,
minutesOfHour: Int = 0,
secondsOfMinute: Int = 0): DateTime =
new DateTime(
year,
monthOfyear,
dayOfMonth,
hourOfDay,
minutesOfHour,
secondsOfMinute,
DateTimeZone.UTC).withZone(DateTimeConverter.zone)
}
object DateTimeConverter extends DateTimeConverter
| deepsense-io/seahorse-workflow-executor | commons/src/main/scala/io/deepsense/commons/datetime/DateTimeConverter.scala | Scala | apache-2.0 | 2,017 |
package com.greencatsoft.d3.event
import scala.scalajs.js
import scala.scalajs.js.Any.jsArrayOps
import scala.scalajs.js.UndefOr
import scala.scalajs.js.UndefOr.undefOr2ops
import org.scalajs.dom.{ Event, Node }
import org.scalajs.dom.ext.Castable
import com.greencatsoft.d3.common.Point
import com.greencatsoft.d3.selection.Selection
@js.native
trait EventChain[A <: Node, B <: Selection[A, B]] extends js.Object {
def event: UndefOr[D3Event[_]] = js.native
def mouse(container: Node): UndefOr[js.Array[Double]] = js.native
def touch(container: Node): UndefOr[js.Array[js.Array[Double]]] = js.native
def touch(container: Node, identifier: String): UndefOr[js.Array[js.Array[Double]]] = js.native
def touches(container: Node): UndefOr[js.Array[js.Array[Double]]] = js.native
def behavior: BehaviorFactory[A, B] = js.native
}
object EventChain {
import com.greencatsoft.d3.GlobalDefinitions.d3
def event[A <: Event]: Option[A] = d3.event.toOption.map(_.cast[A])
def sourceEvent[A <: Event]: Option[A] = event[D3Event[A]].flatMap(_.sourceEvent.toOption)
def mouse(container: Node): Option[Point] =
d3.mouse(container).toOption.map(point => Point(point(0), point(1)))
def touch(container: Node): Seq[Point] =
d3.touch(container).toOption.toSeq.flatten.map(p => Point(p(0), p(1)))
def touch(container: Node, identifier: String): Seq[Point] =
d3.touch(container, identifier).toOption.toSeq.flatten.map(p => Point(p(0), p(1)))
def touches(container: Node): Seq[Point] =
d3.touches(container).toOption.toSeq.flatten.map(p => Point(p(0), p(1)))
} | greencatsoft/scalajs-d3 | src/main/scala/com/greencatsoft/d3/event/EventChain.scala | Scala | apache-2.0 | 1,598 |
/*
* Copyright 2014 nidkil
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nidkil.downloader.splitter
import java.io.File
import scala.collection.mutable.LinkedHashSet
import com.nidkil.downloader.datatypes.Chunk
import com.nidkil.downloader.datatypes.RemoteFileInfo
/**
* A single splitter is provide with a strategy to split files.
*
* The following strategy is provided:
* - defaultStrategy: creates chunks of 5 MB each.
*/
object Splitter {
val CHUNK_FILE_EXT = ".chunk"
def defaultStrategy(fileSize: Long): Int = 1024 * 1024 * 5 // 5 MB
}
trait Splitter {
import Splitter._
def split(r: RemoteFileInfo, append: Boolean, workDir: File, strategy: (Long) => Int = defaultStrategy): LinkedHashSet[Chunk]
} | nidkil/scala-downloader | src/main/scala/com/nidkil/downloader/splitter/Splitter.scala | Scala | apache-2.0 | 1,262 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.logical.rel
import java.util
import org.apache.calcite.plan.{Convention, RelOptCluster, RelTraitSet}
import org.apache.calcite.rel.core.{Aggregate, AggregateCall}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.util.ImmutableBitSet
/**
* Logical Node for TableAggregate.
*/
class LogicalTableAggregate(
cluster: RelOptCluster,
traitSet: RelTraitSet,
input: RelNode,
indicator: Boolean,
groupSet: ImmutableBitSet,
groupSets: util.List[ImmutableBitSet],
aggCalls: util.List[AggregateCall])
extends TableAggregate(cluster, traitSet, input, indicator, groupSet, groupSets, aggCalls) {
override def copy(traitSet: RelTraitSet, inputs: util.List[RelNode]): TableAggregate = {
new LogicalTableAggregate(
cluster,
traitSet,
inputs.get(0),
indicator,
groupSet,
groupSets,
aggCalls
)
}
}
object LogicalTableAggregate {
def create(aggregate: Aggregate): LogicalTableAggregate = {
new LogicalTableAggregate(
aggregate.getCluster,
aggregate.getCluster.traitSetOf(Convention.NONE),
aggregate.getInput,
aggregate.indicator,
aggregate.getGroupSet,
aggregate.getGroupSets,
aggregate.getAggCallList)
}
}
| GJL/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/plan/logical/rel/LogicalTableAggregate.scala | Scala | apache-2.0 | 2,076 |
/*
* Copyright 2014 – 2015 Paul Horn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.redis.pipeline
import rx.redis.resp.RespType
import rx.Observer
import io.netty.channel.ChannelInitializer
import io.netty.channel.socket.SocketChannel
import java.util
private[redis] class RxChannelInitializer(optimizeForThroughput: Boolean) extends ChannelInitializer[SocketChannel] {
def initChannel(ch: SocketChannel): Unit = {
if (optimizeForThroughput) {
ch.config().setPerformancePreferences(0, 1, 3)
ch.config().setTcpNoDelay(false)
} else {
ch.config().setPerformancePreferences(0, 3, 1)
ch.config().setTcpNoDelay(true)
}
val queue = new util.LinkedList[Observer[RespType]]()
ch.pipeline().
addLast("resp-codec", new RespCodec).
addLast("rx-adapter", new RxAdapter(queue)).
addLast("rx-closer", new RxCloser(queue))
}
}
| knutwalker/rx-redis | modules/client/src/main/scala/rx/redis/pipeline/RxChannelInitializer.scala | Scala | apache-2.0 | 1,416 |
/* Copyright 2009-2021 EPFL, Lausanne */
package stainless
package extraction
package termination
trait Trees extends extraction.Trees { self =>
case object Induct extends Flag("induct", Seq())
override def extractFlag(name: String, args: Seq[Expr]): Flag = (name, args) match {
case ("induct", Seq()) => Induct
case _ => super.extractFlag(name, args)
}
override def getDeconstructor(
that: inox.ast.Trees
): inox.ast.TreeDeconstructor { val s: self.type; val t: that.type } = that match {
case tree: (Trees & that.type) => // The `& that.type` trick allows to convince scala that `tree` and `that` are actually equal...
class DeconstructorImpl(override val s: self.type, override val t: tree.type & that.type) extends ConcreteTreeDeconstructor(s, t)
new DeconstructorImpl(self, tree)
case _ => super.getDeconstructor(that)
}
}
trait Printer extends extraction.Printer {
protected val trees: Trees
}
trait TreeDeconstructor extends extraction.TreeDeconstructor {
protected val s: Trees
protected val t: Trees
override def deconstruct(f: s.Flag): DeconstructedFlag = f match {
case s.Induct => (Seq(), Seq(), Seq(), (_, _, _) => t.Induct)
case _ => super.deconstruct(f)
}
}
class ConcreteTreeDeconstructor(override val s: Trees, override val t: Trees) extends TreeDeconstructor | epfl-lara/stainless | core/src/main/scala/stainless/extraction/termination/Trees.scala | Scala | apache-2.0 | 1,350 |
package com.arcusys.valamis.certificate.storage
import com.arcusys.valamis.certificate.model.goal.CourseGoal
import com.arcusys.valamis.model.PeriodTypes
/**
* Created by mminin on 04.03.15.
*/
trait CourseGoalStorage {
def create(certificateId: Long,
courseId: Long,
periodValue: Int,
periodType: PeriodTypes.Value,
arrangementIndex: Int,
isOptional: Boolean = false,
groupId: Option[Long] = None): CourseGoal
def get(certificateId: Long, courseId: Long): Option[CourseGoal]
def getBy(goalId: Long): Option[CourseGoal]
def getByCertificateId(certificateId: Long): Seq[CourseGoal]
} | igor-borisov/valamis | valamis-certificate/src/main/scala/com/arcusys/valamis/certificate/storage/CourseGoalStorage.scala | Scala | gpl-3.0 | 675 |
package org.jetbrains.plugins.scala
package annotator
import com.intellij.codeInspection.ProblemHighlightType
import com.intellij.lang.annotation.HighlightSeverity
import com.intellij.psi.impl.source.tree.LeafPsiElement
import com.intellij.psi.{PsiElement, PsiErrorElement, PsiFile, PsiNamedElement}
import org.jetbrains.plugins.scala.annotator.template.kindOf
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.highlighter.DefaultHighlighter
import org.jetbrains.plugins.scala.lang.psi.api.base.ScReference
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScCaseClause
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunctionDeclaration
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScModifierListOwner
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTypeDefinition
import org.jetbrains.plugins.scala.lang.psi.types.api.FunctionType
import org.jetbrains.plugins.scala.lang.psi.types.nonvalue.ScMethodType
import org.jetbrains.plugins.scala.lang.psi.types.result.TypeResult
import org.jetbrains.plugins.scala.lang.psi.types.{AmbiguousImplicitParameters, ApplicabilityProblem, DefaultTypeParameterMismatch, DoesNotTakeParameters, DoesNotTakeTypeParameters, ExcessArgument, ExcessTypeArgument, ExpansionForNonRepeatedParameter, ExpectedTypeMismatch, IncompleteCallSyntax, InternalApplicabilityProblem, MalformedDefinition, MissedParametersClause, MissedTypeParameter, MissedValueParameter, NotFoundImplicitParameter, ParameterSpecifiedMultipleTimes, PositionalAfterNamedArgument, ScType, ScTypeExt, TypeMismatch, UnresolvedParameter, WrongTypeParameterInferred}
import org.jetbrains.plugins.scala.project.ProjectContext
import org.jetbrains.plugins.scala.settings.ScalaProjectSettings
/**
* @author Aleksander Podkhalyuzin
* Date: 25.03.2009
*/
// TODO move to org.jetbrains.plugins.scala.lang.psi.annotator
object AnnotatorUtils {
def checkConformance(expression: ScExpression, typeElement: ScTypeElement)
(implicit holder: ScalaAnnotationHolder): Unit = {
implicit val ctx: ProjectContext = expression
if (ScMethodType.hasMethodType(expression)) {
return
}
expression.getTypeAfterImplicitConversion().tr.foreach {actual =>
val expected = typeElement.calcType
if (!actual.conforms(expected) && !shouldIgnoreTypeMismatchIn(expression)) {
TypeMismatchError.register(expression, expected, actual, blockLevel = 1) { (expected, actual) =>
ScalaBundle.message("type.mismatch.found.required", actual, expected)
}
}
}
}
def shouldIgnoreTypeMismatchIn(e: PsiElement, fromFunctionLiteral: Boolean = false): Boolean = {
// Don't show type a mismatch error when there's a parser error, SCL-16899, SCL-17206
def hasParserErrors = e.elements.exists(_.isInstanceOf[PsiErrorElement]) ||
e.getPrevSibling.isInstanceOf[PsiErrorElement] ||
e.getNextSibling.isInstanceOf[PsiErrorElement] ||
e.getNextSibling.isInstanceOf[LeafPsiElement] && e.getNextSibling.textMatches(".") && e.getNextSibling.getNextSibling.isInstanceOf[PsiErrorElement] ||
e.parent.exists { parent =>
e == parent.getFirstChild && parent.getPrevSibling.isInstanceOf[PsiErrorElement] ||
e == parent.getLastChild && parent.getNextSibling.isInstanceOf[PsiErrorElement]
}
// Most often it's an incomplete if-then-else, SCL-18862
def isIfThen: Boolean = e match {
case it: ScIf => it.elseExpression.isEmpty
case _ => false
}
// Most often it's an incomplete case clause, SCL-19447
def isEmptyCaseClause: Boolean = e match {
case block: ScBlock if block.getParent.is[ScCaseClause] => block.exprs.isEmpty
case _ => false
}
// Don't show type mismatch for a whole function literal when result type doesn't match, SCL-16901
def isFunctionLiteral = e match {
case _: ScFunctionExpr | ScBlock(_: ScFunctionExpr) => true
case _ => false
}
def isResultOfFunctionLiteral = e match {
case Parent(_: ScFunctionExpr) => true
case Parent(Parent((_: ScFunctionExpr) && Parent(_: ScBlockExpr))) => true
case _ => false
}
def hasUnresolvedReferences = e.elements.exists(_.asOptionOf[ScReference].exists(_.multiResolveScala(false).isEmpty))
hasParserErrors ||
isIfThen ||
isEmptyCaseClause ||
hasUnresolvedReferences ||
!fromFunctionLiteral && (isFunctionLiteral || isResultOfFunctionLiteral)
}
//fix for SCL-7176
def checkAbstractMemberPrivateModifier(element: PsiElement, toHighlight: Seq[PsiElement])
(implicit holder: ScalaAnnotationHolder): Unit = {
element match {
case fun: ScFunctionDeclaration if fun.isNative =>
case modOwner: ScModifierListOwner =>
modOwner.getModifierList.accessModifier match {
case Some(am) if am.isUnqualifiedPrivateOrThis =>
for (e <- toHighlight) {
holder.createErrorAnnotation(e, ScalaBundle.message("abstract.member.not.have.private.modifier"), ProblemHighlightType.GENERIC_ERROR)
}
case _ =>
}
case _ =>
}
}
def registerTypeMismatchError(actualType: ScType, expectedType: ScType,
expression: ScExpression)
(implicit holder: ScalaAnnotationHolder): Unit = {
// See comments in ScMethodType.hasMethodType
// The workaround is nice but there is a situation where we want to show the mismatch error with function types:
// => namely if a function type is expected
if (!FunctionType.isFunctionType(expectedType) && ScMethodType.hasMethodType(expression)) {
return
}
//TODO show parameter name
if (!actualType.conforms(expectedType) && !shouldIgnoreTypeMismatchIn(expression)) {
TypeMismatchError.register(expression, expectedType, actualType) { (expected, actual) =>
ScalaBundle.message("type.mismatch.expected.actual", expected, actual)
}
}
}
/**
* This method will return checked conformance if it's possible to check it.
* In other way it will return true to avoid red code.
* Check conformance in case l = r.
*/
def smartCheckConformance(l: TypeResult, r: TypeResult): Boolean = {
val leftType = l match {
case Right(res) => res
case _ => return true
}
val rightType = r match {
case Right(res) => res
case _ => return true
}
rightType.conforms(leftType)
}
// TODO encapsulate
def highlightImplicitView(expr: ScExpression, fun: PsiNamedElement, typeTo: ScType,
elementToHighlight: PsiElement)
(implicit holder: ScalaAnnotationHolder): Unit = {
if (ScalaProjectSettings.getInstance(elementToHighlight.getProject).isShowImplicitConversions) {
holder.newSilentAnnotation(HighlightSeverity.INFORMATION)
.range(elementToHighlight.getTextRange)
.textAttributes(DefaultHighlighter.IMPLICIT_CONVERSIONS)
.create()
}
}
// TODO something more reliable
object ErrorAnnotationMessage {
def unapply(definition: ScTypeDefinition): Option[NlsString] =
Option.when(definition.isSealed)(
ScalaBundle.nls(
"illegal.inheritance.from.sealed.kind",
kindOf(definition, toLowerCase = true),
definition.name
)
)
}
def inSameFile(elem: PsiElement, file: PsiFile): Boolean = {
elem != null && elem.getContainingFile.getViewProvider.getVirtualFile == file.getViewProvider.getVirtualFile
}
// some properties cannot be shown because they are synthetic for example.
// filter these out
def withoutNonHighlightables(
problems: Seq[ApplicabilityProblem],
currentFile: PsiFile
): Seq[ApplicabilityProblem] = problems.filter {
case PositionalAfterNamedArgument(argument) => inSameFile(argument, currentFile)
case ParameterSpecifiedMultipleTimes(assignment) => inSameFile(assignment, currentFile)
case UnresolvedParameter(assignment) => inSameFile(assignment, currentFile)
case ExpansionForNonRepeatedParameter(argument) => inSameFile(argument, currentFile)
case ExcessArgument(argument) => inSameFile(argument, currentFile)
case MissedParametersClause(clause) => inSameFile(clause, currentFile)
case TypeMismatch(expression, _) => inSameFile(expression, currentFile)
case ExcessTypeArgument(argument) => inSameFile(argument, currentFile)
case MalformedDefinition(_) => true
case DoesNotTakeParameters => true
case MissedValueParameter(_) => true
case DefaultTypeParameterMismatch(_, _) => true
case WrongTypeParameterInferred => true
case DoesNotTakeTypeParameters => true
case MissedTypeParameter(_) => true
case ExpectedTypeMismatch => true
case NotFoundImplicitParameter(_) => true
case AmbiguousImplicitParameters(_) => true
case IncompleteCallSyntax(_) => true
case InternalApplicabilityProblem(_) => true
}
} | JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/annotator/AnnotatorUtils.scala | Scala | apache-2.0 | 9,473 |
package fpinscala.exercises.monoids
import fpinscala.exercises.parallelism.Nonblocking.*
import language.higherKinds
trait Monoid[A]:
def combine(a1: A, a2: A): A
def empty: A
object Monoid:
val stringMonoid: Monoid[String] = new:
def combine(a1: String, a2: String) = a1 + a2
val empty = ""
def listMonoid[A]: Monoid[List[A]] = new:
def combine(a1: List[A], a2: List[A]) = a1 ++ a2
val empty = Nil
lazy val intAddition: Monoid[Int] = ???
lazy val intMultiplication: Monoid[Int] = ???
lazy val booleanOr: Monoid[Boolean] = ???
lazy val booleanAnd: Monoid[Boolean] = ???
def optionMonoid[A]: Monoid[Option[A]] = ???
def dual[A](m: Monoid[A]): Monoid[A] = new:
def combine(x: A, y: A): A = m.combine(y, x)
val empty = m.empty
def endoMonoid[A]: Monoid[A => A] = ???
import fpinscala.exercises.testing.{Prop, Gen}
// import Gen.`**`
def monoidLaws[A](m: Monoid[A], gen: Gen[A]): Prop = ???
def combineAll[A](as: List[A], m: Monoid[A]): A =
???
def foldMap[A, B](as: List[A], m: Monoid[B])(f: A => B): B =
???
def foldRight[A, B](as: List[A])(acc: B)(f: (A, B) => B): B =
???
def foldLeft[A, B](as: List[A])(acc: B)(f: (B, A) => B): B =
???
def foldMapV[A, B](as: IndexedSeq[A], m: Monoid[B])(f: A => B): B =
???
def par[A](m: Monoid[A]): Monoid[Par[A]] =
???
def parFoldMap[A,B](v: IndexedSeq[A], m: Monoid[B])(f: A => B): Par[B] =
???
def ordered(ints: IndexedSeq[Int]): Boolean =
???
enum WC:
case Stub(chars: String)
case Part(lStub: String, words: Int, rStub: String)
lazy val wcMonoid: Monoid[WC] = ???
def count(s: String): Int = ???
given productMonoid[A, B](using ma: Monoid[A], mb: Monoid[B]): Monoid[(A, B)] with
def combine(x: (A, B), y: (A, B)) = ???
val empty = ???
given functionMonoid[A, B](using mb: Monoid[B]): Monoid[A => B] with
def combine(f: A => B, g: A => B) = ???
val empty: A => B = a => ???
given mapMergeMonoid[K, V](using mv: Monoid[V]): Monoid[Map[K, V]] with
def combine(a: Map[K, V], b: Map[K, V]) = ???
val empty = ???
def bag[A](as: IndexedSeq[A]): Map[A, Int] =
???
end Monoid
| fpinscala/fpinscala | src/main/scala/fpinscala/exercises/monoids/Monoid.scala | Scala | mit | 2,190 |
/*
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.compiler
package operator.user
package join
import org.objectweb.asm.Type
import org.objectweb.asm.signature.SignatureVisitor
import com.asakusafw.lang.compiler.model.graph.UserOperator
import com.asakusafw.runtime.model.DataModel
import com.asakusafw.spark.compiler.spi.{ OperatorCompiler, OperatorType }
import com.asakusafw.spark.runtime.fragment.user.join.BroadcastMasterJoinOperatorFragment
import com.asakusafw.spark.tools.asm._
import com.asakusafw.spark.tools.asm.MethodBuilder._
import com.asakusafw.vocabulary.operator.{ MasterJoin => MasterJoinOp }
class BroadcastMasterJoinOperatorCompiler extends UserOperatorCompiler {
override def support(
operator: UserOperator)(
implicit context: OperatorCompiler.Context): Boolean = {
operator.annotationDesc.resolveClass == classOf[MasterJoinOp]
}
override def operatorType: OperatorType = OperatorType.ExtractType
override def compile(
operator: UserOperator)(
implicit context: OperatorCompiler.Context): Type = {
assert(support(operator),
s"The operator type is not supported: ${operator.annotationDesc.resolveClass.getSimpleName}"
+ s" [${operator}]")
assert(operator.inputs.size >= 2,
"The size of inputs should be greater than or equals to 2: " +
s"${operator.inputs.size} [${operator}]")
assert(operator.outputs.size == 2,
s"The size of outputs should be 2: ${operator.outputs.size} [${operator}]")
assert(operator.outputs(MasterJoinOp.ID_OUTPUT_MISSED).dataModelType
== operator.inputs(MasterJoinOp.ID_INPUT_TRANSACTION).dataModelType,
s"The `missed` output type should be the same as the transaction type: ${
operator.outputs(MasterJoinOp.ID_OUTPUT_MISSED).dataModelType
} [${operator}]")
val builder = new BroadcastMasterJoinOperatorFragmentClassBuilder(operator)
context.addClass(builder)
}
}
private class BroadcastMasterJoinOperatorFragmentClassBuilder(
operator: UserOperator)(
implicit context: OperatorCompiler.Context)
extends JoinOperatorFragmentClassBuilder(
operator.inputs(MasterJoinOp.ID_INPUT_TRANSACTION).dataModelType,
operator,
operator.inputs(MasterJoinOp.ID_INPUT_MASTER),
operator.inputs(MasterJoinOp.ID_INPUT_TRANSACTION))(
Option(
new ClassSignatureBuilder()
.newSuperclass {
_.newClassType(classOf[BroadcastMasterJoinOperatorFragment[_, _, _]].asType) {
_.newTypeArgument(
SignatureVisitor.INSTANCEOF,
operator.inputs(MasterJoinOp.ID_INPUT_MASTER).dataModelType)
.newTypeArgument(
SignatureVisitor.INSTANCEOF,
operator.inputs(MasterJoinOp.ID_INPUT_TRANSACTION).dataModelType)
.newTypeArgument(
SignatureVisitor.INSTANCEOF,
operator.outputs(MasterJoinOp.ID_OUTPUT_JOINED).dataModelType)
}
}),
classOf[BroadcastMasterJoinOperatorFragment[_, _, _]].asType)
with BroadcastJoin
with MasterJoin {
override def defCtor()(implicit mb: MethodBuilder): Unit = {
val thisVar :: broadcastsVar :: fragmentVars = mb.argVars
thisVar.push().invokeInit(
superType,
fragmentVars(MasterJoinOp.ID_OUTPUT_MISSED).push(),
fragmentVars(MasterJoinOp.ID_OUTPUT_JOINED).push(),
pushNew0(joinedType).asType(classOf[DataModel[_]].asType))
}
}
| ueshin/asakusafw-spark | compiler/src/main/scala/com/asakusafw/spark/compiler/operator/user/join/BroadcastMasterJoinOperatorCompiler.scala | Scala | apache-2.0 | 4,017 |
/*
* Copyright (C) 2014 AyaIB Developers (http://github.com/fauu/AyaIB)
*
* This software is licensed under the GNU General Public License
* (version 3 or later). See the COPYING file in this distribution.
*
* You should have received a copy of the GNU Library General Public License
* along with this software. If not, see <http://www.gnu.org/licenses/>.
*
* Authored by: Piotr Grabowski <[email protected]>
*/
package context
import repositories._
import services._
object AyaIBContext {
val boardServiceComponent = new BoardServiceComponentImpl with BoardRepositoryComponentImpl
with ThreadRepositoryComponentImpl
with PostIdRepositoryComponentImpl
with QuotationRepositoryComponentImpl
with FileRepositoryComponentImpl
val boardService = boardServiceComponent.boardService
val fileServiceComponent = new FileServiceComponentImpl with FileRepositoryComponentImpl
val fileService = fileServiceComponent.fileService
val staffServiceComponent = new StaffServiceComponentImpl with StaffMemberRepositoryComponentImpl
val staffService = staffServiceComponent.staffService
}
| fauu/AyaIB | app/context/AyaIBContext.scala | Scala | gpl-3.0 | 1,342 |
package framian.csv
case class Input(offset: Long, data: String, isLast: Boolean, mark: Long) {
private def check(i: Long): Int = if ((i < offset) || (i > (offset + data.length))) {
throw new IndexOutOfBoundsException()
} else {
val j = i - offset
if (j <= Int.MaxValue) {
j.toInt
} else {
throw new IndexOutOfBoundsException()
}
}
def charAt(i: Long): Char = data.charAt(check(i))
def length: Long = offset + data.length
def substring(from: Long, until: Long): String =
data.substring(check(from), check(until))
def marked(pos: Long): Input =
Input(offset, data, isLast, pos)
private def trim: Input = if (mark > offset) {
val next = spire.math.min(mark - offset, data.length.toLong).toInt
val tail = data.substring(next)
val offset0 = offset + next
Input(offset0, tail, isLast, offset0)
} else this
def append(chunk: String, last: Boolean = false): Input =
if (mark > offset) trim.append(chunk, last)
else if (chunk.isEmpty) Input(offset, data, last, mark)
else Input(offset, data + chunk, last, mark)
def finished: Input = Input(offset, data, true, mark)
}
object Input {
def fromString(str: String): Input =
Input(0, str, true, 0)
def init(str: String): Input =
Input(0, str, false, 0)
}
| codeaudit/framian | framian/src/main/scala/framian/csv/Input.scala | Scala | apache-2.0 | 1,304 |
package Problems
object p32 extends Problem {
val numbers = for {
a <- 2 to 100
b <- 2 to (10000 / a)
p = a * b
s = f"$a$b$p"
if s.length == 9
if (1 to 9).mkString.forall(s.contains(_))
} yield p
override def solve(): Long = numbers.distinct.sum
}
| catap/scala-euler | src/main/scala/Problems/p32.scala | Scala | unlicense | 281 |
package templemore.onx.version5
/**
* @author Chris Turner
*/
trait Lines {
this: TokensWithPositions =>
def lines = rows.toList ::: columns.toList ::: List(leftToRightDiagonal) ::: List(rightToLeftDiagonal)
private[this] def rows = buildLines((row: Int, position: Position) => position match {
case Position(r, _) if r == row => true
case _ => false
})
private[this] def columns = buildLines((column: Int, position: Position) => position match {
case Position(_, c) if c == column => true
case _ => false
})
private[this] def leftToRightDiagonal = tokensWithPositions.filter(_._2 match {
case Position(0, 0) => true
case Position(1, 1) => true
case Position(2, 2) => true
case _ => false
})
private[this] def rightToLeftDiagonal = tokensWithPositions.filter(_._2 match {
case Position(0, 2) => true
case Position(1, 1) => true
case Position(2, 0) => true
case _ => false
})
private[this] def buildLines(func: (Int, Position) => Boolean) =
for(index <- 0 to 2) yield tokensWithPositions.filter(item => func(index, item._2))
} | skipoleschris/OandX | src/main/scala/templemore/onx/version5/Lines.scala | Scala | apache-2.0 | 1,108 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt._
object Dependencies {
lazy val args4j = "args4j" % "args4j" % "2.0.29" // CLI
lazy val banana = "com.lucidworks" % "banana" % "1.5.1" artifacts(Artifact("banana", "war", "war"))
lazy val commonsValidator = "commons-validator" % "commons-validator" % "1.5.1"
lazy val httpClient = "org.apache.httpcomponents" % "httpclient" % "4.5.2"
lazy val gson = "com.google.code.gson" % "gson" % "2.8.7"
object Jackson {
private val group = "com.fasterxml.jackson.core"
private val version = "2.10.0"
lazy val core = group % "jackson-core" % version
lazy val databind = group % "jackson-databind" % version
}
lazy val jBrowserDriver = "com.machinepublishers" % "jbrowserdriver" % "0.16.4"
object Jetty {
private val group = "org.eclipse.jetty"
private val version = "9.4.0.v20161208"
lazy val server = group % "jetty-server" % version
lazy val servlet = group % "jetty-servlet" % version
}
lazy val jsonSimple = "com.googlecode.json-simple" % "json-simple" % "1.1.1"
lazy val jUnit = "junit" % "junit" % "4.12"
lazy val jUnitInterface = "com.novocode" % "junit-interface" % "0.11"
lazy val kafkaClients = "org.apache.kafka" % "kafka-clients" % "3.0.0"
lazy val nutch = "org.apache.nutch" % "nutch" % "1.16"
lazy val pf4j = "org.pf4j" % "pf4j" % "3.6.0"
lazy val scalaMacrosParadise = "org.scalamacros" %% "paradise" % "2.1.1"
lazy val lz4 = "net.jpountz.lz4" % "lz4" % "1.3.0"
object Slf4j {
private val group = "org.slf4j"
private val version = "1.7.30"
lazy val logback = "ch.qos.logback" % "logback-classic" % "1.2.6"
lazy val api = group % "slf4j-api" % version
lazy val log4j12 = group % "slf4j-log4j12" % version
}
lazy val snakeYaml = "org.yaml" % "snakeyaml" % "1.26"
object Solr {
private val group = "org.apache.solr"
private val version = "8.5.0"
lazy val core = group % "solr-core" % version
lazy val solrj = group % "solr-solrj" % version
}
object Spark {
private val group = "org.apache.spark"
private val version = "3.0.1" // pre-built version available @ https://spark.apache.org/downloads.html
lazy val core = group %% "spark-core" % version //% "provided"
lazy val sql = group %% "spark-sql" % version //% "provided"
}
object SparkProvided {
private val group = "org.apache.spark"
private val version = "3.0.1" // pre-built version available @ https://spark.apache.org/downloads.html
lazy val core = group %% "spark-core" % version % "provided"
lazy val sql = group %% "spark-sql" % version % "provided"
}
object Tika2 {
private val group = "org.apache.tika"
private val version = "2.2.1"
lazy val parsers = group %% "tika-parsers" % version
lazy val core = group %% "tika-core" % version
}
lazy val tikaParsers = "org.apache.tika" % "tika-parsers" % "2.2.1"
lazy val tikaParsersStandard = "org.apache.tika" % "tika-parsers-standard-package" % "2.2.1"
lazy val tikaCore = "org.apache.tika" % "tika-core" % "2.2.1"
lazy val elasticsearch = "org.elasticsearch.client" % "elasticsearch-rest-high-level-client" % "7.16.3"
} | USCDataScience/sparkler | project/Dependencies.scala | Scala | apache-2.0 | 3,926 |
package org.pfcoperez.cci.treesandgraphs
object FirstCommonAncestor extends App {
trait BinaryTree[+T]
case class Node[T](value: T, var leftFatherRight: (BinaryTree[T], BinaryTree[T], BinaryTree[T])) extends BinaryTree[T] {
def setFather(node: Node[T]): Unit = {
leftFatherRight = leftFatherRight.copy(_2 = node)
}
override def toString: String = value.toString
}
case object Empty extends BinaryTree[Nothing]
def findCommonAncestor[T](a: BinaryTree[T], b: BinaryTree[T]): Option[Node[T]] = {
def recFindCommon(
a: BinaryTree[T],
b: BinaryTree[T]
)(visited: Set[Node[T]]): Option[Node[T]] = (a, b) match {
case (Empty, Empty) => None
case (Empty, _: Node[T]) | (_: Node[T], Empty) =>
val Seq(node @ Node(_, (_, father, _))) = Seq(a,b) collect {
case node: Node[T] => node
}
if(visited contains node) Some(node)
else recFindCommon(father, Empty)(visited)
case (a: Node[T], b: Node[T]) =>
if(visited contains a) Some(a)
else if(visited contains b) Some(b)
else if(a == b) Some(a)
else {
val Node(_, (_, aFather, _)) = a
val Node(_, (_, bFather, _)) = b
recFindCommon(aFather, bFather)(visited + a + b)
}
}
recFindCommon(a, b)(Set.empty)
}
val D = Node[Int](4, (Empty, Empty, Empty))
val E = Node[Int](5, (Empty, Empty, Empty))
val C = Node[Int](3, (D,Empty,E))
val B = Node[Int](2, (Empty, Empty, Empty))
val A = Node[Int](1, (B, Empty, C))
B.setFather(A)
C.setFather(A)
D.setFather(C)
E.setFather(C)
println(findCommonAncestor(D, E))
}
| pfcoperez/cci | src/main/scala/org/pfcoperez/cci/treesandgraphs/FirstCommonAncestor.scala | Scala | gpl-3.0 | 1,705 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming.sources
import java.util
import scala.collection.JavaConverters._
import org.apache.spark.SparkException
import org.apache.spark.sql.{ForeachWriter, SparkSession}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.catalyst.expressions.UnsafeRow
import org.apache.spark.sql.connector.catalog.{SupportsWrite, Table, TableCapability}
import org.apache.spark.sql.connector.write.{DataWriter, SupportsTruncate, WriteBuilder, WriterCommitMessage}
import org.apache.spark.sql.connector.write.streaming.{StreamingDataWriterFactory, StreamingWrite}
import org.apache.spark.sql.execution.python.PythonForeachWriter
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.util.CaseInsensitiveStringMap
/**
* A write-only table for forwarding data into the specified [[ForeachWriter]].
*
* @param writer The [[ForeachWriter]] to process all data.
* @param converter An object to convert internal rows to target type T. Either it can be
* a [[ExpressionEncoder]] or a direct converter function.
* @tparam T The expected type of the sink.
*/
case class ForeachWriterTable[T](
writer: ForeachWriter[T],
converter: Either[ExpressionEncoder[T], InternalRow => T])
extends Table with SupportsWrite {
override def name(): String = "ForeachSink"
override def schema(): StructType = StructType(Nil)
override def capabilities(): util.Set[TableCapability] = {
Set(TableCapability.STREAMING_WRITE).asJava
}
override def newWriteBuilder(options: CaseInsensitiveStringMap): WriteBuilder = {
new WriteBuilder with SupportsTruncate {
private var inputSchema: StructType = _
override def withInputDataSchema(schema: StructType): WriteBuilder = {
this.inputSchema = schema
this
}
// Do nothing for truncate. Foreach sink is special that it just forwards all the records to
// ForeachWriter.
override def truncate(): WriteBuilder = this
override def buildForStreaming(): StreamingWrite = {
new StreamingWrite {
override def commit(epochId: Long, messages: Array[WriterCommitMessage]): Unit = {}
override def abort(epochId: Long, messages: Array[WriterCommitMessage]): Unit = {}
override def createStreamingWriterFactory(): StreamingDataWriterFactory = {
val rowConverter: InternalRow => T = converter match {
case Left(enc) =>
val boundEnc = enc.resolveAndBind(
inputSchema.toAttributes,
SparkSession.getActiveSession.get.sessionState.analyzer)
boundEnc.fromRow
case Right(func) =>
func
}
ForeachWriterFactory(writer, rowConverter)
}
}
}
}
}
}
object ForeachWriterTable {
def apply[T](
writer: ForeachWriter[T],
encoder: ExpressionEncoder[T]): ForeachWriterTable[_] = {
writer match {
case pythonWriter: PythonForeachWriter =>
new ForeachWriterTable[UnsafeRow](
pythonWriter, Right((x: InternalRow) => x.asInstanceOf[UnsafeRow]))
case _ =>
new ForeachWriterTable[T](writer, Left(encoder))
}
}
}
case class ForeachWriterFactory[T](
writer: ForeachWriter[T],
rowConverter: InternalRow => T)
extends StreamingDataWriterFactory {
override def createWriter(
partitionId: Int,
taskId: Long,
epochId: Long): ForeachDataWriter[T] = {
new ForeachDataWriter(writer, rowConverter, partitionId, epochId)
}
}
/**
* A [[DataWriter]] which writes data in this partition to a [[ForeachWriter]].
*
* @param writer The [[ForeachWriter]] to process all data.
* @param rowConverter A function which can convert [[InternalRow]] to the required type [[T]]
* @param partitionId
* @param epochId
* @tparam T The type expected by the writer.
*/
class ForeachDataWriter[T](
writer: ForeachWriter[T],
rowConverter: InternalRow => T,
partitionId: Int,
epochId: Long)
extends DataWriter[InternalRow] {
// If open returns false, we should skip writing rows.
private val opened = writer.open(partitionId, epochId)
private var closeCalled: Boolean = false
override def write(record: InternalRow): Unit = {
if (!opened) return
try {
writer.process(rowConverter(record))
} catch {
case t: Throwable =>
closeWriter(t)
throw t
}
}
override def commit(): WriterCommitMessage = {
closeWriter(null)
ForeachWriterCommitMessage
}
override def abort(): Unit = {
closeWriter(new SparkException("Foreach writer has been aborted due to a task failure"))
}
private def closeWriter(errorOrNull: Throwable): Unit = {
if (!closeCalled) {
closeCalled = true
writer.close(errorOrNull)
}
}
}
/**
* An empty [[WriterCommitMessage]]. [[ForeachWriter]] implementations have no global coordination.
*/
case object ForeachWriterCommitMessage extends WriterCommitMessage
| bdrillard/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/sources/ForeachWriterTable.scala | Scala | apache-2.0 | 5,912 |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ksmpartners.ernie.server
import net.liftweb.common.{ Full, Box }
import net.liftweb.http.{ PlainTextResponse, LiftResponse }
import com.ksmpartners.ernie.model.ModelObject
import com.ksmpartners.ernie.util.MapperUtility._
/**
* Trait containing methods for serializing/deserializing JSONs
*/
trait JsonTranslator {
/**
* Serializes an object into a JSON String
*/
def serialize[A <: ModelObject](obj: A): String = {
mapper.writeValueAsString(obj)
}
/**
* Deserializes the given JSON String into an object of the type clazz represents
*/
def deserialize[A <: ModelObject](json: String, clazz: Class[A]): A = {
mapper.readValue(json, clazz)
}
/**
* Deserializes the given JSON Array[Byte] into an object of the type clazz represents
*/
def deserialize[A <: ModelObject](json: Array[Byte], clazz: Class[A]): A = {
mapper.readValue(json, clazz)
}
/**
* Serializes the given response object into a Full[PlainTextResponse] with a content-type of application/json and
* an HTTP code of 200
*/
def getJsonResponse[A <: ModelObject](response: A): Box[LiftResponse] = {
getJsonResponse(response, 200)
}
/**
* Serializes the given response object into a Full[PlainTextResponse] with a content-type of application/json and
* an HTTP code of 200
*/
def getJsonResponse[A <: ModelObject](response: A, statusCode: Int): Box[LiftResponse] = {
Full(PlainTextResponse(serialize(response), List(("Content-Type", response.cType())), statusCode))
}
/**
* Serializes the given response object into a Full[PlainTextResponse] with a content-type of application/json and
* an HTTP code of 200
*/
def getJsonResponse[A <: ModelObject](response: A, statusCode: Int, headers: List[(String, String)]): Box[LiftResponse] = {
Full(PlainTextResponse(serialize(response), List(("Content-Type", response.cType())) ++ headers, statusCode))
}
} | ksmpartners/ernie | ernie-server/src/main/scala/com/ksmpartners/ernie/server/JsonTranslator.scala | Scala | apache-2.0 | 2,496 |
package bad.robot.temperature.task
import java.util.concurrent.{ScheduledExecutorService, ScheduledFuture}
import bad.robot.logging._
import scala.concurrent.duration.Duration
object Scheduler {
implicit class ScheduledExecutorServiceOps(executor: ScheduledExecutorService) {
def schedule(frequency: Duration, tasks: Runnable*): List[ScheduledFuture[_]] = {
this.schedule(frequency, printError(_), tasks:_*)
}
def schedule(frequency: Duration, errorHandler: Throwable => Runnable => Unit, tasks: Runnable*): List[ScheduledFuture[_]] = {
tasks.map(task => {
executor.scheduleAtFixedRate(wrapWithErrorHandler(task, errorHandler), 0, frequency.length, frequency.unit)
}).toList
}
}
def wrapWithErrorHandler(task: Runnable, errorHandler: Throwable => Runnable => Unit): Runnable = {
() => try {
task.run()
} catch {
case e: Throwable => errorHandler(e)(task)
}
}
private def printError(e: Throwable): Runnable => Unit = {
task => Log.error(s"An error occurred executed a scheduled task ($task) ${e.getMessage}")
}
} | tobyweston/temperature-machine | src/main/scala/bad/robot/temperature/task/Scheduler.scala | Scala | apache-2.0 | 1,101 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.ui
import scala.collection.mutable
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler._
import org.apache.spark.sql.execution.SQLExecution
import org.apache.spark.sql.execution.metric.{SQLMetricParam, SQLMetricValue}
import org.apache.spark.{JobExecutionStatus, Logging, SparkConf}
private[sql] class SQLListener(conf: SparkConf) extends SparkListener with Logging {
private val retainedExecutions = conf.getInt("spark.sql.ui.retainedExecutions", 1000)
private val activeExecutions = mutable.HashMap[Long, SQLExecutionUIData]()
// Old data in the following fields must be removed in "trimExecutionsIfNecessary".
// If adding new fields, make sure "trimExecutionsIfNecessary" can clean up old data
private val _executionIdToData = mutable.HashMap[Long, SQLExecutionUIData]()
/**
* Maintain the relation between job id and execution id so that we can get the execution id in
* the "onJobEnd" method.
*/
private val _jobIdToExecutionId = mutable.HashMap[Long, Long]()
private val _stageIdToStageMetrics = mutable.HashMap[Long, SQLStageMetrics]()
private val failedExecutions = mutable.ListBuffer[SQLExecutionUIData]()
private val completedExecutions = mutable.ListBuffer[SQLExecutionUIData]()
def executionIdToData: Map[Long, SQLExecutionUIData] = synchronized {
_executionIdToData.toMap
}
def jobIdToExecutionId: Map[Long, Long] = synchronized {
_jobIdToExecutionId.toMap
}
def stageIdToStageMetrics: Map[Long, SQLStageMetrics] = synchronized {
_stageIdToStageMetrics.toMap
}
private def trimExecutionsIfNecessary(
executions: mutable.ListBuffer[SQLExecutionUIData]): Unit = {
if (executions.size > retainedExecutions) {
val toRemove = math.max(retainedExecutions / 10, 1)
executions.take(toRemove).foreach { execution =>
for (executionUIData <- _executionIdToData.remove(execution.executionId)) {
for (jobId <- executionUIData.jobs.keys) {
_jobIdToExecutionId.remove(jobId)
}
for (stageId <- executionUIData.stages) {
_stageIdToStageMetrics.remove(stageId)
}
}
}
executions.trimStart(toRemove)
}
}
override def onJobStart(jobStart: SparkListenerJobStart): Unit = {
val executionIdString = jobStart.properties.getProperty(SQLExecution.EXECUTION_ID_KEY)
if (executionIdString == null) {
// This is not a job created by SQL
return
}
val executionId = executionIdString.toLong
val jobId = jobStart.jobId
val stageIds = jobStart.stageIds
synchronized {
activeExecutions.get(executionId).foreach { executionUIData =>
executionUIData.jobs(jobId) = JobExecutionStatus.RUNNING
executionUIData.stages ++= stageIds
stageIds.foreach(stageId =>
_stageIdToStageMetrics(stageId) = new SQLStageMetrics(stageAttemptId = 0))
_jobIdToExecutionId(jobId) = executionId
}
}
}
override def onJobEnd(jobEnd: SparkListenerJobEnd): Unit = synchronized {
val jobId = jobEnd.jobId
for (executionId <- _jobIdToExecutionId.get(jobId);
executionUIData <- _executionIdToData.get(executionId)) {
jobEnd.jobResult match {
case JobSucceeded => executionUIData.jobs(jobId) = JobExecutionStatus.SUCCEEDED
case JobFailed(_) => executionUIData.jobs(jobId) = JobExecutionStatus.FAILED
}
if (executionUIData.completionTime.nonEmpty && !executionUIData.hasRunningJobs) {
// We are the last job of this execution, so mark the execution as finished. Note that
// `onExecutionEnd` also does this, but currently that can be called before `onJobEnd`
// since these are called on different threads.
markExecutionFinished(executionId)
}
}
}
override def onExecutorMetricsUpdate(
executorMetricsUpdate: SparkListenerExecutorMetricsUpdate): Unit = synchronized {
for ((taskId, stageId, stageAttemptID, metrics) <- executorMetricsUpdate.taskMetrics) {
updateTaskAccumulatorValues(taskId, stageId, stageAttemptID, metrics, finishTask = false)
}
}
override def onStageSubmitted(stageSubmitted: SparkListenerStageSubmitted): Unit = synchronized {
val stageId = stageSubmitted.stageInfo.stageId
val stageAttemptId = stageSubmitted.stageInfo.attemptId
// Always override metrics for old stage attempt
if (_stageIdToStageMetrics.contains(stageId)) {
_stageIdToStageMetrics(stageId) = new SQLStageMetrics(stageAttemptId)
} else {
// If a stage belongs to some SQL execution, its stageId will be put in "onJobStart".
// Since "_stageIdToStageMetrics" doesn't contain it, it must not belong to any SQL execution.
// So we can ignore it. Otherwise, this may lead to memory leaks (SPARK-11126).
}
}
override def onTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = synchronized {
updateTaskAccumulatorValues(
taskEnd.taskInfo.taskId,
taskEnd.stageId,
taskEnd.stageAttemptId,
taskEnd.taskMetrics,
finishTask = true)
}
/**
* Update the accumulator values of a task with the latest metrics for this task. This is called
* every time we receive an executor heartbeat or when a task finishes.
*/
private def updateTaskAccumulatorValues(
taskId: Long,
stageId: Int,
stageAttemptID: Int,
metrics: TaskMetrics,
finishTask: Boolean): Unit = {
if (metrics == null) {
return
}
_stageIdToStageMetrics.get(stageId) match {
case Some(stageMetrics) =>
if (stageAttemptID < stageMetrics.stageAttemptId) {
// A task of an old stage attempt. Because a new stage is submitted, we can ignore it.
} else if (stageAttemptID > stageMetrics.stageAttemptId) {
logWarning(s"A task should not have a higher stageAttemptID ($stageAttemptID) then " +
s"what we have seen (${stageMetrics.stageAttemptId})")
} else {
// TODO We don't know the attemptId. Currently, what we can do is overriding the
// accumulator updates. However, if there are two same task are running, such as
// speculation, the accumulator updates will be overriding by different task attempts,
// the results will be weird.
stageMetrics.taskIdToMetricUpdates.get(taskId) match {
case Some(taskMetrics) =>
if (finishTask) {
taskMetrics.finished = true
taskMetrics.accumulatorUpdates = metrics.accumulatorUpdates()
} else if (!taskMetrics.finished) {
taskMetrics.accumulatorUpdates = metrics.accumulatorUpdates()
} else {
// If a task is finished, we should not override with accumulator updates from
// heartbeat reports
}
case None =>
// TODO Now just set attemptId to 0. Should fix here when we can get the attempt
// id from SparkListenerExecutorMetricsUpdate
stageMetrics.taskIdToMetricUpdates(taskId) = new SQLTaskMetrics(
attemptId = 0, finished = finishTask, metrics.accumulatorUpdates())
}
}
case None =>
// This execution and its stage have been dropped
}
}
def onExecutionStart(
executionId: Long,
description: String,
details: String,
physicalPlanDescription: String,
physicalPlanGraph: SparkPlanGraph,
time: Long): Unit = {
val sqlPlanMetrics = physicalPlanGraph.nodes.flatMap { node =>
node.metrics.map(metric => metric.accumulatorId -> metric)
}
val executionUIData = new SQLExecutionUIData(executionId, description, details,
physicalPlanDescription, physicalPlanGraph, sqlPlanMetrics.toMap, time)
synchronized {
activeExecutions(executionId) = executionUIData
_executionIdToData(executionId) = executionUIData
}
}
def onExecutionEnd(executionId: Long, time: Long): Unit = synchronized {
_executionIdToData.get(executionId).foreach { executionUIData =>
executionUIData.completionTime = Some(time)
if (!executionUIData.hasRunningJobs) {
// onExecutionEnd happens after all "onJobEnd"s
// So we should update the execution lists.
markExecutionFinished(executionId)
} else {
// There are some running jobs, onExecutionEnd happens before some "onJobEnd"s.
// Then we don't if the execution is successful, so let the last onJobEnd updates the
// execution lists.
}
}
}
private def markExecutionFinished(executionId: Long): Unit = {
activeExecutions.remove(executionId).foreach { executionUIData =>
if (executionUIData.isFailed) {
failedExecutions += executionUIData
trimExecutionsIfNecessary(failedExecutions)
} else {
completedExecutions += executionUIData
trimExecutionsIfNecessary(completedExecutions)
}
}
}
def getRunningExecutions: Seq[SQLExecutionUIData] = synchronized {
activeExecutions.values.toSeq
}
def getFailedExecutions: Seq[SQLExecutionUIData] = synchronized {
failedExecutions
}
def getCompletedExecutions: Seq[SQLExecutionUIData] = synchronized {
completedExecutions
}
def getExecution(executionId: Long): Option[SQLExecutionUIData] = synchronized {
_executionIdToData.get(executionId)
}
/**
* Get all accumulator updates from all tasks which belong to this execution and merge them.
*/
def getExecutionMetrics(executionId: Long): Map[Long, String] = synchronized {
_executionIdToData.get(executionId) match {
case Some(executionUIData) =>
val accumulatorUpdates = {
for (stageId <- executionUIData.stages;
stageMetrics <- _stageIdToStageMetrics.get(stageId).toIterable;
taskMetrics <- stageMetrics.taskIdToMetricUpdates.values;
accumulatorUpdate <- taskMetrics.accumulatorUpdates.toSeq) yield {
accumulatorUpdate
}
}.filter { case (id, _) => executionUIData.accumulatorMetrics.contains(id) }
mergeAccumulatorUpdates(accumulatorUpdates, accumulatorId =>
executionUIData.accumulatorMetrics(accumulatorId).metricParam)
case None =>
// This execution has been dropped
Map.empty
}
}
private def mergeAccumulatorUpdates(
accumulatorUpdates: Seq[(Long, Any)],
paramFunc: Long => SQLMetricParam[SQLMetricValue[Any], Any]): Map[Long, String] = {
accumulatorUpdates.groupBy(_._1).map { case (accumulatorId, values) =>
val param = paramFunc(accumulatorId)
(accumulatorId,
param.stringValue(values.map(_._2.asInstanceOf[SQLMetricValue[Any]].value)))
}
}
}
/**
* Represent all necessary data for an execution that will be used in Web UI.
*/
private[ui] class SQLExecutionUIData(
val executionId: Long,
val description: String,
val details: String,
val physicalPlanDescription: String,
val physicalPlanGraph: SparkPlanGraph,
val accumulatorMetrics: Map[Long, SQLPlanMetric],
val submissionTime: Long,
var completionTime: Option[Long] = None,
val jobs: mutable.HashMap[Long, JobExecutionStatus] = mutable.HashMap.empty,
val stages: mutable.ArrayBuffer[Int] = mutable.ArrayBuffer()) {
/**
* Return whether there are running jobs in this execution.
*/
def hasRunningJobs: Boolean = jobs.values.exists(_ == JobExecutionStatus.RUNNING)
/**
* Return whether there are any failed jobs in this execution.
*/
def isFailed: Boolean = jobs.values.exists(_ == JobExecutionStatus.FAILED)
def runningJobs: Seq[Long] =
jobs.filter { case (_, status) => status == JobExecutionStatus.RUNNING }.keys.toSeq
def succeededJobs: Seq[Long] =
jobs.filter { case (_, status) => status == JobExecutionStatus.SUCCEEDED }.keys.toSeq
def failedJobs: Seq[Long] =
jobs.filter { case (_, status) => status == JobExecutionStatus.FAILED }.keys.toSeq
}
/**
* Represent a metric in a SQLPlan.
*
* Because we cannot revert our changes for an "Accumulator", we need to maintain accumulator
* updates for each task. So that if a task is retried, we can simply override the old updates with
* the new updates of the new attempt task. Since we cannot add them to accumulator, we need to use
* "AccumulatorParam" to get the aggregation value.
*/
private[ui] case class SQLPlanMetric(
name: String,
accumulatorId: Long,
metricParam: SQLMetricParam[SQLMetricValue[Any], Any])
/**
* Store all accumulatorUpdates for all tasks in a Spark stage.
*/
private[ui] class SQLStageMetrics(
val stageAttemptId: Long,
val taskIdToMetricUpdates: mutable.HashMap[Long, SQLTaskMetrics] = mutable.HashMap.empty)
/**
* Store all accumulatorUpdates for a Spark task.
*/
private[ui] class SQLTaskMetrics(
val attemptId: Long, // TODO not used yet
var finished: Boolean,
var accumulatorUpdates: Map[Long, Any])
| pronix/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLListener.scala | Scala | apache-2.0 | 13,849 |
package main.java.piratebot.pirates
import main.java.piratebot._
class Carpenter(game: Game, player: Player) extends Pirate(game, player) {
val rank = 9
val name = "Carpenter"
override def dayAction(round : Round): RetriableMethodResponse.Value = {
player.doubloons = player.doubloons / 2 + player.doubloons % 2
game.printer.print(Channel.Debug, tag + ": -50% Doubloons")
return RetriableMethodResponse.Complete
}
override def endOfVoyageAction(): Unit = {
player.doubloons += 10
game.printer.print(Channel.Debug, tag + ": +10 Doubloons")
}
def getSubRank(player : Player) : Int = {
Array(3, 2, 5, 4, 6, 1)(player.playerId)
}
} | ItCouldHaveBeenGreat/Eyepatch | src/main/java/piratebot/pirates/carpenter.scala | Scala | gpl-3.0 | 716 |
package scalaxy.react
import org.scalajs.dom
import scala.scalajs.js
import js.annotation._
trait ReactClass extends js.Object
trait ReactElement extends js.Object
trait ReactComponent extends js.Object
trait ReactComponentThis extends js.Object {
def instance: Any = js.native
def instance_=(c: Any): Unit = js.native
def replaceState(newState: js.Object): Unit = js.native
def setState(newState: js.Any): Unit = js.native
def state: js.Any = js.native
}
object React extends js.Object with ElementFactory[ReactElement, ReactClass] {
def render(e: ReactElement, n: dom.Node): ReactComponent = js.native
def createElement(tpe: ReactClass, props: js.Object, children: js.Any*): ReactElement = js.native
def createElement(tpe: String, props: js.Object, children: js.Any*): ReactElement = js.native
def createClass(spec: js.Object): ReactClass = js.native
}
| nativelibs4java/Scalaxy | Experiments/React/src/main/scala/scalaxy/react/ReactExterns.scala | Scala | bsd-3-clause | 887 |
package com.marmoush.scalasamples.underscore_advancedscala
object Dummy {
def main(args: Array[String]): Unit = {
val l = List(1, 3, 3, 5)
val l2 = List(2, 3, 4, 5)
l.map((i: Int) => println(i))(List.canBuildFrom)
}
}
| IsmailMarmoush/scala-samples | scalalang/src/main/scala/com/marmoush/scalasamples/underscore_advancedscala/Dummy.scala | Scala | agpl-3.0 | 237 |
package org.jetbrains.plugins.scala.lang.formatter.tests.scalafmt
trait UseConfig_2_7 extends ScalaFmtTestBase {
override def setUp(): Unit = {
super.setUp()
setScalafmtConfig("empty_config_2_7_5.conf")
scalaSettings.SCALAFMT_FALLBACK_TO_DEFAULT_SETTINGS = false
}
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/lang/formatter/tests/scalafmt/UseConfig_2_7.scala | Scala | apache-2.0 | 286 |
package me.sgrouples.rogue
import org.bson.{BsonArray, BsonValue}
import scala.annotation.{StaticAnnotation, implicitNotFound}
@implicitNotFound("implicit BsonFormat not found for ${T}")
trait BsonFormat[T] {
def read(b: BsonValue): T
def readArray(b: BsonArray): Seq[T]
def write(t: T): BsonValue
def flds: Map[String, BsonFormat[_]]
def defaultValue: T
}
trait BsonArrayReader[T] {
this: BsonFormat[T] =>
override def readArray(b: BsonArray): Seq[T] = {
val sb = Seq.newBuilder[T]
val it = b.iterator()
while (it.hasNext) {
sb += read(it.next())
}
sb.result()
}
}
trait BaseBsonFormat[T] extends BsonFormat[T] with BsonArrayReader[T]
trait BasicBsonFormat[T] extends BaseBsonFormat[T] {
override val flds: Map[String, BsonFormat[_]] = Map.empty
}
class EnumSerializeValue extends StaticAnnotation
| sgrouples/rogue-fsqio | bsonformats/src/main/scala/me/sgrouples/rogue/BsonFormats.scala | Scala | apache-2.0 | 850 |
package org.dsa.test
import org.apache.commons.lang.RandomStringUtils
import scala.util.Random
/**
* Created by xubo on 2016/10/31.
*/
object Test1 {
def main(args: Array[String]) {
println(1.compareTo(2))
println(produceRandomString(10))
}
def produceRandomString(length: Int): String = {
var array: Array[Char] = new Array[Char](26)
var arr = Array('a'.to('z').toArray)
arr.foreach { each =>
if (each != null && each.length > 25) {
for (i <- 0 until each.length) {
array(i) = each(i)
}
}
}
val random = RandomStringUtils.random(length, array)
random
}
def produceRandom(): Unit = {
val random = new Random()
println(random.nextString(2))
println(random.nextString(5))
println(random.nextPrintableChar())
}
def getOsName(): Unit = {
val pro = System.getProperties
println(pro.getProperty(("os.name")))
println(pro.getProperty("os.name").contains("Windows"))
println(System.getProperties.getProperty("os.name").contains("Windows"))
}
def maxValue(): Unit = {
var sum: Long = 1
for (i <- 1 to 18) {
sum = sum * 10
println(sum)
}
}
def getMaxScore(topK: Int): Unit = {
var arrMaxScore = new Array[Int](topK)
var arrStr = new Array[String](topK)
arrMaxScore.foreach(println)
arrStr.foreach(println)
for (i <- 0 until topK) {
println(i)
}
println()
for (i <- 0 until topK reverse) {
println(i)
}
}
}
| xubo245/CloudSW | src/test/scala/org/dsa/test/Test1.scala | Scala | gpl-2.0 | 1,508 |
package com.twitter.inject.thrift.integration.modules
import com.twitter.finagle.thrift.MethodMetadata
import com.twitter.inject.Injector
import com.twitter.inject.thrift.modules.ReqRepDarkTrafficFilterModule
import com.twitter.test.thriftscala.EchoService
object DoEverythingThriftServerDarkTrafficFilterModule
extends ReqRepDarkTrafficFilterModule[EchoService.ReqRepServicePerEndpoint] {
/**
* Function to determine if the request should be "sampled", e.g.
* sent to the dark service.
*/
override def enableSampling(injector: Injector): Any => Boolean = { _ =>
MethodMetadata.current match {
case Some(m) => !(m.methodName.equals("setTimesToEcho"))
case _ => true
}
}
}
| twitter/finatra | inject/inject-thrift-client/src/test/scala/com/twitter/inject/thrift/integration/modules/DoEverythingThriftServerDarkTrafficFilterModule.scala | Scala | apache-2.0 | 714 |
package scorex.api.http.assets
import com.google.common.base.Charsets
import io.swagger.annotations.{ApiModel, ApiModelProperty}
import play.api.libs.functional.syntax._
import play.api.libs.json.{JsPath, Writes}
import scorex.crypto.encode.Base58
import scorex.transaction.assets._
/**
*/
object BroadcastResponses {
@ApiModel(value = "Asset issue transaction")
case class AssetIssueResponse(@ApiModelProperty(value = "Transaction ID", required = true)
id: String,
@ApiModelProperty(value = "Base58 encoded Asset ID", required = true)
assetId: String,
@ApiModelProperty(value = "Base58 encoded Issuer public key", required = true)
senderPublicKey: String,
@ApiModelProperty(value = "Base58 encoded name of Asset", required = true)
name: String,
@ApiModelProperty(value = "Base58 encoded description of Asset", required = true)
description: String,
@ApiModelProperty(required = true)
quantity: Long,
@ApiModelProperty(dataType = "integer", required = true)
decimals: Byte,
@ApiModelProperty(required = true)
reissuable: Boolean,
@ApiModelProperty(required = true)
fee: Long,
@ApiModelProperty(required = true)
timestamp: Long,
@ApiModelProperty(required = true)
signature: String) {
}
object AssetIssueResponse {
def apply(tx: IssueTransaction): AssetIssueResponse = new AssetIssueResponse(
Base58.encode(tx.id),
Base58.encode(tx.assetId),
Base58.encode(tx.sender.publicKey),
new String(tx.name, Charsets.UTF_8),
new String(tx.description, Charsets.UTF_8),
tx.quantity, tx.decimals, tx.reissuable, tx.fee, tx.timestamp,
Base58.encode(tx.signature)
)
implicit val issueResponseWrites: Writes[AssetIssueResponse] = (
(JsPath \\ "id").write[String] and
(JsPath \\ "assetId").write[String] and
(JsPath \\ "senderPublicKey").write[String] and
(JsPath \\ "name").write[String] and
(JsPath \\ "description").write[String] and
(JsPath \\ "quantity").write[Long] and
(JsPath \\ "decimals").write[Byte] and
(JsPath \\ "reissuable").write[Boolean] and
(JsPath \\ "fee").write[Long] and
(JsPath \\ "timestamp").write[Long] and
(JsPath \\ "signature").write[String]
) (unlift(AssetIssueResponse.unapply))
}
@ApiModel(value = "Asset reissue transaction")
case class AssetReissueResponse(@ApiModelProperty(value = "Transaction ID", required = true)
id: String,
@ApiModelProperty(value = "Base58 encoded Asset ID", required = true)
assetId: String,
@ApiModelProperty(value = "Base58 encoded Issuer public key", required = true)
senderPublicKey: String,
@ApiModelProperty(required = true)
quantity: Long,
@ApiModelProperty(required = true)
reissuable: Boolean,
@ApiModelProperty(required = true)
fee: Long,
@ApiModelProperty(required = true)
timestamp: Long,
@ApiModelProperty(required = true)
signature: String) {
}
object AssetReissueResponse {
def apply(tx: ReissueTransaction): AssetReissueResponse = new AssetReissueResponse(
Base58.encode(tx.id),
Base58.encode(tx.assetId),
Base58.encode(tx.sender.publicKey),
tx.quantity,
tx.reissuable,
tx.fee,
tx.timestamp,
Base58.encode(tx.signature)
)
implicit val reissueResponseWrites: Writes[AssetReissueResponse] = (
(JsPath \\ "id").write[String] and
(JsPath \\ "assetId").write[String] and
(JsPath \\ "senderPublicKey").write[String] and
(JsPath \\ "quantity").write[Long] and
(JsPath \\ "reissuable").write[Boolean] and
(JsPath \\ "fee").write[Long] and
(JsPath \\ "timestamp").write[Long] and
(JsPath \\ "signature").write[String]
) (unlift(AssetReissueResponse.unapply))
}
@ApiModel(value = "Asset burn transaction")
case class AssetBurnResponse(@ApiModelProperty(value = "Transaction ID", required = true)
id: String,
@ApiModelProperty(value = "Base58 encoded Asset ID", required = true)
assetId: String,
@ApiModelProperty(value = "Base58 encoded Issuer public key", required = true)
senderPublicKey: String,
@ApiModelProperty(required = true)
quantity: Long,
@ApiModelProperty(required = true)
fee: Long,
@ApiModelProperty(required = true)
timestamp: Long,
@ApiModelProperty(required = true)
signature: String) {
}
object AssetBurnResponse {
def apply(tx: BurnTransaction): AssetBurnResponse = new AssetBurnResponse(
Base58.encode(tx.id),
Base58.encode(tx.assetId),
Base58.encode(tx.sender.publicKey),
tx.amount,
tx.fee,
tx.timestamp,
Base58.encode(tx.signature)
)
implicit val reissueResponseWrites: Writes[AssetBurnResponse] = (
(JsPath \\ "id").write[String] and
(JsPath \\ "assetId").write[String] and
(JsPath \\ "senderPublicKey").write[String] and
(JsPath \\ "quantity").write[Long] and
(JsPath \\ "fee").write[Long] and
(JsPath \\ "timestamp").write[Long] and
(JsPath \\ "signature").write[String]
) (unlift(AssetBurnResponse.unapply))
}
@ApiModel(value = "Asset transfer transaction")
case class AssetTransferResponse(@ApiModelProperty(value = "Transaction ID", required = true)
id: String,
@ApiModelProperty(value = "Base58 encoded Asset ID")
assetId: Option[String],
@ApiModelProperty(value = "Base58 encoded Issuer public key", required = true)
senderPublicKey: String,
@ApiModelProperty(value = "Recipient address", required = true)
recipient: String,
@ApiModelProperty(required = true)
amount: Long,
@ApiModelProperty(required = true)
fee: Long,
@ApiModelProperty(required = true)
timestamp: Long,
@ApiModelProperty
attachment: Option[String],
@ApiModelProperty(required = true)
signature: String) {
}
object AssetTransferResponse {
def apply(tx: TransferTransaction): AssetTransferResponse = new AssetTransferResponse(
Base58.encode(tx.id),
tx.assetId.map(Base58.encode),
Base58.encode(tx.sender.publicKey),
tx.recipient.address,
tx.amount,
tx.fee,
tx.timestamp,
if (tx.attachment.length > 0) Some(Base58.encode(tx.attachment)) else None,
Base58.encode(tx.signature)
)
implicit val transferResponseWrites: Writes[AssetTransferResponse] = (
(JsPath \\ "id").write[String] and
(JsPath \\ "assetId").writeNullable[String] and
(JsPath \\ "senderPublicKey").write[String] and
(JsPath \\ "recipient").write[String] and
(JsPath \\ "amount").write[Long] and
(JsPath \\ "fee").write[Long] and
(JsPath \\ "timestamp").write[Long] and
(JsPath \\ "attachment").writeNullable[String] and
(JsPath \\ "signature").write[String]
) (unlift(AssetTransferResponse.unapply))
}
}
| B83YPoj/Waves | src/main/scala/scorex/api/http/assets/BroadcastResponses.scala | Scala | apache-2.0 | 8,844 |
package text.search
/**
* @author ynupc
* Created on 2016/08/21
*/
object ForwardDawgMatching extends Search {
override def indexOf[T](source: Array[T], target: Array[T]): Int = {
-1
}
override def indicesOf[T](source: Array[T], target: Array[T]): Array[Int] = {
Array()
}
}
| ynupc/scalastringcourseday6 | src/main/scala/text/search/ForwardDawgMatching.scala | Scala | apache-2.0 | 307 |
package com.boldradius.astrolabe.client.components.graph
import com.boldradius.astrolabe.client.d3.Layout._
import com.boldradius.astrolabe.client.d3._
import com.boldradius.astrolabe.client.domain._
import com.boldradius.astrolabe.client.modules._
import com.boldradius.astrolabe.client.services.ClusterService
import com.boldradius.astrolabe.client.services.Logger._
import com.boldradius.astrolabe.http.{ ClusterMember, DiscoveredCluster, RoleDependency }
import japgolly.scalajs.react.extra.OnUnmount
import japgolly.scalajs.react.vdom.SvgAttrs
import japgolly.scalajs.react.vdom.all.svg._
import japgolly.scalajs.react.vdom.prefix_<^._
import japgolly.scalajs.react.{ ReactComponentB, ReactNode, _ }
import scala.scalajs.js
import scala.scalajs.js.JSConverters._
object Graph {
import com.boldradius.astrolabe.client.style.CustomTags._
case class Props(system: String, mode: Mode, width: Double, height: Double,
store: ClusterService, fixedMap: Boolean)
case class State(nodes: Seq[ClusterGraphNode],
links: Seq[ClusterGraphLink],
force: ForceLayout)
def drawLinks(links: Seq[ClusterGraphLink], mode: Mode): ReactNode =
g(links.zipWithIndex.map { case (eachLink, i) => GraphLink(eachLink, i, mode) })
def drawNodes(nodes: Seq[ClusterGraphNode], force: ForceLayout, mode: Mode): Seq[ReactNode] =
nodes.zipWithIndex.map {
case (node, i) =>
GraphNode(node, force, mode)
}
def drawDeps(roles: Seq[(RoleDependency, Boolean)], select: (RoleDependency, Boolean) => Unit): Seq[ReactNode] =
roles.zipWithIndex.map {
case ((dep, selected), i) => ClusterDependencyLegend(dep, i, selected, select)
}
class Backend(t: BackendScope[Props, State]) extends RxObserver(t) {
def mounted(): Unit = {
react(t.props.store.getSelectedCluster, updateGraph)
react(t.props.store.getSelectedDeps, updateLinkDeps)
}
def selectDep(rd: RoleDependency, selected: Boolean) = {
log.debug("selectDep " + rd.tpe.name + " " + selected)
t.props.store.getSelectedCluster().foreach(cluster =>
ClusterService.selectRoleDependency(cluster.system, rd, selected)
)
}
def updateLinkDeps(c: Map[String, List[RoleDependency]]) = {
t.props.store.getSelectedCluster().foreach(cluster =>
c.get(cluster.system).foreach(deps =>
t.modState { s =>
val links: Seq[ClusterGraphLink] = getLinks(t.state.nodes, t.props.mode, cluster, deps)
val nodeUpdateState = s.copy(nodes = t.state.nodes, force = s.force.nodes(t.state.nodes.toJsArray).start())
nodeUpdateState.copy(links = links)
s.copy(links = links)
}
)
)
}
def getFixedList(system: String): List[ClusterGraphNode] =
t.props.store.getFixedNodePositions()
.getOrElse(system, Map.empty[Mode, List[ClusterGraphNode]])
.getOrElse(t.props.mode, Nil)
def fixedHostPosition(system: String, host: String, cn: ClusterGraphNode) =
getFixedList(system).find(e => e.host == host && e.port == 0).fold(
ClusterGraphNode.host(host, cn.index, cn.x, cn.y, cn.px, cn.py, cn.fixed, cn.weight)
)(fixedNode => {
js.Dynamic.literal(
"host" -> cn.host,
"port" -> cn.port,
"roles" -> cn.roles,
"status" -> "Up",
"name" -> cn.name,
"index" -> cn.index,
"x" -> cn.x,
"y" -> cn.y,
"px" -> cn.px,
"py" -> cn.py,
"fixed" -> true,
"weight" -> cn.weight
).asInstanceOf[ClusterGraphNode]
})
/**
* ClusterMember => ClusterGraphNode, checking if this has a fixed position from dragging
*/
def fixNodePosition(system: String, member: ClusterMember, cn: ClusterGraphNode)(implicit ev: MemberLike[ClusterGraphNode, ClusterMember]) =
getFixedList(system).find(e => ev.nodeEq(e, member)).fold(
ClusterGraphNode(member, cn.index, cn.x, cn.y, cn.px, cn.py, cn.fixed, cn.weight)
)(fixedNode => {
js.Dynamic.literal(
"host" -> cn.host,
"port" -> cn.port,
"roles" -> cn.roles,
"status" -> member.state.toString,
"name" -> cn.name,
"index" -> cn.index,
"x" -> cn.x,
"y" -> cn.y,
"px" -> cn.px,
"py" -> cn.py,
"fixed" -> true,
"weight" -> cn.weight
).asInstanceOf[ClusterGraphNode]
})
def updateGraph(c: Option[DiscoveredCluster]) = {
c.fold[Unit]({})(cluster => {
log.debug("updateGraph")
val existingIndexes = t.state.nodes.map(_.index).toSet
val incomingNodes: Seq[ClusterGraphNode] =
t.props.mode match {
case Nodes =>
// get a node map of what is currently on screen
val currentNodesMap = t.state.nodes.map(e => (ClusterGraphNode.label(e), e)).toMap
log.debug("currentNodesMap " + currentNodesMap.toList.map(e => (e._1, e._2.host + ":" + e._2.port)))
// add host nodes in
val hostMap = cluster.members.toSeq.groupBy(m => m.address.host)
// this is actual cluster state from server, nodes could have been added there,
// must be added here. check existence in current map, if not there, add one, else
// check fixed position
val ports = cluster.members.toSeq.map { node =>
currentNodesMap.get(node.address.label).fold(
ClusterGraphNode(node, getNewIndex(existingIndexes, 1), 450, 450, 450, 450, false, 0)
)(cn => fixNodePosition(t.props.system, node, cn))
}
val hosts = hostMap.keys.toSeq.map(hostName =>
currentNodesMap.get(hostName + ":0").fold(
ClusterGraphNode.host(hostName, getNewIndex(existingIndexes, 1), 450, 450, 450, 450, false, 0)
)(cn => fixedHostPosition(t.props.system, hostName, cn))
)
hosts ++: ports
case _ =>
// get a node map of what is currently on screen
val currentNodesMap = t.state.nodes.map(e => (ClusterGraphNode.label(e), e)).toMap
// this is actual cluster state from server, nodes could have been added there,
// must be added here. check existence in current map, if not there, add one, else
// check fixed position
val res = cluster.members.toSeq.map { node =>
currentNodesMap.get(node.address.label).fold(
ClusterGraphNode(node, getNewIndex(existingIndexes, 1), 450, 450, 450, 450, false, 0)
)(cn => fixNodePosition(t.props.system, node, cn))
}
res
}
log.debug("********** incomingNodes = " + incomingNodes.map(e => e.port + " " + e.index))
log.debug("********** cluster deps = " + cluster.dependencies)
t.modState { s =>
val links: Seq[ClusterGraphLink] = getLinks(incomingNodes, t.props.mode, cluster, t.props.store.getSelectedDeps().getOrElse(cluster.system, Nil))
s.copy(nodes = incomingNodes, links = links, force = s.force.nodes(incomingNodes.toJsArray).start())
}
})
initDrag()
}
def renderTick() = {
val newNodes: List[ClusterGraphNode] = t.state.force.nodes().toList
// val notFixed = newNodes.filter(_.fixed == false)
// val fixed = t.state.nodes.filter(_.fixed == true)
t.modState(s => s.copy(nodes = newNodes))
}
def startfixed() = {
t.modState { s =>
val firstState = s.copy(force = s.force.nodes(t.state.nodes.toJsArray).start())
(1 until 150).foreach(i => t.state.force.tick())
firstState.copy(force = s.force.on("tick", () => renderTick))
}
}
def initDrag(): Unit = {
val drag = t.state.force.drag().on("dragend", (a: js.Any, b: Double) => dragEnd[ClusterGraphNode](a, b))
d3.select("svg").
selectAll(".node").
data(t.state.nodes.toJSArray).
call(drag)
}
def dragEnd[T: NodeLike](d: js.Any, x: Double) = {
val node = d.asInstanceOf[ClusterGraphNode]
t.modState { s =>
val newNodes =
s.nodes.map { e =>
if (implicitly[NodeLike[ClusterGraphNode]].nodeEq(e, node)) {
js.Dynamic.literal(
"virtualHost" -> e.name,
"host" -> e.host,
"port" -> e.port,
"roles" -> e.roles,
"status" -> e.status,
"name" -> e.name,
"index" -> e.index,
"x" -> e.x,
"y" -> e.y,
"px" -> e.px,
"py" -> e.py,
"fixed" -> true,
"weight" -> e.weight
).asInstanceOf[ClusterGraphNode]
} else {
e
}
}
s.copy(nodes = newNodes, force = s.force.nodes(newNodes.toJSArray).start())
}
t.state.nodes.find(e => implicitly[NodeLike[ClusterGraphNode]].nodeEq(e, node)).foreach { node =>
log.debug("ClusterService.updateNodePosition node: " + node.host + ":" + node.port)
ClusterService.updateNodePosition(t.props.system, t.props.mode, node)
}
updateGraph(t.props.store.getSelectedCluster())
// initDrag()
}
}
val component = ReactComponentB[Props]("Graph")
.initialStateP { P =>
val force = d3.layout.force()
.size(List[Double](P.width, P.height).toJsArray)
.charge(-1500)
.linkDistance(1000)
.friction(0.9)
val (nodes, links) = P.store.getSelectedCluster().map(cluster => {
getNodesAndLink(cluster,
P.mode,
P.store.getFixedNodePositions().getOrElse(cluster.system, Map.empty[Mode, List[ClusterGraphNode]]).getOrElse(P.mode, Nil),
P.store.getSelectedDeps().getOrElse(cluster.system, Nil))
}).getOrElse((Nil, Nil))
State(nodes, links, force)
}.backend(new Backend(_))
.render((P, S, B) => {
val selectedDeps = P.store.getSelectedDeps().getOrElse(P.system, Nil)
val roles: Seq[(RoleDependency, Boolean)] =
if (P.mode == Roles) {
P.store.getSelectedCluster().map(_.dependencies).getOrElse(Nil)
.map(eachDep => (eachDep, selectedDeps.exists(_.tpe.name == eachDep.tpe.name)))
} else {
Nil
}
svgtag(SvgAttrs.width := P.width, SvgAttrs.height := P.height)(
drawDeps(roles, B.selectDep),
drawLinks(S.links, P.mode),
drawNodes(S.nodes, S.force, P.mode)
)
}).componentWillReceiveProps { (scope, P) =>
log.debug("componentWillReceiveProps")
val (nodes, links) = P.store.getSelectedCluster().map(cluster => {
getNodesAndLink(cluster, P.mode,
P.store.getFixedNodePositions().getOrElse(cluster.system, Map.empty[Mode, List[ClusterGraphNode]]).getOrElse(P.mode, Nil),
P.store.getSelectedDeps().getOrElse(cluster.system, Nil))
}).getOrElse((Nil, Nil))
val newState = State(nodes, links, scope.state.force)
scope.modState { s =>
val firstState = s.copy(nodes = nodes, links = links, force = s.force.nodes(nodes.toJsArray).start())
(1 until 150).foreach(i => scope.state.force.tick())
firstState.copy(force = s.force.on("tick", () => scope.backend.renderTick))
}
}.componentWillMount { scope =>
log.debug("componentWillMount")
scope.backend.startfixed()
}.componentDidMount { scope =>
log.debug("componentDidMount")
scope.backend.mounted()
scope.backend.initDrag()
}.componentWillUnmount { scope =>
scope.state.force.stop()
}.configure(OnUnmount.install).build
def apply(system: String, mode: Mode, width: Double, height: Double, store: ClusterService, fixedMap: Boolean) = {
component(Props(system, mode, width, height, store, fixedMap))
}
def getNodesAndLink(cluster: DiscoveredCluster,
mode: Mode,
fixedList: List[ClusterGraphNode],
selectedDeps: List[RoleDependency])(implicit ev: MemberLike[ClusterGraphNode, ClusterMember]): (Seq[ClusterGraphNode], Seq[ClusterGraphLink]) = {
val nodes: Seq[ClusterGraphNode] =
mode match {
case Nodes =>
// group by host
val map = cluster.members.toSeq.groupBy(m => m.address.host)
var newKeyIndex = 1000
map.keys.toSeq.zipWithIndex.flatMap {
case (key, keyIndex) =>
val ports: Seq[ClusterMember] = map.getOrElse[Seq[ClusterMember]](key, Seq.empty[ClusterMember])
val portNodes: Seq[ClusterGraphNode] = ports.zipWithIndex.map {
case (pNode, pIndex) =>
fixedList.find(e => ev.nodeEq(e, pNode)).fold(
ClusterGraphNode.port(pNode, newKeyIndex + keyIndex + 1 + pIndex, 450, 450, 450, 450, false, 1)
)(found =>
ClusterGraphNode.port(pNode, newKeyIndex + keyIndex + 1 + pIndex, found.x, found.y, found.px, found.py, true, 1)
)
}
val hostNode: Option[ClusterGraphNode] =
ports.headOption.map(firstPort =>
fixedList.find(e => e.host == firstPort.address.host && e.port == 0).fold(
ClusterGraphNode.host(firstPort.address.host, newKeyIndex + keyIndex, 450, 450, 450, 450, false, ports.length)
)(found =>
ClusterGraphNode.host(firstPort.address.host, newKeyIndex + keyIndex, found.x, found.y, found.px, found.py, true, ports.length)
))
val res = hostNode.fold(Seq.empty[ClusterGraphNode])(hn => hn +: portNodes)
newKeyIndex = newKeyIndex + ports.length + 1
res
}
case _ =>
cluster.members.toSeq.zipWithIndex.map {
case (node, i) =>
fixedList.find(e => ev.nodeEq(e, node)).fold(
ClusterGraphNode(node, i, 450, 450, 450, 450, false, 0)
)(fixedNode => {
ClusterGraphNode(node,
fixedNode.index, fixedNode.x, fixedNode.y, fixedNode.px, fixedNode.py, fixedNode.fixed, 0)
})
}
}
(nodes.toSeq, getLinks(nodes.toSeq, mode, cluster, selectedDeps))
}
def getNewIndex(set: Set[Double], v: Double): Double =
if (set.contains(v)) {
getNewIndex(set, v + 1)
} else {
v
}
def getLinks(nodes: Seq[ClusterGraphNode],
mode: Mode,
cluster: DiscoveredCluster,
roleDependencies: List[RoleDependency] = Nil)(implicit ev: MemberLike[ClusterGraphNode, ClusterMember]) = {
def makeLinks(conns: Seq[(Double, Double)]) =
conns.flatMap {
case (a, b) =>
for {
source <- nodes.find(_.index == a)
target <- nodes.find(_.index == b)
} yield {
js.Dynamic.literal(
"source" -> source,
"target" -> target,
"sourceHost" -> source.host,
"targetHost" -> target.host).asInstanceOf[ClusterGraphLink]
}
}
mode match {
case Members =>
val indexes = nodes.filter(_.status == "Up").map(_.index)
val res: Seq[(Double, Double)] =
indexes.flatMap(index => indexes.filter(_ > index).map((index, _)))
makeLinks(res)
case Roles =>
val allDeps = cluster.dependencies
roleDependencies.zipWithIndex.flatMap {
case (rd, index) =>
val sourcesIndexes = rd.roles.flatMap { eachRole =>
cluster.getNodesByRole(eachRole).toSeq.flatMap(e =>
nodes.filter(n => ev.nodeEq(n, e)).map(_.index))
}
val targetsIndexes = rd.dependsOn.flatMap { eachRole =>
cluster.getNodesByRole(eachRole).toSeq.flatMap(e =>
nodes.filter(n => ev.nodeEq(n, e)).map(_.index))
}
val indexes = sourcesIndexes.flatMap(eachSource =>
targetsIndexes.map(eachTarget =>
(eachSource, eachTarget)))
// get index of RoleDep
indexes.flatMap {
case (a, b) =>
for {
source <- nodes.find(_.index == a)
target <- nodes.find(_.index == b)
} yield {
js.Dynamic.literal(
"index" -> allDeps.indexOf(rd),
"source" -> source,
"target" -> target,
"sourceHost" -> source.host,
"targetHost" -> target.host).asInstanceOf[ClusterGraphRoleLink]
}
}
}
case Nodes =>
//join ports to hosts
val hostPortMap: Map[String, Seq[ClusterGraphNode]] = nodes.groupBy(n => n.host)
val hostToPortIndexes = hostPortMap.foldLeft[Seq[(Double, Double)]](Seq.empty[(Double, Double)])((a, b) => a ++ {
nodes.find(e => e.host == b._1 && e.port == 0).map(host =>
b._2.flatMap(e => if (e.port != 0) {
Some((host.index, e.index))
} else None)).getOrElse(Nil)
}
)
makeLinks(hostToPortIndexes)
}
}
}
| boldradius/cluster-console | js/src/main/scala/com/boldradius/astrolabe/client/components/graph/Graph.scala | Scala | bsd-3-clause | 17,416 |
package com.seanshubin.learn.spark.core
// add dataframe/dataset example
class WordCounterImpl(pathSpecification: String,
resilientDistributedDatasetLoader: ResilientDistributedDatasetLoader,
notifications: Notifications) extends WordCounter {
override def calculateWordHistogram(): Seq[(String, Int)] = {
val logData = resilientDistributedDatasetLoader.loadFromPathPattern(pathSpecification).cache()
val lineToWords: String => Seq[String] = line => line.split(" ")
val wordHasA: String => Boolean = word => word.contains("a")
val wordToWordOne: String => (String, Int) = word => (word, 1)
val plus: (Int, Int) => Int = (left, right) => left + right
val wordQuantityToWord: ((String, Int)) => String = wordQuantity => wordQuantity._1
notifications.describeCalculation(s"scan files in $pathSpecification, split each line into words, only consder words containing an 'a', generate a histogram")
val sortedWordAndCountSeq = logData.
flatMap(lineToWords).
filter(wordHasA).
map(wordToWordOne).
reduceByKey(plus).
sortBy(wordQuantityToWord)
val histogram = sortedWordAndCountSeq.toLocalIterator.toSeq
histogram
}
}
| SeanShubin/learn-spark | core/src/main/scala/com/seanshubin/learn/spark/core/WordCounterImpl.scala | Scala | unlicense | 1,233 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.test
import java.util.concurrent.{ CountDownLatch, TimeUnit }
import scala.concurrent.duration.Duration
import io.gatling.core.action.Action
import io.gatling.core.session.Session
private[test] class BlockingExitAction(latchSize: Int = 1) extends Action {
private val latch = new CountDownLatch(latchSize)
override def name: String = "exit"
override def execute(session: Session): Unit = latch.countDown()
def await(duration: Duration): Unit = latch.await(duration.toMillis, TimeUnit.MILLISECONDS)
}
| timve/gatling | gatling-core/src/test/scala/io/gatling/core/test/BlockingExitAction.scala | Scala | apache-2.0 | 1,158 |
package io.udash.web.guide.views.ext
import io.udash._
import io.udash.css.CssView
import io.udash.web.commons.components.{CodeBlock, ForceBootstrap}
import io.udash.web.guide._
import io.udash.web.guide.styles.partials.GuideStyles
import io.udash.web.guide.views.ext.demo.{DynamicRemoteTranslationsDemo, FrontendTranslationsDemo, RemoteTranslationsDemo}
import scalatags.JsDom
case object I18NExtViewFactory extends StaticViewFactory[I18NExtState.type](() => new I18NExtView)
final class I18NExtView extends View with CssView {
import Context._
import JsDom.all._
private val (frontendTranslationsDemo, frontendTranslationsSnippet) = FrontendTranslationsDemo.demoWithSnippet()
private val (remoteTranslationsDemo, remoteTranslationsSnippet) = RemoteTranslationsDemo.demoWithSnippet()
private val (dynamicTranslationsDemo, dynamicTranslationsSnippet) = DynamicRemoteTranslationsDemo.demoWithSnippet()
override def getTemplate: Modifier = div(
h1("Udash i18n"),
p(
"The Udash framework supports internationalization of web applications. ",
"The Udash i18n plugin provides translations loading from the server-side application via RPC ",
"and allows locale changes in frontend application without refreshing. "
),
h2("Translation keys"),
p("If you want to use Udash translations support, you should define ", i("TranslationKeys"), "."),
CodeBlock(
s"""import io.udash.i18n._
|
|object Translations {
| import TranslationKey._
|
| object auth {
| val loginLabel = key("auth.loginLabel")
| val passwordLabel = key("auth.passwordLabel")
|
| object login {
| val buttonLabel = key("auth.login.buttonLabel")
| val retriesLeft = key1[Int]("auth.login.retriesLeft")
| val retriesLeftOne = key("auth.login.retriesLeftOne")
| }
|
| object register {
| val buttonLabel = key("auth.register.buttonLabel")
| }
| }
|}""".stripMargin
)(GuideStyles),
p(
i("TranslationKey"), " knows the count and types of the arguments. In the above example, ",
i("retriesLeft"), " key expects one integer as the argument."
),
p(
"It is possible to transform translation key with arguments to ", i("TranslationKey0"),
" by ", i(".apply"), "ing the arguments on it. You can also pass an ", i("Untranslatable"),
" as ", i("TranslationKey0"), " instance in order to use raw string instead of translated key."
),
h2("TranslationProvider"),
p("When translation keys are defined, we can create ", i("Translated"), " object as follows: "),
CodeBlock(
s"""val translated: Future[Translated] = Translations.auth.login.retriesLeft(3)
|translated onSuccess {
| case Translated(text) => println(text)
|}""".stripMargin
)(GuideStyles),
p(
"This code requires a ", i("TranslationProvider"), " instance to compile. The Udash i18n plugin provides two ",
i("TranslationProviders"), ": ", i("LocalTranslationProvider"), " and ", i("RemoteTranslationProvider"), "."
),
h3("LocalTranslationProvider"),
p(
i("LocalTranslationProvider"), " was prepared for frontend-only applications. It takes a map from ",
i("Lang"), " to ", i("Bundle"), ". Each bundle provides mapping from translation keys to translation templates."
),
frontendTranslationsSnippet,
p(
"Take a look at the example below. As you can see in the code sample, it uses ",
i("translated"), " method to bind translation into DOM hierarchy. "
),
ForceBootstrap(frontendTranslationsDemo),
h3("RemoteTranslationProvider"),
p(
"If your application is using the Udash RPC system, you can provide translations from the server side application. ",
i("RemoteTranslationProvider"), " takes ", i("RemoteTranslationRPC"), " as constructor argument. It allows the frontend application ",
"to ask the server application for the translation templates."
),
h4("RemoteTranslationRPC implementation"),
p("Let's start with ", i("RemoteTranslationRPC"), " implementation in the server application. Add the following method in your server RPC interface: "),
CodeBlock(
s"""import io.udash.i18n._
|
|@RPC
|trait DemosServerRPC {
| def translations(): RemoteTranslationRPC
|}""".stripMargin
)(GuideStyles),
p(
"The Udash i18n plugin makes ", i("RemoteTranslationRPC"), " easier, because it provides ",
i("TranslationRPCEndpoint"), " and ", i("ResourceBundlesTranslationTemplatesProvider"), " classes."
),
CodeBlock(
s"""import io.udash.i18n._
|import java.{util => ju}
|
|class TranslationServer extends TranslationRPCEndpoint(
| new ResourceBundlesTranslationTemplatesProvider(
| TranslationServer.langs
| .map(lang =>
| Lang(lang) -> TranslationServer.bundlesNames.map(name =>
| ju.ResourceBundle.getBundle(name, new ju.Locale(lang))
| )
| ).toMap
| )
|)
|
|object TranslationServer {
| val langs = Seq("en", "pl")
| val bundlesNames = Seq("demo_translations")
|}""".stripMargin
)(GuideStyles),
p(
i("ResourceBundlesTranslationTemplatesProvider"), " expects ", i("Map[Lang, Seq[ju.ResourceBundle]]"), " as a constructor argument, whereas ",
i("TranslationRPCEndpoint"), " takes ", i("ResourceBundlesTranslationTemplatesProvider"), " instance. ",
"The presented implementation will serve translation templates from bundles from server application resources."
),
h4("Frontend usage"),
p(
"Now it is possible to load translations into the frontend application using ",
i("RemoteTranslationProvider"), ". This provider loads required translation templates from server ",
"and caches them in provided storage. In the example below it is a browser local storage which keeps cached values for 6 hours."
),
remoteTranslationsSnippet,
p("Take a look at the example below."),
ForceBootstrap(remoteTranslationsDemo),
h2("Translation bindings"),
p(
"All translations are resolved asynchronously, so they cannot be statically added into DOM hierarchy. The Udash i18n plugin ",
"provides four extension methods for translation bindings on the 0-argument or reduced keys. ",
"These methods are divided into two groups: static and dynamic."
),
h3("Static binding"),
p(
"Static binding puts translated string into DOM hierarchy when available.",
ul(GuideStyles.defaultList)(
li(i("translated"), " - binds translated string in the DOM element."),
li(i("translatedAttr"), " - binds translated string in the DOM element attribute.")
)
),
h3("Dynamic binding"),
p(
"Dynamic binding is able to update translation after the change of ", i("LangProperty"), ".",
ul(GuideStyles.defaultList)(
li(i("translatedDynamic"), " - binds translated string in the DOM element and updates it when the application language changes."),
li(i("translatedAttrDynamic"), " - binds translated string in the DOM element attribute and updates it when the application language changes.")
),
"Take a look at the example below: "
),
dynamicTranslationsSnippet,
p("Now you can change the translation language without redrawing the whole component, as presented in the following live example."),
ForceBootstrap(dynamicTranslationsDemo),
h2("What's next?"),
p(
"Take a look at another extensions like ", a(href := BootstrapExtState.url)("Bootstrap Components"), " or ",
a(href := AuthorizationExtState.url)("Authorization utilities"), "."
)
)
} | UdashFramework/udash-core | guide/guide/.js/src/main/scala/io/udash/web/guide/views/ext/I18NExtView.scala | Scala | apache-2.0 | 7,993 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.coordinator.group
import kafka.common.OffsetAndMetadata
import org.apache.kafka.common.TopicPartition
import org.junit.Assert._
import org.junit.{Before, Test}
import org.scalatest.junit.JUnitSuite
/**
* Test group state transitions and other GroupMetadata functionality
*/
class GroupMetadataTest extends JUnitSuite {
private val protocolType = "consumer"
private val groupId = "groupId"
private val clientId = "clientId"
private val clientHost = "clientHost"
private val rebalanceTimeoutMs = 60000
private val sessionTimeoutMs = 10000
private var group: GroupMetadata = null
@Before
def setUp() {
group = new GroupMetadata("groupId")
}
@Test
def testCanRebalanceWhenStable() {
assertTrue(group.canRebalance)
}
@Test
def testCanRebalanceWhenCompletingRebalance() {
group.transitionTo(PreparingRebalance)
group.transitionTo(CompletingRebalance)
assertTrue(group.canRebalance)
}
@Test
def testCannotRebalanceWhenPreparingRebalance() {
group.transitionTo(PreparingRebalance)
assertFalse(group.canRebalance)
}
@Test
def testCannotRebalanceWhenDead() {
group.transitionTo(PreparingRebalance)
group.transitionTo(Empty)
group.transitionTo(Dead)
assertFalse(group.canRebalance)
}
@Test
def testStableToPreparingRebalanceTransition() {
group.transitionTo(PreparingRebalance)
assertState(group, PreparingRebalance)
}
@Test
def testStableToDeadTransition() {
group.transitionTo(Dead)
assertState(group, Dead)
}
@Test
def testAwaitingRebalanceToPreparingRebalanceTransition() {
group.transitionTo(PreparingRebalance)
group.transitionTo(CompletingRebalance)
group.transitionTo(PreparingRebalance)
assertState(group, PreparingRebalance)
}
@Test
def testPreparingRebalanceToDeadTransition() {
group.transitionTo(PreparingRebalance)
group.transitionTo(Dead)
assertState(group, Dead)
}
@Test
def testPreparingRebalanceToEmptyTransition() {
group.transitionTo(PreparingRebalance)
group.transitionTo(Empty)
assertState(group, Empty)
}
@Test
def testEmptyToDeadTransition() {
group.transitionTo(PreparingRebalance)
group.transitionTo(Empty)
group.transitionTo(Dead)
assertState(group, Dead)
}
@Test
def testAwaitingRebalanceToStableTransition() {
group.transitionTo(PreparingRebalance)
group.transitionTo(CompletingRebalance)
group.transitionTo(Stable)
assertState(group, Stable)
}
@Test(expected = classOf[IllegalStateException])
def testEmptyToStableIllegalTransition() {
group.transitionTo(Stable)
}
@Test
def testStableToStableIllegalTransition() {
group.transitionTo(PreparingRebalance)
group.transitionTo(CompletingRebalance)
group.transitionTo(Stable)
try {
group.transitionTo(Stable)
fail("should have failed due to illegal transition")
} catch {
case e: IllegalStateException => // ok
}
}
@Test(expected = classOf[IllegalStateException])
def testEmptyToAwaitingRebalanceIllegalTransition() {
group.transitionTo(CompletingRebalance)
}
@Test(expected = classOf[IllegalStateException])
def testPreparingRebalanceToPreparingRebalanceIllegalTransition() {
group.transitionTo(PreparingRebalance)
group.transitionTo(PreparingRebalance)
}
@Test(expected = classOf[IllegalStateException])
def testPreparingRebalanceToStableIllegalTransition() {
group.transitionTo(PreparingRebalance)
group.transitionTo(Stable)
}
@Test(expected = classOf[IllegalStateException])
def testAwaitingRebalanceToAwaitingRebalanceIllegalTransition() {
group.transitionTo(PreparingRebalance)
group.transitionTo(CompletingRebalance)
group.transitionTo(CompletingRebalance)
}
def testDeadToDeadIllegalTransition() {
group.transitionTo(PreparingRebalance)
group.transitionTo(Dead)
group.transitionTo(Dead)
assertState(group, Dead)
}
@Test(expected = classOf[IllegalStateException])
def testDeadToStableIllegalTransition() {
group.transitionTo(PreparingRebalance)
group.transitionTo(Dead)
group.transitionTo(Stable)
}
@Test(expected = classOf[IllegalStateException])
def testDeadToPreparingRebalanceIllegalTransition() {
group.transitionTo(PreparingRebalance)
group.transitionTo(Dead)
group.transitionTo(PreparingRebalance)
}
@Test(expected = classOf[IllegalStateException])
def testDeadToAwaitingRebalanceIllegalTransition() {
group.transitionTo(PreparingRebalance)
group.transitionTo(Dead)
group.transitionTo(CompletingRebalance)
}
@Test
def testSelectProtocol() {
val memberId = "memberId"
val member = new MemberMetadata(memberId, groupId, clientId, clientHost, rebalanceTimeoutMs, sessionTimeoutMs,
protocolType, List(("range", Array.empty[Byte]), ("roundrobin", Array.empty[Byte])))
group.add(member)
assertEquals("range", group.selectProtocol)
val otherMemberId = "otherMemberId"
val otherMember = new MemberMetadata(otherMemberId, groupId, clientId, clientHost, rebalanceTimeoutMs,
sessionTimeoutMs, protocolType, List(("roundrobin", Array.empty[Byte]), ("range", Array.empty[Byte])))
group.add(otherMember)
// now could be either range or robin since there is no majority preference
assertTrue(Set("range", "roundrobin")(group.selectProtocol))
val lastMemberId = "lastMemberId"
val lastMember = new MemberMetadata(lastMemberId, groupId, clientId, clientHost, rebalanceTimeoutMs,
sessionTimeoutMs, protocolType, List(("roundrobin", Array.empty[Byte]), ("range", Array.empty[Byte])))
group.add(lastMember)
// now we should prefer 'roundrobin'
assertEquals("roundrobin", group.selectProtocol)
}
@Test(expected = classOf[IllegalStateException])
def testSelectProtocolRaisesIfNoMembers() {
group.selectProtocol
fail()
}
@Test
def testSelectProtocolChoosesCompatibleProtocol() {
val memberId = "memberId"
val member = new MemberMetadata(memberId, groupId, clientId, clientHost, rebalanceTimeoutMs, sessionTimeoutMs,
protocolType, List(("range", Array.empty[Byte]), ("roundrobin", Array.empty[Byte])))
val otherMemberId = "otherMemberId"
val otherMember = new MemberMetadata(otherMemberId, groupId, clientId, clientHost, rebalanceTimeoutMs,
sessionTimeoutMs, protocolType, List(("roundrobin", Array.empty[Byte]), ("blah", Array.empty[Byte])))
group.add(member)
group.add(otherMember)
assertEquals("roundrobin", group.selectProtocol)
}
@Test
def testSupportsProtocols() {
// by default, the group supports everything
assertTrue(group.supportsProtocols(Set("roundrobin", "range")))
val memberId = "memberId"
val member = new MemberMetadata(memberId, groupId, clientId, clientHost, rebalanceTimeoutMs,
sessionTimeoutMs, protocolType, List(("range", Array.empty[Byte]), ("roundrobin", Array.empty[Byte])))
group.add(member)
assertTrue(group.supportsProtocols(Set("roundrobin", "foo")))
assertTrue(group.supportsProtocols(Set("range", "foo")))
assertFalse(group.supportsProtocols(Set("foo", "bar")))
val otherMemberId = "otherMemberId"
val otherMember = new MemberMetadata(otherMemberId, groupId, clientId, clientHost, rebalanceTimeoutMs,
sessionTimeoutMs, protocolType, List(("roundrobin", Array.empty[Byte]), ("blah", Array.empty[Byte])))
group.add(otherMember)
assertTrue(group.supportsProtocols(Set("roundrobin", "foo")))
assertFalse(group.supportsProtocols(Set("range", "foo")))
}
@Test
def testInitNextGeneration() {
val memberId = "memberId"
val member = new MemberMetadata(memberId, groupId, clientId, clientHost, rebalanceTimeoutMs, sessionTimeoutMs,
protocolType, List(("roundrobin", Array.empty[Byte])))
group.transitionTo(PreparingRebalance)
member.awaitingJoinCallback = _ => ()
group.add(member)
assertEquals(0, group.generationId)
assertNull(group.protocol)
group.initNextGeneration()
assertEquals(1, group.generationId)
assertEquals("roundrobin", group.protocol)
}
@Test
def testInitNextGenerationEmptyGroup() {
assertEquals(Empty, group.currentState)
assertEquals(0, group.generationId)
assertNull(group.protocol)
group.transitionTo(PreparingRebalance)
group.initNextGeneration()
assertEquals(1, group.generationId)
assertNull(group.protocol)
}
@Test
def testOffsetCommit(): Unit = {
val partition = new TopicPartition("foo", 0)
val offset = OffsetAndMetadata(37)
val commitRecordOffset = 3
group.prepareOffsetCommit(Map(partition -> offset))
assertTrue(group.hasOffsets)
assertEquals(None, group.offset(partition))
group.onOffsetCommitAppend(partition, CommitRecordMetadataAndOffset(Some(commitRecordOffset), offset))
assertTrue(group.hasOffsets)
assertEquals(Some(offset), group.offset(partition))
}
@Test
def testOffsetCommitFailure(): Unit = {
val partition = new TopicPartition("foo", 0)
val offset = OffsetAndMetadata(37)
group.prepareOffsetCommit(Map(partition -> offset))
assertTrue(group.hasOffsets)
assertEquals(None, group.offset(partition))
group.failPendingOffsetWrite(partition, offset)
assertFalse(group.hasOffsets)
assertEquals(None, group.offset(partition))
}
@Test
def testOffsetCommitFailureWithAnotherPending(): Unit = {
val partition = new TopicPartition("foo", 0)
val firstOffset = OffsetAndMetadata(37)
val secondOffset = OffsetAndMetadata(57)
group.prepareOffsetCommit(Map(partition -> firstOffset))
assertTrue(group.hasOffsets)
assertEquals(None, group.offset(partition))
group.prepareOffsetCommit(Map(partition -> secondOffset))
assertTrue(group.hasOffsets)
group.failPendingOffsetWrite(partition, firstOffset)
assertTrue(group.hasOffsets)
assertEquals(None, group.offset(partition))
group.onOffsetCommitAppend(partition, CommitRecordMetadataAndOffset(Some(3L), secondOffset))
assertTrue(group.hasOffsets)
assertEquals(Some(secondOffset), group.offset(partition))
}
@Test
def testOffsetCommitWithAnotherPending(): Unit = {
val partition = new TopicPartition("foo", 0)
val firstOffset = OffsetAndMetadata(37)
val secondOffset = OffsetAndMetadata(57)
group.prepareOffsetCommit(Map(partition -> firstOffset))
assertTrue(group.hasOffsets)
assertEquals(None, group.offset(partition))
group.prepareOffsetCommit(Map(partition -> secondOffset))
assertTrue(group.hasOffsets)
group.onOffsetCommitAppend(partition, CommitRecordMetadataAndOffset(Some(4L), firstOffset))
assertTrue(group.hasOffsets)
assertEquals(Some(firstOffset), group.offset(partition))
group.onOffsetCommitAppend(partition, CommitRecordMetadataAndOffset(Some(5L), secondOffset))
assertTrue(group.hasOffsets)
assertEquals(Some(secondOffset), group.offset(partition))
}
@Test
def testConsumerBeatsTransactionalOffsetCommit(): Unit = {
val partition = new TopicPartition("foo", 0)
val producerId = 13232L
val txnOffsetCommit = OffsetAndMetadata(37)
val consumerOffsetCommit = OffsetAndMetadata(57)
group.prepareTxnOffsetCommit(producerId, Map(partition -> txnOffsetCommit))
assertTrue(group.hasOffsets)
assertEquals(None, group.offset(partition))
group.prepareOffsetCommit(Map(partition -> consumerOffsetCommit))
assertTrue(group.hasOffsets)
group.onTxnOffsetCommitAppend(producerId, partition, CommitRecordMetadataAndOffset(Some(3L), txnOffsetCommit))
group.onOffsetCommitAppend(partition, CommitRecordMetadataAndOffset(Some(4L), consumerOffsetCommit))
assertTrue(group.hasOffsets)
assertEquals(Some(consumerOffsetCommit), group.offset(partition))
group.completePendingTxnOffsetCommit(producerId, isCommit = true)
assertTrue(group.hasOffsets)
// This is the crucial assertion which validates that we materialize offsets in offset order, not transactional order.
assertEquals(Some(consumerOffsetCommit), group.offset(partition))
}
@Test
def testTransactionBeatsConsumerOffsetCommit(): Unit = {
val partition = new TopicPartition("foo", 0)
val producerId = 13232L
val txnOffsetCommit = OffsetAndMetadata(37)
val consumerOffsetCommit = OffsetAndMetadata(57)
group.prepareTxnOffsetCommit(producerId, Map(partition -> txnOffsetCommit))
assertTrue(group.hasOffsets)
assertEquals(None, group.offset(partition))
group.prepareOffsetCommit(Map(partition -> consumerOffsetCommit))
assertTrue(group.hasOffsets)
group.onOffsetCommitAppend(partition, CommitRecordMetadataAndOffset(Some(3L), consumerOffsetCommit))
group.onTxnOffsetCommitAppend(producerId, partition, CommitRecordMetadataAndOffset(Some(4L), txnOffsetCommit))
assertTrue(group.hasOffsets)
// The transactional offset commit hasn't been committed yet, so we should materialize the consumer offset commit.
assertEquals(Some(consumerOffsetCommit), group.offset(partition))
group.completePendingTxnOffsetCommit(producerId, isCommit = true)
assertTrue(group.hasOffsets)
// The transactional offset commit has been materialized and the transactional commit record is later in the log,
// so it should be materialized.
assertEquals(Some(txnOffsetCommit), group.offset(partition))
}
@Test
def testTransactionalCommitIsAbortedAndConsumerCommitWins(): Unit = {
val partition = new TopicPartition("foo", 0)
val producerId = 13232L
val txnOffsetCommit = OffsetAndMetadata(37)
val consumerOffsetCommit = OffsetAndMetadata(57)
group.prepareTxnOffsetCommit(producerId, Map(partition -> txnOffsetCommit))
assertTrue(group.hasOffsets)
assertEquals(None, group.offset(partition))
group.prepareOffsetCommit(Map(partition -> consumerOffsetCommit))
assertTrue(group.hasOffsets)
group.onOffsetCommitAppend(partition, CommitRecordMetadataAndOffset(Some(3L), consumerOffsetCommit))
group.onTxnOffsetCommitAppend(producerId, partition, CommitRecordMetadataAndOffset(Some(4L), txnOffsetCommit))
assertTrue(group.hasOffsets)
// The transactional offset commit hasn't been committed yet, so we should materialize the consumer offset commit.
assertEquals(Some(consumerOffsetCommit), group.offset(partition))
group.completePendingTxnOffsetCommit(producerId, isCommit = false)
assertTrue(group.hasOffsets)
// The transactional offset commit should be discarded and the consumer offset commit should continue to be
// materialized.
assertFalse(group.hasPendingOffsetCommitsFromProducer(producerId))
assertEquals(Some(consumerOffsetCommit), group.offset(partition))
}
@Test
def testFailedTxnOffsetCommitLeavesNoPendingState(): Unit = {
val partition = new TopicPartition("foo", 0)
val producerId = 13232L
val txnOffsetCommit = OffsetAndMetadata(37)
group.prepareTxnOffsetCommit(producerId, Map(partition -> txnOffsetCommit))
assertTrue(group.hasPendingOffsetCommitsFromProducer(producerId))
assertTrue(group.hasOffsets)
assertEquals(None, group.offset(partition))
group.failPendingTxnOffsetCommit(producerId, partition)
assertFalse(group.hasOffsets)
assertFalse(group.hasPendingOffsetCommitsFromProducer(producerId))
// The commit marker should now have no effect.
group.completePendingTxnOffsetCommit(producerId, isCommit = true)
assertFalse(group.hasOffsets)
assertFalse(group.hasPendingOffsetCommitsFromProducer(producerId))
}
private def assertState(group: GroupMetadata, targetState: GroupState) {
val states: Set[GroupState] = Set(Stable, PreparingRebalance, CompletingRebalance, Dead)
val otherStates = states - targetState
otherStates.foreach { otherState =>
assertFalse(group.is(otherState))
}
assertTrue(group.is(targetState))
}
}
| themarkypantz/kafka | core/src/test/scala/unit/kafka/coordinator/group/GroupMetadataTest.scala | Scala | apache-2.0 | 16,749 |
/*
* bytefrog: a tracing framework for the JVM. For more information
* see http://code-pulse.com/bytefrog
*
* Copyright (C) 2014 Applied Visions - http://securedecisions.avi.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.secdec.bytefrog.clients.common.data.processing.processors
import scala.collection.mutable
import com.secdec.bytefrog.hq.data.processing.DataProcessor
import com.secdec.bytefrog.hq.protocol.DataMessageContent
import com.secdec.bytefrog.fileapi.tracefile.TraceFile
import com.secdec.bytefrog.fileapi.tracefile.TraceFileBuilder
object MethodIdProcessor {
def apply(output: TraceFileBuilder) = new MethodIdProcessor(output)
}
/** The MethodIdProcessor maintains a mapping of method IDs to their full signatures.
*
* @author robertf
*/
class MethodIdProcessor(output: TraceFileBuilder) extends DataProcessor {
private val methods = mutable.Map[Int, String]()
def processMessage(message: DataMessageContent) = message match {
case DataMessageContent.MapMethodSignature(sig, id) =>
methods.put(id, sig)
case _ =>
}
def processDataBreak {
// we don't care :)
}
def finishProcessing() {
val writer = TraceFile.methodIdEntry.writer(output)
writer write methods
writer.finish
}
def cleanup() {
}
} | secdec/bytefrog-clients | common/src/main/scala/com/secdec/bytefrog/clients/common/data/processing/MethodIdProcessor.scala | Scala | apache-2.0 | 1,783 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.crossdata.session
import java.lang.reflect.Constructor
import java.util.UUID
import com.stratio.crossdata.security.CrossdataSecurityManager
import com.typesafe.config.{Config, ConfigException, ConfigFactory}
import org.apache.log4j.Logger
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLConf
import org.apache.spark.sql.crossdata.{XDSQLConf, XDSession}
import org.apache.spark.sql.crossdata.catalog.interfaces.{XDPersistentCatalog, XDStreamingCatalog, XDTemporaryCatalog}
import org.apache.spark.sql.crossdata.catalog.temporary.HashmapCatalog
import org.apache.spark.sql.crossdata.catalog.utils.CatalogUtils
import org.apache.spark.sql.crossdata.config.CoreConfig
import org.apache.spark.sql.crossdata.config.CoreConfig._
import scala.collection.mutable
import scala.util.{Failure, Success, Try}
object XDSessionProvider {
type SessionID = UUID
}
// TODO It should share some of the XDContext fields. It will be possible when Spark 2.0 is released
// TODO sessionProvider should be threadSafe
abstract class XDSessionProvider(
@transient val sc: SparkContext,
protected val userCoreConfig: Config
) extends CoreConfig {
import XDSessionProvider._
lazy val logger = Logger.getLogger(getClass)
protected lazy val finalCoreConfig = userCoreConfig.withFallback(config)
protected lazy val catalogConfig = Try(finalCoreConfig.getConfig(CoreConfig.CatalogConfigKey)).recover {
case exception: ConfigException =>
logger.debug(exception.getMessage, exception)
ConfigFactory.empty()
} get
//NOTE: DO NEVER KEEP THE RETURNED REFERENCE FOR SEVERAL USES!
def session(sessionID: SessionID): Try[XDSession]
def newSession(sessionID: SessionID, userId: String): Try[XDSession]
def closeSession(sessionID: SessionID): Try[Unit]
/**
* Close the underlying connections.
*
* It is called when the crossdata server is stopped, so if a session provider needs to open an external connection
* it should be closed here.
*/
def close(): Unit = {
securityManager.foreach { secManager =>
secManager.stop()
}
}
@transient
protected lazy val securityManager: Option[CrossdataSecurityManager] = {
val isSecurityManagerEnabled: Boolean = Try(finalCoreConfig.getBoolean(SecurityEnabledKey)).getOrElse(false)
if (!isSecurityManagerEnabled) {
logger.warn("Authorization is not enabled, configure a security manager if needed")
None
} else {
Some(
Try(finalCoreConfig.getString(SecurityClassConfigKey)).map { securityManagerClassName =>
val securityManagerClass = Class.forName(securityManagerClassName)
val constr: Constructor[_] = securityManagerClass.getConstructor()
val secManager = constr.newInstance().asInstanceOf[CrossdataSecurityManager]
secManager.start()
secManager
} get // Throw an exception if auth enabled and a security manager cannot be loaded
)
}
}
}
/**
* Session provider which store session info locally, so it can't be used when deploying several crossdata server
*/
class BasicSessionProvider(
@transient override val sc: SparkContext,
userCoreConfig: Config
) extends XDSessionProvider(sc, userCoreConfig) {
import XDSessionProvider._
override lazy val logger = Logger.getLogger(classOf[BasicSessionProvider])
// TODO Update DOC => user can set spark sql properties by adding crossdata-core.config.spark.<spark_option>=<option_value>
private lazy val sqlConf: SQLConf = configToSparkSQL(finalCoreConfig, new SQLConf)
@transient
protected lazy val externalCatalog: XDPersistentCatalog = CatalogUtils.externalCatalog(sqlConf, catalogConfig)
@transient
protected lazy val streamingCatalog: Option[XDStreamingCatalog] = CatalogUtils.streamingCatalog(sqlConf, finalCoreConfig)
private val sharedState = new XDSharedState(sc, sqlConf, externalCatalog, streamingCatalog, securityManager)
private val sessionIDToSQLProps: mutable.Map[SessionID, SQLConf] = mutable.Map.empty
private val sessionIDToTempCatalog: mutable.Map[SessionID, XDTemporaryCatalog] = mutable.Map.empty
private val errorMessage =
"A distributed context must be used to manage XDServer sessions. Please, use SparkSessions instead"
override def newSession(sessionID: SessionID, userId: String): Try[XDSession] =
Try {
sharedState.sqlConf.setConfString(XDSQLConf.UserIdPropertyKey, userId)
val tempCatalog = new HashmapCatalog(sqlConf)
sessionIDToTempCatalog.put(sessionID, tempCatalog)
sessionIDToSQLProps.put(sessionID, sharedState.sqlConf)
buildSession(sharedState.sqlConf, tempCatalog, Some(userCoreConfig))
}
override def closeSession(sessionID: SessionID): Try[Unit] = {
for {
_ <- sessionIDToSQLProps.remove(sessionID)
_ <- sessionIDToTempCatalog.remove(sessionID)
} yield ()
} map {
Success(_)
} getOrElse {
Failure(new RuntimeException(s"Cannot close session with sessionId=$sessionID"))
}
override def session(sessionID: SessionID): Try[XDSession] = {
for {
sqlConf <- sessionIDToSQLProps.get(sessionID)
tempCatalog <- sessionIDToTempCatalog.get(sessionID)
} yield buildSession(sqlConf, tempCatalog)
} map {
Success(_)
} getOrElse {
Failure(new RuntimeException(s"Cannot recover session with sessionId=$sessionID"))
}
override def close(): Unit = {
super.close()
sessionIDToSQLProps.clear
sessionIDToTempCatalog.clear
}
private def buildSession(sqlConf: XDSQLConf, xDTemporaryCatalog: XDTemporaryCatalog, userConfig: Option[Config] = None): XDSession = {
val sessionState = new XDSessionState(sqlConf, xDTemporaryCatalog :: Nil)
new XDSession(sharedState, sessionState, userConfig)
}
}
| Stratio/crossdata | core/src/main/scala/org/apache/spark/sql/crossdata/session/XDSessionProvider.scala | Scala | apache-2.0 | 6,594 |
/**
* Copyright (C) 2010-2011 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.kernel.frontend
import scala.beans.BeanProperty
import net.lshift.diffa.kernel.config.ValidationUtil
/**
* Serializable representation of a domain within the context of a domain.
*/
@Deprecated
case class DomainDef(@BeanProperty var name: String = null) {
def this() = this(name = null)
} | lshift/diffa | kernel/src/main/scala/net/lshift/diffa/kernel/frontend/DomainDef.scala | Scala | apache-2.0 | 930 |
package datacollector
import java.io.{ BufferedWriter, File, FileOutputStream, OutputStreamWriter }
import java.text.SimpleDateFormat
import java.util.Calendar
import java.util.zip.GZIPOutputStream
import akka.actor.{ Actor, Props }
import akka.event.Logging
/**
* Saves received strings to gzipped files.
*
* @author Emre Çelikten
*/
class SaverActor(val filePrefix: String, val configuration: ConfigurationModule) extends Actor {
private implicit val loggingContext = Logging(context.system, this)
private val df = new SimpleDateFormat("yyyy.MM.dd-HH.mm.ss")
var (file, writer) = createWriter()
Logger.info(s"SaverActor ready, will save to ${file.toString}.")
override def receive: Receive = {
case str: String =>
try {
writer.write(str)
} catch {
case ex: Exception =>
Logger.error(s"Error while writing to file $file.\\n" + Utils.getStackTraceString(ex))
try {
writer.close()
} catch {
case ex: Exception =>
Logger.error(s"Error while closing the writer after an error while writing to file $file!\\n" + Utils.getStackTraceString(ex))
throw ex
}
throw ex
}
case other =>
Logger.warn(s"Invalid message received from ${sender()}:\\n$other")
}
override def postStop(): Unit = {
Logger.info("Flushing output and closing stream.")
writer.flush()
writer.close()
}
def createWriter(): (File, BufferedWriter) = {
val file = new File(configuration.outputPath, filePrefix + "." + df.format(Calendar.getInstance().getTime) + ".gz")
(file, new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(file)))))
}
}
object SaverActor {
def props(filePrefix: String): Props = Props(new SaverActor(filePrefix, Configuration.configuration))
}
| emrecelikten/foursquare-data-collector | src/main/scala/datacollector/SaverActor.scala | Scala | gpl-3.0 | 1,858 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.monitoring.metrics
import io.prometheus.client.CollectorRegistry
import org.apache.openwhisk.core.connector.{Activation, EventMessage}
import org.apache.openwhisk.core.entity.{ActivationResponse, Subject, UUID}
import org.junit.runner.RunWith
import org.scalatest.BeforeAndAfterEach
import org.scalatest.junit.JUnitRunner
import scala.concurrent.duration._
@RunWith(classOf[JUnitRunner])
class PrometheusRecorderTests extends KafkaSpecBase with BeforeAndAfterEach with PrometheusMetricNames {
behavior of "PrometheusConsumer"
val initiator = "initiatorTest"
val namespaceDemo = "demo"
val namespaceGuest = "guest"
val actionWithCustomPackage = "apimgmt/createApiOne"
val actionWithDefaultPackage = "createApi"
val kind = "nodejs:10"
val memory = "256"
it should "push user events to kamon" in {
createCustomTopic(EventConsumer.userEventTopic)
val consumer = createConsumer(kafkaPort, system.settings.config)
publishStringMessageToKafka(
EventConsumer.userEventTopic,
newActivationEvent(s"$namespaceDemo/$actionWithCustomPackage", kind, memory).serialize)
publishStringMessageToKafka(
EventConsumer.userEventTopic,
newActivationEvent(s"$namespaceDemo/$actionWithDefaultPackage", kind, memory).serialize)
publishStringMessageToKafka(
EventConsumer.userEventTopic,
newActivationEvent(s"$namespaceGuest/$actionWithDefaultPackage", kind, memory).serialize)
// Custom package
sleep(sleepAfterProduce, "sleeping post produce")
consumer.shutdown().futureValue
counterTotal(activationMetric, namespaceDemo, actionWithCustomPackage) shouldBe 1
counter(coldStartMetric, namespaceDemo, actionWithCustomPackage) shouldBe 1
counterStatus(statusMetric, namespaceDemo, actionWithCustomPackage, ActivationResponse.statusDeveloperError) shouldBe 1
histogramCount(waitTimeMetric, namespaceDemo, actionWithCustomPackage) shouldBe 1
histogramSum(waitTimeMetric, namespaceDemo, actionWithCustomPackage) shouldBe (0.03 +- 0.001)
histogramCount(initTimeMetric, namespaceDemo, actionWithCustomPackage) shouldBe 1
histogramSum(initTimeMetric, namespaceDemo, actionWithCustomPackage) shouldBe (433.433 +- 0.01)
histogramCount(durationMetric, namespaceDemo, actionWithCustomPackage) shouldBe 1
histogramSum(durationMetric, namespaceDemo, actionWithCustomPackage) shouldBe (1.254 +- 0.01)
gauge(memoryMetric, namespaceDemo, actionWithCustomPackage).intValue() shouldBe 256
// Default package
counterTotal(activationMetric, namespaceDemo, actionWithDefaultPackage) shouldBe 1
// Blacklisted namespace should not be tracked
counterTotal(activationMetric, namespaceGuest, actionWithDefaultPackage) shouldBe 0
// Blacklisted should be counted in "openwhisk_namespace_activations_total" metric
namespaceCounterTotal(namespaceMetric, namespaceGuest) shouldBe 1
}
private def newActivationEvent(actionPath: String, kind: String, memory: String) =
EventMessage(
"test",
Activation(actionPath, 2, 1254.millis, 30.millis, 433433.millis, kind, false, memory.toInt, None),
Subject("testuser"),
initiator,
UUID("test"),
Activation.typeName)
private def gauge(metricName: String, namespace: String, action: String) =
CollectorRegistry.defaultRegistry.getSampleValue(
metricName,
Array("namespace", "initiator", "action"),
Array(namespace, initiator, action))
private def counter(metricName: String, namespace: String, action: String) =
CollectorRegistry.defaultRegistry.getSampleValue(
metricName,
Array("namespace", "initiator", "action"),
Array(namespace, initiator, action))
private def counterTotal(metricName: String, namespace: String, action: String) =
CollectorRegistry.defaultRegistry.getSampleValue(
metricName,
Array("namespace", "initiator", "action", "kind", "memory"),
Array(namespace, initiator, action, kind, memory))
private def namespaceCounterTotal(metricName: String, namespace: String) =
CollectorRegistry.defaultRegistry.getSampleValue(
metricName,
Array("namespace", "initiator"),
Array(namespace, initiator))
private def counterStatus(metricName: String, namespace: String, action: String, status: String) =
CollectorRegistry.defaultRegistry.getSampleValue(
metricName,
Array("namespace", "initiator", "action", "status"),
Array(namespace, initiator, action, status))
private def histogramCount(metricName: String, namespace: String, action: String) =
CollectorRegistry.defaultRegistry.getSampleValue(
s"${metricName}_count",
Array("namespace", "initiator", "action"),
Array(namespace, initiator, action))
private def histogramSum(metricName: String, namespace: String, action: String) =
CollectorRegistry.defaultRegistry
.getSampleValue(
s"${metricName}_sum",
Array("namespace", "initiator", "action"),
Array(namespace, initiator, action))
.doubleValue()
}
| markusthoemmes/openwhisk | core/monitoring/user-events/src/test/scala/org/apache/openwhisk/core/monitoring/metrics/PrometheusRecorderTests.scala | Scala | apache-2.0 | 5,878 |
package org.bfn.ninetynineprobs
object P43 {
// TODO
}
| bfontaine/99Scala | src/main/scala/P43.scala | Scala | mit | 60 |
package rocks.muki.graphql.codegen
import org.scalatest.wordspec.AnyWordSpec
import org.scalatest.matchers.should.Matchers
import scala.meta._
class ScalametaUtilsSpec extends AnyWordSpec with Matchers {
"The ScalametaUtils" should {
"generate valid imports" when {
"a single member is imported" in {
val imports = ScalametaUtils.imports("foo.bar.MyClass" :: Nil)
imports should have size 1
imports.head.show[Syntax] should be("import foo.bar.MyClass")
}
"a single member from a single package is imported" in {
val imports = ScalametaUtils.imports("foo.MyClass" :: Nil)
imports should have size 1
imports.head.show[Syntax] should be("import foo.MyClass")
}
"a wildcard import is used" in {
val imports = ScalametaUtils.imports("foo.bar._" :: Nil)
imports should have size 1
imports.head.show[Syntax] should be("import foo.bar._")
}
"a wildcard import from a single package is used" in {
val imports = ScalametaUtils.imports("foo._" :: Nil)
imports should have size 1
imports.head.show[Syntax] should be("import foo._")
}
}
}
}
| muuki88/sbt-graphql | src/test/scala/rocks/muki/graphql/codegen/ScalametaUtilsSpec.scala | Scala | apache-2.0 | 1,193 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.s2graph.spark.spark
import org.apache.spark.Logging
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.scheduler.{StreamingListener, StreamingListenerReceiverError, StreamingListenerReceiverStarted, StreamingListenerReceiverStopped}
class SubscriberListener(ssc: StreamingContext) extends StreamingListener with Logging {
override def onReceiverError(receiverError: StreamingListenerReceiverError): Unit = {
logInfo("onReceiverError")
}
override def onReceiverStarted(receiverStarted: StreamingListenerReceiverStarted): Unit = {
logInfo("onReceiverStarted")
}
override def onReceiverStopped(receiverStopped: StreamingListenerReceiverStopped): Unit = {
logInfo("onReceiverStopped")
ssc.stop()
}
}
| daewon/incubator-s2graph | spark/src/main/scala/org/apache/s2graph/spark/spark/SubscriberListener.scala | Scala | apache-2.0 | 1,592 |
package net.liftweb {
package osgi {
import http.Bootable
/**
* Special Bootable for lift-osgi bundle: Do nothing!
*/
class OsgiBootable extends Bootable {
/** Do nothing! */
override def boot() {}
}
}
}
| lift/lift | framework/lift-modules/lift-osgi/src/main/scala/net/liftweb/osgi/OsgiBootable.scala | Scala | apache-2.0 | 214 |
package mesosphere.marathon.state
import mesosphere.marathon.MarathonSpec
class TimestampedTest extends MarathonSpec {
class A(val version: Timestamp = Timestamp.now) extends Timestamped
test("Ordering") {
val a1 = new A(Timestamp(1393989019980L))
val a2 = new A(Timestamp(1393989019981L))
val a3 = new A(Timestamp(1393989019982L))
implicit val orderingOnA = Timestamped.timestampOrdering[A]
assert(Seq(a2, a3, a1).sorted == Seq(a1, a2, a3))
}
}
| HardikDR/marathon | src/test/scala/mesosphere/marathon/state/TimestampedTest.scala | Scala | apache-2.0 | 478 |
package streams
import common._
/**
* This trait represents the layout and building blocks of the game
*
* @TODO: SHOULD RENAME `x` and `y` in class Pos to `row` and `col`. It's
* confusing to have `x` being the vertical axis.
*/
trait GameDef {
/**
* The case class `Pos` encodes positions in the terrain.
*
* IMPORTANT NOTE
* - The `x` coordinate denotes the position on the vertical axis
* - The `y` coordinate is used for the horizontal axis
* - The coordinates increase when moving down and right
*
* Illustration:
*
* 0 1 2 3 <- y axis
* 0 o o o o
* 1 o o o o
* 2 o # o o # is at position Pos(2, 1)
* 3 o o o o
*
* ^
* |
*
* x axis
*/
case class Pos(x: Int, y: Int) {
/** The position obtained by changing the `x` coordinate by `d` */
def dx(d: Int) = copy(x = x + d)
/** The position obtained by changing the `y` coordinate by `d` */
def dy(d: Int) = copy(y = y + d)
}
/**
* The position where the block is located initially.
*
* This value is left abstract, it will be defined in concrete
* instances of the game.
*/
val startPos: Pos
/**
* The target position where the block has to go.
* This value is left abstract.
*/
val goal: Pos
/**
* The terrain is represented as a function from positions to
* booleans. The function returns `true` for every position that
* is inside the terrain.
*
* As explained in the documentation of class `Pos`, the `x` axis
* is the vertical one and increases from top to bottom.
*/
type Terrain = Pos => Boolean
/**
* The terrain of this game. This value is left abstract.
*/
val terrain: Terrain
/**
* In Bloxorz, we can move left, right, Up or down.
* These moves are encoded as case objects.
*/
sealed abstract class Move
case object Left extends Move
case object Right extends Move
case object Up extends Move
case object Down extends Move
/**
* This function returns the block at the start position of
* the game.
*/
def startBlock: Block = Block(startPos, startPos)
/**
* A block is represented by the position of the two cubes that
* it consists of. We make sure that `b1` is lexicographically
* smaller than `b2`.
*/
case class Block(b1: Pos, b2: Pos) {
// checks the requirement mentioned above
require(b1.x <= b2.x && b1.y <= b2.y, "Invalid block position: b1=" + b1 + ", b2=" + b2)
/**
* Returns a block where the `x` coordinates of `b1` and `b2` are
* changed by `d1` and `d2`, respectively.
*/
def dx(d1: Int, d2: Int) = Block(b1.dx(d1), b2.dx(d2))
/**
* Returns a block where the `y` coordinates of `b1` and `b2` are
* changed by `d1` and `d2`, respectively.
*/
def dy(d1: Int, d2: Int) = Block(b1.dy(d1), b2.dy(d2))
/** The block obtained by moving left */
def left = if (isStanding) dy(-2, -1)
else if (b1.x == b2.x) dy(-1, -2)
else dy(-1, -1)
/** The block obtained by moving right */
def right = if (isStanding) dy(1, 2)
else if (b1.x == b2.x) dy(2, 1)
else dy(1, 1)
/** The block obtained by moving up */
def up = if (isStanding) dx(-2, -1)
else if (b1.x == b2.x) dx(-1, -1)
else dx(-1, -2)
/** The block obtained by moving down */
def down = if (isStanding) dx(1, 2)
else if (b1.x == b2.x) dx(1, 1)
else dx(2, 1)
/**
* Returns the list of blocks that can be obtained by moving
* the current block, together with the corresponding move.
*/
def neighbors: List[(Block, Move)] = List((left, Left), (right, Right), (up, Up), (down, Down))
/**
* Returns the list of positions reachable from the current block
* which are inside the terrain.
*/
def legalNeighbors: List[(Block, Move)] = neighbors filter (p => p._1.isLegal)
/**
* Returns `true` if the block is standing.
*/
def isStanding: Boolean = b1.x == b2.x && b1.y == b2.y
/**
* Returns `true` if the block is entirely inside the terrain.
*/
def isLegal: Boolean = terrain(b1) && terrain(b2)
}
}
| juliengrimault/Scala-Class | streams/src/main/scala/streams/GameDef.scala | Scala | mit | 4,382 |
package biology
import org.apache.commons.math3.distribution.NormalDistribution
import locals.DistributionType.DistributionType
import locals.PelagicLarvalDurationType
import io.config._
import utilities.Time
import locals.Constants
class PelagicLarvalDuration(val config: PelagicLarvalDurationConfig) {
private val distribution: NormalDistribution =
new NormalDistribution(
Time.convertDaysToSeconds(config.mean),
Constants.SecondsInDay * config.stdev
)
def getPld(): Int = config.pldType match {
case "fixed" => Time.convertDaysToSeconds(config.mean)
case _ => distribution.sample().toInt
}
}
| shawes/zissou | src/main/scala/biology/PelagicLarvalDuration.scala | Scala | mit | 639 |
object Main {
def min0[A](less: (A, A) => Boolean, xs: List[A]): Option[A] = xs match {
case List() => None
case List(x) => Some(x)
case y :: ys => (min0(less, ys): @unchecked) match {
case Some(m) => if (less(y, m)) Some(y) else Some(m)
}
}
def min(xs: List[Int]) = min0((x: Int, y: Int) => x < y, xs);
def main(args: Array[String]) =
Console.println(min(List()));
}
| yusuke2255/dotty | tests/pos/t0082.scala | Scala | bsd-3-clause | 442 |
/*
* Copyright (C) 2016 University of Basel, Graphics and Vision Research Group
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package scalismo.ui.view.perspective
import scalismo.ui.view.{ScalismoFrame, ViewportPanel, ViewportPanel3D}
import scala.swing.BorderPanel
class ThreeDOnlyPerspective(override val frame: ScalismoFrame, override val factory: PerspectiveFactory)
extends BorderPanel
with Perspective {
val viewport = new ViewportPanel3D(frame)
override val viewports: List[ViewportPanel] = List(viewport)
layout(viewport) = BorderPanel.Position.Center
}
object ThreeDOnlyPerspective extends PerspectiveFactory {
override def instantiate(frame: ScalismoFrame): Perspective = new ThreeDOnlyPerspective(frame, this)
override val perspectiveName: String = "Single 3D viewport"
}
| unibas-gravis/scalismo-ui | src/main/scala/scalismo/ui/view/perspective/ThreeDOnlyPerspective.scala | Scala | gpl-3.0 | 1,418 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rdd
import java.io._
import scala.Serializable
import scala.collection.Map
import scala.collection.immutable.NumericRange
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
import org.apache.spark._
import org.apache.spark.serializer.JavaSerializer
import org.apache.spark.util.Utils
private[spark] class ParallelCollectionPartition[T: ClassTag](
var rddId: Long,
var slice: Int,
var values: Seq[T])
extends Partition with Serializable {
def iterator: Iterator[T] = values.iterator
override def hashCode(): Int = (41 * (41 + rddId) + slice).toInt
override def equals(other: Any): Boolean = other match {
case that: ParallelCollectionPartition[_] =>
this.rddId == that.rddId && this.slice == that.slice
case _ => false
}
override def index: Int = slice
@throws(classOf[IOException])
private def writeObject(out: ObjectOutputStream): Unit = Utils.tryOrIOException {
val sfactory = SparkEnv.get.serializer
// Treat java serializer with default action rather than going thru serialization, to avoid a
// separate serialization header.
sfactory match {
case js: JavaSerializer => out.defaultWriteObject()
case _ =>
out.writeLong(rddId)
out.writeInt(slice)
val ser = sfactory.newInstance()
Utils.serializeViaNestedStream(out, ser)(_.writeObject(values))
}
}
@throws(classOf[IOException])
private def readObject(in: ObjectInputStream): Unit = Utils.tryOrIOException {
val sfactory = SparkEnv.get.serializer
sfactory match {
case js: JavaSerializer => in.defaultReadObject()
case _ =>
rddId = in.readLong()
slice = in.readInt()
val ser = sfactory.newInstance()
Utils.deserializeViaNestedStream(in, ser)(ds => values = ds.readObject[Seq[T]]())
}
}
}
private[spark] class ParallelCollectionRDD[T: ClassTag](
@transient sc: SparkContext,
@transient data: Seq[T],
numSlices: Int,
locationPrefs: Map[Int, Seq[String]])
extends RDD[T](sc, Nil) {
// TODO: Right now, each split sends along its full data, even if later down the RDD chain it gets
// cached. It might be worthwhile to write the data to a file in the DFS and read it in the split
// instead.
// UPDATE: A parallel collection can be checkpointed to HDFS, which achieves this goal.
override def getPartitions: Array[Partition] = {
val slices = ParallelCollectionRDD.slice(data, numSlices).toArray
slices.indices.map(i => new ParallelCollectionPartition(id, i, slices(i))).toArray
}
override def compute(s: Partition, context: TaskContext) = {
new InterruptibleIterator(context, s.asInstanceOf[ParallelCollectionPartition[T]].iterator)
}
override def getPreferredLocations(s: Partition): Seq[String] = {
locationPrefs.getOrElse(s.index, Nil)
}
}
private object ParallelCollectionRDD {
/**
* Slice a collection into numSlices sub-collections. One extra thing we do here is to treat Range
* collections specially, encoding the slices as other Ranges to minimize memory cost. This makes
* it efficient to run Spark over RDDs representing large sets of numbers. And if the collection
* is an inclusive Range, we use inclusive range for the last slice.
*/
def slice[T: ClassTag](seq: Seq[T], numSlices: Int): Seq[Seq[T]] = {
if (numSlices < 1) {
throw new IllegalArgumentException("Positive number of slices required")
}
// Sequences need to be sliced at the same set of index positions for operations
// like RDD.zip() to behave as expected
def positions(length: Long, numSlices: Int): Iterator[(Int, Int)] = {
(0 until numSlices).iterator.map(i => {
val start = ((i * length) / numSlices).toInt
val end = (((i + 1) * length) / numSlices).toInt
(start, end)
})
}
seq match {
case r: Range => {
positions(r.length, numSlices).zipWithIndex.map({ case ((start, end), index) =>
// If the range is inclusive, use inclusive range for the last slice
if (r.isInclusive && index == numSlices - 1) {
new Range.Inclusive(r.start + start * r.step, r.end, r.step)
}
else {
new Range(r.start + start * r.step, r.start + end * r.step, r.step)
}
}).toSeq.asInstanceOf[Seq[Seq[T]]]
}
case nr: NumericRange[_] => {
// For ranges of Long, Double, BigInteger, etc
val slices = new ArrayBuffer[Seq[T]](numSlices)
var r = nr
for ((start, end) <- positions(nr.length, numSlices)) {
val sliceSize = end - start
slices += r.take(sliceSize).asInstanceOf[Seq[T]]
r = r.drop(sliceSize)
}
slices
}
case _ => {
val array = seq.toArray // To prevent O(n^2) operations for List etc
positions(array.length, numSlices).map({
case (start, end) =>
array.slice(start, end).toSeq
}).toSeq
}
}
}
}
| Dax1n/spark-core | core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala | Scala | apache-2.0 | 5,845 |
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
package std
import Def.Initialize
import sbt.internal.util.Types.{ Id, idFun }
import sbt.internal.util.AList
import sbt.internal.util.appmacro.{
Convert,
Converted,
Instance,
LinterDSL,
MixedBuilder,
MonadInstance
}
object InitializeInstance extends MonadInstance {
type M[x] = Initialize[x]
def app[K[L[x]], Z](in: K[Initialize], f: K[Id] => Z)(implicit a: AList[K]): Initialize[Z] =
Def.app[K, Z](in)(f)(a)
def map[S, T](in: Initialize[S], f: S => T): Initialize[T] = Def.map(in)(f)
def flatten[T](in: Initialize[Initialize[T]]): Initialize[T] = Def.bind(in)(idFun[Initialize[T]])
def pure[T](t: () => T): Initialize[T] = Def.pure(t)
}
import reflect.macros._
object InitializeConvert extends Convert {
def apply[T: c.WeakTypeTag](c: blackbox.Context)(nme: String, in: c.Tree): Converted[c.type] =
nme match {
case InputWrapper.WrapInitName => convert[T](c)(in)
case InputWrapper.WrapTaskName | InputWrapper.WrapInitTaskName => failTask[c.type](c)(in.pos)
case InputWrapper.WrapPreviousName => failPrevious[c.type](c)(in.pos)
case _ => Converted.NotApplicable
}
private def convert[T: c.WeakTypeTag](c: blackbox.Context)(in: c.Tree): Converted[c.type] = {
val i = c.Expr[Initialize[T]](in)
val t = c.universe.reify(i.splice).tree
Converted.Success(t)
}
private def failTask[C <: blackbox.Context with Singleton](
c: C
)(pos: c.Position): Converted[c.type] =
Converted.Failure(pos, "A setting cannot depend on a task")
private def failPrevious[C <: blackbox.Context with Singleton](
c: C
)(pos: c.Position): Converted[c.type] =
Converted.Failure(pos, "A setting cannot depend on a task's previous value.")
}
object SettingMacro {
import LinterDSL.{ Empty => EmptyLinter }
def settingMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[T]): c.Expr[Initialize[T]] =
Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder, EmptyLinter)(
Left(t),
Instance.idTransform[c.type]
)
def settingDynMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(t: c.Expr[Initialize[T]]): c.Expr[Initialize[T]] =
Instance.contImpl[T, Id](c, InitializeInstance, InitializeConvert, MixedBuilder, EmptyLinter)(
Right(t),
Instance.idTransform[c.type]
)
}
| xuwei-k/xsbt | main-settings/src/main/scala/sbt/std/SettingMacro.scala | Scala | apache-2.0 | 2,602 |
/*
* Copyright (C) 2016 University of Basel, Graphics and Vision Research Group
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package scalismo.ui.view.properties
import java.awt.{Color, Dimension, Graphics}
import javax.swing.JPanel
import javax.swing.border.{LineBorder, TitledBorder}
import scalismo.ui.model.SceneNode
import scalismo.ui.model.properties.{HasColor, HasOpacity, NodeProperty, OpacityProperty}
import scalismo.ui.view.ScalismoFrame
import scalismo.ui.view.util.ColorChooserPanel.event.ColorChanged
import scalismo.ui.view.util.ScalableUI.implicits._
import scalismo.ui.view.util.{ColorChooserPanel, Constants, ScalableUI}
import scala.swing.{BorderPanel, Component}
object ColorPropertyPanel extends PropertyPanel.Factory {
override def create(frame: ScalismoFrame): PropertyPanel = {
new ColorPropertyPanel(frame)
}
}
class ColorPropertyPanel(override val frame: ScalismoFrame) extends BorderPanel with PropertyPanel {
override def description: String = "Color"
private var targets: List[HasColor] = Nil
class ColorDisplayer extends Component {
private val BorderWidth = 1.scaled
override lazy val peer: JPanel = new JPanel {
override def paintComponent(g: Graphics): Unit = {
val dim: Dimension = getSize
val s = BorderWidth
g.setColor(Constants.PerceivedBackgroundColor)
g.fillRect(s, s, dim.width - s, dim.height - s)
// now paint the selected color on the gray background
g.setColor(getBackground)
g.fillRect(s, s, dim.width - s, dim.height - s)
}
}
def setColor(color: Color, opacity: Double): Unit = {
val comp = color.getColorComponents(null)
val c = new Color(comp(0), comp(1), comp(2), opacity.toFloat)
peer.setBackground(c)
peer.setForeground(c)
revalidate()
repaint()
}
peer.setOpaque(false)
peer.setPreferredSize(ScalableUI.scaleDimension(new Dimension(20, 20)))
peer.setBorder(new LineBorder(Color.BLACK, BorderWidth, false))
}
val colorDisplayer = new ColorDisplayer
private val colorChooser = new ColorChooserPanel {
border = new javax.swing.border.EmptyBorder(0, 0, 10.scaled, 0)
}
{
val northedPanel: BorderPanel = new BorderPanel {
val colorPanel: BorderPanel = new BorderPanel {
border = new TitledBorder(null, description, TitledBorder.LEADING, 0, null, null)
layout(colorChooser) = BorderPanel.Position.Center
layout(colorDisplayer) = BorderPanel.Position.South
}
layout(colorPanel) = BorderPanel.Position.Center
}
layout(northedPanel) = BorderPanel.Position.North
}
listenToOwnEvents()
private def listenToOwnEvents(): Unit = {
listenTo(colorChooser)
}
private def deafToOwnEvents(): Unit = {
deafTo(colorChooser)
}
private def updateUi(): Unit = {
if (targets.nonEmpty) {
deafToOwnEvents()
updateColorDisplayer()
listenToOwnEvents()
}
}
def updateColorDisplayer(): Unit = {
targets.headOption.foreach { t =>
val c = t.color.value
colorChooser.color = c
colorDisplayer.setColor(c, targetOpacityOption().map(_.value).getOrElse(1.0))
}
}
// returns the target's opacity property if the (first) target also happens to be a HasOpacity, else None
def targetOpacityOption(): Option[OpacityProperty] = {
targets.headOption.collect { case ok: HasOpacity => ok.opacity }
}
override def setNodes(nodes: List[SceneNode]): Boolean = {
cleanup()
val supported = allMatch[HasColor](nodes)
if (supported.nonEmpty) {
targets = supported
listenTo(targets.head.color)
targetOpacityOption().foreach(o => listenTo(o))
updateUi()
true
} else false
}
def cleanup(): Unit = {
targets.headOption.foreach(t => deafTo(t.color))
targetOpacityOption().foreach(o => deafTo(o))
targets = Nil
}
reactions += {
case NodeProperty.event.PropertyChanged(_) => updateUi()
case ColorChanged(c) =>
targets.foreach { t =>
t.color.value = c.color
updateColorDisplayer()
}
}
}
| unibas-gravis/scalismo-ui | src/main/scala/scalismo/ui/view/properties/ColorPropertyPanel.scala | Scala | gpl-3.0 | 4,723 |
package jp.rotaryo.whitespace
protected[whitespace] object DuplicateOperation extends Operation {
override def getSource(): String = {
return stackManipulation + "\\n "
}
override def getParameter(): Option[Parameter] = {
return None
}
override def preRun(container: Container, index: Int): Unit = {
}
override def run(container: Container, index: Int): Int = {
container.pushValue(container.getValue(Constants.zero))
return index + 1
}
}
| rotary-o/scala2ws | src/main/scala/jp/rotaryo/whitespace/DuplicateOperation.scala | Scala | mit | 476 |
package com.azavea.maml.eval
import com.azavea.maml.ast
import com.azavea.maml.ast._
import com.azavea.maml.dsl._
import com.azavea.maml.error._
import com.azavea.maml.eval.directive.OpDirectives._
import com.azavea.maml.ast.codec.tree.ExpressionTreeCodec
import geotrellis.raster._
import geotrellis.vector._
import geotrellis.proj4.WebMercator
import cats.data._
import cats.effect._
import Validated._
import scala.reflect._
import org.scalatest._
import scala.concurrent.ExecutionContext.Implicits.global
import java.time.Instant
class ConcurrentEvaluationSpec
extends FunSpec
with Matchers
with ExpressionTreeCodec {
implicit val cs = IO.contextShift(global)
val interpreter = ConcurrentInterpreter.DEFAULT[IO].prependDirective(sleep)
implicit def tileIsTileLiteral(
tile: Tile
): RasterLit[ProjectedRaster[MultibandTile]] =
RasterLit(
ProjectedRaster(
MultibandTile(tile),
Extent(0, 0, 0.05, 0.05),
WebMercator
)
)
implicit def tileIsTileLiteral(
tile: MultibandTile
): RasterLit[ProjectedRaster[MultibandTile]] =
RasterLit(
ProjectedRaster(
tile,
Extent(0, 0, 0.05, 0.05),
WebMercator
)
)
implicit class TypeRefinement(self: Interpreted[Result]) {
def as[T: ClassTag]: Interpreted[T] = self match {
case Valid(r) => r.as[T]
case i @ Invalid(_) => i
}
}
it("should take less time than the total duration of its leaves") {
val sleepDuration = 3L
val expr = Addition(List(
Sleep(sleepDuration, List(IntLit(1))),
Sleep(sleepDuration, List(IntLit(1)))))
val now1 = Instant.now.toEpochMilli
interpreter(expr).unsafeRunSync.as[Int] should be(Valid(2))
val now2 = Instant.now.toEpochMilli
val duration = (now2 - now1) / 1000
duration should be < (2 * sleepDuration)
}
it("Should interpret and evaluate to Boolean literals") {
interpreter(BoolLit(true)).unsafeRunSync.as[Boolean] should be(Valid(true))
interpreter(false).unsafeRunSync.as[Boolean] should be(Valid(false))
interpreter(true).unsafeRunSync.as[Boolean] should be(Valid(true))
}
it("Should interpret and evaluate to Int literals") {
interpreter(IntLit(42)).unsafeRunSync.as[Int] should be(Valid(42))
interpreter(IntLit(4200)).unsafeRunSync.as[Int] should be(Valid(4200))
}
it("Should interpret and evaluate to double literals") {
interpreter(DblLit(42.0)).unsafeRunSync.as[Double] should be(Valid(42.0))
interpreter(DblLit(4200.0123)).unsafeRunSync.as[Double] should be(
Valid(4200.0123)
)
}
it("Should interpret and evaluate addition with scalars") {
interpreter(IntLit(42) + DblLit(42)).unsafeRunSync.as[Double] should be(
Valid(84.0)
)
}
it("Should interpret and evaluate multiplication with scalars") {
interpreter(IntLit(2) * DblLit(42)).unsafeRunSync.as[Double] should be(
Valid(84.0)
)
}
it("Should interpret and evaluate division with scalars") {
interpreter(DblLit(20) / DblLit(2) / DblLit(2)).unsafeRunSync
.as[Double] should be(Valid(5.0))
}
it("Should interpret and evaluate comparisions with scalars") {
interpreter(DblLit(20) < DblLit(20)).unsafeRunSync.as[Boolean] should be(
Valid(false)
)
interpreter(DblLit(19) < DblLit(20)).unsafeRunSync.as[Boolean] should be(
Valid(true)
)
interpreter(DblLit(29) < DblLit(20)).unsafeRunSync.as[Boolean] should be(
Valid(false)
)
interpreter(DblLit(20) <= DblLit(20)).unsafeRunSync.as[Boolean] should be(
Valid(true)
)
interpreter(DblLit(19) <= DblLit(20)).unsafeRunSync.as[Boolean] should be(
Valid(true)
)
interpreter(DblLit(29) <= DblLit(20)).unsafeRunSync.as[Boolean] should be(
Valid(false)
)
interpreter(DblLit(20) === DblLit(20)).unsafeRunSync.as[Boolean] should be(
Valid(true)
)
interpreter(DblLit(19) === DblLit(20)).unsafeRunSync.as[Boolean] should be(
Valid(false)
)
interpreter(DblLit(29) === DblLit(20)).unsafeRunSync.as[Boolean] should be(
Valid(false)
)
interpreter(DblLit(20) >= DblLit(20)).unsafeRunSync.as[Boolean] should be(
Valid(true)
)
interpreter(DblLit(19) >= DblLit(20)).unsafeRunSync.as[Boolean] should be(
Valid(false)
)
interpreter(DblLit(29) >= DblLit(20)).unsafeRunSync.as[Boolean] should be(
Valid(true)
)
interpreter(DblLit(20) > DblLit(20)).unsafeRunSync.as[Boolean] should be(
Valid(false)
)
interpreter(DblLit(19) > DblLit(20)).unsafeRunSync.as[Boolean] should be(
Valid(false)
)
interpreter(DblLit(29) > DblLit(20)).unsafeRunSync.as[Boolean] should be(
Valid(true)
)
}
it("Should interpret and evaluate ndvi") {
interpreter((DblLit(5) - DblLit(2)) / (DblLit(5) + DblLit(2))).unsafeRunSync
.as[Double] match {
case Valid(x) => x should be(0.42857 +- 0.001)
case i @ Invalid(_) => fail(s"$i")
}
}
it("Should interpret and evaluate tile addition") {
interpreter(
IntArrayTile(1 to 4 toArray, 2, 2) + IntArrayTile(1 to 4 toArray, 2, 2)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(2)
case i @ Invalid(_) => fail(s"$i")
}
}
it("Should interpret and evaluate tile subtraction") {
interpreter(
IntArrayTile(1 to 4 toArray, 2, 2) - IntArrayTile(1 to 4 toArray, 2, 2)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(0)
case i @ Invalid(_) => fail(s"$i")
}
}
it("Should interpret and evaluate tile multiplication") {
interpreter(
IntArrayTile(1 to 4 toArray, 2, 2) * IntArrayTile(1 to 4 toArray, 2, 2)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(1, 0) should be(4)
case i @ Invalid(_) => fail(s"$i")
}
}
it("Should interpret and evaluate tile division") {
interpreter(
IntArrayTile(1 to 4 toArray, 2, 2) / IntArrayTile(1 to 4 toArray, 2, 2)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(1, 0) should be(1)
case i @ Invalid(_) => fail(s"$i")
}
}
it("Should interpret and evaluate tile assembly") {
interpreter(ast.Assemble(
List(
IntArrayTile(1 to 100 toArray, 10, 10),
IntArrayTile(101 to 200 toArray, 10, 10),
IntArrayTile(201 to 300 toArray, 10, 10)
)
)).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands match {
case Vector(r, g, b) =>
r.get(0, 0) should be(1)
g.get(0, 0) should be(101)
b.get(0, 0) should be(201)
}
case i @ Invalid(_) => fail(s"$i")
}
}
it("should interpret and evaluate tile comparison") {
interpreter(
IntArrayTile(1 to 4 toArray, 2, 2) < IntArrayTile(2 to 5 toArray, 2, 2)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(1)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(
IntArrayTile(1 to 4 toArray, 2, 2) < IntArrayTile(1 to 4 toArray, 2, 2)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(0)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(
IntArrayTile(1 to 4 toArray, 2, 2) < IntArrayTile(0 to 3 toArray, 2, 2)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(0)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(
IntArrayTile(1 to 4 toArray, 2, 2) <= IntArrayTile(2 to 5 toArray, 2, 2)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(1)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(
IntArrayTile(1 to 4 toArray, 2, 2) <= IntArrayTile(1 to 4 toArray, 2, 2)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(1)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(
IntArrayTile(1 to 4 toArray, 2, 2) <= IntArrayTile(0 to 3 toArray, 2, 2)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(0)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(
Equal(
List(
IntArrayTile(1 to 4 toArray, 2, 2),
IntArrayTile(2 to 5 toArray, 2, 2)
)
)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(0)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(
Equal(
List(
IntArrayTile(1 to 4 toArray, 2, 2),
IntArrayTile(1 to 4 toArray, 2, 2)
)
)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(1)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(
Equal(
List(
IntArrayTile(1 to 4 toArray, 2, 2),
IntArrayTile(0 to 3 toArray, 2, 2)
)
)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(0)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(
IntArrayTile(1 to 4 toArray, 2, 2) >= IntArrayTile(2 to 5 toArray, 2, 2)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(0)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(
IntArrayTile(1 to 4 toArray, 2, 2) >= IntArrayTile(1 to 4 toArray, 2, 2)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(1)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(
IntArrayTile(1 to 4 toArray, 2, 2) >= IntArrayTile(0 to 3 toArray, 2, 2)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(1)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(
IntArrayTile(1 to 4 toArray, 2, 2) > IntArrayTile(2 to 5 toArray, 2, 2)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(0)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(
IntArrayTile(1 to 4 toArray, 2, 2) > IntArrayTile(1 to 4 toArray, 2, 2)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(0)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(
IntArrayTile(1 to 4 toArray, 2, 2) > IntArrayTile(0 to 3 toArray, 2, 2)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(0, 0) should be(1)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(FocalSlope(List(IntArrayTile(1 to 100 toArray, 10, 10)))).unsafeRunSync
.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(5, 5) should be(10)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(FocalAspect(List(IntArrayTile(1 to 100 toArray, 10, 10)))).unsafeRunSync
.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(5, 5) should be(354)
case i @ Invalid(_) => fail(s"$i")
}
interpreter(ast.RGB(
List(
IntArrayTile(1 to 100 toArray, 10, 10),
IntArrayTile(101 to 200 toArray, 10, 10),
IntArrayTile(201 to 300 toArray, 10, 10)
)
)).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands match {
case Vector(r, g, b) =>
r.get(0, 0) should be(1)
g.get(0, 0) should be(101)
b.get(0, 0) should be(201)
}
case i@Invalid(_) => fail(s"$i")
}
val mbt: Expression = MultibandTile(
IntArrayTile(1 to 100 toArray, 10, 10),
IntArrayTile(101 to 200 toArray, 10, 10),
IntArrayTile(201 to 300 toArray, 10, 10)
)
interpreter(ast.RGB(List(mbt, mbt, mbt), "0", "1", "2")).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands match {
case Vector(r, g, b) =>
r.get(0, 0) should be(1)
g.get(0, 0) should be(101)
b.get(0, 0) should be(201)
}
case i @ Invalid(_) => fail(s"$i")
}
interpreter(Rescale(ast.RGB(
List(
IntArrayTile(1 to 100 toArray, 10, 10),
IntArrayTile(101 to 200 toArray, 10, 10),
IntArrayTile(201 to 300 toArray, 10, 10)
)
) :: Nil, 10, 11)).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands match {
case Vector(r, g, b) =>
r.get(0, 0) should be(10)
g.get(0, 0) should be(10)
b.get(0, 0) should be(10)
}
case i @ Invalid(_) => fail(s"$i")
}
interpreter(ast.RGB(
List(
Rescale(IntArrayTile(1 to 100 toArray, 10, 10) :: Nil, 10, 11),
Rescale(IntArrayTile(101 to 200 toArray, 10, 10) :: Nil, 20, 21),
Rescale(IntArrayTile(201 to 300 toArray, 10, 10) :: Nil, 30, 31)
)
)).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands match {
case Vector(r, g, b) =>
r.get(0, 0) should be(10)
g.get(0, 0) should be(20)
b.get(0, 0) should be(30)
}
case i @ Invalid(_) => fail(s"$i")
}
interpreter(ast.RGB(
List(
Clamp(IntArrayTile(1 to 100 toArray, 10, 10) :: Nil, 10, 11),
Clamp(IntArrayTile(101 to 200 toArray, 10, 10) :: Nil, 120, 121),
Clamp(IntArrayTile(201 to 300 toArray, 10, 10) :: Nil, 230, 231)
)
)).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands match {
case Vector(r, g, b) =>
r.get(0, 0) should be(10)
g.get(0, 0) should be(120)
b.get(0, 0) should be(230)
}
case i @ Invalid(_) => fail(s"$i")
}
/** The hillshade test is a bit more involved than some of the above
* See http://bit.ly/Qj0YPg for more information about the proper interpretation
* of hillshade values
**/
val hillshadeTile =
IntArrayTile(
Array(0, 0, 0, 0, 0, 0, 2450, 2461, 2483, 0, 0, 2452, 2461, 2483, 0, 0,
2447, 2455, 2477, 0, 0, 0, 0, 0, 0),
5,
5
)
val hillshadeE =
Extent(0, 0, 25, 25)
val hillshadeProjectedRaster =
ProjectedRaster(
Raster(MultibandTile(hillshadeTile), hillshadeE),
WebMercator
)
interpreter(
FocalHillshade(List(RasterLit(hillshadeProjectedRaster)), 315, 45)
).unsafeRunSync.as[MultibandTile] match {
case Valid(t) => t.bands.head.get(2, 2) should be(77)
case i @ Invalid(_) => fail(s"$i")
}
}
}
| geotrellis/maml | jvm/src/test/scala/eval/ConcurrentEvaluationSpec.scala | Scala | apache-2.0 | 14,752 |
object Bug {
abstract class FChain {
type T
def chain(constant:String) =
new FConstant[this.type](constant, this) //removing [this.type], everything compiles
}
case class FConstant[E <: FChain](constant:String, tail:E) extends FChain {
type T = tail.T
}
object FNil extends FChain {
type T = Unit
}
}
object Test {
import Bug.*
println("Compiles:")
val a1 = FNil.chain("a").chain("a")
val a2 = a1.chain("a")
println("\\nDoesn't compile:")
val a3 = FNil.chain("a").chain("a").chain("a")
val a4: FConstant[_ <: FConstant[_ <: FConstant[FNil.type]]] = a3
}
| dotty-staging/dotty | tests/pos/t2435.scala | Scala | apache-2.0 | 607 |
package com.eharmony.aloha.models.multilabel.json
import com.eharmony.aloha.id.ModelId
import com.eharmony.aloha.models.multilabel.PluginInfo
import com.eharmony.aloha.models.reg.json.{Spec, SpecJson}
import spray.json.DefaultJsonProtocol._
import spray.json.{JsObject, JsonFormat, RootJsonFormat}
import scala.collection.immutable.ListMap
import com.eharmony.aloha.factory.ScalaJsonFormats
trait MultilabelModelJson extends SpecJson with ScalaJsonFormats {
protected[this] case class Plugin(`type`: String)
/**
* AST for multi-label models.
*
* @param modelType
* @param modelId
* @param features
* @param numMissingThreshold
* @param labelsInTrainingSet The sequence of all labels encountered in training. '''It is
* important''' that this is sequence (''with the same order as the
* labels in the training set''). This is because some algorithms
* may require indices based on the training data.
* @param labelsOfInterest a string representing a function that will be used to extract labels.
* @param underlying the underlying model that will be produced by a
* @tparam K
*/
protected[this] case class MultilabelData[K](
modelType: String,
modelId: ModelId,
features: ListMap[String, Spec],
numMissingThreshold: Option[Int],
labelsInTrainingSet: Vector[K],
labelsOfInterest: Option[String],
underlying: JsObject
) extends PluginInfo[K]
protected[this] final implicit def multilabelDataJsonFormat[K: JsonFormat]: RootJsonFormat[MultilabelData[K]] =
jsonFormat7(MultilabelData[K])
protected[this] final implicit val pluginJsonFormat: RootJsonFormat[Plugin] =
jsonFormat1(Plugin)
}
| eHarmony/aloha | aloha-core/src/main/scala/com/eharmony/aloha/models/multilabel/json/MultilabelModelJson.scala | Scala | mit | 1,790 |
package org.jetbrains.plugins.hocon.editor
import com.intellij.codeInsight.editorActions.{JavaLikeQuoteHandler, SimpleTokenSetQuoteHandler}
import com.intellij.openapi.editor.highlighter.HighlighterIterator
import com.intellij.psi.PsiElement
import com.intellij.psi.tree.{IElementType, TokenSet}
import org.jetbrains.plugins.hocon.lexer.HoconTokenType
class HoconQuoteHandler extends SimpleTokenSetQuoteHandler(HoconTokenType.QuotedString) with JavaLikeQuoteHandler {
override protected def isNonClosedLiteral(iterator: HighlighterIterator, chars: CharSequence) =
iterator.getStart >= iterator.getEnd - 1 || chars.charAt(iterator.getEnd - 1) != '\\"'
def getConcatenatableStringTokenTypes = TokenSet.EMPTY
def getStringConcatenationOperatorRepresentation = null
def getStringTokenTypes: TokenSet = myLiteralTokenSet
def needParenthesesAroundConcatenation(element: PsiElement) = false
def isAppropriateElementTypeForLiteral(tokenType: IElementType) = true
}
| ilinum/intellij-scala | src/org/jetbrains/plugins/hocon/editor/HoconQuoteHandler.scala | Scala | apache-2.0 | 981 |
package dundertext.editor.cmd
import org.junit.Test
import org.junit.Assert._
class BlurOnEmptyLastTest extends CommandTestBase {
@Test
def should_remove_empty_row_and_blur(): Unit = {
implicit val editor = given("""
Rad 1
Rad 2
╎
""")
// when
def cmd = new BlurOnEmptyLast
editor.execute(cmd)
// then
assertEquals(2, editor.buffer.firstText.rowCount)
assertEquals("Rad 1", editor.buffer.firstText.firstRow.text)
assertEquals("Rad 2", editor.buffer.firstText.lastRow.text)
assertEquals("", editor.cursor.toString)
}
}
| dundertext/dundertext | editor/src/test/scala/dundertext/editor/cmd/BlurOnEmptyLastTest.scala | Scala | gpl-3.0 | 589 |
package com.github.jmcs.domain.unaryexpression
import com.github.jmcs.domain.Expression
import com.github.jmcs.domain.UnaryExpression
/**
* Created with IntelliJ IDEA.
* User: Marcelo
* Date: 30/07/13
* Time: 22:53
* To change this template use File | Settings | File Templates.
*/
class CommonLogarithmExpression(override val expression: Expression) extends UnaryExpression(expression) {
def evaluate(): Any = useCache(() => BigDecimal(Math.log(expression.evaluate.asInstanceOf[BigDecimal].toDouble) / Math.log(10)))
def getOperationSymbol = "log10"
}
| MarceloPortilho/jmc-scala | src/main/java/com/github/jmcs/domain/unaryexpression/CommonLogarithmExpression.scala | Scala | apache-2.0 | 585 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models
import play.api.libs.json.Json
case class CalculationType(calcType: Option[String])
object CalculationType{
implicit val format = Json.format[CalculationType]
val DOL = "0"
val REVALUATION = "1"
val PAYABLE_AGE = "2"
val SURVIVOR = "3"
val SPA = "4"
}
| hmrc/gmp-frontend | app/models/CalculationType.scala | Scala | apache-2.0 | 887 |
package org.jetbrains.plugins.scala.codeInsight.intention.controlflow
import com.intellij.codeInsight.intention.PsiElementBaseIntentionAction
import com.intellij.openapi.project.Project
import com.intellij.openapi.editor.Editor
import com.intellij.psi.{PsiDocumentManager, PsiElement}
import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScExpression, ScInfixExpr, ScIfStmt}
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugins.scala.extensions._
import com.intellij.openapi.util.TextRange
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.util.IntentionUtils
/**
* @author Ksenia.Sautina
* @since 6/8/12
*/
object SplitIfIntention {
def familyName = "Split If"
}
class SplitIfIntention extends PsiElementBaseIntentionAction {
def getFamilyName = SplitIfIntention.familyName
override def getText: String = "Split into 2 'if's"
def isAvailable(project: Project, editor: Editor, element: PsiElement): Boolean = {
val ifStmt: ScIfStmt = PsiTreeUtil.getParentOfType(element, classOf[ScIfStmt], false)
if (ifStmt == null) return false
val cond = ifStmt.condition.getOrElse(null)
if (cond == null || !cond.isInstanceOf[ScInfixExpr]) return false
val range: TextRange = cond.asInstanceOf[ScInfixExpr].operation.nameId.getTextRange
val offset = editor.getCaretModel.getOffset
if (!(range.getStartOffset <= offset && offset <= range.getEndOffset)) return false
if (cond.asInstanceOf[ScInfixExpr].operation.nameId.getText == "&&") return true
false
}
override def invoke(project: Project, editor: Editor, element: PsiElement) {
val ifStmt: ScIfStmt = PsiTreeUtil.getParentOfType(element, classOf[ScIfStmt], false)
if (ifStmt == null || !ifStmt.isValid) return
val start = ifStmt.getTextRange.getStartOffset
val expr = new StringBuilder
val cond: ScInfixExpr = ifStmt.condition.get.asInstanceOf[ScInfixExpr]
val firstCond =
if (cond.getBaseExpr.getText.trim.startsWith("(") && cond.getBaseExpr.getText.trim.endsWith(")"))
cond.getBaseExpr.getText.trim
else "(" + cond.getBaseExpr.getText.trim + ")"
val secondCond =
if (cond.getArgExpr.getText.trim.startsWith("(") && cond.getArgExpr.getText.trim.endsWith(")"))
cond.getArgExpr.getText.trim
else "(" + cond.getArgExpr.getText.trim + ")"
expr.append("if ").append(firstCond).append("\\n").append("if ").append(secondCond).append(" ").
append(ifStmt.thenBranch.get.getText)
val elseBranch = ifStmt.elseBranch.getOrElse(null)
if (elseBranch != null) {
if (expr.toString().trim.endsWith("}")) expr.append(" else ")
else expr.append("\\nelse ")
expr.append(elseBranch.getText).append("\\nelse ").append(elseBranch.getText)
}
val newIfStmt: ScExpression = ScalaPsiElementFactory.createExpressionFromText(expr.toString(), element.getManager)
val diff = newIfStmt.asInstanceOf[ScIfStmt].condition.get.getTextRange.getStartOffset -
newIfStmt.asInstanceOf[ScIfStmt].getTextRange.getStartOffset
inWriteAction {
ifStmt.replaceExpression(newIfStmt, true)
editor.getCaretModel.moveToOffset(start + diff)
PsiDocumentManager.getInstance(project).commitDocument(editor.getDocument)
}
}
}
| consulo/consulo-scala | src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/SplitIfIntention.scala | Scala | apache-2.0 | 3,303 |
package net.jcazevedo.moultingyaml
trait DefaultYamlProtocol
extends BasicFormats
with StandardFormats
with CollectionFormats
with ProductFormats
with AdditionalFormats
object DefaultYamlProtocol extends DefaultYamlProtocol
| DDuarte/moultingyaml | src/main/scala/net/jcazevedo/moultingyaml/DefaultYamlProtocol.scala | Scala | mit | 236 |
package play.boilerplate.parser.model
sealed trait ResponseCode
case object DefaultResponse extends ResponseCode
final case class StatusResponse(code: Int) extends ResponseCode
case class Response(code: ResponseCode,
description: Option[String],
schema: Option[Definition],
headers: Map[String, Parameter]
/*examples: Map[String, AnyRef]*/
) extends WithResolve[Response] {
override def containsLazyRef: Boolean = headers.values.exists(_.containsLazyRef)
override def resolve(resolver: DefinitionResolver): Response = {
copy(
schema = schema.map(_.resolve(resolver)),
headers = for ((name, header) <- headers) yield {
name -> header.resolve(resolver)
}
)
}
}
| Romastyi/sbt-play-boilerplate | sbt-plugin/lib/src/main/scala/play/boilerplate/parser/model/Response.scala | Scala | apache-2.0 | 803 |
/*
* Copyright 2013 Maurício Linhares
*
* Maurício Linhares licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.github.mauricio.async.db.mysql
import java.util.concurrent.TimeoutException
import com.github.mauricio.async.db.Configuration
import org.specs2.execute.{AsResult, Success, ResultExecution}
import org.specs2.mutable.Specification
import scala.concurrent.Await
import scala.concurrent.duration._
class QueryTimeoutSpec extends Specification with ConnectionHelper {
implicit def unitAsResult: AsResult[Unit] = new AsResult[Unit] {
def asResult(r: =>Unit) =
ResultExecution.execute(r)(_ => Success())
}
"Simple query with 1 nanosec timeout" in {
withConfigurablePool(shortTimeoutConfiguration) {
pool => {
val connection = Await.result(pool.take, Duration(10,SECONDS))
connection.isTimeouted === false
connection.isConnected === true
val queryResultFuture = connection.sendQuery("select sleep(1)")
Await.result(queryResultFuture, Duration(10,SECONDS)) must throwA[TimeoutException]()
connection.isTimeouted === true
Await.ready(pool.giveBack(connection), Duration(10,SECONDS))
pool.availables.count(_ == connection) === 0 // connection removed from pool
// we do not know when the connection will be closed.
}
}
}
"Simple query with 5 sec timeout" in {
withConfigurablePool(longTimeoutConfiguration) {
pool => {
val connection = Await.result(pool.take, Duration(10,SECONDS))
connection.isTimeouted === false
connection.isConnected === true
val queryResultFuture = connection.sendQuery("select sleep(1)")
Await.result(queryResultFuture, Duration(10,SECONDS)).rows.get.size === 1
connection.isTimeouted === false
connection.isConnected === true
Await.ready(pool.giveBack(connection), Duration(10,SECONDS))
pool.availables.count(_ == connection) === 1 // connection returned to pool
}
}
}
def shortTimeoutConfiguration = new Configuration(
"mysql_async",
"localhost",
port = 3306,
password = Some("root"),
database = Some("mysql_async_tests"),
queryTimeout = Some(Duration(1,NANOSECONDS))
)
def longTimeoutConfiguration = new Configuration(
"mysql_async",
"localhost",
port = 3306,
password = Some("root"),
database = Some("mysql_async_tests"),
queryTimeout = Some(Duration(5,SECONDS))
)
}
| anand-singh/postgresql-async | mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/QueryTimeoutSpec.scala | Scala | apache-2.0 | 3,000 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.flume
import java.nio.charset.StandardCharsets
import java.util.{Collections, List => JList, Map => JMap}
import java.util.concurrent._
import scala.collection.mutable.ArrayBuffer
import org.apache.flume.Context
import org.apache.flume.channel.MemoryChannel
import org.apache.flume.conf.Configurables
import org.apache.flume.event.EventBuilder
import org.apache.spark.streaming.flume.sink.{SparkSink, SparkSinkConfig}
/**
* Share codes for Scala and Python unit tests
*/
private[flume] class PollingFlumeTestUtils {
private val batchCount = 5
val eventsPerBatch = 100
private val totalEventsPerChannel = batchCount * eventsPerBatch
private val channelCapacity = 5000
def getTotalEvents: Int = totalEventsPerChannel * channels.size
private val channels = new ArrayBuffer[MemoryChannel]
private val sinks = new ArrayBuffer[SparkSink]
/**
* Start a sink and return the port of this sink
*/
def startSingleSink(): Int = {
channels.clear()
sinks.clear()
// Start the channel and sink.
val context = new Context()
context.put("capacity", channelCapacity.toString)
context.put("transactionCapacity", "1000")
context.put("keep-alive", "0")
val channel = new MemoryChannel()
Configurables.configure(channel, context)
val sink = new SparkSink()
context.put(SparkSinkConfig.CONF_HOSTNAME, "localhost")
context.put(SparkSinkConfig.CONF_PORT, String.valueOf(0))
Configurables.configure(sink, context)
sink.setChannel(channel)
sink.start()
channels += (channel)
sinks += sink
sink.getPort()
}
/**
* Start 2 sinks and return the ports
*/
def startMultipleSinks(): Seq[Int] = {
channels.clear()
sinks.clear()
// Start the channel and sink.
val context = new Context()
context.put("capacity", channelCapacity.toString)
context.put("transactionCapacity", "1000")
context.put("keep-alive", "0")
val channel = new MemoryChannel()
Configurables.configure(channel, context)
val channel2 = new MemoryChannel()
Configurables.configure(channel2, context)
val sink = new SparkSink()
context.put(SparkSinkConfig.CONF_HOSTNAME, "localhost")
context.put(SparkSinkConfig.CONF_PORT, String.valueOf(0))
Configurables.configure(sink, context)
sink.setChannel(channel)
sink.start()
val sink2 = new SparkSink()
context.put(SparkSinkConfig.CONF_HOSTNAME, "localhost")
context.put(SparkSinkConfig.CONF_PORT, String.valueOf(0))
Configurables.configure(sink2, context)
sink2.setChannel(channel2)
sink2.start()
sinks += sink
sinks += sink2
channels += channel
channels += channel2
sinks.map(_.getPort())
}
/**
* Send data and wait until all data has been received
*/
def sendDataAndEnsureAllDataHasBeenReceived(): Unit = {
val executor = Executors.newCachedThreadPool()
val executorCompletion = new ExecutorCompletionService[Void](executor)
val latch = new CountDownLatch(batchCount * channels.size)
sinks.foreach(_.countdownWhenBatchReceived(latch))
channels.foreach { channel =>
executorCompletion.submit(new TxnSubmitter(channel))
}
for (i <- 0 until channels.size) {
executorCompletion.take()
}
latch.await(15, TimeUnit.SECONDS) // Ensure all data has been received.
}
/**
* A Python-friendly method to assert the output
*/
def assertOutput(
outputHeaders: JList[JMap[String, String]], outputBodies: JList[String]): Unit = {
require(outputHeaders.size == outputBodies.size)
val eventSize = outputHeaders.size
if (eventSize != totalEventsPerChannel * channels.size) {
throw new AssertionError(
s"Expected ${totalEventsPerChannel * channels.size} events, but was $eventSize")
}
var counter = 0
for (k <- 0 until channels.size; i <- 0 until totalEventsPerChannel) {
val eventBodyToVerify = s"${channels(k).getName}-$i"
val eventHeaderToVerify: JMap[String, String] = Collections.singletonMap(s"test-$i", "header")
var found = false
var j = 0
while (j < eventSize && !found) {
if (eventBodyToVerify == outputBodies.get(j) &&
eventHeaderToVerify == outputHeaders.get(j)) {
found = true
counter += 1
}
j += 1
}
}
if (counter != totalEventsPerChannel * channels.size) {
throw new AssertionError(
s"111 Expected ${totalEventsPerChannel * channels.size} events, but was $counter")
}
}
def assertChannelsAreEmpty(): Unit = {
channels.foreach(assertChannelIsEmpty)
}
private def assertChannelIsEmpty(channel: MemoryChannel): Unit = {
val queueRemaining = channel.getClass.getDeclaredField("queueRemaining")
queueRemaining.setAccessible(true)
val m = queueRemaining.get(channel).getClass.getDeclaredMethod("availablePermits")
if (m.invoke(queueRemaining.get(channel)).asInstanceOf[Int] != channelCapacity) {
throw new AssertionError(s"Channel ${channel.getName} is not empty")
}
}
def close(): Unit = {
sinks.foreach(_.stop())
sinks.clear()
channels.foreach(_.stop())
channels.clear()
}
private class TxnSubmitter(channel: MemoryChannel) extends Callable[Void] {
override def call(): Void = {
var t = 0
for (i <- 0 until batchCount) {
val tx = channel.getTransaction
tx.begin()
for (j <- 0 until eventsPerBatch) {
channel.put(EventBuilder.withBody(
s"${channel.getName}-$t".getBytes(StandardCharsets.UTF_8),
Collections.singletonMap(s"test-$t", "header")))
t += 1
}
tx.commit()
tx.close()
Thread.sleep(500) // Allow some time for the events to reach
}
null
}
}
}
| janewangfb/spark | external/flume/src/main/scala/org/apache/spark/streaming/flume/PollingFlumeTestUtils.scala | Scala | apache-2.0 | 6,633 |
/*
* Copyright 2007-2009 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package net.liftweb.http
import _root_.javax.servlet.http._
import _root_.javax.servlet.ServletContext
import _root_.net.liftweb.util.Helpers._
import _root_.net.liftweb.util.{Log, Box, Full, Empty,
EmptyBox,
Failure, ThreadGlobal,
NamedPF, NamedPartialFunction}
import _root_.net.liftweb.sitemap._
import _root_.java.io.InputStream
import _root_.scala.xml._
import _root_.org.apache.commons.fileupload.servlet._
@serializable
sealed trait ParamHolder {
def name: String
}
@serializable
case class NormalParamHolder(name: String, value: String) extends ParamHolder
@serializable
case class FileParamHolder(name: String, mimeType: String,
fileName: String,
file: Array[Byte]) extends ParamHolder
/**
* Helper object for constructing Req instances
*/
object Req {
object NilPath extends ParsePath(Nil, "", true, false)
def apply(request: HttpServletRequest, rewrite: List[LiftRules.RewritePF], nanoStart: Long): Req = {
val reqType = RequestType(request)
val turi = request.getRequestURI.substring(request.getContextPath.length)
val tmpUri = if (turi.length > 0) turi else "/"
val contextPath = LiftRules.calculateContextPath(request) openOr
request.getContextPath
val tmpPath = parsePath(tmpUri)
def processRewrite(path: ParsePath, params: Map[String, String]): RewriteResponse =
NamedPF.applyBox(RewriteRequest(path, reqType, request), rewrite) match {
case Full(resp @ RewriteResponse(_, _, true)) => resp
case _: EmptyBox[_] => RewriteResponse(path, params)
case Full(resp) => processRewrite(resp.path, resp.params)
}
// val (uri, path, localSingleParams) = processRewrite(tmpUri, tmpPath, TreeMap.empty)
val rewritten = processRewrite(tmpPath, Map.empty)
val localParams: Map[String, List[String]] = Map(rewritten.params.toList.map{case (name, value) => name -> List(value)} :_*)
// val session = request.getSession
// val body = ()
val eMap = Map.empty[String, List[String]]
val contentType = request.getContentType match {
case null => ""
case s => s
}
// val (paramNames: List[String], params: Map[String, List[String]], files: List[FileParamHolder], body: Box[Array[Byte]]) =
val paramCalculator = () =>
if ((reqType.post_? ||
reqType.put_?) && contentType.startsWith("text/xml")) {
(Nil,localParams, Nil, tryo(readWholeStream(request.getInputStream)))
} else if (ServletFileUpload.isMultipartContent(request)) {
val allInfo = (new Iterator[ParamHolder] {
val mimeUpload = (new ServletFileUpload)
mimeUpload.setSizeMax(LiftRules.maxMimeSize)
mimeUpload.setFileSizeMax(LiftRules.maxMimeFileSize)
val what = mimeUpload.getItemIterator(request)
def hasNext = what.hasNext
def next = what.next match {
case f if (f.isFormField) => NormalParamHolder(f.getFieldName, new String(readWholeStream(f.openStream), "UTF-8"))
case f => FileParamHolder(f.getFieldName, f.getContentType, f.getName, readWholeStream(f.openStream))
}
}).toList
val normal: List[NormalParamHolder] = allInfo.flatMap{case v: NormalParamHolder => List(v) case _ => Nil}
val files: List[FileParamHolder] = allInfo.flatMap{case v: FileParamHolder => List(v) case _ => Nil}
val params = normal.foldLeft(eMap)((a,b) => a.get(b.name) match {
case None => a + (b.name -> List(b.value))
case Some(v) => a + (b.name -> (v ::: List(b.value)))
})
(normal.map(_.name).removeDuplicates, localParams ++ params, files, Empty)
} else if (reqType.get_?) {
request.getQueryString match {
case null => (Nil, localParams, Nil, Empty)
case s =>
val pairs = s.split("&").toList.map(_.trim).filter(_.length > 0).map(_.split("=").toList match {
case name :: value :: Nil => (true, urlDecode(name), urlDecode(value))
case name :: Nil => (true, urlDecode(name), "")
case _ => (false, "", "")
}).filter(_._1).map{case (_, name, value) => (name, value)}
val names = pairs.map(_._1).removeDuplicates
val params = pairs.foldLeft(eMap) (
(a,b) => a.get(b._1) match {
case None => a + (b._1 -> List(b._2))
case Some(xs) => a + (b._1 -> (xs ::: List(b._2)))
}
)
val hereParams = localParams ++ params
(names, hereParams, Nil, Empty)
}
} else if (contentType.toLowerCase.startsWith("application/x-www-form-urlencoded")) {
val paramNames = enumToStringList(request.getParameterNames).sort{(s1, s2) => s1 < s2}
// val tmp = paramNames.map{n => (n, xlateIfGet(request.getParameterValues(n).toList))}
val params = localParams ++ paramNames.map{n => (n, request.getParameterValues(n).toList)}
(paramNames, params, Nil, Empty)
} else {
(Nil,localParams, Nil, tryo(readWholeStream(request.getInputStream)))
}
new Req(rewritten.path, contextPath, reqType,
contentType, request, nanoStart,
System.nanoTime, paramCalculator)
}
private def fixURI(uri : String) = uri indexOf ";jsessionid" match {
case -1 => uri
case x @ _ => uri substring(0, x)
}
def nil = new Req(NilPath, "", GetRequest, "", null,
System.nanoTime, System.nanoTime,
() => (Nil, Map.empty, Nil, Empty))
def parsePath(in: String): ParsePath = {
val p1 = fixURI((in match {case null => "/"; case s if s.length == 0 => "/"; case s => s}).replaceAll("/+", "/"))
val front = p1.startsWith("/")
val back = p1.length > 1 && p1.endsWith("/")
val orgLst = p1.replaceAll("/$", "/index").split("/").
toList.map(_.trim).filter(_.length > 0)
val last = orgLst.last
val idx = last.indexOf(".")
val (lst, suffix) = if (idx == -1) (orgLst, "")
else (orgLst.dropRight(1) ::: List(last.substring(0, idx)),
last.substring(idx + 1))
ParsePath(lst.map(urlDecode), suffix, front, back)
}
var fixHref = _fixHref _
private def _fixHref(contextPath: String, v : Seq[Node], fixURL: Boolean, rewrite: Box[String => String]): Text = {
val hv = v.text
val updated = if (hv.startsWith("/")) contextPath + hv else hv
Text(if (fixURL && rewrite.isDefined &&
!updated.startsWith("javascript:") &&
!updated.startsWith("http://") &&
!updated.startsWith("https://"))
rewrite.open_!.apply(updated) else updated)
}
/**
* Corrects the HTML content,such as applying context path to URI's, session information if cookies are disabled etc.
*/
def fixHtml(contextPath: String, in: NodeSeq): NodeSeq = {
val rewrite = URLRewriter.rewriteFunc
def fixAttrs(toFix : String, attrs : MetaData, fixURL: Boolean) : MetaData = {
if (attrs == Null) Null
else if (attrs.key == toFix) {
new UnprefixedAttribute(toFix, Req.fixHref(contextPath, attrs.value, fixURL, rewrite),fixAttrs(toFix, attrs.next, fixURL))
} else attrs.copy(fixAttrs(toFix, attrs.next, fixURL))
}
def _fixHtml(contextPath: String, in: NodeSeq): NodeSeq = {
in.map{
v =>
v match {
case Group(nodes) => Group(_fixHtml(contextPath, nodes))
case e: Elem if e.label == "form" => Elem(v.prefix, v.label, fixAttrs("action", v.attributes, true), v.scope, _fixHtml(contextPath, v.child) : _* )
case e: Elem if e.label == "script" => Elem(v.prefix, v.label, fixAttrs("src", v.attributes, false), v.scope, _fixHtml(contextPath, v.child) : _* )
case e: Elem if e.label == "a" => Elem(v.prefix, v.label, fixAttrs("href", v.attributes, true), v.scope, _fixHtml(contextPath, v.child) : _* )
case e: Elem if e.label == "link" => Elem(v.prefix, v.label, fixAttrs("href", v.attributes, false), v.scope, _fixHtml(contextPath, v.child) : _* )
case e: Elem => Elem(v.prefix, v.label, fixAttrs("src", v.attributes, true), v.scope, _fixHtml(contextPath, v.child) : _*)
case _ => v
}
}
}
_fixHtml(contextPath, in)
}
private[liftweb] def defaultCreateNotFound(in: Req) =
XhtmlResponse((<html><body>The Requested URL {in.contextPath+in.uri} was not found on this server</body></html>),
ResponseInfo.docType(in), List("Content-Type" -> "text/html"), Nil, 404, S.ieMode)
def unapply(in: Req): Option[(List[String], String, RequestType)] = Some((in.path.partPath, in.path.suffix, in.requestType))
}
/**
* Contains request information
*/
@serializable
class Req(val path: ParsePath,
val contextPath: String,
val requestType: RequestType,
val contentType: String,
val request: HttpServletRequest,
val nanoStart: Long,
val nanoEnd: Long,
val paramCalculator: () => (List[String], Map[String, List[String]],List[FileParamHolder],Box[Array[Byte]])) extends HasParams
{
override def toString = "Req("+paramNames+", "+params+", "+path+
", "+contextPath+", "+requestType+", "+contentType+")"
/**
* Returns true if the content-type is text/xml
*/
def xml_? = contentType != null && contentType.toLowerCase.startsWith("text/xml")
val section = path(0) match {case null => "default"; case s => s}
val view = path(1) match {case null => "index"; case s @ _ => s}
val id = pathParam(0)
def pathParam(n: Int) = head(path.wholePath.drop(n + 2), "")
def path(n: Int):String = head(path.wholePath.drop(n), null)
def param(n: String) = params.get(n) match {
case Some(s :: _) => Some(s)
case _ => None
}
lazy val headers: List[(String, String)] =
for (header <- enumToList[String](request.getHeaderNames.asInstanceOf[_root_.java.util.Enumeration[String]]);
item <- enumToList[String](request.getHeaders(header).asInstanceOf[_root_.java.util.Enumeration[String]]))
yield (header, item)
lazy val (paramNames: List[String],
params: Map[String, List[String]],
uploadedFiles: List[FileParamHolder],
body: Box[Array[Byte]]) = paramCalculator()
lazy val cookies = request.getCookies() match {
case null => Nil
case ca => ca.toList
}
lazy val xml: Box[Elem] = if (!xml_?) Empty
else {
try {
body.map(b => XML.load(new _root_.java.io.ByteArrayInputStream(b)))
} catch {
case e => Failure(e.getMessage, Full(e), Empty)
}
}
lazy val location: Box[Loc[_]] = LiftRules.siteMap.flatMap(_.findLoc(this))
def testLocation: Either[Boolean, Box[LiftResponse]] = {
if (LiftRules.siteMap.isEmpty) Left(true)
else location.map(_.testAccess) match {
case Full(Left(true)) => Left(true)
case Full(Right(Full(resp))) => Right(Full(resp))
case _ => Right(Empty)
}
}
lazy val buildMenu: CompleteMenu = location.map(_.buildMenu) openOr
CompleteMenu(Nil)
def createNotFound = {
NamedPF((this, Empty), LiftRules.uriNotFound.toList)
}
def createNotFound(failure: Failure) = {
NamedPF((this, Full(failure)), LiftRules.uriNotFound.toList)
}
def post_? = requestType.post_?
def get_? = requestType.get_?
def put_? = requestType.put_?
def fixHtml(in: NodeSeq): NodeSeq = Req.fixHtml(contextPath, in)
lazy val uri: String = request match {
case null => "Outside HTTP Request (e.g., on Actor)"
case request =>
val ret = for (uri <- Box.legacyNullTest(request.getRequestURI);
val cp = Box.legacyNullTest(request.getContextPath) openOr "") yield
uri.substring(cp.length)
match {
case "" => "/"
case x => Req.fixURI(x)
}
ret openOr "/"
}
/**
* The IP address of the request
*/
def remoteAddr: String = request.getRemoteAddr()
/**
* The user agent of the browser that sent the request
*/
lazy val userAgent: Box[String] =
for (r <- Box.legacyNullTest(request);
uah <- Box.legacyNullTest(request.getHeader("User-Agent")))
yield uah
lazy val isIE6: Boolean = (userAgent.map(_.indexOf("MSIE 6") >= 0)) openOr false
lazy val isIE7: Boolean = (userAgent.map(_.indexOf("MSIE 7") >= 0)) openOr false
lazy val isIE8: Boolean = (userAgent.map(_.indexOf("MSIE 8") >= 0)) openOr false
lazy val isIE = isIE6 || isIE7 || isIE8
lazy val isSafari2: Boolean = (userAgent.map(s => s.indexOf("Safari/") >= 0 &&
s.indexOf("Version/2.") >= 0)) openOr false
lazy val isSafari3: Boolean = (userAgent.map(s => s.indexOf("Safari/") >= 0 &&
s.indexOf("Version/3.") >= 0)) openOr false
lazy val isSafari = isSafari2 || isSafari3
lazy val isIPhone = isSafari && (userAgent.map(s => s.indexOf("(iPhone;") >= 0) openOr false)
lazy val isFirefox2: Boolean = (userAgent.map(_.indexOf("Firefox/2") >= 0)) openOr false
lazy val isFirefox3: Boolean = (userAgent.map(_.indexOf("Firefox/3") >= 0)) openOr false
lazy val isFirefox = isFirefox2 || isFirefox3
lazy val isOpera9: Boolean = (userAgent.map(s => s.indexOf("Opera/9.") >= 0) openOr false)
def isOpera = isOpera9
def updateWithContextPath(uri: String): String = if (uri.startsWith("/")) contextPath + uri else uri
}
case class RewriteRequest(path: ParsePath, requestType: RequestType, httpRequest: HttpServletRequest)
case class RewriteResponse(path: ParsePath, params: Map[String, String], stopRewriting: Boolean)
/**
* The representation of an URI path
*/
@serializable
case class ParsePath(partPath: List[String], suffix: String, absolute: Boolean, endSlash: Boolean) {
def drop(cnt: Int) = ParsePath(partPath.drop(cnt), suffix, absolute, endSlash)
lazy val wholePath = if (suffix.length > 0) partPath.dropRight(1) ::: List(partPath.last + "." + suffix)
else partPath
}
/**
* Maintains the context of resolving the URL when cookies are disabled from container. It maintains
* low coupling such as code within request processing is not aware of the servlet response that
* ancodes the URL.
*/
object RewriteResponse {
def apply(path: List[String], params: Map[String, String]) = new RewriteResponse(ParsePath(path, "", true, false), params, false)
def apply(path: List[String]) = new RewriteResponse(ParsePath(path, "", true, false), Map.empty, false)
def apply(path: List[String], suffix: String) = new RewriteResponse(ParsePath(path, suffix, true, false), Map.empty, false)
def apply(path: ParsePath, params: Map[String, String]) = new RewriteResponse(path, params, false)
}
object URLRewriter {
private val funcHolder = new ThreadGlobal[(String) => String]
def doWith[R](f: (String) => String)(block : => R):R = {
funcHolder.doWith(f) {
block
}
}
def rewriteFunc: Box[(String) => String] = Box.legacyNullTest(funcHolder value)
}
| andreum/liftweb | lift/src/main/scala/net/liftweb/http/Req.scala | Scala | apache-2.0 | 15,991 |
package de.kaufhof.pillar.cli
import java.io.File
import org.scalatest.FunSpec
import org.scalatest.matchers.ShouldMatchers
class CommandLineConfigurationSpec extends FunSpec with ShouldMatchers {
val sep = File.separator
describe(".buildFromArguments") {
describe("command initialize") {
describe("data-store faker") {
it("sets the command") {
CommandLineConfiguration.buildFromArguments(Array("initialize", "faker")).command should be(Initialize)
}
it("sets the data store name") {
CommandLineConfiguration.buildFromArguments(Array("initialize", "faker")).dataStore should equal("faker")
}
it("sets the environment") {
CommandLineConfiguration.buildFromArguments(Array("initialize", "faker")).environment should equal("development")
}
it("sets the migrations directory") {
CommandLineConfiguration.buildFromArguments(Array("initialize", "faker")).migrationsDirectory
.getPath should equal(s"conf${sep}pillar${sep}migrations")
}
it("sets the time stamp") {
CommandLineConfiguration.buildFromArguments(Array("initialize", "faker")).timeStampOption should be(None)
}
describe("environment test") {
it("sets the environment") {
CommandLineConfiguration.buildFromArguments(Array("-e", "test", "initialize", "faker")).environment should equal("test")
}
}
describe("migrations-directory baz") {
it("sets the migrations directory") {
CommandLineConfiguration.buildFromArguments(Array("-d", "src/test/resources/pillar/migrations",
"initialize", "faker")).migrationsDirectory
.getPath should equal(s"src${sep}test${sep}resources${sep}pillar${sep}migrations")
}
}
describe("time-stamp 1370028262") {
it("sets the time stamp option") {
CommandLineConfiguration.buildFromArguments(Array("-t", "1370028262", "initialize", "faker")).timeStampOption should equal(Some(1370028262))
}
}
}
}
}
} | j-potts/pillar | src/test/scala/de/kaufhof/pillar/cli/CommandLineConfigurationSpec.scala | Scala | mit | 2,130 |
package org.http4s
package parser
import org.http4s.headers._
import org.specs2.mutable.Specification
class ProxyAuthenticateHeaderSpec
extends Specification
with HeaderParserHelper[`Proxy-Authenticate`] {
def hparse(value: String): ParseResult[`Proxy-Authenticate`] =
HttpHeaderParser.PROXY_AUTHENTICATE(value)
override def parse(value: String) =
hparse(value).fold(_ => sys.error(s"Couldn't parse: $value"), identity)
val params = Map("a" -> "b", "c" -> "d")
val c = Challenge("Basic", "foo")
val str = "Basic realm=\\"foo\\""
val wparams = c.copy(params = params)
"Proxy-Authenticate Header parser" should {
"Render challenge correctly" in {
c.renderString must be_==(str)
}
"Parse a basic authentication" in {
parse(str) must be_==(`Proxy-Authenticate`(c))
}
"Parse a basic authentication with params" in {
parse(wparams.renderString) must be_==(`Proxy-Authenticate`(wparams))
}
"Parse multiple concatenated authentications" in {
val twotypes = "Newauth realm=\\"apps\\", Basic realm=\\"simple\\""
val twoparsed = Challenge("Newauth", "apps") :: Challenge("Basic", "simple") :: Nil
parse(twotypes).values.toList must be_==(twoparsed)
}
"parse mulmultiple concatenated authentications with params" in {
val twowparams =
"Newauth realm=\\"apps\\", type=1, title=\\"Login to apps\\", Basic realm=\\"simple\\""
val twp = Challenge("Newauth", "apps", Map("type" -> "1", "title" -> "Login to apps")) ::
Challenge("Basic", "simple") :: Nil
parse(twowparams).values.toList must be_==(twp)
}
}
}
| aeons/http4s | tests/src/test/scala/org/http4s/parser/ProxyAuthenticateHeaderSpec.scala | Scala | apache-2.0 | 1,634 |
package cpup.poland.runtime.userdata
import cpup.poland.runtime.PObject
case class InjectObjectInstruction(obj: PObject) extends Userdata with TInstruction {
override def activate(context: SendContext, seq: InstructionSeq) = obj
} | CoderPuppy/poland-scala | src/main/scala/cpup/poland/runtime/userdata/InjectObjectInstruction.scala | Scala | mit | 233 |
package org.labrad.manager
import java.io.File
import java.net.URI
import org.labrad.crypto.CertConfig
import org.labrad.util.cli.Environment
import org.scalatest.FunSuite
class ManagerConfigTest extends FunSuite {
// If not otherwise specified, use empty environment for these tests.
implicit val defaultEnv = Environment()
test("default manager config works with empty command line and env") {
ManagerConfig.fromCommandLine(Array())
}
test("port can be set from environment variable") {
val env = Environment("LABRADPORT" -> "7777")
val config = ManagerConfig.fromCommandLine(Array())(env)
assert(config.port == 7777)
}
test("port can be set from command line") {
val config1 = ManagerConfig.fromCommandLine(Array("--port", "7777"))
assert(config1.port == 7777)
val config2 = ManagerConfig.fromCommandLine(Array("--port=7878"))
assert(config2.port == 7878)
}
test("port command line arg overrides environment variable") {
val env = Environment("LABRADPORT" -> "2345")
val config = ManagerConfig.fromCommandLine(Array("--port=1234"))(env)
assert(config.port == 1234)
}
test("port env var must be an integer") {
val env = Environment("LABRADPORT" -> "foo")
intercept[Exception] {
ManagerConfig.fromCommandLine(Array())(env)
}
}
test("port command line must be an integer") {
intercept[Exception] {
ManagerConfig.fromCommandLine(Array("--port=foo"))
}
}
private def checkPassword(config: ManagerConfig, expected: String): Unit = {
assert(config.password.toSeq == expected.toCharArray.toSeq)
}
test("password can be set from environment variable") {
val env = Environment("LABRADPASSWORD" -> "foo")
val config = ManagerConfig.fromCommandLine(Array())(env)
checkPassword(config, "foo")
}
test("password can be set from command line") {
val config1 = ManagerConfig.fromCommandLine(Array("--password", "foo"))
checkPassword(config1, "foo")
val config2 = ManagerConfig.fromCommandLine(Array("--password=foo"))
checkPassword(config2, "foo")
}
test("password command line arg overrides environment variable") {
val env = Environment("LABRADPASSWORD" -> "bar")
val config = ManagerConfig.fromCommandLine(Array("--password=foo"))(env)
checkPassword(config, "foo")
}
private def checkRegistry(config: ManagerConfig, expected: String): Unit = {
assert(config.registryUri == new URI(expected))
}
test("registry can be set from environment variable") {
val env = Environment("LABRADREGISTRY" -> "/var/labrad/registry/")
val config = ManagerConfig.fromCommandLine(Array())(env)
checkRegistry(config, "/var/labrad/registry/")
}
test("registry can be set from command line") {
val config1 = ManagerConfig.fromCommandLine(Array("--registry", "/var/labrad/registry/"))
checkRegistry(config1, "/var/labrad/registry/")
val config2 = ManagerConfig.fromCommandLine(Array("--registry=/var/labrad/registry/"))
checkRegistry(config2, "/var/labrad/registry/")
}
test("registry command line arg overrides environment variable") {
val env = Environment("LABRADREGISTRY" -> "/var/labrad/fake-reg")
val config = ManagerConfig.fromCommandLine(Array("--registry=/var/labrad/real-reg/"))(env)
checkRegistry(config, "/var/labrad/real-reg/")
}
test("tls-port can be set from environment variable") {
val env = Environment("LABRAD_TLS_PORT" -> "7777")
val config = ManagerConfig.fromCommandLine(Array())(env)
assert(config.tlsPort == 7777)
}
test("tls-port can be set from command line") {
val config1 = ManagerConfig.fromCommandLine(Array("--tls-port", "7777"))
assert(config1.tlsPort == 7777)
val config2 = ManagerConfig.fromCommandLine(Array("--tls-port=7878"))
assert(config2.tlsPort == 7878)
}
test("tls-port command line arg overrides environment variable") {
val env = Environment("LABRAD_TLS_PORT" -> "2345")
val config = ManagerConfig.fromCommandLine(Array("--tls-port=1234"))(env)
assert(config.tlsPort == 1234)
}
test("tls-port env var must be an integer") {
val env = Environment("LABRAD_TLS_PORT" -> "foo")
intercept[Exception] {
ManagerConfig.fromCommandLine(Array())(env)
}
}
test("tls-port command line must be an integer") {
intercept[Exception] {
ManagerConfig.fromCommandLine(Array("--tls-port=foo"))
}
}
test("tls-required can be set from environment variable") {
val env = Environment("LABRAD_TLS_REQUIRED" -> "no")
val config = ManagerConfig.fromCommandLine(Array())(env)
assert(config.tlsRequired == false)
}
test("tls-required can be set from command line") {
val config1 = ManagerConfig.fromCommandLine(Array("--tls-required", "no"))
assert(config1.tlsRequired == false)
val config2 = ManagerConfig.fromCommandLine(Array("--tls-required=no"))
assert(config2.tlsRequired == false)
}
test("tls-required command line arg overrides environment variable") {
val env = Environment("LABRAD_TLS_REQUIRED" -> "yes")
val config = ManagerConfig.fromCommandLine(Array("--tls-required=no"))(env)
assert(config.tlsRequired == false)
}
test("tls-required env var must be a valid option") {
val env = Environment("LABRAD_TLS_REQUIRED" -> "foo")
intercept[Exception] {
ManagerConfig.fromCommandLine(Array())(env)
}
}
test("tls-required command line must be a valid option") {
intercept[Exception] {
ManagerConfig.fromCommandLine(Array("--tls-required=foo"))
}
}
test("tls-hosts can be set from environment variable") {
val env = Environment("LABRAD_TLS_HOSTS" -> "foo.com")
val config = ManagerConfig.fromCommandLine(Array())(env)
assert(config.tlsHosts == Map("foo.com" -> CertConfig.SelfSigned))
}
test("tls-hosts can be set from command line") {
val config1 = ManagerConfig.fromCommandLine(Array("--tls-hosts", "foo.com"))
assert(config1.tlsHosts == Map("foo.com" -> CertConfig.SelfSigned))
val config2 = ManagerConfig.fromCommandLine(Array("--tls-hosts=foo.com"))
assert(config2.tlsHosts == Map("foo.com" -> CertConfig.SelfSigned))
}
test("tls-hosts command line arg overrides environment variable") {
val env = Environment("LABRAD_TLS_HOSTS" -> "bar.com")
val config = ManagerConfig.fromCommandLine(Array("--tls-hosts=foo.com"))(env)
assert(config.tlsHosts == Map("foo.com" -> CertConfig.SelfSigned))
}
test("tls-hosts may contain cert and key files") {
val config1 = ManagerConfig.fromCommandLine(Array(
"--tls-hosts=foo.com?cert=/my/cert/file.crt&key=/my/key/file.pem"))
assert(config1.tlsHosts == Map(
"foo.com" -> CertConfig.Files(
cert = new File("/my/cert/file.crt"),
key = new File("/my/key/file.pem"))))
val config2 = ManagerConfig.fromCommandLine(Array(
"--tls-hosts=foo.com?cert=/my/cert/file.crt&key=/my/key/file.pem&intermediates=/my/interm/file.pem"))
assert(config2.tlsHosts == Map(
"foo.com" -> CertConfig.Files(
cert = new File("/my/cert/file.crt"),
key = new File("/my/key/file.pem"),
intermediates = Some(new File("/my/interm/file.pem")))))
}
test("tls-hosts must provide cert and key if either is provided") {
intercept[Exception] {
ManagerConfig.fromCommandLine(Array("--tls-hosts=foo.com?cert=/my/cert/file.crt"))
}
intercept[Exception] {
ManagerConfig.fromCommandLine(Array("--tls-hosts=foo.com?key=/my/key/file.pem"))
}
}
test("tls-hosts may contain multiple semicolon-separated hosts") {
val config = ManagerConfig.fromCommandLine(Array(
"--tls-hosts=private1;public.com?cert=/my/cert/file.crt&key=/my/key/file.pem;private2"))
assert(config.tlsHosts == Map(
"private1" -> CertConfig.SelfSigned,
"private2" -> CertConfig.SelfSigned,
"public.com" -> CertConfig.Files(
cert = new File("/my/cert/file.crt"),
key = new File("/my/key/file.pem"))))
}
}
| labrad/scalabrad | manager/src/test/scala/org/labrad/manager/ManagerConfigTest.scala | Scala | mit | 8,020 |
package sample.model.asset
import java.time.LocalDate
import sample.context._
import sample.util.Calculator
import scalikejdbc._
import scalikejdbc.jsr310.Implicits._
/**
* 口座の資産概念を表現します。
* asset配下のEntityを横断的に取り扱います。
* low: 実際の開発では多通貨や執行中/拘束中のキャッシュフローアクションに対する考慮で、サービスによってはかなり複雑になります。
*/
case class Asset(/** 口座ID */ id: String) {
/**
* 振込出金可能か判定します。
* <p>0 <= 口座残高 + 未実現キャッシュフロー - (出金依頼拘束額 + 出金依頼額)
* low: 判定のみなのでscale指定は省略。余力金額を返す時はきちんと指定する
*/
def canWithdraw(currency: String, absAmount: BigDecimal, valueDay: LocalDate)(implicit s: DBSession, dh: DomainHelper): Boolean = {
0 <=
(calcUnprocessedCio(
calcUnrealizeCf(calcCashBalance(currency), currency, valueDay), currency)
- absAmount
).decimal.signum
}
private def calcCashBalance(currency: String)(implicit s: DBSession, dh: DomainHelper): Calculator =
Calculator(CashBalance.getOrNew(id, currency).amount)
private def calcUnrealizeCf(base: Calculator, currency: String, valueDay: LocalDate)(implicit s: DBSession, dh: DomainHelper): Calculator =
Cashflow.findUnrealize(id, currency, valueDay).foldLeft(base)(
(calc, cf) => calc + cf.amount)
private def calcUnprocessedCio(base: Calculator, currency: String)(implicit s: DBSession, dh: DomainHelper): Calculator =
CashInOut.findUnprocessed(id, currency, true).foldLeft(base)(
(calc, cio) => calc - cio.absAmount)
} | jkazama/sample-boot-scala | src/main/scala/sample/model/asset/Asset.scala | Scala | mit | 1,737 |
import com.typesafe.sbt.less.Import.LessKeys
import com.typesafe.sbt.web.Import._
import org.scalajs.sbtplugin.ScalaJSPlugin.autoImport._
import play.sbt.routes.RoutesKeys._
import playscalajs.PlayScalaJS.autoImport._
import sbt.Keys._
import sbt._
object Settings {
val applicationName = "scalajs-play-demo"
val applicationVersion = "1.0.0"
lazy val elideOptions = settingKey[Seq[String]]("Set limit for elidable functions")
lazy val applicationSettings = Seq(
name := applicationName,
version := applicationVersion
)
val sharedSettings = Seq(
scalaVersion := versions.common.scala,
scalacOptions ++= Seq(
"-Xlint",
"-unchecked",
"-deprecation",
"-feature"
),
resolvers ++= Seq(Resolver.jcenterRepo)
)
lazy val clientSettings = applicationSettings ++ sharedSettings ++ Seq(
libraryDependencies ++= dependencies.clientDependencies.value,
elideOptions := Seq(),
scalacOptions ++= elideOptions.value,
jsDependencies ++= dependencies.jsDependencies.value,
skip in packageJSDependencies := false,
persistLauncher := true,
persistLauncher in Test := false,
testFrameworks += new TestFramework("utest.runner.Framework")
)
lazy val serverSettings = applicationSettings ++ sharedSettings ++ Seq(
libraryDependencies ++= dependencies.serverDependencies.value,
commands += ReleaseCmd,
pipelineStages := Seq(scalaJSProd),
LessKeys.compress in Assets := true,
includeFilter in(Assets, LessKeys.less) := "*.less",
excludeFilter in(Assets, LessKeys.less) := "_*.less",
routesGenerator := InjectedRoutesGenerator
)
// Command for building a release
lazy val ReleaseCmd = Command.command("release") {
state => "set elideOptions in client := Seq(\\"-Xelide-below\\", \\"WARNING\\")" ::
"client/clean" ::
"client/test" ::
"server/clean" ::
"server/test" ::
"server/dist" ::
"set elideOptions in client := Seq()" ::
state
}
}
object dependencies {
val sharedDependencies = Def.setting(Seq(
"com.lihaoyi" %%% "autowire" % versions.common.autowire,
"me.chrons" %%% "boopickle" % versions.common.booPickle,
"com.lihaoyi" %%% "scalarx" % versions.common.scalaRx,
"com.lihaoyi" %%% "utest" % versions.common.uTest
))
val serverDependencies = Def.setting(Seq(
"com.softwaremill.macwire" %% "macros" % versions.server.macwire % "provided",
"com.softwaremill.macwire" %% "util" % versions.server.macwire,
"com.softwaremill.macwire" %% "proxy" % versions.server.macwire,
"com.mohiva" %% "play-silhouette" % versions.server.silhouette,
"com.mohiva" %% "play-silhouette-testkit" % versions.server.silhouette % "test",
"com.vmunier" %% "play-scalajs-scripts" % versions.server.playScripts
))
val clientDependencies = Def.setting(Seq(
"com.github.japgolly.scalajs-react" %%% "core" % versions.client.scalajsReact,
"com.github.japgolly.scalajs-react" %%% "extra" % versions.client.scalajsReact,
"com.github.japgolly.scalajs-react" %%% "ext-scalaz71" % versions.client.scalajsReact,
"com.github.japgolly.scalajs-react" %%% "ext-monocle" % versions.client.scalajsReact,
"com.github.japgolly.scalacss" %%% "ext-react" % versions.client.scalaCSS,
"org.scala-js" %%% "scalajs-dom" % versions.client.scalaDom
))
val jsDependencies = Def.setting(Seq(
"org.webjars.npm" % "react" % versions.js.react / "react-with-addons.js" minified "react-with-addons.min.js" commonJSName "React",
"org.webjars.npm" % "react-dom" % versions.js.react / "react-dom.js" commonJSName "ReactDOM" minified "react-dom.min.js" dependsOn "react-with-addons.js",
"org.webjars" % "jquery" % versions.js.jQuery / "jquery.js" minified "jquery.min.js",
RuntimeDOM % "test"
))
}
object versions {
object common {
val scala = "2.11.7"
val scalaRx = "0.2.8"
val autowire = "0.2.5"
val booPickle = "1.1.1"
val uTest = "0.3.1"
}
object client {
val scalaDom = "0.8.2"
val scalajsReact = "0.10.1"
val scalaCSS = "0.3.1"
}
object js {
val jQuery = "2.1.4"
val react = "0.14.2"
}
object server {
val silhouette = "3.0.4"
val macwire = "2.2.2"
val playScripts = "0.3.0"
}
}
| b0c1/scalajs-play-core-react | project/Settings.scala | Scala | apache-2.0 | 4,256 |
package im.actor.server.webactions
import akka.actor.ActorSystem
import im.actor.api.rpc.collections.{ ApiStringValue, ApiInt32Value, ApiMapValueItem, ApiMapValue }
import scala.concurrent.Future
object CorrectWebaction {
val uri = "https://google.com/"
val regex = "https://mail.google.com"
val completeUri = "https://mail.google.com/mail/u/0/#inbox"
}
class CorrectWebaction(system: ActorSystem) extends Webaction(system) {
import system.dispatcher
override def uri(params: ApiMapValue): String = CorrectWebaction.uri
override def regex: String = CorrectWebaction.regex
override def complete(userId: Int, url: String): Future[WebactionResult] = Future {
Webaction.success(ApiMapValue(Vector(
ApiMapValueItem("userId", ApiInt32Value(userId)),
ApiMapValueItem("url", ApiStringValue(url.reverse))
)))
}
}
| EaglesoftZJ/actor-platform | actor-server/actor-testkit/src/main/scala/im/actor/server/webactions/CorrectWebaction.scala | Scala | agpl-3.0 | 845 |
/*
* Copyright 2017 Exon IT
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package by.exonit.redmine.client
import org.joda.time.DateTime
trait WikiPageIdLike extends Identifiable[String]
case class WikiPageId(id: String) extends WikiPageIdLike
trait WikiPageLike extends WikiPageIdLike {
def version: BigInt
def createdOn: DateTime
def updatedOn: DateTime
def parent: Option[WikiPageIdLike]
}
case class WikiPage(
id: String,
version: BigInt,
createdOn: DateTime,
updatedOn: DateTime,
parent: Option[WikiPageId]
) extends WikiPageLike
case class WikiPageDetails(
id: String,
version: BigInt,
createdOn: DateTime,
updatedOn: DateTime,
parent: Option[WikiPageId],
text: String,
author: Option[UserLink],
comments: Option[String]
) extends WikiPageLike
object WikiPage {
case class New(
title: String,
text: String,
parent: Option[WikiPageIdLike] = None,
comments: Option[String] = None
)
case class Update(
text: Option[String] = None,
parent: Option[Option[WikiPageIdLike]] = None,
comments: Option[String] = None
)
}
| exon-it/redmine-scala-client | client-api/src/main/scala/by/exonit/redmine/client/Wiki.scala | Scala | apache-2.0 | 1,613 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.transform.vision.image.augmentation
import java.util
import com.intel.analytics.bigdl.transform.vision.image.opencv.OpenCVMat
import com.intel.analytics.bigdl.transform.vision.image.{FeatureTransformer, ImageFeature}
import com.intel.analytics.bigdl.utils.RandomGenerator._
import org.opencv.core.{Core, Mat}
import org.opencv.imgproc.Imgproc
/**
* Adjust image saturation
*/
class Saturation(deltaLow: Double, deltaHigh: Double)
extends FeatureTransformer {
require(deltaHigh >= deltaLow, "saturation upper must be >= lower.")
require(deltaLow >= 0, "saturation lower must be non-negative.")
override def transformMat(feature: ImageFeature): Unit = {
Saturation.transform(feature.opencvMat(), feature.opencvMat(), RNG.uniform(deltaLow, deltaHigh))
}
}
object Saturation {
def apply(deltaLow: Double, deltaHigh: Double): Saturation = new Saturation(deltaLow, deltaHigh)
def transform(input: OpenCVMat, output: OpenCVMat, delta: Double): OpenCVMat = {
if (Math.abs(delta - 1) != 1e-3) {
// Convert to HSV colorspace
Imgproc.cvtColor(input, output, Imgproc.COLOR_BGR2HSV)
// Split the image to 3 channels.
val channels = new util.ArrayList[Mat]()
Core.split(output, channels)
// Adjust the saturation.
channels.get(1).convertTo(channels.get(1), -1, delta, 0)
Core.merge(channels, output)
(0 until channels.size()).foreach(channels.get(_).release())
// Back to BGR colorspace.
Imgproc.cvtColor(output, output, Imgproc.COLOR_HSV2BGR)
} else {
if (input != output) input.copyTo(output)
}
output
}
}
| yiheng/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/transform/vision/image/augmentation/Saturation.scala | Scala | apache-2.0 | 2,249 |
import org.scalatest.{Matchers, FunSuite}
/** @version 1.2.0 */
class DifferenceOfSquaresTest extends FunSuite with Matchers {
test("square of sum 1") {
DifferenceOfSquares.squareOfSum(1) should be (1)
}
test("square of sum 5") {
pending
DifferenceOfSquares.squareOfSum(5) should be (225)
}
test("square of sum 100") {
pending
DifferenceOfSquares.squareOfSum(100) should be (25502500)
}
test("sum of squares 1") {
pending
DifferenceOfSquares.sumOfSquares(1) should be (1)
}
test("sum of squares 5") {
pending
DifferenceOfSquares.sumOfSquares(5) should be (55)
}
test("sum of squares 100") {
pending
DifferenceOfSquares.sumOfSquares(100) should be (338350)
}
test("difference of squares 1") {
pending
DifferenceOfSquares.differenceOfSquares(1) should be (0)
}
test("difference of squares 5") {
pending
DifferenceOfSquares.differenceOfSquares(5) should be (170)
}
test("difference of squares 100") {
pending
DifferenceOfSquares.differenceOfSquares(100) should be (25164150)
}
} | exercism/xscala | exercises/practice/difference-of-squares/src/test/scala/DifferenceOfSquaresTest.scala | Scala | mit | 1,089 |
package unfiltered.request
import java.io.{File => JFile}
trait MultiPartMatcher[T] {
def unapply(req: T): Option[T]
}
/** Multipart file upload utilities should extract data
* using this common format */
case class MultipartData[W](
params: String => Seq[String], files: String => W)
/** Describes an uploaded file, its content type, and
* a means of copying its content to another file */
trait FileWrapper {
val name: String
val contentType: String
def write(out: JFile): Option[JFile]
}
/** Describes some abstract file which exists on
* disk or in memory */
trait AbstractDiskFile extends FileWrapper {
def inMemory: Boolean
def bytes: Array[Byte]
def size: Long
val name: String
val contentType: String
}
/** Describes a file whose content may be written to a stream */
trait AbstractStreamedFile extends FileWrapper {
def stream[T]: (java.io.InputStream => T) => T
}
/** Base trait for disk-based multi part form data extraction */
trait AbstractDiskExtractor[R] {
/** @return the number of bytes to load a file into memory
* before writing to disk */
def memLimit: Int
/** @return the directory to write temp files to */
def tempDir: JFile
/**
* Given a req, extract the multipart form params into a
* (Map[String, Seq[String]], Map[String, Seq[FileItem]], request).
* The Map is assigned a default value of Nil, so param("p") would
* return Nil if there is no such parameter, or (as normal for
* servlets) a single empty string if the parameter was
* supplied without a value. */
def apply(req: R): MultipartData[Seq[AbstractDiskFile]]
}
trait DiskExtractor {
val memLimit = Int.MaxValue
val tempDir = new JFile(".")
}
/** Stream-based multi-part form data extractor */
trait StreamedExtractor[R] {
import java.io.{InputStream => JInputStream}
/**
* Provides extraction similar to MultiPartParams.Disk, except
* the second map will contain Map[String, Seq[StreamedFileWrapper]] rather
* than Map[String, Seq[DiskFileWrapper]].
* @note the seq returned by keys will only return the `first`
* named value. This is based on a limitation on apache commons file upload
* streaming interface. To read from the stream iterator,
* you must read before #next is called or the stream read will fail. */
def apply(req: R): MultipartData[Seq[AbstractStreamedFile]]
def withStreamedFile[T](istm: JInputStream)(f: java.io.InputStream => T): T = {
try { f(istm) } finally { istm.close }
}
}
trait TupleGenerator {
/** generates a tuple of (Map[String, List[A]], Map[String, List[B]]) */
protected def genTuple[A, B, C](iter: Iterator[C])(f: ((Map[String, List[A]], Map[String, List[B]]), C) => (Map[String, List[A]], Map[String, List[B]])) =
((Map.empty[String, List[A]].withDefaultValue(Nil), Map.empty[String, List[B]].withDefaultValue(Nil)) /: iter)(f(_,_))
}
| jarin/unfiltered | uploads/src/main/scala/request/uploads.scala | Scala | mit | 2,896 |
class A {
1 match {
case t => /*caret*/
}
}
/*
abstract
case
class
def
do
false
final
for
if
implicit
import
lazy
new
null
object
override
private
protected
return
sealed
super
this
throw
trait
true
try
type
val
var
while
*/ | ilinum/intellij-scala | testdata/keywordCompletion/modifiers/caseClause.scala | Scala | apache-2.0 | 232 |
package DistRDF2ML_Evaluation
import java.io.{File, PrintWriter}
import java.util.Calendar
import net.sansa_stack.examples.spark.ml.DistRDF2ML.DistRDF2ML_Evaluation
import net.sansa_stack.ml.spark.featureExtraction.{SmartVectorAssembler, SparqlFrame}
import net.sansa_stack.ml.spark.utils.ML2Graph
import net.sansa_stack.rdf.common.io.riot.error.{ErrorParseMode, WarningParseMode}
import net.sansa_stack.rdf.spark.io.NTripleReader
import net.sansa_stack.rdf.spark.model.TripleOperations
import org.apache.jena.graph
import org.apache.jena.graph.Triple
import org.apache.jena.sys.JenaSystem
import org.apache.spark.ml.Pipeline
import org.apache.spark.ml.classification.RandomForestClassifier
import org.apache.spark.ml.evaluation.{MulticlassClassificationEvaluator, RegressionEvaluator}
import org.apache.spark.ml.feature.{IndexToString, StringIndexer}
import org.apache.spark.ml.regression.RandomForestRegressor
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.functions.{col, collect_list, explode}
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
import scala.io.Source
object DistRDF2ML_Regression {
def main(args: Array[String]): Unit = {
// readIn
val inputPath: String = args(0) // http://www.cs.toronto.edu/~oktie/linkedmdb/linkedmdb-18-05-2009-dump.nt
println("\\nSETUP SPARK SESSION")
val spark = {
SparkSession.builder
.appName(s"SampleFeatureExtractionPipeline")
.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.config("spark.kryo.registrator", String.join(", ",
"net.sansa_stack.rdf.spark.io.JenaKryoRegistrator",
"net.sansa_stack.query.spark.sparqlify.KryoRegistratorSparqlify"))
.getOrCreate()
}
spark.sparkContext.setLogLevel("ERROR")
JenaSystem.init()
println("\\nREAD IN DATA")
/**
* Read in dataset of Jena Triple representing the Knowledge Graph
*/
val dataset: Dataset[graph.Triple] = {
NTripleReader.load(
spark,
inputPath,
stopOnBadTerm = ErrorParseMode.SKIP,
stopOnWarnings = WarningParseMode.IGNORE
)
.toDS()
.cache()
}
// dataset.rdd.coalesce(1).saveAsNTriplesFile(args(0).replace(".", " ") + "clean.nt")
println(f"\\ndata consists of ${dataset.count()} triples")
dataset.take(n = 10).foreach(println(_))
println("\\nFEATURE EXTRACTION OVER SPARQL")
/**
* The sparql query used to gather features
*/
val sparqlString = """
SELECT
?movie
?movie__down_genre__down_film_genre_name
?movie__down_title
(<http://www.w3.org/2001/XMLSchema#int>(?movie__down_runtime) as ?movie__down_runtime_asInt)
?movie__down_runtime
?movie__down_actor__down_actor_name
WHERE {
?movie <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://data.linkedmdb.org/movie/film> .
?movie <http://data.linkedmdb.org/movie/genre> ?movie__down_genre . ?movie__down_genre <http://data.linkedmdb.org/movie/film_genre_name> ?movie__down_desiredGenre__down_film_genre_name .
OPTIONAL { ?movie <http://purl.org/dc/terms/title> ?movie__down_title . }
OPTIONAL { ?movie <http://data.linkedmdb.org/movie/runtime> ?movie__down_runtime . }
OPTIONAL { ?movie <http://data.linkedmdb.org/movie/actor> ?movie__down_actor . ?movie__down_actor <http://data.linkedmdb.org/movie/actor_name> ?movie__down_actor__down_actor_name . }
OPTIONAL { ?movie <http://data.linkedmdb.org/movie/genre> ?movie__down_genre . ?movie__down_genre <http://data.linkedmdb.org/movie/film_genre_name> ?movie__down_genre__down_film_genre_name . }
FILTER (?movie__down_desiredGenre__down_film_genre_name = 'Superhero' || ?movie__down_desiredGenre__down_film_genre_name = 'Fantasy' )
}"""
/**
* transformer that collect the features from the Dataset[Triple] to a common spark Dataframe
* collapsed
* by column movie
*/
val sparqlFrame = new SparqlFrame()
.setSparqlQuery(sparqlString)
.setCollapsByKey(true)
.setCollapsColumnName("movie")
/**
* dataframe with resulting features
* in this collapsed by the movie column
*/
val extractedFeaturesDf = sparqlFrame
.transform(dataset)
.cache()
/**
* feature descriptions of the resulting collapsed dataframe
*/
val featureDescriptions = sparqlFrame.getFeatureDescriptions()
println(s"Feature decriptions are:\\n${featureDescriptions.mkString(",\\n")}")
extractedFeaturesDf.show(10, false)
// extractedFeaturesDf.schema.foreach(println(_))
println("FEATURE EXTRACTION POSTPROCESSING")
/**
* Here we adjust things in dataframe which do not fit to our expectations like:
* the fact that the runtime is in rdf data not annotated to be a double
* but easy castable
*/
val postprocessedFeaturesDf = extractedFeaturesDf
.withColumn("movie__down_runtime(ListOf_NonCategorical_Int)", col("movie__down_runtime(ListOf_NonCategorical_String)").cast("array<int>"))
.drop("movie__down_runtime(ListOf_NonCategorical_String)")
.withColumn("movie__down_runtime(Single_NonCategorical_Int)", col("movie__down_runtime(ListOf_NonCategorical_Int)").getItem(0))
.drop("movie__down_runtime(ListOf_NonCategorical_Int)")
postprocessedFeaturesDf.show(10, false)
// postprocessedFeaturesDf.withColumn("tmp", col("movie__down_runtime(ListOf_NonCategorical_Int)").getItem(0)).show(false)
println("\\nSMART VECTOR ASSEMBLER")
val smartVectorAssembler = new SmartVectorAssembler()
.setEntityColumn("movie")
.setLabelColumn("movie__down_runtime(Single_NonCategorical_Int)")
.setNullReplacement("string", "")
.setNullReplacement("digit", -1)
.setWord2VecSize(5)
.setWord2VecMinCount(1)
// .setWord2vecTrainingDfSizeRatio(svaWord2vecTrainingDfSizeRatio)
val assembledDf: DataFrame = smartVectorAssembler
.transform(postprocessedFeaturesDf)
.cache()
assembledDf.show(10, false)
println("\\nAPPLY Common SPARK MLlib Example Algorithm")
/**
* drop rows where label is null
*/
val mlDf = assembledDf
.filter(col("label").isNotNull)
mlDf.show(10, false)
// From here on process is used based on SApache SPark MLlib samples: https://spark.apache.org/docs/latest/ml-classification-regression.html#random-forest-regression
// Split the data into training and test sets (30% held out for testing).
val Array(trainingData, testData) = mlDf.randomSplit(Array(0.7, 0.3))
// Train a RandomForest model.
val rf = new RandomForestRegressor()
.setLabelCol("label")
.setFeaturesCol("features")
// Train model. This also runs the indexer.
val model = rf.fit(trainingData)
// Make predictions.
val predictions = model.transform(testData)
// Select example rows to display.
predictions.select("entityID", "prediction", "label", "features").show(10)
predictions.show()
val ml2Graph = new ML2Graph()
.setEntityColumn("entityID")
.setValueColumn("prediction")
val metagraph: RDD[Triple] = ml2Graph.transform(predictions)
metagraph.take(10).foreach(println(_))
metagraph
.coalesce(1)
.saveAsNTriplesFile(args(0) + "someFolder")
}
}
| SANSA-Stack/SANSA-RDF | sansa-examples/sansa-examples-spark/src/main/scala/net/sansa_stack/examples/spark/ml/DistRDF2ML/DistRDF2ML_Regression.scala | Scala | apache-2.0 | 7,358 |
package com.wbillingsley.handy.appbase
case class IdentityLookup(service:String,
value:Option[String],
username:Option[String])
| wbillingsley/handy | handy-appbase-core/src/main/scala/com/wbillingsley/handy/appbase/IdentityLookup.scala | Scala | mit | 181 |
package com.eevolution.context.dictionary.domain.model
import ai.x.play.json.Jsonx
import com.eevolution.context.dictionary.api.{ActiveEnabled, DomainModel, Identifiable, Traceable}
import org.joda.time.DateTime
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: [email protected], http://www.e-evolution.com , http://github.com/e-Evolution
* Created by [email protected] , www.e-evolution.com on 18/02/17.
* Modified by [email protected], www.e-evolution.com on 05/10/17.
*/
/**
* Change Log Entity
* @param changeLogId Change Log ID
* @param sessionId Session ID
* @param entityId Entity ID
* @param attributeId Attribute ID
* @param tenantId Tenant ID
* @param organizationId Organization ID
* @param isActive Is Active
* @param created Created
* @param createdBy Created By
* @param updated Updated
* @param updatedBy Updated By
* @param recordId Record ID
* @param oldValue Old Value
* @param newValue New Value
* @param undo Undo
* @param redo Redo
* @param isCustomization Is Customization
* @param trxName TRX Name
* @param description Description
* @param eventChangeLog Event Change Log
* @param uuid UUID
*/
case class ChangeLog(changeLogId: Int,
sessionId: Int,
entityId: Int,
attributeId: Int,
tenantId: Int,
organizationId: Int,
isActive : Boolean = true,
created : DateTime = DateTime.now,
createdBy : Int ,
updated : DateTime = DateTime.now,
updatedBy : Int ,
recordId : Int,
oldValue : String,
newValue : String,
undo : Boolean ,
redo : Boolean ,
isCustomization : Boolean ,
trxName : String,
description: Option[String],
eventChangeLog : String,
uuid: String
) extends DomainModel
with ActiveEnabled
with Identifiable
with Traceable {
override type ActiveEnabled = this.type
override type Identifiable = this.type
override type Traceable = this.type
override def Id: Int = changeLogId
override val entityName: String = "AD_ChangeLog"
override val identifier: String = "AD_ChangeLog_ID"
}
object ChangeLog {
implicit lazy val jsonFormat = Jsonx.formatCaseClass[ChangeLog]
def create(changeLogId: Int,
sessionId: Int,
entityId: Int,
attributeId: Int,
tenantId: Int,
organizationId: Int,
isActive : Boolean,
created : DateTime,
createdBy : Int ,
updated :DateTime ,
updatedBy : Int,
recordId : Int,
oldValue : String,
newValue : String,
undo : Boolean,
redo : Boolean,
isCustomization : Boolean,
trxName : String,
description: String,
eventChangeLog : String,
uuid: String) = ChangeLog(changeLogId, sessionId, entityId, attributeId, tenantId, organizationId,
isActive, created, createdBy, updated, updatedBy, recordId, oldValue, newValue, undo, redo, isCustomization,
trxName, None, eventChangeLog, uuid)
}
| adempiere/ADReactiveSystem | dictionary-api/src/main/scala/com/eevolution/context/dictionary/domain/model/ChangeLog.scala | Scala | gpl-3.0 | 4,151 |
package mesosphere.marathon.core.task.tracker
import mesosphere.marathon.core.task.TaskStateOp
import scala.concurrent.Future
/**
* Notifies the [[TaskTracker]] of task creation and termination.
*/
trait TaskCreationHandler {
/**
* Create a new task.
*
* If the task exists already, the existing task will be overwritten so make sure
* that you generate unique IDs.
*/
def created(taskStateOp: TaskStateOp): Future[Unit]
/**
* Remove the task for the given app with the given ID completely.
*
* If the task does not exist, the returned Future will not fail.
*/
def terminated(taskStateOp: TaskStateOp.ForceExpunge): Future[_]
}
| ss75710541/marathon | src/main/scala/mesosphere/marathon/core/task/tracker/TaskCreationHandler.scala | Scala | apache-2.0 | 680 |
package org.scalawiki.bots
import java.time.{LocalDate, ZoneOffset, ZonedDateTime}
import com.typesafe.config.{Config, ConfigFactory}
import net.ceedubs.ficus.Ficus._
import net.ceedubs.ficus.readers.ArbitraryTypeReader._
import org.scalawiki.dto.{Namespace, Page, User}
import org.scalawiki.query.QueryLibrary
import org.scalawiki.time.TimeRange
import org.scalawiki.{ActionLibrary, MwBot}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
case class Message(subject: String, body: String)
/**
* Send messages to users either via talk page or email
*
* @param conf configuration
*/
class MessageBot(val conf: Config) extends ActionLibrary with QueryLibrary {
/**
* Mediawiki host, e.g. en.wikipedia.org
*/
val host = conf.getString("host")
/**
* Page that contains links to user pages of users we are going to notify
*/
val userListPage = conf.getString("users.list")
/**
* optional start and end of time range that user contributions are queried
*/
val (start: Option[ZonedDateTime], end: Option[ZonedDateTime]) = (
conf.as[Option[LocalDate]]("users.start").map(_.atStartOfDay(ZoneOffset.UTC)),
conf.as[Option[LocalDate]]("users.end").map(_.atStartOfDay(ZoneOffset.UTC))
)
val range = TimeRange(start, end)
/**
* Email message
*/
val mail = conf.as[Message]("email")
/**
* Talk page message
*/
val talkPageMessage = conf.as[Message]("talk-page")
implicit lazy val bot = MwBot.fromHost(host)
def run() = {
for (users <- fetchUsers(userListPage))
processUsers(users, conf)
}
def fetchUsers(userListPage: String): Future[Seq[User]] = {
for {
userPages <- bot.run(pageLinks(userListPage, Namespace.USER))
userInfos <- bot.run(userProps(userPagesToUserNames(userPages)))
} yield pagesToUsers(userInfos).collect { case u: User => u }
}
def processUsers(users: Seq[User], conf: Config) = {
val pages = users.map(u => userCreatedPages(u.name.get, range))
val folded = Future.fold(pages)(Seq.empty[(String, Set[String])])(_ :+ _).map(_.toMap)
for (createdPagesByUser <- folded) {
val withContribution = users.filter(u => createdPagesByUser(u.name.get).nonEmpty)
val (withEmail, withoutEmail) = withContribution.partition(_.emailable.getOrElse(false))
logUsers(users, withEmail, withoutEmail)
val mailedBefore = FileUtils.read("emails.txt")
val userNames = withEmail.flatMap(_.name).toSet -- mailedBefore.toSet
messageUsers(withoutEmail, talkPageMessage)
mailUsers(userNames, mail)
}
}
def logUsers(users: Seq[User], withEmail: Seq[User], withoutEmail: Seq[User]): Unit = {
println("AllUsers: " + users.size)
println("WithEmail: " + withEmail.size)
println("WithoutEmail: " + withoutEmail.size)
}
def messageUsers(withoutEmail: Seq[User], msg: Message): Unit = {
withoutEmail.foreach { u =>
val username = u.name.get
message(username, msg.subject, msg.body)
}
}
def mailUsers(toMail: Set[String], mail: Message): Unit = {
toMail.foreach { username =>
val result = email(username, mail.subject, mail.body.format(username))
println(s" $username: $result")
}
}
def userPagesToUserNames(pages: Seq[Page]): Seq[String] =
pages.head.links.map(_.titleWithoutNs)
}
object MessageBot {
def main(args: Array[String]) {
val conf = ConfigFactory.load("flashmob.conf")
new MessageBot(conf).run()
}
}
| intracer/scalawiki | scalawiki-bots/src/main/scala/org/scalawiki/bots/MessageBot.scala | Scala | apache-2.0 | 3,510 |
object Test {
def main(args: Array[String]): Unit = {
// constant fold will fail for non-bootstrapped Dotty
assert(!(Float.NaN > 0.0))
assert(!(Float.NaN < 0.0))
assert(!(Double.NaN > 0.0))
assert(!(Double.NaN < 0.0))
val f: Float = Float.NaN
val d: Double = Double.NaN
assert(!(f > 0.0f))
assert(!(f < 0.0f))
assert(!(d > 0.0))
assert(!(d < 0.0))
// loop forever before the fix
var x = Double.NaN
while(x < 10.0) { x = x + 1; println(x) }
while(x > 10.0) { x = x + 1; println(x) }
while ({ x = x + 1; println(x) ; x < 10.0 }) ()
while ({ x = x + 1; println(x) ; x > 10.0 }) ()
// tests from https://github.com/scala/scala/pull/5207
{
val n = Double.NaN
def ne(x: Double, y: Double) = x != y
val fs: List[(Double, Double) => Boolean] = List(_ < _, _ <= _, _ > _, _ >= _, _ == _, (x, y) => !ne(x, y))
val vs = List[Double](n, 1, -1, 0)
for (f <- fs; v <- vs; (x, y) <- List((n, v), (v, n))) assert(!f(x, y))
}
{
val n = Float.NaN
def ne(x: Float, y: Float) = x != y
val fs: List[(Float, Float) => Boolean] = List(_ < _, _ <= _, _ > _, _ >= _, _ == _, (x, y) => !ne(x, y))
val vs = List[Float](n, 1, -1, 0)
for (f <- fs; v <- vs; (x, y) <- List((n, v), (v, n))) assert(!f(x, y))
}
{
def a[T](x: T, y: T) = x == y
def b[T](x: T, y: T) = x != y
val n = Double.NaN
(a(n, n) :: a(n, 0) :: a (0, n) :: !b(n, n) :: !b(n, 0) :: !b(0, n) :: Nil).foreach(b => assert(!b))
}
{
def a[T](x: T, y: T) = x == y
def b[T](x: T, y: T) = x != y
val n = Float.NaN
(a(n, n) :: a(n, 0) :: a (0, n) :: !b(n, n) :: !b(n, 0) :: !b(0, n) :: Nil).foreach(b => assert(!b))
}
}
}
| som-snytt/dotty | tests/run/i6710.scala | Scala | apache-2.0 | 1,774 |
import org.scalatest._
class Suite extends FunSuite {
test("test factorial") {
assert(Main.factorial(0) === 1)
assert(Main.factorial(1) === 1)
assert(Main.factorial(2) === 2)
assert(Main.factorial(3) === 6)
assert(Main.factorial(4) === 24)
}
}
| carsonpun/learn-scala | practice1/src/test/scala/test.scala | Scala | apache-2.0 | 269 |
/*
* Copyright 2013 agwlvssainokuni
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import models.Profile
import play.api.data._
import play.api.data.Forms._
import play.api.data.validation.Constraints._
object ProfileForm {
val EMAIL = "email"
val EMAIL_MIN = 1
val EMAIL_MAX = 256
val NICKNAME = "nickname"
val NICKNAME_MIN = 1
val NICKNAME_MAX = 256
val BIRTHDAY = "birthday"
val BIRTHDAY_PATTERN = "yyyy/MM/dd"
val PASSWORD = "password"
val PASSWORD_MIN = 1
val PASSWORD_MAX = 32
val CONFIRM = "confirm"
val CONFIRM_MIN = 1
val CONFIRM_MAX = 32
val profileForm: Form[Profile] = Form(mapping(
EMAIL -> email.verifying(minLength(EMAIL_MIN), maxLength(EMAIL_MAX)),
NICKNAME -> nonEmptyText(NICKNAME_MIN, NICKNAME_MAX),
BIRTHDAY -> optional(date(BIRTHDAY_PATTERN)))(Profile.apply)(Profile.unapply))
val passwdForm: Form[(String, String)] = Form(tuple(
PASSWORD -> nonEmptyText(PASSWORD_MIN, PASSWORD_MAX),
CONFIRM -> nonEmptyText(CONFIRM_MIN, CONFIRM_MAX)))
}
| agwlvssainokuni/lifelog | lifelog-website/app/controllers/ProfileForm.scala | Scala | apache-2.0 | 1,557 |
package geotwine
object Geohash {
private[geotwine] val Alphabet = "0123456789bcdefghjkmnpqrstuvwxyz"
case class Box(
minLon: Double, minLat: Double, maxLat: Double, maxLon: Double) {
def center: (Double, Double) =
((minLat + maxLat) / 2.0, (minLon + maxLon) / 2.0)
}
}
| softprops/geo-twine | src/main/scala/Geohash.scala | Scala | mit | 291 |
package mesosphere.marathon.api.v2
import javax.ws.rs._
import javax.ws.rs.core.{ MediaType, Response }
import com.codahale.metrics.annotation.Timed
import org.apache.log4j.Logger
import mesosphere.marathon.api.{ MarathonMediaType, RestResource }
import mesosphere.marathon.state.PathId._
import mesosphere.marathon.state.{ AppDefinition, Timestamp }
import mesosphere.marathon.{ MarathonConf, MarathonSchedulerService }
@Produces(Array(MarathonMediaType.PREFERRED_APPLICATION_JSON))
@Consumes(Array(MediaType.APPLICATION_JSON))
class AppVersionsResource(service: MarathonSchedulerService, val config: MarathonConf) extends RestResource {
val log = Logger.getLogger(getClass.getName)
@GET
@Timed
def index(@PathParam("appId") appId: String): Response = {
val id = appId.toRootPath
val versions = service.listAppVersions(id).toSeq
if (versions.isEmpty) unknownApp(id)
else ok(Map("versions" -> versions))
}
@GET
@Timed
@Path("{version}")
def show(@PathParam("appId") appId: String,
@PathParam("version") version: String): Response = {
val id = appId.toRootPath
val timestamp = Timestamp(version)
service.getApp(id, timestamp).map(ok(_)) getOrElse unknownApp(id, Option(timestamp))
}
}
| MrMarvin/marathon | src/main/scala/mesosphere/marathon/api/v2/AppVersionsResource.scala | Scala | apache-2.0 | 1,250 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.classification
import org.apache.spark.SparkFunSuite
import org.apache.spark.ml.attribute.NominalAttribute
import org.apache.spark.ml.classification.LogisticRegressionSuite._
import org.apache.spark.ml.feature.LabeledPoint
import org.apache.spark.ml.feature.StringIndexer
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.ml.param.{ParamMap, ParamsSuite}
import org.apache.spark.ml.util.{DefaultReadWriteTest, MetadataUtils, MLTestingUtils}
import org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
import org.apache.spark.mllib.evaluation.MulticlassMetrics
import org.apache.spark.mllib.linalg.{Vectors => OldVectors}
import org.apache.spark.mllib.regression.{LabeledPoint => OldLabeledPoint}
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.mllib.util.TestingUtils._
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Dataset
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.Metadata
class OneVsRestSuite extends SparkFunSuite with MLlibTestSparkContext with DefaultReadWriteTest {
import testImplicits._
@transient var dataset: Dataset[_] = _
@transient var rdd: RDD[LabeledPoint] = _
override def beforeAll(): Unit = {
super.beforeAll()
val nPoints = 1000
// The following coefficients and xMean/xVariance are computed from iris dataset with lambda=0.2
// As a result, we are drawing samples from probability distribution of an actual model.
val coefficients = Array(
-0.57997, 0.912083, -0.371077, -0.819866, 2.688191,
-0.16624, -0.84355, -0.048509, -0.301789, 4.170682)
val xMean = Array(5.843, 3.057, 3.758, 1.199)
val xVariance = Array(0.6856, 0.1899, 3.116, 0.581)
rdd = sc.parallelize(generateMultinomialLogisticInput(
coefficients, xMean, xVariance, true, nPoints, 42), 2)
dataset = rdd.toDF()
}
test("params") {
ParamsSuite.checkParams(new OneVsRest)
val lrModel = new LogisticRegressionModel("lr", Vectors.dense(0.0), 0.0)
val model = new OneVsRestModel("ovr", Metadata.empty, Array(lrModel))
ParamsSuite.checkParams(model)
}
test("one-vs-rest: default params") {
val numClasses = 3
val ova = new OneVsRest()
.setClassifier(new LogisticRegression)
assert(ova.getLabelCol === "label")
assert(ova.getPredictionCol === "prediction")
val ovaModel = ova.fit(dataset)
// copied model must have the same parent.
MLTestingUtils.checkCopy(ovaModel)
assert(ovaModel.models.length === numClasses)
val transformedDataset = ovaModel.transform(dataset)
// check for label metadata in prediction col
val predictionColSchema = transformedDataset.schema(ovaModel.getPredictionCol)
assert(MetadataUtils.getNumClasses(predictionColSchema) === Some(3))
val ovaResults = transformedDataset.select("prediction", "label").rdd.map {
row => (row.getDouble(0), row.getDouble(1))
}
val lr = new LogisticRegressionWithLBFGS().setIntercept(true).setNumClasses(numClasses)
lr.optimizer.setRegParam(0.1).setNumIterations(100)
val model = lr.run(rdd.map(OldLabeledPoint.fromML))
val results = model.predict(rdd.map(p => OldVectors.fromML(p.features))).zip(rdd.map(_.label))
// determine the #confusion matrix in each class.
// bound how much error we allow compared to multinomial logistic regression.
val expectedMetrics = new MulticlassMetrics(results)
val ovaMetrics = new MulticlassMetrics(ovaResults)
assert(expectedMetrics.confusionMatrix ~== ovaMetrics.confusionMatrix absTol 400)
}
test("one-vs-rest: pass label metadata correctly during train") {
val numClasses = 3
val ova = new OneVsRest()
ova.setClassifier(new MockLogisticRegression)
val labelMetadata = NominalAttribute.defaultAttr.withName("label").withNumValues(numClasses)
val labelWithMetadata = dataset("label").as("label", labelMetadata.toMetadata())
val features = dataset("features").as("features")
val datasetWithLabelMetadata = dataset.select(labelWithMetadata, features)
ova.fit(datasetWithLabelMetadata)
}
test("SPARK-8092: ensure label features and prediction cols are configurable") {
val labelIndexer = new StringIndexer()
.setInputCol("label")
.setOutputCol("indexed")
val indexedDataset = labelIndexer
.fit(dataset)
.transform(dataset)
.drop("label")
.withColumnRenamed("features", "f")
val ova = new OneVsRest()
ova.setClassifier(new LogisticRegression())
.setLabelCol(labelIndexer.getOutputCol)
.setFeaturesCol("f")
.setPredictionCol("p")
val ovaModel = ova.fit(indexedDataset)
val transformedDataset = ovaModel.transform(indexedDataset)
val outputFields = transformedDataset.schema.fieldNames.toSet
assert(outputFields.contains("p"))
}
test("SPARK-18625 : OneVsRestModel should support setFeaturesCol and setPredictionCol") {
val ova = new OneVsRest().setClassifier(new LogisticRegression)
val ovaModel = ova.fit(dataset)
val dataset2 = dataset.select(col("label").as("y"), col("features").as("fea"))
ovaModel.setFeaturesCol("fea")
ovaModel.setPredictionCol("pred")
val transformedDataset = ovaModel.transform(dataset2)
val outputFields = transformedDataset.schema.fieldNames.toSet
assert(outputFields === Set("y", "fea", "pred"))
}
test("SPARK-8049: OneVsRest shouldn't output temp columns") {
val logReg = new LogisticRegression()
.setMaxIter(1)
val ovr = new OneVsRest()
.setClassifier(logReg)
val output = ovr.fit(dataset).transform(dataset)
assert(output.schema.fieldNames.toSet === Set("label", "features", "prediction"))
}
test("OneVsRest.copy and OneVsRestModel.copy") {
val lr = new LogisticRegression()
.setMaxIter(1)
val ovr = new OneVsRest()
withClue("copy with classifier unset should work") {
ovr.copy(ParamMap(lr.maxIter -> 10))
}
ovr.setClassifier(lr)
val ovr1 = ovr.copy(ParamMap(lr.maxIter -> 10))
require(ovr.getClassifier.getOrDefault(lr.maxIter) === 1, "copy should have no side-effects")
require(ovr1.getClassifier.getOrDefault(lr.maxIter) === 10,
"copy should handle extra classifier params")
val ovrModel = ovr1.fit(dataset).copy(ParamMap(lr.thresholds -> Array(0.9, 0.1)))
ovrModel.models.foreach { case m: LogisticRegressionModel =>
require(m.getThreshold === 0.1, "copy should handle extra model params")
}
}
test("read/write: OneVsRest") {
val lr = new LogisticRegression().setMaxIter(10).setRegParam(0.01)
val ova = new OneVsRest()
.setClassifier(lr)
.setLabelCol("myLabel")
.setFeaturesCol("myFeature")
.setPredictionCol("myPrediction")
val ova2 = testDefaultReadWrite(ova, testParams = false)
assert(ova.uid === ova2.uid)
assert(ova.getFeaturesCol === ova2.getFeaturesCol)
assert(ova.getLabelCol === ova2.getLabelCol)
assert(ova.getPredictionCol === ova2.getPredictionCol)
ova2.getClassifier match {
case lr2: LogisticRegression =>
assert(lr.uid === lr2.uid)
assert(lr.getMaxIter === lr2.getMaxIter)
assert(lr.getRegParam === lr2.getRegParam)
case other =>
throw new AssertionError(s"Loaded OneVsRest expected classifier of type" +
s" LogisticRegression but found ${other.getClass.getName}")
}
}
test("read/write: OneVsRestModel") {
def checkModelData(model: OneVsRestModel, model2: OneVsRestModel): Unit = {
assert(model.uid === model2.uid)
assert(model.getFeaturesCol === model2.getFeaturesCol)
assert(model.getLabelCol === model2.getLabelCol)
assert(model.getPredictionCol === model2.getPredictionCol)
val classifier = model.getClassifier.asInstanceOf[LogisticRegression]
model2.getClassifier match {
case lr2: LogisticRegression =>
assert(classifier.uid === lr2.uid)
assert(classifier.getMaxIter === lr2.getMaxIter)
assert(classifier.getRegParam === lr2.getRegParam)
case other =>
throw new AssertionError(s"Loaded OneVsRestModel expected classifier of type" +
s" LogisticRegression but found ${other.getClass.getName}")
}
assert(model.labelMetadata === model2.labelMetadata)
model.models.zip(model2.models).foreach {
case (lrModel1: LogisticRegressionModel, lrModel2: LogisticRegressionModel) =>
assert(lrModel1.uid === lrModel2.uid)
assert(lrModel1.coefficients === lrModel2.coefficients)
assert(lrModel1.intercept === lrModel2.intercept)
case other =>
throw new AssertionError(s"Loaded OneVsRestModel expected model of type" +
s" LogisticRegressionModel but found ${other.getClass.getName}")
}
}
val lr = new LogisticRegression().setMaxIter(10).setRegParam(0.01)
val ova = new OneVsRest().setClassifier(lr)
val ovaModel = ova.fit(dataset)
val newOvaModel = testDefaultReadWrite(ovaModel, testParams = false)
checkModelData(ovaModel, newOvaModel)
}
test("should support all NumericType labels and not support other types") {
val ovr = new OneVsRest().setClassifier(new LogisticRegression().setMaxIter(1))
MLTestingUtils.checkNumericTypes[OneVsRestModel, OneVsRest](
ovr, spark) { (expected, actual) =>
val expectedModels = expected.models.map(m => m.asInstanceOf[LogisticRegressionModel])
val actualModels = actual.models.map(m => m.asInstanceOf[LogisticRegressionModel])
assert(expectedModels.length === actualModels.length)
expectedModels.zip(actualModels).foreach { case (e, a) =>
assert(e.intercept === a.intercept)
assert(e.coefficients.toArray === a.coefficients.toArray)
}
}
}
}
private class MockLogisticRegression(uid: String) extends LogisticRegression(uid) {
def this() = this("mockLogReg")
setMaxIter(1)
override protected[spark] def train(dataset: Dataset[_]): LogisticRegressionModel = {
val labelSchema = dataset.schema($(labelCol))
// check for label attribute propagation.
assert(MetadataUtils.getNumClasses(labelSchema).forall(_ == 2))
super.train(dataset)
}
}
| Panos-Bletsos/spark-cost-model-optimizer | mllib/src/test/scala/org/apache/spark/ml/classification/OneVsRestSuite.scala | Scala | apache-2.0 | 11,061 |
/*
* Copyright (c) 2017 joesan @ http://github.com/joesan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.inland24.plantsim.core
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.testkit.TestKit
import com.inland24.plantsim.config.AppConfig
import org.scalatest.BeforeAndAfterAll
import org.scalatest.featurespec.AnyFeatureSpecLike
import org.scalatest.matchers.must.Matchers
class AppBindingsTest
extends TestKit(ActorSystem("AppBindingsActorSystem"))
with Matchers
with AnyFeatureSpecLike
with BeforeAndAfterAll {
override def afterAll(): Unit = {
super.afterAll()
system.terminate()
}
Feature("AppBindings") {
val appCfg = AppConfig.load()
Scenario("initialize application components against a default environment") {
val appBindings = AppBindings(system)
appBindings.appConfig === appCfg
appBindings.supervisorActor.path.name === s"${appCfg.appName}-supervisor"
}
}
}
| joesan/plant-simulator | test/com/inland24/plantsim/core/AppBindingsTest.scala | Scala | apache-2.0 | 1,498 |
/**
* Copyright 2013-2014. Genome Bridge LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.berkeley.cs.amplab.adam.models
import edu.berkeley.cs.amplab.adam.avro.{ADAMRecord, ADAMNucleotideContig}
import edu.berkeley.cs.amplab.adam.avro.ADAMRecord
import edu.berkeley.cs.amplab.adam.rdd.AdamContext._
import net.sf.samtools.{SAMFileHeader, SAMFileReader, SAMSequenceRecord, SAMSequenceDictionary}
import org.apache.avro.specific.SpecificRecord
import org.broadinstitute.variant.vcf.{VCFHeader, VCFContigHeaderLine}
import scala.collection._
import scala.math.Ordering.Implicits._
/**
* SequenceDictionary contains the (bijective) map between Ints (the referenceId) and Strings (the referenceName)
* from the header of a BAM file, or the combined result of multiple such SequenceDictionaries.
*/
class SequenceDictionary(recordsIn: Iterable[SequenceRecord]) extends Serializable {
// Intermediate value used to ensure that no referenceName or referenceId is listed twice with a different
// referenceId or referenceName (respectively). Notice the "toSet", which means it's okay to pass an Iterable
// that lists the _same_ SequenceRecord twice.
private val idNamePairs = recordsIn.map(rec => (rec.id, rec.name.toString)).toSet
// check that no referenceId value is listed twice, to two different referenceNames
assert(idNamePairs.groupBy(_._1).map(p => (p._1, p._2.size)).filter(p => p._2 > 1).isEmpty,
"Duplicate ID in %s".format(idNamePairs))
// check that no referenceName is listed twice, to two different referenceIds
assert(idNamePairs.groupBy(_._2).map(p => (p._1, p._2.size)).filter(p => p._2 > 1).isEmpty,
"Duplicate Name in %s".format(idNamePairs))
// Pre-compute the hashCode, based on a sorted version of the idNamePairs list.
private val _hashCode: Int = idNamePairs.toSeq.sortWith(_ < _).foldLeft(0) {
(hash: Int, p: (Int, CharSequence)) => 37 * (hash + p._1) + p._2.hashCode
}
// Maps referenceId -> SequenceRecord
private val recordIndices: mutable.Map[Int, SequenceRecord] =
mutable.Map(recordsIn.map {
rec => (rec.id, rec)
}.toSeq: _*)
// Maps referenceName -> SequenceRecord
private val recordNames: mutable.Map[CharSequence, SequenceRecord] =
mutable.Map(recordsIn.map {
// Call toString explicitly, since otherwise we were picking up an Avro-specific Utf8 value here,
// which was making the containsRefName method below fail in a hard-to-understand way.
rec => (rec.name.toString, rec)
}.toSeq: _*)
def assignments: Map[Int, CharSequence] = recordIndices.map {
case (id: Int, rec: SequenceRecord) =>
(id, rec.name)
}
def apply(id: Int): SequenceRecord = recordIndices(id)
/**
* Returns the sequence record associated with a specific contig name.
*
* @param name Name to search for.
* @return SequenceRecord associated with this record.
*/
def apply(name: CharSequence): SequenceRecord = {
// must explicitly call toString - see note at recordNames creation RE: Avro & Utf8
recordNames(name.toString)
}
/**
* Returns true if this sequence dictionary contains a reference with a specific name.
*
* @param name Reference name to look for.
* @return True if reference is in this dictionary.
*/
def containsRefName(name : CharSequence) : Boolean = {
// must explicitly call toString - see note at recordNames creation RE: Avro & Utf8
recordNames.contains(name.toString)
}
/**
* Produces a Map of Int -> Int which maps the referenceIds from this SequenceDictionary
* into referenceIds compatible with the argument SequenceDictionary ('dict').
*
* There are basically three cases that we have to handle here:
* (1) ids for the same sequence name which are different between the dictionaries. These are
* converted (from this.referenceId into dict.referenceId).
* (2) ids which are in use (for different sequences) between the two dictionaries. In this case,
* we mint a new identifier (using nonoverlappingHash) for the sequence in this dictionary
* that won't conflict with any sequence in either dictionary.
* (3) ids for sequences that aren't in the argument dict, and which don't conflict as in (2),
* can be carried over as-is.
*
* (Note: if the source referenceId isn't in the Map produced by mapTo, we can assume that it
* can be used without change in the new dictionary. The method remap, below, actually implements
* this identity.)
*
* The results of this mapTo should be useable by remap to produce a "compatible" dictionary,
* i.e. for all d1 and d2,
*
* d1.remap(d1.mapTo(d2)).isCompatibleWith(d2)
*
* should be true.
*
* @param dict The target dictionary into whose referenceId space the ids of this dictionary should be mapped.
* @return A Map whose values change the referenceIds in this dictionary; every referenceId in the source
* dictionary should be present in this Map
*/
def mapTo(dict: SequenceDictionary): Map[Int, Int] = {
/*
* we start by assuming that all the sequences in the target dictionary will maintain their
* existing identifiers -- mapTo won't imply any changes to the id/sequence correspondence in
* the target dictionary.
*/
val assign: mutable.Map[Int, CharSequence] = mutable.Map(dict.assignments.toSeq: _*)
/*
* Next, for every source sequence that is _not_ in the target dictionary, there are two cases:
* 1. the source ID is not in use in the target -- in this case, just carry over the existing
* identifier into the assignment.
* 2. the source ID _is_ already in use in the assignment -- in this case, we assign a new identifier
* for the source sequence, and store it in the assignment.
*/
recordNames.keys.filter(!dict.recordNames.contains(_)).foreach {
name =>
val myIdx = recordNames(name).id
if (assign.contains(myIdx)) {
// using dict.nonoverlappingHash (rather than this.nonoverlappingHash) ensures
// that the new identifier won't overlap with any other in the target dictionary
// (and therefore, in the assignment map we're building, above).
assign(dict.nonoverlappingHash(name)) = name
} else {
assign(myIdx) = name
}
}
/*
* At this point, 'assign' holds the desired id->sequence mapping of the "combined" target
* and source dictionaries; to some extent, we've reverse-engineered the results of
*
* this.remap(this.mapTo(dict)) ++ dict
*
* So now, we reverse the mapping (into sequence->id) and use it to convert source identifiers
* into target identifiers.
*/
val rassign: Map[CharSequence, Int] = Map(assign.toSeq.map(p => (p._2, p._1)): _*)
val idxMap: Map[Int, Int] = Map(recordIndices.keys.map(idx => (idx, rassign(recordIndices(idx).name))).toSeq: _*)
assert(idxMap.keys.filter(!recordIndices.contains(_)).isEmpty,
"There we keys in the mapTo Map that weren't actually sequence indices")
assert(recordIndices.keys.filter(!idxMap.contains(_)).isEmpty,
"There were keys which weren't remapped by the mapTo idxMap")
idxMap
}
/**
* See the note to mapTo, above.
* The results of this remap and mapTo should be to produce a "compatible" dictionary,
* i.e. for all d1 and d2,
*
* d1.remap(d1.mapTo(d2)).isCompatibleWith(d2)
*
* should be true.
*
* @param idTransform The Map[Int,Int] to transform the identifiers of this dictionary; e.g. the output of
* mapTo.
* @return A new SequenceDictionary with just the referenceIds mapped through the given Map argument.
*/
def remap(idTransform: Map[Int, Int]): SequenceDictionary = {
def remapIndex(i: Int): Int =
if (idTransform.contains(i)) idTransform(i) else i
SequenceDictionary(idNamePairs.map {
case (id, name) =>
recordIndices(id).withReferenceId(remapIndex(id))
}.toSeq: _*)
}
def records: Seq[SequenceRecord] = recordIndices.values.toSeq
def +(rec: SequenceRecord): SequenceDictionary =
new SequenceDictionary(recordsIn ++ List(rec))
def +=(rec: SequenceRecord): SequenceDictionary = {
recordIndices.put(rec.id, rec)
recordNames.put(rec.name, rec)
this
}
def ++(dict: SequenceDictionary): SequenceDictionary =
new SequenceDictionary(recordsIn ++ dict.records)
def ++(recs: Seq[SequenceRecord]): SequenceDictionary =
recs.foldRight(this)((rec, dict) => dict + rec)
def ++=(recs: Seq[SequenceRecord]): SequenceDictionary = {
recs.foreach {
rec => this += rec
}
this
}
def ++=(dict: SequenceDictionary): SequenceDictionary = {
dict.recordIndices.keys.foreach {
idx => {
val newrec = dict.recordIndices(idx)
recordIndices.put(newrec.id, newrec)
recordNames.put(newrec.name, newrec)
}
}
this
}
/**
* Tests whether two dictionaries are compatible, where "compatible" means that
* shared referenceName values are associated with the same referenceId, and
* shared referenceId values are associated with the same referenceName.
*
* Roughly, two dictionaries are compatible if the ++ operator will succeed when
* called on them together.
*
* @param dict The other dictionary with which to test compatibility
* @return true if the dictionaries are compatible, false otherwise.
*/
def isCompatibleWith(dict: SequenceDictionary): Boolean =
recordIndices.keys.filter(dict.recordIndices.contains).filter(idx => recordIndices(idx) != dict.recordIndices(idx)).isEmpty &&
recordNames.keys.filter(dict.recordNames.contains).filter(name => recordNames(name) != dict.recordNames(name)).isEmpty
def nonoverlappingHash(x: CharSequence): Int =
SequenceDictionary.nonoverlappingHash(x, idx => recordIndices.contains(idx))
override def equals(x: Any): Boolean = {
x match {
case d: SequenceDictionary =>
recordNames == d.recordNames && recordIndices == d.recordIndices
case _ => false
}
}
override def hashCode(): Int = _hashCode
override def toString: String = idNamePairs.toString()
/**
* Converts this ADAM style sequence dictionary into a SAM style sequence dictionary.
*
* @return Returns a SAM formatted sequence dictionary.
*/
def toSAMSequenceDictionary(): SAMSequenceDictionary = {
new SAMSequenceDictionary(recordsIn.map(_.toSAMSequenceRecord).toList)
}
/**
* Returns the reference names stored in this dictionary.
*
* @return Returns the reference names in this dictionary.
*/
def getReferenceNames (): Iterable[String] = {
recordsIn.map(_.name.toString)
}
}
object SequenceDictionary {
def apply(recordsIn: SequenceRecord*) = new SequenceDictionary(recordsIn)
/**
* Extracts a SAM sequence dictionary from a SAM file header and returns an
* ADAM sequence dictionary.
*
* @see fromSAMSequenceDictionary
*
* @param header SAM file header.
* @return Returns an ADAM style sequence dictionary.
*/
def fromSAMHeader(header: SAMFileHeader): SequenceDictionary = {
val samDict = header.getSequenceDictionary
fromSAMSequenceDictionary(samDict)
}
/**
* Extracts a SAM sequence dictionary from a VCF header and returns an
* ADAM sequence dictionary.
*
* @see fromSAMSequenceDictionary
*
* @param header SAM file header.
* @return Returns an ADAM style sequence dictionary.
*/
def fromVCFHeader(header: VCFHeader): SequenceDictionary = {
val contigLines: List[VCFContigHeaderLine] = header.getContigLines()
// map over contig lines,
apply(contigLines.map(l => {
val name = l.getID()
val index = l.getContigIndex()
// TODO: this is clearly not correct. however, the picard version we are currently using does _not_ have a way
// to report contig length from a vcf. we can't fix this without an update being made to hadoop-bam first, so
// i've flagged the hadoop-bam team to let them know -- FAN, 2/5/2014
val length = 1
SequenceRecord(index, name, length.toLong, null)
}): _*)
}
/**
* Converts a picard/samtools SAMSequenceDictionary into an ADAM sequence dictionary.
*
* @see fromSAMHeader
* @see fromVCFHeader
*
* @param samDict SAM style sequence dictionary.
* @return Returns an ADAM style sequence dictionary.
*/
def fromSAMSequenceDictionary(samDict: SAMSequenceDictionary): SequenceDictionary = {
val samDictRecords: List[SAMSequenceRecord] = samDict.getSequences
val seqDict: SequenceDictionary =
SequenceDictionary(samDictRecords.map {
seqRecord: SAMSequenceRecord => SequenceRecord.fromSamSequenceRecord(seqRecord)
}: _*)
seqDict
}
def fromSAMReader(samReader: SAMFileReader): SequenceDictionary =
fromSAMHeader(samReader.getFileHeader)
def nonoverlappingHash(x: CharSequence, conflicts: Int => Boolean): Int = {
var hash = x.hashCode
while (conflicts(hash)) {
hash += 1
}
hash
}
}
/**
* Utility class within the SequenceDictionary; represents unique reference name-to-id correspondence
*
* @param id
* @param name
* @param length
* @param url
*/
class SequenceRecord(val id: Int, val name: CharSequence, val length: Long, val url: CharSequence) extends Serializable {
assert(name != null, "SequenceRecord.name is null")
assert(name.length > 0, "SequenceRecord.name has length 0")
assert(length > 0, "SequenceRecord.length <= 0")
def withReferenceId(newId: Int): SequenceRecord =
new SequenceRecord(newId, name, length, url)
override def equals(x: Any): Boolean = {
x match {
case y: SequenceRecord =>
id == y.id && name == y.name && length == y.length && url == y.url
case _ => false
}
}
override def hashCode: Int = ((id + name.hashCode) * 37 + length.hashCode) * 37
override def toString: String = "%s->%s=%d".format(id, name, length)
/**
* Converts this sequence record into a SAM sequence record.
*
* @return A SAM formatted sequence record.
*/
def toSAMSequenceRecord (): SAMSequenceRecord = {
val rec = new SAMSequenceRecord(name.toString, length.toInt)
// NOTE: we should set the sam sequence record's id here, but, that is private inside of samtools - FAN, 2/5/2014
// if url is defined, set it
if (url != null) {
rec.setAssembly(url)
}
rec
}
}
object SequenceRecord {
def apply(id: Int, name: CharSequence, length: Long, url: CharSequence = null): SequenceRecord =
new SequenceRecord(id, name, length, url)
/**
* Converts an ADAM contig into a sequence record.
*
* @param ctg Contig to convert.
* @return Contig expressed as a sequence record.
*/
def fromADAMContig (ctg: ADAMNucleotideContig): SequenceRecord = {
apply(ctg.getContigId, ctg.getContigName, ctg.getSequenceLength, ctg.getUrl)
}
/*
* Generates a sequence record from a SAMSequence record.
*
* @param seqRecord SAM Sequence record input.
* @return A new ADAM sequence record.
*/
def fromSamSequenceRecord(seqRecord: SAMSequenceRecord): SequenceRecord = {
apply(seqRecord.getSequenceIndex, seqRecord.getSequenceName, seqRecord.getSequenceLength, seqRecord.getAssembly)
}
/**
* Convert an ADAMRecord into one or more SequenceRecords.
* The reason that we can't simply use the "fromSpecificRecord" method, below, is that each ADAMRecord
* can (through the fact that it could be a pair of reads) contain 1 or 2 possible SequenceRecord entries
* for the SequenceDictionary itself. Both have to be extracted, separately.
*
* @param rec The ADAMRecord from which to extract the SequenceRecord entries
* @return a list of all SequenceRecord entries derivable from this record.
*/
def fromADAMRecord(rec: ADAMRecord): Seq[SequenceRecord] = {
assert(rec != null, "ADAMRecord was null")
if (rec.getReadPaired) {
// only process a read pair, if we're looking at the first element of the pair.
if (rec.getFirstOfPair) {
val left =
if (rec.getReadMapped)
List(SequenceRecord(rec.getReferenceId, rec.getReferenceName, rec.getReferenceLength, rec.getReferenceUrl))
else
List()
val right =
if (rec.getMateMapped)
List(SequenceRecord(rec.getMateReferenceId, rec.getMateReference, rec.getMateReferenceLength, rec.getMateReferenceUrl))
else
List()
left ++ right
} else {
List()
}
} else {
if (rec.getReadMapped) {
List(SequenceRecord(rec.getReferenceId, rec.getReferenceName, rec.getReferenceLength, rec.getReferenceUrl))
} else {
// If the read isn't mapped, then ignore the fields altogether.
List()
}
}
}
def fromSpecificRecord(rec: SpecificRecord): SequenceRecord = {
val schema = rec.getSchema
new SequenceRecord(
rec.get(schema.getField("referenceId").pos()).asInstanceOf[Int],
rec.get(schema.getField("referenceName").pos()).asInstanceOf[CharSequence],
rec.get(schema.getField("referenceLength").pos()).asInstanceOf[Long],
rec.get(schema.getField("referenceUrl").pos()).asInstanceOf[CharSequence])
}
}
| fnothaft/adam | adam-core/src/main/scala/edu/berkeley/cs/amplab/adam/models/SequenceDictionary.scala | Scala | apache-2.0 | 17,865 |
/**
* Copyright (C) 2014 TU Berlin ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.peelframework.core.cli.command.experiment
import java.lang.{System => Sys}
import org.peelframework.core.beans.experiment.ExperimentSuite
import org.peelframework.core.beans.system.{Lifespan, System}
import org.peelframework.core.cli.command.Command
import org.peelframework.core.config.{Configurable, loadConfig}
import org.peelframework.core.graph.{Node, createGraph}
import org.peelframework.core.util.console._
import net.sourceforge.argparse4j.impl.Arguments
import net.sourceforge.argparse4j.inf.{Namespace, Subparser}
import org.springframework.context.ApplicationContext
import org.springframework.stereotype.Service
/** Execute a specific experiment. */
@Service("exp:run")
class Run extends Command {
override val name = "exp:run"
override val help = "execute a specific experiment"
override def register(parser: Subparser) = {
// options
parser.addArgument("--just")
.`type`(classOf[Boolean])
.dest("app.suite.experiment.just")
.action(Arguments.storeTrue)
.help("skip system set-up and tear-down")
parser.addArgument("--run")
.`type`(classOf[Integer])
.dest("app.suite.experiment.run")
.metavar("RUN")
.help("run to execute")
// arguments
parser.addArgument("suite")
.`type`(classOf[String])
.dest("app.suite.name")
.metavar("SUITE")
.help("suite containing the experiment")
parser.addArgument("experiment")
.`type`(classOf[String])
.dest("app.suite.experiment.name")
.metavar("EXPERIMENT")
.help("experiment to run")
// option defaults
parser.setDefault("app.suite.experiment.run", 1)
}
override def configure(ns: Namespace) = {
// set ns options and arguments to system properties
Sys.setProperty("app.suite.experiment.just", if (ns.getBoolean("app.suite.experiment.just")) "true" else "false")
Sys.setProperty("app.suite.experiment.run", ns.getInt("app.suite.experiment.run").toString)
Sys.setProperty("app.suite.name", ns.getString("app.suite.name"))
Sys.setProperty("app.suite.experiment.name", ns.getString("app.suite.experiment.name"))
}
override def run(context: ApplicationContext) = {
val suiteName = Sys.getProperty("app.suite.name")
val expName = Sys.getProperty("app.suite.experiment.name")
val expRun = Sys.getProperty("app.suite.experiment.run").toInt
val justRun = Sys.getProperty("app.suite.experiment.just") == "true"
logger.info(s"Running experiment '${Sys.getProperty("app.suite.experiment.name")}' from suite '${Sys.getProperty("app.suite.name")}'")
val suite = context.getBean(Sys.getProperty("app.suite.name"), classOf[ExperimentSuite])
val graph = createGraph(suite)
//TODO check for cycles in the graph
if (graph.isEmpty) throw new RuntimeException("Experiment suite is empty!")
// find experiment
val exps = suite.experiments.filter(_.name == expName)
// load config
for (e <- suite.experiments.filter(_.name == expName)) e.config = loadConfig(graph, e)
// check if experiment exists (the list should contain exactly one element)
if (exps.size != 1) throw new RuntimeException(s"Experiment '$expName' either not found or ambigous in suite '$suiteName'")
for (exp <- exps; r <- Some(exp.run(expRun, force = true))) {
val allSystems = for (n <- graph.reverse.traverse(); if graph.descendants(exp).contains(n)) yield n
val inpSystems: Set[Node] = for (in <- exp.inputs; sys <- in.dependencies) yield sys
val expSystems = (graph.descendants(exp, exp.inputs) diff Seq(exp)).toSet
try {
logger.info("Executing experiment '%s'".format(exp.name))
// update config
for (n <- graph.descendants(exp)) n match {
case s: Configurable => s.config = exp.config
case _ => Unit
}
if (!justRun) {
logger.info("Setting up / updating systems required for input data sets")
for (n <- inpSystems) n match {
case s: System => if (s.isUp) s.update() else s.setUp()
case _ => Unit
}
logger.info("Materializing experiment input data sets")
for (n <- exp.inputs; path = n.resolve(n.path)) if (!n.fs.exists(path)) {
try {
n.materialize()
} catch {
case e: Throwable => n.fs.rmr(path); throw e // make sure the path is cleaned for the next try
}
} else {
logger.info(s"Skipping already materialized path '$path'".yellow)
}
logger.info("Tearing down redundant systems before conducting experiment runs")
for (n <- inpSystems diff expSystems) n match {
case s: System if !(Lifespan.PROVIDED :: Lifespan.SUITE :: Nil contains s.lifespan) => s.tearDown()
case _ => Unit
}
logger.info("Setting up systems with SUITE lifespan")
for (n <- allSystems) n match {
case s: System if s.lifespan == Lifespan.SUITE && !s.isUp => s.setUp()
case _ => Unit
}
logger.info("Updating systems with PROVIDED lifespan")
for (n <- allSystems) n match {
case s: System if s.lifespan == Lifespan.PROVIDED => s.update()
case _ => Unit
}
logger.info("Setting up systems with EXPERIMENT lifespan")
for (n <- expSystems) n match {
case s: System if s.lifespan == Lifespan.EXPERIMENT => s.setUp()
case _ => Unit
}
} else {
logger.info("Updating all systems")
for (n <- allSystems) n match {
case s: System => s.update()
case _ => Unit
}
}
logger.info("Setting up systems with RUN lifespan")
for (n <- allSystems) n match {
case s: System if s.lifespan == Lifespan.RUN => s.setUp()
case _ => Unit
}
for (n <- exp.outputs) n.clean()
r.execute() // run experiment
} catch {
case e: Throwable =>
logger.error(s"Exception for experiment ${exp.name} in suite ${suite.name}: ${e.getMessage}".red)
throw e
} finally {
if (!justRun) {
logger.info("Tearing down systems with SUITE or EXPERIMENT lifespan")
for (n <- allSystems) n match {
case s: System if Lifespan.SUITE :: Lifespan.EXPERIMENT :: Nil contains s.lifespan => s.tearDown()
case _ => Unit
}
}
logger.info("Tearing down systems with RUN lifespan")
for (n <- allSystems) n match {
case s: System if s.lifespan == Lifespan.RUN => s.tearDown()
case _ => Unit
}
}
}
}
}
| carabolic/peel | peel-core/src/main/scala/org/peelframework/core/cli/command/experiment/Run.scala | Scala | apache-2.0 | 7,356 |
package ingress.submission
import com.rabbitmq.client.AMQP.BasicProperties
import com.rabbitmq.client.{QueueingConsumer, Channel, Connection}
import play.api.test.FakeApplication
import submission.SubmissionService
import submission.messaging.{MessageSender, ConnectionManager}
import utils.WithServer
import play.api._
class WithServerConfig(params:(String,_)*) extends WithServer(app = FakeApplication(additionalConfiguration = params.toMap))
object TestUtils {
def declareQueue = {
val service:SubmissionService = new SubmissionService {
override def messagingService = MessageSender
}
val queueName = service.messagingService.getQueueName
Logger.info(s"cr WithServerConfig declaring queue $queueName")
val conn: Connection = ConnectionManager.factory.newConnection()
val channel: Channel = conn.createChannel()
val declareOk = channel.queueDeclare(queueName,true,false,false,null)
channel.close()
conn.close()
queueName
}
def consumeMessage(queueName:String) = {
val conn = ConnectionManager.factory.newConnection()
val channel = conn.createChannel()
val consumer = new QueueingConsumer(channel)
channel.basicConsume(queueName,true,consumer)
val delivery = consumer.nextDelivery(2000)
val body: String = new String(delivery.getBody)
channel.queueDelete(queueName)
channel.close()
conn.close()
body
}
def deleteQueue(queueName:String) = {
val conn: Connection = ConnectionManager.factory.newConnection()
val channel: Channel = conn.createChannel()
val deleteOk = channel.queueDelete(queueName)
channel.close()
conn.close()
}
def publishMessages(queueName:String,messages:String*) = {
val conn: Connection = ConnectionManager.factory.newConnection()
val channel: Channel = conn.createChannel()
val declareOk = channel.queueDeclare(queueName,true,false,false,null)
channel.confirmSelect()
messages.foreach{ message =>
channel.basicPublish("",queueName,new BasicProperties().builder().deliveryMode(2).build(),message.getBytes)
channel.waitForConfirms()
}
channel.close()
conn.close()
}
}
| Department-for-Work-and-Pensions/ClaimReceived | cr/test/ingress/submission/WithServerConfig.scala | Scala | mit | 2,168 |
package wp
import org.scalatest.FunSuite
class Discipline extends FunSuite with org.typelevel.discipline.scalatest.Discipline
| jawp/wicked-playground | modules/shared/.jvm/src/test/scala/wp/Discipline.scala | Scala | mit | 129 |
package com.forged.server
import com.forged.data.PortfolioProject
import spray.httpx.PlayTwirlSupport._
import spray.routing.HttpServiceActor
/**
* Created by visitor15 on 11/28/15.
*/
class ServerActor extends HttpServiceActor {
override def receive: Receive = runRoute {
path("home") {
get {
complete {
html.home.render()
}
}
} ~
path("old-profile") {
get {
complete {
html.basePage.render("Nick Champagne",
html.header.render(),
html.footer.render(),
html.profile.render())
}
}
} ~
path("portfolio") {
get {
complete {
html.basePage.render("Nick Champagne",
html.header.render(),
html.footer.render(),
html.portfolio.render(Server.generateProjectList()))
}
}
} ~
pathPrefix("theme") {
get {
getFromResourceDirectory("theme")
}
}
}
}
object Server {
def generateProjectList(): List[PortfolioProject] = {
val curiousProject = PortfolioProject("Curious - Khan Academy",
"A native Android application that integrates Khan Academy's public api.",
List("android-curious-khanacademy-green.png", "android-curious-khanacademy-blue.png", "android-curious-topics-menu.png"))
val nuesoftMedical = PortfolioProject("Nuesoft Medical",
"A medical application designed for increasing a patient's control over their data while providing a secure way of transferring data electronically.",
List("nuesoft-medical-login.png", "nuesoft-medical-profile-docs.png", "nuesoft-medical-profile-encryption.png"))
val moneyInformer = PortfolioProject("Money Informer",
"A tool to convert and look up world currency and country information. View financial history charts for different exchange rates and quickly find more information through Wikipedia and Google search.",
List("money-informer-phone-port-main.png", "money-informer-phone-port-history.png", "money-informer-tablet-links.png"))
val astroFileManager = PortfolioProject("ASTRO File manager",
"ASTRO File Manager finds and manages all of your files, no matter where they are located.",
List("astro-phone-listview.png", "astro-phone-nav-drawer.png"))
List(curiousProject, nuesoftMedical, moneyInformer, astroFileManager)
}
} | Visitor15/webPresence-scala | src/main/scala/com/forged/server/ServerActor.scala | Scala | mit | 2,382 |
package controllers
import scala.io.Source
/**
* Currently not in use.
* Could be used for loading templates from the server.
*/
class TemplateHandler {
val filePathTemplates = "server/app/controllers/templates.txt"
val nbrOfTemplates = getAmountOfTemplates
def getTemplate(templateNbr: Int): Option[String] = {
val contentIterator = Source.fromFile(filePathTemplates).getLines
val template = "Template" + templateNbr + "="
var wanted = false
def take(line: String): Boolean = {
if (line.replace(" ", "").startsWith(template))
wanted = true
if (line.trim.isEmpty)
wanted = false
wanted
}
Some(contentIterator.filter(take).mkString("\\n"))
}
def getAmountOfTemplates: Int = {
var amount = 0
Source.fromFile(filePathTemplates).getLines.foreach(line => if (line.replace(" ", "").startsWith("Template=")) amount += 1)
amount
}
}
| reqT/reqT-webapp | server/app/controllers/TemplateHandler.scala | Scala | apache-2.0 | 923 |
def f = {}
def f = {}
println(/* resolved: false */ f)
println(classOf[/* resolved: false */f]) | ilinum/intellij-scala | testdata/resolve2/element/clash/FunctionDefinition.scala | Scala | apache-2.0 | 96 |
/*
* Copyright (C) 2017. envisia GmbH
* All Rights Reserved.
*/
package de.envisia.postgresql.message.backend
import akka.util.ByteString
case class DataRowMessage(values: Array[ByteString]) extends ServerMessage {
override val kind: Int = ServerMessage.DataRow
} | schmitch/akka-pg | src/main/scala/de/envisia/postgresql/message/backend/DataRowMessage.scala | Scala | apache-2.0 | 270 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.