code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
# Copyright 2016 Georges Lipka
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
*/
package com.glipka.easyReactJS.reactBootstrap
import scala.scalajs.js
import scala.scalajs.js._
import com.glipka.easyReactJS.react._
import ReactBootstrap._
@js.native trait PanelProps extends HTMLProps[Panel] with TransitionCallbacks with js.Any {
var bsClass: String = js.native
var bsSize: Sizes = js.native
var bsStyle: String = js.native
var collapsible: Boolean = js.native
var defaultExpanded: Boolean = js.native
var eventKey: Any = js.native
var expanded: Boolean = js.native
var footer: ReactNode = js.native
var header: ReactNode = js.native
}
| glipka/Easy-React-With-ScalaJS | src/main/scala/com/glipka/easyReactJS/reactBootstrap/Panel.scala | Scala | apache-2.0 | 1,168 |
package com.ubirch.user.model.rest
import java.util.UUID
/**
* author: cvandrei
* since: 2017-03-29
*/
case class AllowedUsers(groupId: UUID,
allowedUsers: Set[UUID]
)
| ubirch/ubirch-user-service | model-rest/src/main/scala/com/ubirch/user/model/rest/AllowedUsers.scala | Scala | apache-2.0 | 223 |
import q._
package q.qq {
class C extends CC // ambiguous though same underlying
}
| martijnhoekstra/scala | test/files/neg/t2458b/test.scala | Scala | apache-2.0 | 89 |
package com.wavesplatform.settings
import com.typesafe.config.ConfigFactory
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.test.FlatSpec
import net.ceedubs.ficus.Ficus._
import net.ceedubs.ficus.readers.ArbitraryTypeReader._
class WalletSettingsSpecification extends FlatSpec {
"WalletSettings" should "read values from config" in {
val config = loadConfig(ConfigFactory.parseString("""waves.wallet {
| password: "some string as password"
| seed: "BASE58SEED"
|}""".stripMargin))
val settings = config.as[WalletSettings]("waves.wallet")
settings.seed should be(Some(ByteStr.decodeBase58("BASE58SEED").get))
settings.password should be(Some("some string as password"))
}
}
| wavesplatform/Waves | node/src/test/scala/com/wavesplatform/settings/WalletSettingsSpecification.scala | Scala | mit | 747 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.api.java
import java.util.{Comparator, List => JList, Map => JMap}
import java.lang.{Iterable => JIterable}
import scala.collection.JavaConverters._
import scala.language.implicitConversions
import scala.reflect.ClassTag
import com.google.common.base.Optional
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.io.compress.CompressionCodec
import org.apache.hadoop.mapred.{JobConf, OutputFormat}
import org.apache.hadoop.mapreduce.{OutputFormat => NewOutputFormat}
import org.apache.spark.{HashPartitioner, Partitioner}
import org.apache.spark.Partitioner._
import org.apache.spark.api.java.JavaSparkContext.fakeClassTag
import org.apache.spark.api.java.JavaUtils.mapAsSerializableJavaMap
import org.apache.spark.api.java.function.{Function => JFunction, Function2 => JFunction2, PairFunction}
import org.apache.spark.partial.{BoundedDouble, PartialResult}
import org.apache.spark.rdd.{OrderedRDDFunctions, RDD}
import org.apache.spark.rdd.RDD.rddToPairRDDFunctions
import org.apache.spark.serializer.Serializer
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.Utils
class JavaPairRDD[K, V](val rdd: RDD[(K, V)])
(implicit val kClassTag: ClassTag[K], implicit val vClassTag: ClassTag[V])
extends AbstractJavaRDDLike[(K, V), JavaPairRDD[K, V]] {
override def wrapRDD(rdd: RDD[(K, V)]): JavaPairRDD[K, V] = JavaPairRDD.fromRDD(rdd)
override val classTag: ClassTag[(K, V)] = rdd.elementClassTag
import JavaPairRDD._
// Common RDD functions
/** Persist this RDD with the default storage level (`MEMORY_ONLY`). */
def cache(): JavaPairRDD[K, V] = new JavaPairRDD[K, V](rdd.cache())
/**
* Set this RDD's storage level to persist its values across operations after the first time
* it is computed. Can only be called once on each RDD.
*/
def persist(newLevel: StorageLevel): JavaPairRDD[K, V] =
new JavaPairRDD[K, V](rdd.persist(newLevel))
/**
* Mark the RDD as non-persistent, and remove all blocks for it from memory and disk.
* This method blocks until all blocks are deleted.
*/
def unpersist(): JavaPairRDD[K, V] = wrapRDD(rdd.unpersist())
/**
* Mark the RDD as non-persistent, and remove all blocks for it from memory and disk.
*
* @param blocking Whether to block until all blocks are deleted.
*/
def unpersist(blocking: Boolean): JavaPairRDD[K, V] = wrapRDD(rdd.unpersist(blocking))
// Transformations (return a new RDD)
/**
* Return a new RDD containing the distinct elements in this RDD.
*/
def distinct(): JavaPairRDD[K, V] = new JavaPairRDD[K, V](rdd.distinct())
/**
* Return a new RDD containing the distinct elements in this RDD.
*/
def distinct(numPartitions: Int): JavaPairRDD[K, V] =
new JavaPairRDD[K, V](rdd.distinct(numPartitions))
/**
* Return a new RDD containing only the elements that satisfy a predicate.
*/
def filter(f: JFunction[(K, V), java.lang.Boolean]): JavaPairRDD[K, V] =
new JavaPairRDD[K, V](rdd.filter(x => f.call(x).booleanValue()))
/**
* Return a new RDD that is reduced into `numPartitions` partitions.
*/
def coalesce(numPartitions: Int): JavaPairRDD[K, V] = fromRDD(rdd.coalesce(numPartitions))
/**
* Return a new RDD that is reduced into `numPartitions` partitions.
*/
def coalesce(numPartitions: Int, shuffle: Boolean): JavaPairRDD[K, V] =
fromRDD(rdd.coalesce(numPartitions, shuffle))
/**
* Return a new RDD that has exactly numPartitions partitions.
*
* Can increase or decrease the level of parallelism in this RDD. Internally, this uses
* a shuffle to redistribute data.
*
* If you are decreasing the number of partitions in this RDD, consider using `coalesce`,
* which can avoid performing a shuffle.
*/
def repartition(numPartitions: Int): JavaPairRDD[K, V] = fromRDD(rdd.repartition(numPartitions))
/**
* Return a sampled subset of this RDD.
*/
def sample(withReplacement: Boolean, fraction: Double): JavaPairRDD[K, V] =
sample(withReplacement, fraction, Utils.random.nextLong)
/**
* Return a sampled subset of this RDD.
*/
def sample(withReplacement: Boolean, fraction: Double, seed: Long): JavaPairRDD[K, V] =
new JavaPairRDD[K, V](rdd.sample(withReplacement, fraction, seed))
/**
* Return a subset of this RDD sampled by key (via stratified sampling).
*
* Create a sample of this RDD using variable sampling rates for different keys as specified by
* `fractions`, a key to sampling rate map, via simple random sampling with one pass over the
* RDD, to produce a sample of size that's approximately equal to the sum of
* math.ceil(numItems * samplingRate) over all key values.
*/
def sampleByKey(withReplacement: Boolean,
fractions: JMap[K, Double],
seed: Long): JavaPairRDD[K, V] =
new JavaPairRDD[K, V](rdd.sampleByKey(withReplacement, fractions.asScala, seed))
/**
* Return a subset of this RDD sampled by key (via stratified sampling).
*
* Create a sample of this RDD using variable sampling rates for different keys as specified by
* `fractions`, a key to sampling rate map, via simple random sampling with one pass over the
* RDD, to produce a sample of size that's approximately equal to the sum of
* math.ceil(numItems * samplingRate) over all key values.
*
* Use Utils.random.nextLong as the default seed for the random number generator.
*/
def sampleByKey(withReplacement: Boolean,
fractions: JMap[K, Double]): JavaPairRDD[K, V] =
sampleByKey(withReplacement, fractions, Utils.random.nextLong)
/**
* Return a subset of this RDD sampled by key (via stratified sampling) containing exactly
* math.ceil(numItems * samplingRate) for each stratum (group of pairs with the same key).
*
* This method differs from [[sampleByKey]] in that we make additional passes over the RDD to
* create a sample size that's exactly equal to the sum of math.ceil(numItems * samplingRate)
* over all key values with a 99.99% confidence. When sampling without replacement, we need one
* additional pass over the RDD to guarantee sample size; when sampling with replacement, we need
* two additional passes.
*/
def sampleByKeyExact(withReplacement: Boolean,
fractions: JMap[K, Double],
seed: Long): JavaPairRDD[K, V] =
new JavaPairRDD[K, V](rdd.sampleByKeyExact(withReplacement, fractions.asScala, seed))
/**
* Return a subset of this RDD sampled by key (via stratified sampling) containing exactly
* math.ceil(numItems * samplingRate) for each stratum (group of pairs with the same key).
*
* This method differs from [[sampleByKey]] in that we make additional passes over the RDD to
* create a sample size that's exactly equal to the sum of math.ceil(numItems * samplingRate)
* over all key values with a 99.99% confidence. When sampling without replacement, we need one
* additional pass over the RDD to guarantee sample size; when sampling with replacement, we need
* two additional passes.
*
* Use Utils.random.nextLong as the default seed for the random number generator.
*/
def sampleByKeyExact(withReplacement: Boolean, fractions: JMap[K, Double]): JavaPairRDD[K, V] =
sampleByKeyExact(withReplacement, fractions, Utils.random.nextLong)
/**
* Return the union of this RDD and another one. Any identical elements will appear multiple
* times (use `.distinct()` to eliminate them).
*/
def union(other: JavaPairRDD[K, V]): JavaPairRDD[K, V] =
new JavaPairRDD[K, V](rdd.union(other.rdd))
/**
* Return the intersection of this RDD and another one. The output will not contain any duplicate
* elements, even if the input RDDs did.
*
* Note that this method performs a shuffle internally.
*/
def intersection(other: JavaPairRDD[K, V]): JavaPairRDD[K, V] =
new JavaPairRDD[K, V](rdd.intersection(other.rdd))
// first() has to be overridden here so that the generated method has the signature
// 'public scala.Tuple2 first()'; if the trait's definition is used,
// then the method has the signature 'public java.lang.Object first()',
// causing NoSuchMethodErrors at runtime.
override def first(): (K, V) = rdd.first()
// Pair RDD functions
/**
* Generic function to combine the elements for each key using a custom set of aggregation
* functions. Turns a JavaPairRDD[(K, V)] into a result of type JavaPairRDD[(K, C)], for a
* "combined type" C. Note that V and C can be different -- for example, one might group an
* RDD of type (Int, Int) into an RDD of type (Int, List[Int]). Users provide three
* functions:
*
* - `createCombiner`, which turns a V into a C (e.g., creates a one-element list)
* - `mergeValue`, to merge a V into a C (e.g., adds it to the end of a list)
* - `mergeCombiners`, to combine two C's into a single one.
*
* In addition, users can control the partitioning of the output RDD, the serializer that is use
* for the shuffle, and whether to perform map-side aggregation (if a mapper can produce multiple
* items with the same key).
*/
def combineByKey[C](createCombiner: JFunction[V, C],
mergeValue: JFunction2[C, V, C],
mergeCombiners: JFunction2[C, C, C],
partitioner: Partitioner,
mapSideCombine: Boolean,
serializer: Serializer): JavaPairRDD[K, C] = {
implicit val ctag: ClassTag[C] = fakeClassTag
fromRDD(rdd.combineByKeyWithClassTag(
createCombiner,
mergeValue,
mergeCombiners,
partitioner,
mapSideCombine,
serializer
))
}
/**
* Generic function to combine the elements for each key using a custom set of aggregation
* functions. Turns a JavaPairRDD[(K, V)] into a result of type JavaPairRDD[(K, C)], for a
* "combined type" C. Note that V and C can be different -- for example, one might group an
* RDD of type (Int, Int) into an RDD of type (Int, List[Int]). Users provide three
* functions:
*
* - `createCombiner`, which turns a V into a C (e.g., creates a one-element list)
* - `mergeValue`, to merge a V into a C (e.g., adds it to the end of a list)
* - `mergeCombiners`, to combine two C's into a single one.
*
* In addition, users can control the partitioning of the output RDD. This method automatically
* uses map-side aggregation in shuffling the RDD.
*/
def combineByKey[C](createCombiner: JFunction[V, C],
mergeValue: JFunction2[C, V, C],
mergeCombiners: JFunction2[C, C, C],
partitioner: Partitioner): JavaPairRDD[K, C] = {
combineByKey(createCombiner, mergeValue, mergeCombiners, partitioner, true, null)
}
/**
* Simplified version of combineByKey that hash-partitions the output RDD and uses map-side
* aggregation.
*/
def combineByKey[C](createCombiner: JFunction[V, C],
mergeValue: JFunction2[C, V, C],
mergeCombiners: JFunction2[C, C, C],
numPartitions: Int): JavaPairRDD[K, C] =
combineByKey(createCombiner, mergeValue, mergeCombiners, new HashPartitioner(numPartitions))
/**
* Merge the values for each key using an associative reduce function. This will also perform
* the merging locally on each mapper before sending results to a reducer, similarly to a
* "combiner" in MapReduce.
*/
def reduceByKey(partitioner: Partitioner, func: JFunction2[V, V, V]): JavaPairRDD[K, V] =
fromRDD(rdd.reduceByKey(partitioner, func))
/**
* Merge the values for each key using an associative reduce function, but return the results
* immediately to the master as a Map. This will also perform the merging locally on each mapper
* before sending results to a reducer, similarly to a "combiner" in MapReduce.
*/
def reduceByKeyLocally(func: JFunction2[V, V, V]): java.util.Map[K, V] =
mapAsSerializableJavaMap(rdd.reduceByKeyLocally(func))
/** Count the number of elements for each key, and return the result to the master as a Map. */
def countByKey(): java.util.Map[K, Long] = mapAsSerializableJavaMap(rdd.countByKey())
/**
* Approximate version of countByKey that can return a partial result if it does
* not finish within a timeout.
*/
def countByKeyApprox(timeout: Long): PartialResult[java.util.Map[K, BoundedDouble]] =
rdd.countByKeyApprox(timeout).map(mapAsSerializableJavaMap)
/**
* Approximate version of countByKey that can return a partial result if it does
* not finish within a timeout.
*/
def countByKeyApprox(timeout: Long, confidence: Double = 0.95)
: PartialResult[java.util.Map[K, BoundedDouble]] =
rdd.countByKeyApprox(timeout, confidence).map(mapAsSerializableJavaMap)
/**
* Aggregate the values of each key, using given combine functions and a neutral "zero value".
* This function can return a different result type, U, than the type of the values in this RDD,
* V. Thus, we need one operation for merging a V into a U and one operation for merging two U's,
* as in scala.TraversableOnce. The former operation is used for merging values within a
* partition, and the latter is used for merging values between partitions. To avoid memory
* allocation, both of these functions are allowed to modify and return their first argument
* instead of creating a new U.
*/
def aggregateByKey[U](zeroValue: U, partitioner: Partitioner, seqFunc: JFunction2[U, V, U],
combFunc: JFunction2[U, U, U]): JavaPairRDD[K, U] = {
implicit val ctag: ClassTag[U] = fakeClassTag
fromRDD(rdd.aggregateByKey(zeroValue, partitioner)(seqFunc, combFunc))
}
/**
* Aggregate the values of each key, using given combine functions and a neutral "zero value".
* This function can return a different result type, U, than the type of the values in this RDD,
* V. Thus, we need one operation for merging a V into a U and one operation for merging two U's,
* as in scala.TraversableOnce. The former operation is used for merging values within a
* partition, and the latter is used for merging values between partitions. To avoid memory
* allocation, both of these functions are allowed to modify and return their first argument
* instead of creating a new U.
*/
def aggregateByKey[U](zeroValue: U, numPartitions: Int, seqFunc: JFunction2[U, V, U],
combFunc: JFunction2[U, U, U]): JavaPairRDD[K, U] = {
implicit val ctag: ClassTag[U] = fakeClassTag
fromRDD(rdd.aggregateByKey(zeroValue, numPartitions)(seqFunc, combFunc))
}
/**
* Aggregate the values of each key, using given combine functions and a neutral "zero value".
* This function can return a different result type, U, than the type of the values in this RDD,
* V. Thus, we need one operation for merging a V into a U and one operation for merging two U's.
* The former operation is used for merging values within a partition, and the latter is used for
* merging values between partitions. To avoid memory allocation, both of these functions are
* allowed to modify and return their first argument instead of creating a new U.
*/
def aggregateByKey[U](zeroValue: U, seqFunc: JFunction2[U, V, U], combFunc: JFunction2[U, U, U]):
JavaPairRDD[K, U] = {
implicit val ctag: ClassTag[U] = fakeClassTag
fromRDD(rdd.aggregateByKey(zeroValue)(seqFunc, combFunc))
}
/**
* Merge the values for each key using an associative function and a neutral "zero value" which
* may be added to the result an arbitrary number of times, and must not change the result
* (e.g ., Nil for list concatenation, 0 for addition, or 1 for multiplication.).
*/
def foldByKey(zeroValue: V, partitioner: Partitioner, func: JFunction2[V, V, V])
: JavaPairRDD[K, V] = fromRDD(rdd.foldByKey(zeroValue, partitioner)(func))
/**
* Merge the values for each key using an associative function and a neutral "zero value" which
* may be added to the result an arbitrary number of times, and must not change the result
* (e.g ., Nil for list concatenation, 0 for addition, or 1 for multiplication.).
*/
def foldByKey(zeroValue: V, numPartitions: Int, func: JFunction2[V, V, V]): JavaPairRDD[K, V] =
fromRDD(rdd.foldByKey(zeroValue, numPartitions)(func))
/**
* Merge the values for each key using an associative function and a neutral "zero value"
* which may be added to the result an arbitrary number of times, and must not change the result
* (e.g., Nil for list concatenation, 0 for addition, or 1 for multiplication.).
*/
def foldByKey(zeroValue: V, func: JFunction2[V, V, V]): JavaPairRDD[K, V] =
fromRDD(rdd.foldByKey(zeroValue)(func))
/**
* Merge the values for each key using an associative reduce function. This will also perform
* the merging locally on each mapper before sending results to a reducer, similarly to a
* "combiner" in MapReduce. Output will be hash-partitioned with numPartitions partitions.
*/
def reduceByKey(func: JFunction2[V, V, V], numPartitions: Int): JavaPairRDD[K, V] =
fromRDD(rdd.reduceByKey(func, numPartitions))
/**
* Group the values for each key in the RDD into a single sequence. Allows controlling the
* partitioning of the resulting key-value pair RDD by passing a Partitioner.
*
* Note: If you are grouping in order to perform an aggregation (such as a sum or average) over
* each key, using [[JavaPairRDD.reduceByKey]] or [[JavaPairRDD.combineByKey]]
* will provide much better performance.
*/
def groupByKey(partitioner: Partitioner): JavaPairRDD[K, JIterable[V]] =
fromRDD(groupByResultToJava(rdd.groupByKey(partitioner)))
/**
* Group the values for each key in the RDD into a single sequence. Hash-partitions the
* resulting RDD with into `numPartitions` partitions.
*
* Note: If you are grouping in order to perform an aggregation (such as a sum or average) over
* each key, using [[JavaPairRDD.reduceByKey]] or [[JavaPairRDD.combineByKey]]
* will provide much better performance.
*/
def groupByKey(numPartitions: Int): JavaPairRDD[K, JIterable[V]] =
fromRDD(groupByResultToJava(rdd.groupByKey(numPartitions)))
/**
* Return an RDD with the elements from `this` that are not in `other`.
*
* Uses `this` partitioner/partition size, because even if `other` is huge, the resulting
* RDD will be <= us.
*/
def subtract(other: JavaPairRDD[K, V]): JavaPairRDD[K, V] =
fromRDD(rdd.subtract(other))
/**
* Return an RDD with the elements from `this` that are not in `other`.
*/
def subtract(other: JavaPairRDD[K, V], numPartitions: Int): JavaPairRDD[K, V] =
fromRDD(rdd.subtract(other, numPartitions))
/**
* Return an RDD with the elements from `this` that are not in `other`.
*/
def subtract(other: JavaPairRDD[K, V], p: Partitioner): JavaPairRDD[K, V] =
fromRDD(rdd.subtract(other, p))
/**
* Return an RDD with the pairs from `this` whose keys are not in `other`.
*
* Uses `this` partitioner/partition size, because even if `other` is huge, the resulting
* RDD will be <= us.
*/
def subtractByKey[W](other: JavaPairRDD[K, W]): JavaPairRDD[K, V] = {
implicit val ctag: ClassTag[W] = fakeClassTag
fromRDD(rdd.subtractByKey(other))
}
/** Return an RDD with the pairs from `this` whose keys are not in `other`. */
def subtractByKey[W](other: JavaPairRDD[K, W], numPartitions: Int): JavaPairRDD[K, V] = {
implicit val ctag: ClassTag[W] = fakeClassTag
fromRDD(rdd.subtractByKey(other, numPartitions))
}
/** Return an RDD with the pairs from `this` whose keys are not in `other`. */
def subtractByKey[W](other: JavaPairRDD[K, W], p: Partitioner): JavaPairRDD[K, V] = {
implicit val ctag: ClassTag[W] = fakeClassTag
fromRDD(rdd.subtractByKey(other, p))
}
/**
* Return a copy of the RDD partitioned using the specified partitioner.
*/
def partitionBy(partitioner: Partitioner): JavaPairRDD[K, V] =
fromRDD(rdd.partitionBy(partitioner))
/**
* Merge the values for each key using an associative reduce function. This will also perform
* the merging locally on each mapper before sending results to a reducer, similarly to a
* "combiner" in MapReduce.
*/
def join[W](other: JavaPairRDD[K, W], partitioner: Partitioner): JavaPairRDD[K, (V, W)] =
fromRDD(rdd.join(other, partitioner))
/**
* Perform a left outer join of `this` and `other`. For each element (k, v) in `this`, the
* resulting RDD will either contain all pairs (k, (v, Some(w))) for w in `other`, or the
* pair (k, (v, None)) if no elements in `other` have key k. Uses the given Partitioner to
* partition the output RDD.
*/
def leftOuterJoin[W](other: JavaPairRDD[K, W], partitioner: Partitioner)
: JavaPairRDD[K, (V, Optional[W])] = {
val joinResult = rdd.leftOuterJoin(other, partitioner)
fromRDD(joinResult.mapValues{case (v, w) => (v, JavaUtils.optionToOptional(w))})
}
/**
* Perform a right outer join of `this` and `other`. For each element (k, w) in `other`, the
* resulting RDD will either contain all pairs (k, (Some(v), w)) for v in `this`, or the
* pair (k, (None, w)) if no elements in `this` have key k. Uses the given Partitioner to
* partition the output RDD.
*/
def rightOuterJoin[W](other: JavaPairRDD[K, W], partitioner: Partitioner)
: JavaPairRDD[K, (Optional[V], W)] = {
val joinResult = rdd.rightOuterJoin(other, partitioner)
fromRDD(joinResult.mapValues{case (v, w) => (JavaUtils.optionToOptional(v), w)})
}
/**
* Perform a full outer join of `this` and `other`. For each element (k, v) in `this`, the
* resulting RDD will either contain all pairs (k, (Some(v), Some(w))) for w in `other`, or
* the pair (k, (Some(v), None)) if no elements in `other` have key k. Similarly, for each
* element (k, w) in `other`, the resulting RDD will either contain all pairs
* (k, (Some(v), Some(w))) for v in `this`, or the pair (k, (None, Some(w))) if no elements
* in `this` have key k. Uses the given Partitioner to partition the output RDD.
*/
def fullOuterJoin[W](other: JavaPairRDD[K, W], partitioner: Partitioner)
: JavaPairRDD[K, (Optional[V], Optional[W])] = {
val joinResult = rdd.fullOuterJoin(other, partitioner)
fromRDD(joinResult.mapValues{ case (v, w) =>
(JavaUtils.optionToOptional(v), JavaUtils.optionToOptional(w))
})
}
/**
* Simplified version of combineByKey that hash-partitions the resulting RDD using the existing
* partitioner/parallelism level and using map-side aggregation.
*/
def combineByKey[C](createCombiner: JFunction[V, C],
mergeValue: JFunction2[C, V, C],
mergeCombiners: JFunction2[C, C, C]): JavaPairRDD[K, C] = {
implicit val ctag: ClassTag[C] = fakeClassTag
fromRDD(combineByKey(createCombiner, mergeValue, mergeCombiners, defaultPartitioner(rdd)))
}
/**
* Merge the values for each key using an associative reduce function. This will also perform
* the merging locally on each mapper before sending results to a reducer, similarly to a
* "combiner" in MapReduce. Output will be hash-partitioned with the existing partitioner/
* parallelism level.
*/
def reduceByKey(func: JFunction2[V, V, V]): JavaPairRDD[K, V] = {
fromRDD(reduceByKey(defaultPartitioner(rdd), func))
}
/**
* Group the values for each key in the RDD into a single sequence. Hash-partitions the
* resulting RDD with the existing partitioner/parallelism level.
*
* Note: If you are grouping in order to perform an aggregation (such as a sum or average) over
* each key, using [[JavaPairRDD.reduceByKey]] or [[JavaPairRDD.combineByKey]]
* will provide much better performance.
*/
def groupByKey(): JavaPairRDD[K, JIterable[V]] =
fromRDD(groupByResultToJava(rdd.groupByKey()))
/**
* Return an RDD containing all pairs of elements with matching keys in `this` and `other`. Each
* pair of elements will be returned as a (k, (v1, v2)) tuple, where (k, v1) is in `this` and
* (k, v2) is in `other`. Performs a hash join across the cluster.
*/
def join[W](other: JavaPairRDD[K, W]): JavaPairRDD[K, (V, W)] =
fromRDD(rdd.join(other))
/**
* Return an RDD containing all pairs of elements with matching keys in `this` and `other`. Each
* pair of elements will be returned as a (k, (v1, v2)) tuple, where (k, v1) is in `this` and
* (k, v2) is in `other`. Performs a hash join across the cluster.
*/
def join[W](other: JavaPairRDD[K, W], numPartitions: Int): JavaPairRDD[K, (V, W)] =
fromRDD(rdd.join(other, numPartitions))
/**
* Perform a left outer join of `this` and `other`. For each element (k, v) in `this`, the
* resulting RDD will either contain all pairs (k, (v, Some(w))) for w in `other`, or the
* pair (k, (v, None)) if no elements in `other` have key k. Hash-partitions the output
* using the existing partitioner/parallelism level.
*/
def leftOuterJoin[W](other: JavaPairRDD[K, W]): JavaPairRDD[K, (V, Optional[W])] = {
val joinResult = rdd.leftOuterJoin(other)
fromRDD(joinResult.mapValues{case (v, w) => (v, JavaUtils.optionToOptional(w))})
}
/**
* Perform a left outer join of `this` and `other`. For each element (k, v) in `this`, the
* resulting RDD will either contain all pairs (k, (v, Some(w))) for w in `other`, or the
* pair (k, (v, None)) if no elements in `other` have key k. Hash-partitions the output
* into `numPartitions` partitions.
*/
def leftOuterJoin[W](other: JavaPairRDD[K, W], numPartitions: Int)
: JavaPairRDD[K, (V, Optional[W])] = {
val joinResult = rdd.leftOuterJoin(other, numPartitions)
fromRDD(joinResult.mapValues{case (v, w) => (v, JavaUtils.optionToOptional(w))})
}
/**
* Perform a right outer join of `this` and `other`. For each element (k, w) in `other`, the
* resulting RDD will either contain all pairs (k, (Some(v), w)) for v in `this`, or the
* pair (k, (None, w)) if no elements in `this` have key k. Hash-partitions the resulting
* RDD using the existing partitioner/parallelism level.
*/
def rightOuterJoin[W](other: JavaPairRDD[K, W]): JavaPairRDD[K, (Optional[V], W)] = {
val joinResult = rdd.rightOuterJoin(other)
fromRDD(joinResult.mapValues{case (v, w) => (JavaUtils.optionToOptional(v), w)})
}
/**
* Perform a right outer join of `this` and `other`. For each element (k, w) in `other`, the
* resulting RDD will either contain all pairs (k, (Some(v), w)) for v in `this`, or the
* pair (k, (None, w)) if no elements in `this` have key k. Hash-partitions the resulting
* RDD into the given number of partitions.
*/
def rightOuterJoin[W](other: JavaPairRDD[K, W], numPartitions: Int)
: JavaPairRDD[K, (Optional[V], W)] = {
val joinResult = rdd.rightOuterJoin(other, numPartitions)
fromRDD(joinResult.mapValues{case (v, w) => (JavaUtils.optionToOptional(v), w)})
}
/**
* Perform a full outer join of `this` and `other`. For each element (k, v) in `this`, the
* resulting RDD will either contain all pairs (k, (Some(v), Some(w))) for w in `other`, or
* the pair (k, (Some(v), None)) if no elements in `other` have key k. Similarly, for each
* element (k, w) in `other`, the resulting RDD will either contain all pairs
* (k, (Some(v), Some(w))) for v in `this`, or the pair (k, (None, Some(w))) if no elements
* in `this` have key k. Hash-partitions the resulting RDD using the existing partitioner/
* parallelism level.
*/
def fullOuterJoin[W](other: JavaPairRDD[K, W]): JavaPairRDD[K, (Optional[V], Optional[W])] = {
val joinResult = rdd.fullOuterJoin(other)
fromRDD(joinResult.mapValues{ case (v, w) =>
(JavaUtils.optionToOptional(v), JavaUtils.optionToOptional(w))
})
}
/**
* Perform a full outer join of `this` and `other`. For each element (k, v) in `this`, the
* resulting RDD will either contain all pairs (k, (Some(v), Some(w))) for w in `other`, or
* the pair (k, (Some(v), None)) if no elements in `other` have key k. Similarly, for each
* element (k, w) in `other`, the resulting RDD will either contain all pairs
* (k, (Some(v), Some(w))) for v in `this`, or the pair (k, (None, Some(w))) if no elements
* in `this` have key k. Hash-partitions the resulting RDD into the given number of partitions.
*/
def fullOuterJoin[W](other: JavaPairRDD[K, W], numPartitions: Int)
: JavaPairRDD[K, (Optional[V], Optional[W])] = {
val joinResult = rdd.fullOuterJoin(other, numPartitions)
fromRDD(joinResult.mapValues{ case (v, w) =>
(JavaUtils.optionToOptional(v), JavaUtils.optionToOptional(w))
})
}
/**
* Return the key-value pairs in this RDD to the master as a Map.
*/
def collectAsMap(): java.util.Map[K, V] = mapAsSerializableJavaMap(rdd.collectAsMap())
/**
* Pass each value in the key-value pair RDD through a map function without changing the keys;
* this also retains the original RDD's partitioning.
*/
def mapValues[U](f: JFunction[V, U]): JavaPairRDD[K, U] = {
implicit val ctag: ClassTag[U] = fakeClassTag
fromRDD(rdd.mapValues(f))
}
/**
* Pass each value in the key-value pair RDD through a flatMap function without changing the
* keys; this also retains the original RDD's partitioning.
*/
def flatMapValues[U](f: JFunction[V, java.lang.Iterable[U]]): JavaPairRDD[K, U] = {
import scala.collection.JavaConverters._
def fn: (V) => Iterable[U] = (x: V) => f.call(x).asScala
implicit val ctag: ClassTag[U] = fakeClassTag
fromRDD(rdd.flatMapValues(fn))
}
/**
* For each key k in `this` or `other`, return a resulting RDD that contains a tuple with the
* list of values for that key in `this` as well as `other`.
*/
def cogroup[W](other: JavaPairRDD[K, W], partitioner: Partitioner)
: JavaPairRDD[K, (JIterable[V], JIterable[W])] =
fromRDD(cogroupResultToJava(rdd.cogroup(other, partitioner)))
/**
* For each key k in `this` or `other1` or `other2`, return a resulting RDD that contains a
* tuple with the list of values for that key in `this`, `other1` and `other2`.
*/
def cogroup[W1, W2](other1: JavaPairRDD[K, W1], other2: JavaPairRDD[K, W2],
partitioner: Partitioner): JavaPairRDD[K, (JIterable[V], JIterable[W1], JIterable[W2])] =
fromRDD(cogroupResult2ToJava(rdd.cogroup(other1, other2, partitioner)))
/**
* For each key k in `this` or `other1` or `other2` or `other3`,
* return a resulting RDD that contains a tuple with the list of values
* for that key in `this`, `other1`, `other2` and `other3`.
*/
def cogroup[W1, W2, W3](other1: JavaPairRDD[K, W1],
other2: JavaPairRDD[K, W2],
other3: JavaPairRDD[K, W3],
partitioner: Partitioner)
: JavaPairRDD[K, (JIterable[V], JIterable[W1], JIterable[W2], JIterable[W3])] =
fromRDD(cogroupResult3ToJava(rdd.cogroup(other1, other2, other3, partitioner)))
/**
* For each key k in `this` or `other`, return a resulting RDD that contains a tuple with the
* list of values for that key in `this` as well as `other`.
*/
def cogroup[W](other: JavaPairRDD[K, W]): JavaPairRDD[K, (JIterable[V], JIterable[W])] =
fromRDD(cogroupResultToJava(rdd.cogroup(other)))
/**
* For each key k in `this` or `other1` or `other2`, return a resulting RDD that contains a
* tuple with the list of values for that key in `this`, `other1` and `other2`.
*/
def cogroup[W1, W2](other1: JavaPairRDD[K, W1], other2: JavaPairRDD[K, W2])
: JavaPairRDD[K, (JIterable[V], JIterable[W1], JIterable[W2])] =
fromRDD(cogroupResult2ToJava(rdd.cogroup(other1, other2)))
/**
* For each key k in `this` or `other1` or `other2` or `other3`,
* return a resulting RDD that contains a tuple with the list of values
* for that key in `this`, `other1`, `other2` and `other3`.
*/
def cogroup[W1, W2, W3](other1: JavaPairRDD[K, W1],
other2: JavaPairRDD[K, W2],
other3: JavaPairRDD[K, W3])
: JavaPairRDD[K, (JIterable[V], JIterable[W1], JIterable[W2], JIterable[W3])] =
fromRDD(cogroupResult3ToJava(rdd.cogroup(other1, other2, other3)))
/**
* For each key k in `this` or `other`, return a resulting RDD that contains a tuple with the
* list of values for that key in `this` as well as `other`.
*/
def cogroup[W](other: JavaPairRDD[K, W], numPartitions: Int)
: JavaPairRDD[K, (JIterable[V], JIterable[W])] =
fromRDD(cogroupResultToJava(rdd.cogroup(other, numPartitions)))
/**
* For each key k in `this` or `other1` or `other2`, return a resulting RDD that contains a
* tuple with the list of values for that key in `this`, `other1` and `other2`.
*/
def cogroup[W1, W2](other1: JavaPairRDD[K, W1], other2: JavaPairRDD[K, W2], numPartitions: Int)
: JavaPairRDD[K, (JIterable[V], JIterable[W1], JIterable[W2])] =
fromRDD(cogroupResult2ToJava(rdd.cogroup(other1, other2, numPartitions)))
/**
* For each key k in `this` or `other1` or `other2` or `other3`,
* return a resulting RDD that contains a tuple with the list of values
* for that key in `this`, `other1`, `other2` and `other3`.
*/
def cogroup[W1, W2, W3](other1: JavaPairRDD[K, W1],
other2: JavaPairRDD[K, W2],
other3: JavaPairRDD[K, W3],
numPartitions: Int)
: JavaPairRDD[K, (JIterable[V], JIterable[W1], JIterable[W2], JIterable[W3])] =
fromRDD(cogroupResult3ToJava(rdd.cogroup(other1, other2, other3, numPartitions)))
/** Alias for cogroup. */
def groupWith[W](other: JavaPairRDD[K, W]): JavaPairRDD[K, (JIterable[V], JIterable[W])] =
fromRDD(cogroupResultToJava(rdd.groupWith(other)))
/** Alias for cogroup. */
def groupWith[W1, W2](other1: JavaPairRDD[K, W1], other2: JavaPairRDD[K, W2])
: JavaPairRDD[K, (JIterable[V], JIterable[W1], JIterable[W2])] =
fromRDD(cogroupResult2ToJava(rdd.groupWith(other1, other2)))
/** Alias for cogroup. */
def groupWith[W1, W2, W3](other1: JavaPairRDD[K, W1],
other2: JavaPairRDD[K, W2],
other3: JavaPairRDD[K, W3])
: JavaPairRDD[K, (JIterable[V], JIterable[W1], JIterable[W2], JIterable[W3])] =
fromRDD(cogroupResult3ToJava(rdd.groupWith(other1, other2, other3)))
/**
* Return the list of values in the RDD for key `key`. This operation is done efficiently if the
* RDD has a known partitioner by only searching the partition that the key maps to.
*/
def lookup(key: K): JList[V] = rdd.lookup(key).asJava
/** Output the RDD to any Hadoop-supported file system. */
def saveAsHadoopFile[F <: OutputFormat[_, _]](
path: String,
keyClass: Class[_],
valueClass: Class[_],
outputFormatClass: Class[F],
conf: JobConf) {
rdd.saveAsHadoopFile(path, keyClass, valueClass, outputFormatClass, conf)
}
/** Output the RDD to any Hadoop-supported file system. */
def saveAsHadoopFile[F <: OutputFormat[_, _]](
path: String,
keyClass: Class[_],
valueClass: Class[_],
outputFormatClass: Class[F]) {
rdd.saveAsHadoopFile(path, keyClass, valueClass, outputFormatClass)
}
/** Output the RDD to any Hadoop-supported file system, compressing with the supplied codec. */
def saveAsHadoopFile[F <: OutputFormat[_, _]](
path: String,
keyClass: Class[_],
valueClass: Class[_],
outputFormatClass: Class[F],
codec: Class[_ <: CompressionCodec]) {
rdd.saveAsHadoopFile(path, keyClass, valueClass, outputFormatClass, codec)
}
/** Output the RDD to any Hadoop-supported file system. */
def saveAsNewAPIHadoopFile[F <: NewOutputFormat[_, _]](
path: String,
keyClass: Class[_],
valueClass: Class[_],
outputFormatClass: Class[F],
conf: Configuration) {
rdd.saveAsNewAPIHadoopFile(path, keyClass, valueClass, outputFormatClass, conf)
}
/**
* Output the RDD to any Hadoop-supported storage system, using
* a Configuration object for that storage system.
*/
def saveAsNewAPIHadoopDataset(conf: Configuration) {
rdd.saveAsNewAPIHadoopDataset(conf)
}
/** Output the RDD to any Hadoop-supported file system. */
def saveAsNewAPIHadoopFile[F <: NewOutputFormat[_, _]](
path: String,
keyClass: Class[_],
valueClass: Class[_],
outputFormatClass: Class[F]) {
rdd.saveAsNewAPIHadoopFile(path, keyClass, valueClass, outputFormatClass)
}
/**
* Output the RDD to any Hadoop-supported storage system, using a Hadoop JobConf object for
* that storage system. The JobConf should set an OutputFormat and any output paths required
* (e.g. a table name to write to) in the same way as it would be configured for a Hadoop
* MapReduce job.
*/
def saveAsHadoopDataset(conf: JobConf) {
rdd.saveAsHadoopDataset(conf)
}
/**
* Repartition the RDD according to the given partitioner and, within each resulting partition,
* sort records by their keys.
*
* This is more efficient than calling `repartition` and then sorting within each partition
* because it can push the sorting down into the shuffle machinery.
*/
def repartitionAndSortWithinPartitions(partitioner: Partitioner): JavaPairRDD[K, V] = {
val comp = com.google.common.collect.Ordering.natural().asInstanceOf[Comparator[K]]
repartitionAndSortWithinPartitions(partitioner, comp)
}
/**
* Repartition the RDD according to the given partitioner and, within each resulting partition,
* sort records by their keys.
*
* This is more efficient than calling `repartition` and then sorting within each partition
* because it can push the sorting down into the shuffle machinery.
*/
def repartitionAndSortWithinPartitions(partitioner: Partitioner, comp: Comparator[K])
: JavaPairRDD[K, V] = {
implicit val ordering = comp // Allow implicit conversion of Comparator to Ordering.
fromRDD(
new OrderedRDDFunctions[K, V, (K, V)](rdd).repartitionAndSortWithinPartitions(partitioner))
}
/**
* Sort the RDD by key, so that each partition contains a sorted range of the elements in
* ascending order. Calling `collect` or `save` on the resulting RDD will return or output an
* ordered list of records (in the `save` case, they will be written to multiple `part-X` files
* in the filesystem, in order of the keys).
*/
def sortByKey(): JavaPairRDD[K, V] = sortByKey(true)
/**
* Sort the RDD by key, so that each partition contains a sorted range of the elements. Calling
* `collect` or `save` on the resulting RDD will return or output an ordered list of records
* (in the `save` case, they will be written to multiple `part-X` files in the filesystem, in
* order of the keys).
*/
def sortByKey(ascending: Boolean): JavaPairRDD[K, V] = {
val comp = com.google.common.collect.Ordering.natural().asInstanceOf[Comparator[K]]
sortByKey(comp, ascending)
}
/**
* Sort the RDD by key, so that each partition contains a sorted range of the elements. Calling
* `collect` or `save` on the resulting RDD will return or output an ordered list of records
* (in the `save` case, they will be written to multiple `part-X` files in the filesystem, in
* order of the keys).
*/
def sortByKey(ascending: Boolean, numPartitions: Int): JavaPairRDD[K, V] = {
val comp = com.google.common.collect.Ordering.natural().asInstanceOf[Comparator[K]]
sortByKey(comp, ascending, numPartitions)
}
/**
* Sort the RDD by key, so that each partition contains a sorted range of the elements. Calling
* `collect` or `save` on the resulting RDD will return or output an ordered list of records
* (in the `save` case, they will be written to multiple `part-X` files in the filesystem, in
* order of the keys).
*/
def sortByKey(comp: Comparator[K]): JavaPairRDD[K, V] = sortByKey(comp, true)
/**
* Sort the RDD by key, so that each partition contains a sorted range of the elements. Calling
* `collect` or `save` on the resulting RDD will return or output an ordered list of records
* (in the `save` case, they will be written to multiple `part-X` files in the filesystem, in
* order of the keys).
*/
def sortByKey(comp: Comparator[K], ascending: Boolean): JavaPairRDD[K, V] = {
implicit val ordering = comp // Allow implicit conversion of Comparator to Ordering.
fromRDD(new OrderedRDDFunctions[K, V, (K, V)](rdd).sortByKey(ascending))
}
/**
* Sort the RDD by key, so that each partition contains a sorted range of the elements. Calling
* `collect` or `save` on the resulting RDD will return or output an ordered list of records
* (in the `save` case, they will be written to multiple `part-X` files in the filesystem, in
* order of the keys).
*/
def sortByKey(comp: Comparator[K], ascending: Boolean, numPartitions: Int): JavaPairRDD[K, V] = {
implicit val ordering = comp // Allow implicit conversion of Comparator to Ordering.
fromRDD(new OrderedRDDFunctions[K, V, (K, V)](rdd).sortByKey(ascending, numPartitions))
}
/**
* Return an RDD with the keys of each tuple.
*/
def keys(): JavaRDD[K] = JavaRDD.fromRDD[K](rdd.map(_._1))
/**
* Return an RDD with the values of each tuple.
*/
def values(): JavaRDD[V] = JavaRDD.fromRDD[V](rdd.map(_._2))
/**
* Return approximate number of distinct values for each key in this RDD.
*
* The algorithm used is based on streamlib's implementation of "HyperLogLog in Practice:
* Algorithmic Engineering of a State of The Art Cardinality Estimation Algorithm", available
* <a href="http://dx.doi.org/10.1145/2452376.2452456">here</a>.
*
* @param relativeSD Relative accuracy. Smaller values create counters that require more space.
* It must be greater than 0.000017.
* @param partitioner partitioner of the resulting RDD.
*/
def countApproxDistinctByKey(relativeSD: Double, partitioner: Partitioner): JavaPairRDD[K, Long] =
{
fromRDD(rdd.countApproxDistinctByKey(relativeSD, partitioner))
}
/**
* Return approximate number of distinct values for each key in this RDD.
*
* The algorithm used is based on streamlib's implementation of "HyperLogLog in Practice:
* Algorithmic Engineering of a State of The Art Cardinality Estimation Algorithm", available
* <a href="http://dx.doi.org/10.1145/2452376.2452456">here</a>.
*
* @param relativeSD Relative accuracy. Smaller values create counters that require more space.
* It must be greater than 0.000017.
* @param numPartitions number of partitions of the resulting RDD.
*/
def countApproxDistinctByKey(relativeSD: Double, numPartitions: Int): JavaPairRDD[K, Long] = {
fromRDD(rdd.countApproxDistinctByKey(relativeSD, numPartitions))
}
/**
* Return approximate number of distinct values for each key in this RDD.
*
* The algorithm used is based on streamlib's implementation of "HyperLogLog in Practice:
* Algorithmic Engineering of a State of The Art Cardinality Estimation Algorithm", available
* <a href="http://dx.doi.org/10.1145/2452376.2452456">here</a>.
*
* @param relativeSD Relative accuracy. Smaller values create counters that require more space.
* It must be greater than 0.000017.
*/
def countApproxDistinctByKey(relativeSD: Double): JavaPairRDD[K, Long] = {
fromRDD(rdd.countApproxDistinctByKey(relativeSD))
}
/** Assign a name to this RDD */
def setName(name: String): JavaPairRDD[K, V] = {
rdd.setName(name)
this
}
}
object JavaPairRDD {
private[spark]
def groupByResultToJava[K: ClassTag, T](rdd: RDD[(K, Iterable[T])]): RDD[(K, JIterable[T])] = {
rddToPairRDDFunctions(rdd).mapValues(_.asJava)
}
private[spark]
def cogroupResultToJava[K: ClassTag, V, W](
rdd: RDD[(K, (Iterable[V], Iterable[W]))]): RDD[(K, (JIterable[V], JIterable[W]))] = {
rddToPairRDDFunctions(rdd).mapValues(x => (x._1.asJava, x._2.asJava))
}
private[spark]
def cogroupResult2ToJava[K: ClassTag, V, W1, W2](
rdd: RDD[(K, (Iterable[V], Iterable[W1], Iterable[W2]))])
: RDD[(K, (JIterable[V], JIterable[W1], JIterable[W2]))] = {
rddToPairRDDFunctions(rdd).mapValues(x => (x._1.asJava, x._2.asJava, x._3.asJava))
}
private[spark]
def cogroupResult3ToJava[K: ClassTag, V, W1, W2, W3](
rdd: RDD[(K, (Iterable[V], Iterable[W1], Iterable[W2], Iterable[W3]))])
: RDD[(K, (JIterable[V], JIterable[W1], JIterable[W2], JIterable[W3]))] = {
rddToPairRDDFunctions(rdd).mapValues(x => (x._1.asJava, x._2.asJava, x._3.asJava, x._4.asJava))
}
def fromRDD[K: ClassTag, V: ClassTag](rdd: RDD[(K, V)]): JavaPairRDD[K, V] = {
new JavaPairRDD[K, V](rdd)
}
implicit def toRDD[K, V](rdd: JavaPairRDD[K, V]): RDD[(K, V)] = rdd.rdd
private[spark]
implicit def toScalaFunction2[T1, T2, R](fun: JFunction2[T1, T2, R]): Function2[T1, T2, R] = {
(x: T1, x1: T2) => fun.call(x, x1)
}
private[spark] implicit def toScalaFunction[T, R](fun: JFunction[T, R]): T => R = x => fun.call(x)
private[spark]
implicit def pairFunToScalaFun[A, B, C](x: PairFunction[A, B, C]): A => (B, C) = y => x.call(y)
/** Convert a JavaRDD of key-value pairs to JavaPairRDD. */
def fromJavaRDD[K, V](rdd: JavaRDD[(K, V)]): JavaPairRDD[K, V] = {
implicit val ctagK: ClassTag[K] = fakeClassTag
implicit val ctagV: ClassTag[V] = fakeClassTag
new JavaPairRDD[K, V](rdd.rdd)
}
}
| chenc10/Spark-PAF | core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala | Scala | apache-2.0 | 46,160 |
/*
* Copyright (c) 2016 Webtrends (http://www.webtrends.com)
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webtrends.harness.libs.iteratee
import scala.concurrent.ExecutionContext
object TestExecutionContext {
/**
* Create a `TestExecutionContext` that delegates to the iteratee package's default `ExecutionContext`.
*/
def apply(): TestExecutionContext = new TestExecutionContext(Execution.trampoline)
}
/**
* An `ExecutionContext` that counts its executions.
*
* @param delegate The underlying `ExecutionContext` to delegate execution to.
*/
class TestExecutionContext(delegate: ExecutionContext) extends ExecutionContext {
top =>
val count = new java.util.concurrent.atomic.AtomicInteger()
val local = new ThreadLocal[java.lang.Boolean]
def preparable[A](body: => A): A = {
local.set(true)
try body finally local.set(null)
}
def execute(runnable: Runnable): Unit = {
throw new RuntimeException("Cannot execute unprepared TestExecutionContext")
}
def reportFailure(t: Throwable): Unit = {
println(t)
}
override def prepare(): ExecutionContext = {
val isLocal = Option(local.get()).getOrElse(false: java.lang.Boolean)
if (!isLocal) throw new RuntimeException("Can only prepare TestExecutionContext within 'preparable' scope")
val preparedDelegate = delegate.prepare()
return new ExecutionContext {
def execute(runnable: Runnable): Unit = {
count.getAndIncrement()
preparedDelegate.execute(runnable)
}
def reportFailure(t: Throwable): Unit = {
println(t)
}
}
}
def executionCount: Int = count.get()
}
| pcross616/wookiee | wookiee-core/src/test/scala/com/webtrends/harness/libs/iteratee/TestExecutionContext.scala | Scala | apache-2.0 | 2,298 |
package it.unich.jandom.domains.objects
import org.scalatest.FunSpec
import org.scalatest.prop.TableFor1
/**
* This suite implements tests for those object domain which precisely track
* definite nullness.
* Created by amato on 4/1/14.
*/
trait PreciseDefiniteNullness extends ObjectDomainSuite {
describe("The bottom element") {
it("has all variable definitively null") {
forAll (someFibersAndVars) { (fiber, i) =>
assert(dom.bottom(fiber).mustBeNull(i))
}
}
}
describe("The assignNull method") {
it("makes variable definitively null") {
forAll (somePropertiesAndVars) { (p,i) =>
assert(p.assignNull(i).mustBeNull(i))
}
}
it("should produce a lesser property than top") {
forAll (someFibersAndVars) { (f,i) =>
val top = dom.top(f)
assert(top.assignNull(i) < top)
}
}
}
describe("The testNull method") {
it("is the identity on bottom") {
forAll(someFibersAndVars) { (fiber, j) =>
assert(dom.bottom(fiber).testNull(j).isBottom)
}
}
}
describe("The testNotNull method") {
it("is identity on bottom") {
forAll(someFibersAndVars) { (fiber, j) =>
assert(dom.bottom(fiber).testNotNull(j).isBottom)
}
}
}
}
| francescaScozzari/Jandom | core/src/test/scala/it/unich/jandom/domains/objects/PreciseDefiniteNullness.scala | Scala | lgpl-3.0 | 1,289 |
package sledtr.section
import sledtr.plugin._
import sledtr.shelf._
import sledtr.MyPreDef._
abstract class SectionCompanion extends SelectedCompanion[Section] {
def apply(title: String, chapter: Chapter, url_list: List[String], map: ConfigMap): Section
def canFormat(url: String): Boolean
} | K2Da/sledtr | src/main/scala/sledtr/section/FormatterCompanion.scala | Scala | gpl-3.0 | 296 |
class Hello {
type T = String
val hello: String = "Hello"
var name: T = "John"
def greet: String =
hello + " " + name
}
| grzegorzbalcerek/scala-book-examples | examples/ClassMembers1.scala | Scala | mit | 132 |
/*
* Copyright 2007-2008 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package net.liftweb.util
import _root_.org.specs._
import _root_.org.specs.runner._
import _root_.org.specs.util.WaitFor
import _root_.scala.actors.Actor
import _root_.java.util.concurrent._
import Helpers._
class ActorPingUnitTest extends JUnit4(ActorPingUnit)
object ActorPingUnit extends Specification with PingedService with WaitFor {
def pingService = {
service.start
ActorPing.schedule(service, Alive, TimeSpan(10))
waitFor(100.ms)
service.pinged must beTrue
}
"The ActorPing object" can {
"be restarted twice" in {
ActorPing.restart
ActorPing.restart
pingService
}
"be shutdown and restarted" in {
ActorPing.shutdown
ActorPing.restart
pingService
}
"be shutdown twice" in {
ActorPing.shutdown
ActorPing.shutdown
pingService must throwAn[ActorPingException]
}
}
}
| beni55/liftweb | lift-util/src/test/scala/net/liftweb/util/ActorPingUnit.scala | Scala | apache-2.0 | 1,489 |
package objsets
import org.junit._
import org.junit.Assert.assertEquals
class TweetSetSuite {
trait TestSets {
val set1 = new Empty
val set2 = set1.incl(new Tweet("a", "a body", 20))
val set3 = set2.incl(new Tweet("b", "b body", 20))
val c = new Tweet("c", "c body", 7)
val d = new Tweet("d", "d body", 9)
val set4c = set3.incl(c)
val set4d = set3.incl(d)
val set5 = set4c.incl(d)
val set6 = set1.incl(new Tweet("e", "e body", 1))
.incl(new Tweet("f", "f body", 2))
.incl(new Tweet("g", "g body", 5))
.incl(new Tweet("h", "h body", 7))
.incl(new Tweet("bb", "bbbody", 100))
.incl(new Tweet("cc", "ccbody", 12))
}
def asSet(tweets: TweetSet): Set[Tweet] = {
var res = Set[Tweet]()
tweets.foreach(res += _)
res
}
def size(set: TweetSet): Int = asSet(set).size
@Test def `filter: on empty set`: Unit =
new TestSets {
assertEquals(0, size(set1.filter(tw => tw.user == "a")))
}
@Test def `filter: on three nodes tree`: Unit =
new TestSets {
val set = new NonEmpty(new Tweet("a", "a", 1),
new NonEmpty(new Tweet("b", "b", 2), new Empty, new Empty),
new NonEmpty(new Tweet("c", "c", 3), new Empty, new Empty))
val filteredSet = set.filter((_) => true)
assertEquals(3, size(filteredSet))
}
@Test def `filter: a on set5`: Unit =
new TestSets {
assertEquals(1, size(set5.filter(tw => tw.user == "a")))
}
@Test def `filter: twenty on set5`: Unit =
new TestSets {
assertEquals(2, size(set5.filter(tw => tw.retweets == 20)))
}
@Test def `union: set4c and set4d`: Unit =
new TestSets {
assertEquals(4, size(set4c.union(set4d)))
}
@Test def `union: with empty set1`: Unit =
new TestSets {
assertEquals(4, size(set5.union(set1)))
}
@Test def `union: with empty set2`: Unit =
new TestSets {
assertEquals(4, size(set1.union(set5)))
}
@Test def `mostRetweeted: with set5`: Unit =
new TestSets {
val mostRetweeted = set5.mostRetweeted
assertEquals(20, mostRetweeted.retweets)
}
@Test def `mostRetweeted: with empty set`: Unit =
new TestSets {
try {
val mostRetweeted = set1.mostRetweeted
assert(false)
} catch {
case e: NoSuchElementException => Unit;
}
}
@Test def `mostRetweeted: with set2`: Unit =
new TestSets {
val mostRetweeted = set2.mostRetweeted
assertEquals("a", mostRetweeted.user)
assertEquals("a body", mostRetweeted.text)
assertEquals(20, mostRetweeted.retweets)
}
@Test def `mostRetweeted: with set3`: Unit =
new TestSets {
val mostRetweeted = set3.mostRetweeted
assertEquals(20, mostRetweeted.retweets)
}
@Test def `mostRetweeted: with set4c`: Unit =
new TestSets {
val mostRetweeted = set4c.mostRetweeted
assertEquals(20, mostRetweeted.retweets)
}
@Test def `mostRetweeted: with set4d`: Unit =
new TestSets {
val mostRetweeted = set4c.mostRetweeted
assertEquals(20, mostRetweeted.retweets)
}
@Test def `descending: set5`: Unit =
new TestSets {
val trends = set5.descendingByRetweet
assert(!trends.isEmpty)
assert(trends.head.user == "a" || trends.head.user == "b")
}
@Test def `descending: set6`: Unit =
new TestSets {
val trends = set6.descendingByRetweet
assert(!trends.isEmpty)
assert(trends.head.user == "bb" && trends.head.retweets == 100)
assert(trends.tail.head.user == "cc" && trends.tail.head.retweets == 12)
}
@Test def `GoogleVsApple: googleSets is computed`: Unit =
new TestSets {
TweetReader.allTweets
GoogleVsApple.googleTweets
GoogleVsApple.appleTweets
GoogleVsApple.trending
}
@Rule def individualTestTimeout = new org.junit.rules.Timeout(10 * 1000)
}
| rusucosmin/courses | fp/4-object-oriented-sets-rusucosmin/src/test/scala/objsets/TweetSetSuite.scala | Scala | mit | 3,902 |
package com.twitter.finagle.exp.swift
import com.facebook.swift.service.{ThriftException, ThriftMethod, ThriftService}
import com.twitter.util.Future
import scala.beans.BeanProperty
import scala.annotation.meta._
@ThriftStruct
case class Test1Exc(
@ThriftField(1) @BeanProperty var m: String,
@ThriftField(2) @BeanProperty var howmany: java.lang.Integer
) extends Exception(m) {
def this() = this("", 0)
}
@ThriftService("Test1")
trait Test1 {
@ThriftMethod(exception = Array(
new ThriftException(`type` = classOf[Test1Exc], id=1)))
def ab(
a: String,
b: java.util.Map[String, java.lang.Integer]
): Future[java.util.Map[String, java.lang.Integer]]
@ThriftMethod
def ping(msg: String): Future[String]
}
| finagle/finagle-swift | core/src/test/scala/com/twitter/finagle/swift/ifaces.scala | Scala | apache-2.0 | 738 |
package org.jetbrains.plugins.scala.lang.psi.stubs.index
import com.intellij.psi.stubs.StringStubIndexExtension
import com.intellij.psi.PsiClass
/**
* User: Alefas
* Date: 10.02.12
*/
class ScAllClassNamesIndex extends StringStubIndexExtension[PsiClass] {
def getKey = ScAllClassNamesIndex.KEY
}
object ScAllClassNamesIndex {
val KEY = ScalaIndexKeys.ALL_CLASS_NAMES
}
| consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/psi/stubs/index/ScAllClassNamesIndex.scala | Scala | apache-2.0 | 380 |
package org.atmosphere.cpr
import java.util.UUID
import java.util.concurrent.ConcurrentHashMap
import akka.actor.ActorSystem
import grizzled.slf4j.Logger
import org.scalatra.atmosphere.{ ScalatraBroadcaster, WireFormat }
import scala.collection.JavaConverters._
import scala.collection.concurrent.{ Map => ConcurrentMap }
object ScalatraBroadcasterFactory {
}
class ScalatraBroadcasterFactory(cfg: AtmosphereConfig, bCfg: BroadcasterConf)(implicit wireFormat: WireFormat, system: ActorSystem) extends BroadcasterFactory {
BroadcasterFactory.setBroadcasterFactory(this, cfg)
private[this] val logger = Logger[ScalatraBroadcasterFactory]
private[this] val store: ConcurrentMap[Any, Broadcaster] = new ConcurrentHashMap[Any, Broadcaster]().asScala
private def createBroadcaster[T <: Broadcaster](c: Class[T], id: Any): T = {
try {
val b: T = if (classOf[ScalatraBroadcaster].isAssignableFrom(c)) {
bCfg.broadcasterClass.getConstructor(classOf[WireFormat], classOf[ActorSystem]).newInstance(wireFormat, system).asInstanceOf[T]
} else {
cfg.framework().newClassInstance(c, c)
}
b.initialize(id.toString, bCfg.uri, cfg)
bCfg.extraSetup(b)
b.setSuspendPolicy(-1, Broadcaster.POLICY.FIFO)
if (b.getBroadcasterConfig == null) {
b.setBroadcasterConfig(new BroadcasterConfig(cfg.framework().broadcasterFilters, cfg, id.toString).init())
}
b.setBroadcasterLifeCyclePolicy(BroadcasterLifeCyclePolicy.NEVER)
broadcasterListeners.asScala foreach { l =>
b.addBroadcasterListener(l)
l.onPostCreate(b)
}
b
} catch {
case ex: Exception => throw new DefaultBroadcasterFactory.BroadcasterCreationException(ex)
}
}
def add(b: Broadcaster, id: Any): Boolean = store.put(id, b).isEmpty
def destroy() {
val s = cfg.getInitParameter(ApplicationConfig.SHARED)
if (s != null && s.equalsIgnoreCase("TRUE")) {
logger.warn("Factory shared, will not be destroyed. That can possibly cause memory leaks if" +
"Broadcaster where created. Make sure you destroy them manually.")
}
var bc: BroadcasterConfig = null
store foreach {
case (k, b) =>
b.resumeAll()
b.destroy()
bc = b.getBroadcasterConfig
}
if (bc != null) bc.forceDestroy()
store.clear()
BroadcasterFactory.factory = null
}
def get(): Broadcaster = lookup(UUID.randomUUID().toString)
def get(id: Any): Broadcaster = lookup(id, createIfNull = true)
def get[T <: Broadcaster](c: Class[T], id: Any): T = lookup(c, id)
def lookup[T <: Broadcaster](c: Class[T], id: scala.Any): T = lookup(c, id, false)
def lookup[T <: Broadcaster](c: Class[T], id: scala.Any, createIfNull: Boolean): T = {
val bOpt = store get id
if (bOpt.isDefined && !c.isAssignableFrom(bOpt.get.getClass)) {
val msg = "Invalid lookup class " + c.getName + ". Cached class is: " + bOpt.get.getClass.getName
logger.warn(msg)
throw new IllegalStateException(msg)
}
if ((bOpt.isEmpty && createIfNull) || (bOpt.isDefined && bOpt.get.isDestroyed)) {
if (bOpt.isDefined) {
val b = bOpt.get
logger.debug("Removing destroyed Broadcaster %s" format b.getID)
store.remove(b.getID, b)
}
if (store.putIfAbsent(id, createBroadcaster(c, id)) == null) {
logger.debug("Added Broadcaster %s. Factory size: %s.".format(id, store.size))
}
}
store.get(id) match {
case Some(b) => b.asInstanceOf[T]
case None => null.asInstanceOf[T]
}
}
def lookup[T <: Broadcaster](id: scala.Any): T = lookup(id, createIfNull = false)
def lookup[T <: Broadcaster](id: scala.Any, createIfNull: Boolean): T = {
lookup(classOf[ScalatraBroadcaster], id, createIfNull).asInstanceOf[T]
}
def lookupAll(): java.util.Collection[Broadcaster] = {
store.values.toList.asJavaCollection
}
def remove(b: Broadcaster, id: Any): Boolean = {
val removed: Boolean = store.remove(id, b)
if (removed) {
logger.debug("Removing Broadcaster {} factory size now {} ", id, store.size)
}
removed
}
def remove(id: Any): Boolean = store.remove(id).isDefined
def removeAllAtmosphereResource(r: AtmosphereResource) {
// Remove inside all Broadcaster as well.
try {
if (store.nonEmpty) {
try {
store.valuesIterator foreach { b =>
if (b.getAtmosphereResources.contains(r))
b.removeAtmosphereResource(r)
}
} catch {
case ex: IllegalStateException => logger.debug(ex.getMessage, ex)
}
}
} catch {
case ex: Exception => {
logger.warn(ex.getMessage, ex)
}
}
}
}
| 0xfaded/scalatra | atmosphere/src/main/scala/org/atmosphere/cpr/ScalatraBroadcasterFactory.scala | Scala | bsd-2-clause | 4,733 |
package com.twitter.gizzard
import com.twitter.ostrich.stats.{DevNullStats, StatsCollection, Stats => OStats, StatsSummary, StatsProvider, Counter, Metric}
import com.twitter.logging.Logger
import com.twitter.util.{Local, Time}
import scala.collection.mutable
import java.util.Random
class FilteredStatsProvider(default: StatsProvider) extends StatsProvider {
val test: PartialFunction[String, StatsProvider] = { case s: String => default }
def addGauge(name: String)(gauge: => Double) = test(name).addGauge(name)(gauge)
def clearGauge(name: String) = test(name).clearGauge(name)
def setLabel(name: String, value: String) = test(name).setLabel(name, value)
def clearLabel(name: String) = test(name).clearLabel(name)
def getCounter(name: String) = test(name).getCounter(name)
def getMetric(name: String) = test(name).getMetric(name)
def getGauge(name: String) = test(name).getGauge(name)
def getLabel(name: String) = test(name).getLabel(name)
def getCounters() = default.getCounters()
def getMetrics() = default.getMetrics()
def getGauges() = default.getGauges()
def getLabels() = default.getLabels()
def clearAll() = default.clearAll()
}
object Stats extends FilteredStatsProvider(OStats) {
def transaction: TransactionalStatsProvider = {
transactionOpt.getOrElse(DevNullTransactionalStats)
}
def transactionOpt = transStack.headOption
def beginTransaction() {
val newTransaction = transactionOpt match {
case Some(t) => t.createChild()
case None => new TransactionalStatsCollection(rng.nextInt(Integer.MAX_VALUE))
}
setTransaction(newTransaction)
}
def endTransaction() = {
transactionOpt match {
case Some(t) => transStack.pop()
case None => DevNullTransactionalStats
}
}
def setTransaction(collection: TransactionalStatsProvider) {
transStack.push(collection)
}
def withTransaction[T <: Any](f: => T): (T, TransactionalStatsProvider) = {
beginTransaction()
val rv = try { f } catch { case e => { endTransaction(); throw e } }
val t = endTransaction()
(rv, t)
}
// We use a com.twitter.util.Local instead of a ThreadLocal to hold our transaction
// stack so it can be threaded through a Future-based execution context.
private val localTransStack = new Local[mutable.Stack[TransactionalStatsProvider]]
// Initialize our transaction stack on the current thread if it hasn't already been.
private def transStack = {
localTransStack() getOrElse {
val newStack = new mutable.Stack[TransactionalStatsProvider]
localTransStack.update(newStack)
newStack
}
}
private val rng = new Random
}
case class TraceRecord(id: Long, timestamp: Time, message: String)
trait TransactionalStatsConsumer {
def apply(t: TransactionalStatsProvider)
}
abstract class LoggingTransactionalStatsConsumer(log: Logger) extends TransactionalStatsConsumer {
def this(logName: String) = this(Logger.get(logName))
def transactionToString(t: TransactionalStatsProvider): String
def apply(t: TransactionalStatsProvider) {
log.info(transactionToString(t))
}
}
class HumanReadableTransactionalStatsConsumer(log: Logger) extends LoggingTransactionalStatsConsumer(log) {
def this(logName: String) = this(Logger.get(logName))
def transactionToString(t: TransactionalStatsProvider) = {
val buf = new StringBuilder
buf.append("Trace "+t.id)
t.name.foreach { name =>
buf.append(" (name: "+name+")")
}
buf.append("\n")
t.toSeq.map { record =>
buf.append(" ["+record.timestamp.inMillis+"] "+record.message+"\n")
}
t.children.map { child =>
buf.append(" Child Thread "+child.id+":\n")
child.toSeq.map { record =>
buf.append(" ["+record.timestamp.inMillis+"] "+record.message+"\n")
}
}
buf.toString
}
}
object SampledTransactionalStatsConsumer {
val rng = new Random
}
class ConditionalTransactionalStatsConsumer(
consumer: TransactionalStatsConsumer,
f: TransactionalStatsProvider => Boolean) extends TransactionalStatsConsumer {
def apply(t: TransactionalStatsProvider) {
if (f(t)) consumer(t)
}
}
class SampledTransactionalStatsConsumer(consumer: TransactionalStatsConsumer, sampleRate: Double)
extends ConditionalTransactionalStatsConsumer(consumer, { t =>
SampledTransactionalStatsConsumer.rng.nextFloat() < sampleRate
})
class SlowTransactionalStatsConsumer(consumer: TransactionalStatsConsumer, threshold: Long)
extends ConditionalTransactionalStatsConsumer(consumer, { t =>
t.get("duration").map { _.asInstanceOf[Long] > threshold }.getOrElse(false)
})
class AuditingTransactionalStatsConsumer(consumer: TransactionalStatsConsumer, methodNames: Set[String])
extends ConditionalTransactionalStatsConsumer(consumer, { t =>
t.name.map { name => methodNames.contains(name) } getOrElse false
})
trait TransactionalStatsProvider {
def record(message: => String)
def set(key: String, value: AnyRef)
def get(key: String): Option[AnyRef]
def name_=(name: String)
def name: Option[String]
def toSeq: Seq[TraceRecord]
def createChild(): TransactionalStatsProvider
def children: Seq[TransactionalStatsProvider]
def id: Long
def clearAll()
}
class TransactionalStatsCollection(val id: Long) extends TransactionalStatsProvider {
private val messages = new mutable.ArrayBuffer[TraceRecord]
private val childs = new mutable.ArrayBuffer[TransactionalStatsCollection]
private val vars = new mutable.HashMap[String, AnyRef]
def record(message: => String) {
messages += TraceRecord(id, Time.now, message)
}
def set(key: String, value: AnyRef) { vars.put(key, value) }
def get(key: String) = { vars.get(key) }
def name: Option[String] = { vars.get("name").map { _.asInstanceOf[String] } }
def name_=(n: String) { vars("name") = n }
def toSeq = messages.toSeq
def children = childs.toSeq
def createChild() = {
val rv = new TransactionalStatsCollection(childs.size+1)
childs += rv
rv
}
def clearAll() {
messages.clear()
childs.clear()
vars.clear()
}
}
object DevNullTransactionalStats extends TransactionalStatsProvider {
def clearAll() {}
def record(message: => String) {}
def name = None
def name_=(name: String) {}
def toSeq = Seq()
def createChild() = DevNullTransactionalStats
def children = Seq()
def id = 0L
def set(key: String, value: AnyRef) {}
def get(key: String) = None
}
| kmiku7/gizzard | src/main/scala/com/twitter/gizzard/Stats.scala | Scala | apache-2.0 | 6,456 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package scalaguide.ws.scalaopenid
import play.api.test._
import scala.concurrent.ExecutionContext
//#dependency
import javax.inject.Inject
import scala.concurrent.Future
import play.api._
import play.api.mvc._
import play.api.data._
import play.api.data.Forms._
import play.api.libs.openid._
class IdController @Inject() (val openIdClient: OpenIdClient, c: ControllerComponents)(
implicit val ec: ExecutionContext
) extends AbstractController(c)
//#dependency
class ScalaOpenIdSpec extends PlaySpecification {
"Scala OpenId" should {
"be injectable" in new WithApplication() with Injecting {
val controller =
new IdController(inject[OpenIdClient], inject[ControllerComponents])(inject[ExecutionContext]) with Logging {
//#flow
def login = Action {
Ok(views.html.login())
}
def loginPost = Action.async { implicit request =>
Form(
single(
"openid" -> nonEmptyText
)
).bindFromRequest.fold(
{ error =>
logger.info(s"bad request ${error.toString}")
Future.successful(BadRequest(error.toString))
}, { openId =>
openIdClient
.redirectURL(openId, routes.Application.openIdCallback.absoluteURL())
.map(url => Redirect(url))
.recover { case t: Throwable => Redirect(routes.Application.login) }
}
)
}
def openIdCallback = Action.async { implicit request: Request[AnyContent] =>
openIdClient
.verifiedId(request)
.map(info => Ok(info.id + "\\n" + info.attributes))
.recover {
case t: Throwable =>
// Here you should look at the error, and give feedback to the user
Redirect(routes.Application.login)
}
}
//#flow
def extended(openId: String)(implicit request: RequestHeader) = {
//#extended
openIdClient.redirectURL(
openId,
routes.Application.openIdCallback.absoluteURL(),
Seq("email" -> "http://schema.openid.net/contact/email")
)
//#extended
}
}
controller must beAnInstanceOf[IdController]
}
}
}
object routes {
object Application {
val login = Call("GET", "login")
val openIdCallback = Call("GET", "callback")
}
}
package views {
object html {
def login() = "loginpage"
}
}
| benmccann/playframework | documentation/manual/working/scalaGuide/main/ws/code/ScalaOpenIdSpec.scala | Scala | apache-2.0 | 2,665 |
package com.twitter.finagle
import com.twitter.finagle.naming.{NameInterpreter, NamerExceededMaxDepthException, namerMaxDepth}
import com.twitter.util._
import scala.util.control.NonFatal
/**
* A namer is a context in which a [[com.twitter.finagle.NameTree
* NameTree]] is bound. The context is provided by the
* [[com.twitter.finagle.Namer#lookup lookup]] method, which
* translates [[com.twitter.finagle.Path Paths]] into
* [[com.twitter.finagle.NameTree NameTrees]]. Namers may represent
* external processes, for example lookups through DNS or to ZooKeeper,
* and thus lookup results are represented by a [[com.twitter.util.Activity Activity]].
*/
abstract class Namer { self =>
/**
* Translate a [[com.twitter.finagle.Path Path]] into a
* [[com.twitter.finagle.NameTree NameTree]].
*/
def lookup(path: Path): Activity[NameTree[Name]]
/**
* Bind the given tree with this namer. Bind recursively follows
* paths by looking them up in this namer. A recursion depth of up
* to 100 is allowed.
*/
def bind(tree: NameTree[Path]): Activity[NameTree[Name.Bound]] =
Namer.bind(this.lookup, tree)
}
private case class FailingNamer(exc: Throwable) extends Namer {
def lookup(path: Path): Activity[NameTree[Name]] =
Activity.exception(exc)
}
object Namer {
import NameTree._
private[finagle] val namerOfKind: (String => Namer) = Memoize { kind =>
try Class.forName(kind).newInstance().asInstanceOf[Namer]
catch {
case NonFatal(exc) => FailingNamer(exc)
}
}
// Key to encode name tree weights in Addr metadata
private[twitter] val AddrWeightKey = "namer_nametree_weight"
/**
* The global [[com.twitter.finagle.Namer Namer]]. It binds paths of the form
*
* {{{
* /$/classname/path...
* }}}
*
* By reflecting in the Java class `classname` whose expected type is a
* [[com.twitter.finagle.Namer Namer]] with a zero-arg constructor,
* and passing the residual path to it. Lookups fail when `classname` does
* not exist or cannot be constructed.
*
* The global namer also handles paths of the form
*
* {{{
* /$/nil/...
* }}}
*
* to force empty resolutions.
*/
val global: Namer = new Namer {
private[this] object InetPath {
private[this] def resolve(host: String, port: Int): Var[Addr] =
Resolver.eval(s"inet!$host:$port") match {
case Name.Bound(va) => va
case n: Name.Path =>
Var.value(
Addr.Failed(new IllegalStateException(s"InetResolver returned an unbound name: $n."))
)
}
def unapply(path: Path): Option[(Var[Addr], Path)] = path match {
case Path.Utf8("$", "inet", host, IntegerString(port), residual @ _*) =>
Some((resolve(host, port), Path.Utf8(residual: _*)))
case Path.Utf8("$", "inet", IntegerString(port), residual @ _*) =>
// no host provided means localhost
Some((resolve("", port), Path.Utf8(residual: _*)))
case _ => None
}
}
private[this] object FailPath {
val prefix = Path.Utf8("$", "fail")
def unapply(path: Path): Boolean =
path startsWith prefix
}
private[this] object NilPath {
val prefix = Path.Utf8("$", "nil")
def unapply(path: Path): Boolean =
path startsWith prefix
}
private[this] object NamerPath {
def unapply(path: Path): Option[(Namer, Path)] = path match {
case Path.Utf8("$", kind, rest @ _*) => Some((namerOfKind(kind), Path.Utf8(rest: _*)))
case _ => None
}
}
def lookup(path: Path): Activity[NameTree[Name]] = path match {
// Clients may depend on Name.Bound ids being Paths which resolve
// back to the same Name.Bound.
case InetPath(va, residual) =>
val id = path.take(path.size - residual.size)
Activity.value(Leaf(Name.Bound(va, id, residual)))
case FailPath() => Activity.value(Fail)
case NilPath() => Activity.value(Empty)
case NamerPath(namer, rest) => namer.lookup(rest)
case _ => Activity.value(Neg)
}
override def toString = "Namer.global"
}
/**
* Resolve a path to an address set (taking `dtab` into account).
*/
def resolve(dtab: Dtab, path: Path): Var[Addr] =
NameInterpreter.bind(dtab, path).map(_.eval).run.flatMap {
case Activity.Ok(None) => Var.value(Addr.Neg)
case Activity.Ok(Some(names)) => Name.all(names).addr
case Activity.Pending => Var.value(Addr.Pending)
case Activity.Failed(exc) => Var.value(Addr.Failed(exc))
}
/**
* Resolve a path to an address set (taking [[Dtab.local]] into account).
*/
def resolve(path: Path): Var[Addr] = resolve(Dtab.base ++ Dtab.local, path)
/**
* Resolve a path to an address set (taking [[Dtab.local]] into account).
*/
def resolve(path: String): Var[Addr] =
Try { Path.read(path) } match {
case Return(path) => resolve(path)
case Throw(e) => Var.value(Addr.Failed(e))
}
private object IntegerString {
def unapply(s: String): Option[Int] =
Try(s.toInt).toOption
}
private object DoubleString {
def unapply(s: String): Option[Double] =
Try(s.toDouble).toOption
}
/**
* Bind the given tree by recursively following paths and looking them
* up with the provided `lookup` function. A recursion depth of up to
* 100 is allowed.
*/
def bind(
lookup: Path => Activity[NameTree[Name]],
tree: NameTree[Path]
): Activity[NameTree[Name.Bound]] =
bind(lookup, 0, None)(tree map { path =>
Name.Path(path)
})
private[this] def bindUnion(
lookup: Path => Activity[NameTree[Name]],
depth: Int,
trees: Seq[Weighted[Name]]
): Activity[NameTree[Name.Bound]] = {
val weightedTreeVars: Seq[Var[Activity.State[NameTree.Weighted[Name.Bound]]]] = trees.map {
case Weighted(w, t) =>
val treesAct: Activity[NameTree[Name.Bound]] = bind(lookup, depth, Some(w))(t)
treesAct.map(Weighted(w, _)).run
}
val stateVar: Var[Activity.State[NameTree[Name.Bound]]] = Var.collect(weightedTreeVars).map {
seq: Seq[Activity.State[NameTree.Weighted[Name.Bound]]] =>
// - if there's at least one activity in Ok state, return the union of them
// - if all activities are pending, the union is pending.
// - if no subtree is Ok, and there are failures, retain the first failure.
val oks = seq.collect {
case Activity.Ok(t) => t
}
if (oks.isEmpty) {
seq
.collectFirst {
case f @ Activity.Failed(_) => f
}
.getOrElse(Activity.Pending)
} else {
Activity.Ok(Union.fromSeq(oks).simplified)
}
}
new Activity(stateVar)
}
// values of the returned activity are simplified and contain no Alt nodes
private def bind(
lookup: Path => Activity[NameTree[Name]],
depth: Int,
weight: Option[Double]
)(tree: NameTree[Name]
): Activity[NameTree[Name.Bound]] =
if (depth > namerMaxDepth())
Activity.exception(
new NamerExceededMaxDepthException(
s"Max recursion level: ${namerMaxDepth()} reached in Namer lookup"))
else
tree match {
case Leaf(Name.Path(path)) => lookup(path).flatMap(bind(lookup, depth + 1, weight))
case Leaf(bound @ Name.Bound(addr)) =>
// Add the weight of the parent to the addr's metadata
// Note: this assumes a single level of tree weights
val addrWithWeight = addr.map { addr =>
(addr, weight) match {
case (Addr.Bound(addrs, metadata), Some(weight)) =>
Addr.Bound(addrs, metadata + ((AddrWeightKey, weight)))
case _ => addr
}
}
Activity.value(Leaf(Name.Bound(addrWithWeight, bound.id, bound.path)))
case Fail => Activity.value(Fail)
case Neg => Activity.value(Neg)
case Empty => Activity.value(Empty)
case Union() => Activity.value(Neg)
case Union(Weighted(weight, tree)) => bind(lookup, depth, Some(weight))(tree)
case Union(trees @ _*) => bindUnion(lookup, depth, trees)
case Alt() => Activity.value(Neg)
case Alt(tree) => bind(lookup, depth, weight)(tree)
case Alt(trees @ _*) =>
def loop(trees: Seq[NameTree[Name]]): Activity[NameTree[Name.Bound]] =
trees match {
case Nil => Activity.value(Neg)
case Seq(head, tail @ _*) =>
bind(lookup, depth, weight)(head).flatMap {
case Fail => Activity.value(Fail)
case Neg => loop(tail)
case head => Activity.value(head)
}
}
loop(trees)
}
}
/**
* Abstract [[Namer]] class for Java compatibility.
*/
abstract class AbstractNamer extends Namer
/**
* Base-trait for Namers that bind to a local Service.
*
* Implementers with a 0-argument constructor may be named and
* auto-loaded with `/$/pkg.cls` syntax.
*
* Note that this can't actually be accomplished in a type-safe manner
* since the naming step obscures the service's type to observers.
*/
trait ServiceNamer[Req, Rep] extends Namer {
protected def lookupService(path: Path): Option[Service[Req, Rep]]
def lookup(path: Path): Activity[NameTree[Name]] = lookupService(path) match {
case None =>
Activity.value(NameTree.Neg)
case Some(svc) =>
val factory = ServiceFactory(() => Future.value(svc))
val addr = Addr.Bound(Address(factory))
val name = Name.Bound(Var.value(addr), factory, path)
Activity.value(NameTree.Leaf(name))
}
}
package namer {
final class global extends Namer {
def lookup(path: Path) = Namer.global.lookup(path)
}
}
| luciferous/finagle | finagle-core/src/main/scala/com/twitter/finagle/Namer.scala | Scala | apache-2.0 | 9,827 |
/*
* Copyright 2016 Nicolas Rinaudo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kantan.xpath
/** Compiled XPath expression.
*
* Instance creation is achieved through the [[Query$ companion object]].
*/
trait Query[A] extends Serializable { self =>
def eval(n: Node): A
def map[B](f: A => B): Query[B] = Query(n => f(self.eval(n)))
def flatMap[B](f: A => Query[B]): Query[B] = Query(n => f(self.eval(n)).eval(n))
}
/** Provides convenient methods for XPath expression compilation.
*
* Actual compilation is done through instances of [[Compiler]]. Methods declared here will simply summon the right
* implicit value.
*/
object Query {
def apply[A](f: Node => A): Query[A] = new Query[A] {
override def eval(n: Node): A = f(n)
}
def apply[A: Compiler](expr: XPathExpression): Query[DecodeResult[A]] =
Compiler[A].compile(expr)
/** Compiles the specified XPath expression. */
def compile[A: Compiler](str: String)(implicit xpath: XPathCompiler): CompileResult[Query[DecodeResult[A]]] =
xpath.compile(str).map(Compiler[A].compile)
/** Compiles the specified XPath expression. */
def unsafeCompile[A: Compiler](str: String)(implicit xpath: XPathCompiler): Query[DecodeResult[A]] =
Query.compile(str).getOrElse(sys.error(s"Not a valid XPath expression: '$str'."))
}
| nrinaudo/kantan.xpath | core/src/main/scala/kantan/xpath/Query.scala | Scala | apache-2.0 | 1,844 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.index
import com.google.common.primitives.{Bytes, Longs, Shorts}
import com.typesafe.scalalogging.LazyLogging
import org.apache.accumulo.core.data.Range
import org.apache.hadoop.io.Text
import org.geotools.factory.Hints
import org.locationtech.geomesa.accumulo.data.stats.GeoMesaStats
import org.locationtech.geomesa.accumulo.data.tables.{GeoMesaTable, Z3Table}
import org.locationtech.geomesa.accumulo.iterators._
import org.locationtech.geomesa.curve.Z3SFC
import org.locationtech.geomesa.filter._
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import org.locationtech.geomesa.utils.geotools._
import org.locationtech.geomesa.utils.index.VisibilityLevel
import org.locationtech.sfcurve.zorder.Z3
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter.Filter
import org.opengis.filter.spatial._
class Z3IdxStrategy(val filter: QueryFilter) extends Strategy with LazyLogging with IndexFilterHelpers {
/**
* Plans the query - strategy implementations need to define this
*/
override def getQueryPlan(queryPlanner: QueryPlanner, hints: Hints, output: ExplainerOutputType) = {
import QueryHints.{LOOSE_BBOX, RichHints}
import Z3IdxStrategy._
import org.locationtech.geomesa.filter.FilterHelper._
val ds = queryPlanner.ds
val sft = queryPlanner.sft
val dtgField = sft.getDtgField
val (geomFilters, temporalFilters) = {
val (g, t) = filter.primary.partition(isSpatialFilter)
if (g.isEmpty) {
// allow for date only queries - if no geom, use whole world
(Seq(ff.bbox(sft.getGeomField, -180, -90, 180, 90, "EPSG:4326")), t)
} else {
(g, t)
}
}
output(s"Geometry filters: ${filtersToString(geomFilters)}")
output(s"Temporal filters: ${filtersToString(temporalFilters)}")
// standardize the two key query arguments: polygon and date-range
// TODO GEOMESA-1215 this can handle OR'd geoms, but the query splitter won't currently send them
val geometryToCover =
filter.singlePrimary.flatMap(extractSingleGeometry(_, sft.getGeomField)).getOrElse(WholeWorldPolygon)
// since we don't apply a temporal filter, we pass handleExclusiveBounds to
// make sure we exclude the non-inclusive endpoints of a during filter.
// note that this isn't completely accurate, as we only index down to the second
val interval = {
// TODO GEOMESA-1215 this can handle OR'd intervals, but the query splitter won't currently send them
val intervals = for { dtg <- dtgField; filter <- andOption(temporalFilters) } yield {
extractIntervals(filter, dtg)
}
// note: because our filters were and'ed, there will be at most one interval
intervals.flatMap(_.headOption).getOrElse {
throw new RuntimeException(s"Couldn't extract interval from filters '${filtersToString(temporalFilters)}'")
}
}
output(s"GeomsToCover: $geometryToCover")
output(s"Interval: $interval")
val looseBBox = if (hints.containsKey(LOOSE_BBOX)) Boolean.unbox(hints.get(LOOSE_BBOX)) else ds.config.looseBBox
val ecql: Option[Filter] = if (!looseBBox || sft.nonPoints) {
// if the user has requested strict bounding boxes, we apply the full filter
// if this is a non-point geometry, the index is coarse-grained, so we apply the full filter
filter.filter
} else {
// for normal bboxes, the index is fine enough that we don't need to apply the filter on top of it
// this may cause some minor errors at extremely fine resolution, but the performance is worth it
// if we have a complicated geometry predicate, we need to pass it through to be evaluated
val complexGeomFilter = filterListAsAnd(geomFilters.filter(isComplicatedSpatialFilter))
(complexGeomFilter, filter.secondary) match {
case (Some(gf), Some(fs)) => filterListAsAnd(Seq(gf, fs))
case (None, fs) => fs
case (gf, None) => gf
}
}
val (iterators, kvsToFeatures, colFamily, hasDupes) = if (hints.isBinQuery) {
// if possible, use the pre-computed values
// can't use if there are non-st filters or if custom fields are requested
val (iters, cf) =
if (filter.secondary.isEmpty && BinAggregatingIterator.canUsePrecomputedBins(sft, hints)) {
// TODO GEOMESA-1254 per-attribute vis + bins
val idOffset = Z3Table.getIdRowOffset(sft)
(Seq(BinAggregatingIterator.configurePrecomputed(sft, ecql, hints, idOffset, sft.nonPoints)), Z3Table.BIN_CF)
} else {
val iter = BinAggregatingIterator.configureDynamic(sft, ecql, hints, sft.nonPoints)
(Seq(iter), Z3Table.FULL_CF)
}
(iters, BinAggregatingIterator.kvsToFeatures(), cf, false)
} else if (hints.isDensityQuery) {
val iter = Z3DensityIterator.configure(sft, ecql, hints)
(Seq(iter), KryoLazyDensityIterator.kvsToFeatures(), Z3Table.FULL_CF, false)
} else if (hints.isStatsIteratorQuery) {
val iter = KryoLazyStatsIterator.configure(sft, ecql, hints, sft.nonPoints)
(Seq(iter), KryoLazyStatsIterator.kvsToFeatures(sft), Z3Table.FULL_CF, false)
} else if (hints.isMapAggregatingQuery) {
val iter = KryoLazyMapAggregatingIterator.configure(sft, ecql, hints, sft.nonPoints)
(Seq(iter), queryPlanner.defaultKVsToFeatures(hints), Z3Table.FULL_CF, false)
} else {
val iters = KryoLazyFilterTransformIterator.configure(sft, ecql, hints).toSeq
(iters, queryPlanner.defaultKVsToFeatures(hints), Z3Table.FULL_CF, sft.nonPoints)
}
val z3table = ds.getTableName(sft.getTypeName, Z3Table)
val numThreads = ds.getSuggestedThreads(sft.getTypeName, Z3Table)
// setup Z3 iterator
val env = geometryToCover.getEnvelopeInternal
val (lx, ly, ux, uy) = (env.getMinX, env.getMinY, env.getMaxX, env.getMaxY)
val (epochWeekStart, lt) = Z3Table.getWeekAndSeconds(interval._1)
val (epochWeekEnd, ut) = Z3Table.getWeekAndSeconds(interval._2)
val weeks = scala.Range.inclusive(epochWeekStart, epochWeekEnd).map(_.toShort)
// time range for a chunk is 0 to 1 week (in seconds)
val (tStart, tEnd) = (Z3SFC.time.min.toInt, Z3SFC.time.max.toInt)
val getRanges: (Seq[Array[Byte]], (Double, Double), (Double, Double), (Long, Long)) => Seq[Range] =
if (sft.isPoints) getPointRanges else getGeomRanges
val hasSplits = Z3Table.hasSplits(sft)
val prefixes = if (hasSplits) {
val wBytes = weeks.map(Shorts.toByteArray)
Z3Table.SPLIT_ARRAYS.flatMap(s => wBytes.map(b => Array(s(0), b(0), b(1))))
} else {
weeks.map(Shorts.toByteArray)
}
// the z3 index breaks time into 1 week chunks, so create a range for each week in our range
val ranges = if (weeks.length == 1) {
getRanges(prefixes, (lx, ux), (ly, uy), (lt, ut))
} else {
val head +: middle :+ last = prefixes.toList
val headRanges = getRanges(Seq(head), (lx, ux), (ly, uy), (lt, tEnd))
val lastRanges = getRanges(Seq(last), (lx, ux), (ly, uy), (tStart, ut))
val middleRanges = if (middle.isEmpty) Seq.empty else getRanges(middle, (lx, ux), (ly, uy), (tStart, tEnd))
headRanges ++ middleRanges ++ lastRanges
}
// index space values for comparing in the iterator
def decode(x: Double, y: Double, t: Int): (Int, Int, Int) = if (sft.isPoints) {
Z3SFC.index(x, y, t).decode
} else {
Z3(Z3SFC.index(x, y, t).z & Z3Table.GEOM_Z_MASK).decode
}
val (xmin, ymin, tmin) = decode(lx, ly, lt)
val (xmax, ymax, tmax) = decode(ux, uy, ut)
val (tLo, tHi) = (Z3SFC.time.normalize(tStart), Z3SFC.time.normalize(tEnd))
val wmin = weeks.head
val wmax = weeks.last
val zIter = Z3Iterator.configure(sft.isPoints, xmin, xmax, ymin, ymax, tmin, tmax, wmin, wmax, tLo, tHi, hasSplits, Z3_ITER_PRIORITY)
val perAttributeIter = sft.getVisibilityLevel match {
case VisibilityLevel.Feature => Seq.empty
case VisibilityLevel.Attribute => Seq(KryoVisibilityRowEncoder.configure(sft, Z3Table))
}
val cf = if (perAttributeIter.isEmpty) colFamily else GeoMesaTable.AttributeColumnFamily
val iters = perAttributeIter ++ Seq(zIter) ++ iterators
BatchScanPlan(filter, z3table, ranges, iters, Seq(cf), kvsToFeatures, numThreads, hasDupes)
}
def getPointRanges(prefixes: Seq[Array[Byte]], x: (Double, Double), y: (Double, Double), t: (Long, Long)): Seq[Range] = {
Z3SFC.ranges(x, y, t).flatMap { case indexRange =>
val startBytes = Longs.toByteArray(indexRange.lower)
val endBytes = Longs.toByteArray(indexRange.upper)
prefixes.map { prefix =>
val start = new Text(Bytes.concat(prefix, startBytes))
val end = Range.followingPrefix(new Text(Bytes.concat(prefix, endBytes)))
new Range(start, true, end, false)
}
}
}
def getGeomRanges(prefixes: Seq[Array[Byte]], x: (Double, Double), y: (Double, Double), t: (Long, Long)): Seq[Range] = {
Z3SFC.ranges(x, y, t, 8 * Z3Table.GEOM_Z_NUM_BYTES).flatMap { indexRange =>
val startBytes = Longs.toByteArray(indexRange.lower).take(Z3Table.GEOM_Z_NUM_BYTES)
val endBytes = Longs.toByteArray(indexRange.upper).take(Z3Table.GEOM_Z_NUM_BYTES)
prefixes.map { prefix =>
val start = new Text(Bytes.concat(prefix, startBytes))
val end = Range.followingPrefix(new Text(Bytes.concat(prefix, endBytes)))
new Range(start, true, end, false)
}
}
}
}
object Z3IdxStrategy extends StrategyProvider {
val Z3_ITER_PRIORITY = 23
val FILTERING_ITER_PRIORITY = 25
override protected def statsBasedCost(sft: SimpleFeatureType,
filter: QueryFilter,
transform: Option[SimpleFeatureType],
stats: GeoMesaStats): Option[Long] = {
// https://geomesa.atlassian.net/browse/GEOMESA-1166
// TODO check date range and use z2 instead if too big
// TODO also if very small bbox, z2 has ~10 more bits of lat/lon info
filter.singlePrimary match {
case Some(f) => stats.getCount(sft, f, exact = false)
case None => Some(Long.MaxValue)
}
}
/**
* More than id lookups (at 1), high-cardinality attributes (at 1).
* Less than unknown cardinality attributes (at 999).
* With a spatial component, less than z2, otherwise more than z2 (at 400)
*/
override protected def indexBasedCost(sft: SimpleFeatureType,
filter: QueryFilter,
transform: Option[SimpleFeatureType]): Long = {
if (filter.primary.exists(isSpatialFilter)) 200L else 401L
}
def isComplicatedSpatialFilter(f: Filter): Boolean = {
f match {
case _: BBOX => false
case _: DWithin => true
case _: Contains => true
case _: Crosses => true
case _: Intersects => true
case _: Overlaps => true
case _: Within => true
case _ => false // Beyond, Disjoint, DWithin, Equals, Touches
}
}
}
| mdzimmerman/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/index/Z3IdxStrategy.scala | Scala | apache-2.0 | 11,611 |
package im.actor.server
import akka.actor._
import akka.contrib.pattern.DistributedPubSubExtension
import akka.kernel.Bootable
import akka.stream.ActorMaterializer
import im.actor.server.activation.gate.{ GateCodeActivation, GateConfig }
import im.actor.server.activation.internal.{ ActivationConfig, InternalCodeActivation }
import im.actor.server.api.CommonSerialization
import im.actor.server.api.frontend.Frontend
import im.actor.server.api.http.{ HttpApiConfig, HttpApiFrontend }
import im.actor.server.api.rpc.RpcApiService
import im.actor.server.api.rpc.service.auth.AuthServiceImpl
import im.actor.server.api.rpc.service.configs.ConfigsServiceImpl
import im.actor.server.api.rpc.service.contacts.ContactsServiceImpl
import im.actor.server.api.rpc.service.files.FilesServiceImpl
import im.actor.server.api.rpc.service.groups.{ GroupInviteConfig, GroupsServiceImpl }
import im.actor.server.api.rpc.service.messaging.MessagingServiceImpl
import im.actor.server.api.rpc.service.profile.ProfileServiceImpl
import im.actor.server.api.rpc.service.pubgroups.PubgroupsServiceImpl
import im.actor.server.api.rpc.service.push.PushServiceImpl
import im.actor.server.api.rpc.service.sequence.{ SequenceServiceConfig, SequenceServiceImpl }
import im.actor.server.api.rpc.service.users.UsersServiceImpl
import im.actor.server.api.rpc.service.weak.WeakServiceImpl
import im.actor.server.api.rpc.service.webhooks.IntegrationsServiceImpl
import im.actor.server.commons.ActorConfig
import im.actor.server.db.DbExtension
import im.actor.server.dialog.privat.{ PrivateDialog, PrivateDialogExtension }
import im.actor.server.email.{ EmailConfig, EmailSender }
import im.actor.server.enrich.{ RichMessageConfig, RichMessageWorker }
import im.actor.server.group._
import im.actor.server.oauth.{ GoogleProvider, OAuth2GoogleConfig }
import im.actor.server.dialog.group.{ GroupDialog, GroupDialogExtension }
import im.actor.server.presences.{ GroupPresenceManager, PresenceManager }
import im.actor.server.push._
import im.actor.server.session.{ Session, SessionConfig, SessionMessage }
import im.actor.server.sms.TelesignSmsEngine
import im.actor.server.social.SocialExtension
import im.actor.server.user._
class Main extends Bootable {
SessionMessage.register()
CommonSerialization.register()
UserProcessor.register()
GroupProcessor.register()
GroupDialog.register()
PrivateDialog.register()
val serverConfig = ActorConfig.load()
// FIXME: get rid of unsafe get's
val activationConfig = ActivationConfig.load.get
val emailConfig = EmailConfig.fromConfig(serverConfig.getConfig("services.email")).toOption.get
val gateConfig = GateConfig.load.get
val groupInviteConfig = GroupInviteConfig.load(serverConfig.getConfig("enabled-modules.messaging.groups.invite"))
val webappConfig = HttpApiConfig.load(serverConfig.getConfig("webapp")).toOption.get
val oauth2GoogleConfig = OAuth2GoogleConfig.load(serverConfig.getConfig("services.google.oauth"))
val richMessageConfig = RichMessageConfig.load(serverConfig.getConfig("enabled-modules.enricher")).get
val smsConfig = serverConfig.getConfig("sms")
val sequenceConfig = SequenceServiceConfig.load().get
implicit val sessionConfig = SessionConfig.load(serverConfig.getConfig("session"))
implicit val system = ActorSystem(serverConfig.getString("actor-system-name"), serverConfig)
implicit val executor = system.dispatcher
implicit val materializer = ActorMaterializer()
implicit val db = DbExtension(system).db
def startup() = {
DbExtension(system).migrate()
UserMigrator.migrateAll()
GroupMigrator.migrateAll()
implicit val weakUpdManagerRegion = WeakUpdatesManager.startRegion()
implicit val presenceManagerRegion = PresenceManager.startRegion()
implicit val groupPresenceManagerRegion = GroupPresenceManager.startRegion()
implicit val socialManagerRegion = SocialExtension(system).region
implicit val userProcessorRegion = UserExtension(system).processorRegion
implicit val userViewRegion = UserExtension(system).viewRegion
implicit val groupProcessorRegion = GroupExtension(system).processorRegion
implicit val groupViewRegion = GroupExtension(system).viewRegion
implicit val groupDialogRegion = GroupDialogExtension(system).region //no need to be implicit
implicit val privateDialogRegion = PrivateDialogExtension(system).region
val mediator = DistributedPubSubExtension(system).mediator
val activationContext = serverConfig.getString("services.activation.default-service") match {
case "internal" ⇒ InternalCodeActivation.newContext(
activationConfig,
new TelesignSmsEngine(serverConfig.getConfig("services.telesign")),
new EmailSender(emailConfig)
)
case "actor-activation" ⇒ new GateCodeActivation(gateConfig)
case _ ⇒ throw new Exception("""Invalid activation.default-service value provided: valid options: "internal", actor-activation""")
}
Session.startRegion(
Some(Session.props(mediator))
)
implicit val sessionRegion = Session.startRegionProxy()
RichMessageWorker.startWorker(richMessageConfig, mediator)
implicit val oauth2Service = new GoogleProvider(oauth2GoogleConfig)
val services = Seq(
new AuthServiceImpl(activationContext, mediator),
new ContactsServiceImpl,
MessagingServiceImpl(mediator),
new GroupsServiceImpl(groupInviteConfig),
new PubgroupsServiceImpl,
new SequenceServiceImpl(sequenceConfig),
new WeakServiceImpl,
new UsersServiceImpl,
new FilesServiceImpl,
new ConfigsServiceImpl,
new PushServiceImpl,
new ProfileServiceImpl,
new IntegrationsServiceImpl(webappConfig)
)
system.actorOf(RpcApiService.props(services), "rpcApiService")
Frontend.start(serverConfig)
HttpApiFrontend.start(serverConfig)
}
def shutdown() = {
system.shutdown()
DbExtension(system).ds.close()
}
}
object Main {
def main(args: Array[String]): Unit = {
new Main()
.startup()
}
}
| xiaotaijun/actor-platform | actor-server/actor-runner/src/main/scala/im/actor/server/Main.scala | Scala | mit | 6,062 |
package org.bitcoins.core.bloom
import org.bitcoins.core.gen.BloomFilterGenerator
import org.scalacheck.{Prop, Properties}
/**
* Created by chris on 8/3/16.
*/
class BloomFilterSpec extends Properties("BloomFilterSpec") {
property("Serialization symmetry") =
Prop.forAll(BloomFilterGenerator.bloomFilter) { filter =>
BloomFilter(filter.hex) == filter
}
property("No false negatives") =
Prop.forAll(BloomFilterGenerator.loadedBloomFilter) {
case (loadedFilter: BloomFilter, byteVectors: Seq[Seq[Byte]]) =>
val containsAllHashes = byteVectors.map(bytes => loadedFilter.contains(bytes))
!containsAllHashes.exists(_ == false)
}
}
| SuredBits/bitcoin-s-sidechains | src/test/scala/org/bitcoins/core/bloom/BloomFilterSpec.scala | Scala | mit | 683 |
package com.alexitc.coinalerts.services.validators
import com.alexitc.coinalerts.errors.{
InvalidEmailFormatError,
InvalidEmailLengthError,
InvalidPasswordLengthError,
UnsupportedLangError
}
import com.alexitc.coinalerts.models._
import com.alexitc.playsonify.core.ApplicationResult
import org.apache.commons.validator.routines.EmailValidator
import org.scalactic.Accumulation.withGood
import org.scalactic.{Bad, Good, One, Or}
class UserValidator {
def validateCreateUserModel(createUserModel: CreateUserModel): ApplicationResult[CreateUserModel] = {
withGood(validateEmailFormat(createUserModel.email), validatePasswordFormat(createUserModel.password)) { (_, _) =>
createUserModel
}
}
def validateSetUserPreferencesModel(
preferencesModel: SetUserPreferencesModel): ApplicationResult[SetUserPreferencesModel] = {
val preferencesMaybe = UserPreferences.AvailableLangs
.find(_ == preferencesModel.lang)
.map(_ => preferencesModel)
Or.from(preferencesMaybe, One(UnsupportedLangError))
}
private val MaxEmailLength = 64
private val PasswordLengthRange = 8 to 30
private def validateEmailFormat(email: UserEmail): ApplicationResult[UserEmail] = {
val isValid = EmailValidator.getInstance().isValid(email.string)
if (isValid) {
if (email.string.length <= MaxEmailLength) {
Good(email)
} else {
Bad(InvalidEmailLengthError(MaxEmailLength)).accumulating
}
} else {
Bad(InvalidEmailFormatError).accumulating
}
}
private def validatePasswordFormat(password: UserPassword): ApplicationResult[UserPassword] = {
if (PasswordLengthRange.contains(password.string.length)) {
// TODO: enforce strong password?
Good(password)
} else {
Bad(InvalidPasswordLengthError(PasswordLengthRange)).accumulating
}
}
}
| AlexITC/crypto-coin-alerts | alerts-server/app/com/alexitc/coinalerts/services/validators/UserValidator.scala | Scala | gpl-3.0 | 1,853 |
import sbt._
import Keys._
import Import._
object build extends Build {
val defaultSettings = Seq(
libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-reflect" % _ ),
incOptions := incOptions.value.withNameHashing(true),
scalaVersion := "2.11.6"
)
lazy val root = Project(
base = file("."),
id = "macro",
aggregate = Seq(macroProvider, macroClient),
settings = Defaults.defaultSettings ++ defaultSettings
)
lazy val macroProvider = Project(
base = file("macro-provider"),
id = "macro-provider",
settings = Defaults.defaultSettings ++ defaultSettings
)
lazy val macroClient = Project(
base = file("macro-client"),
id = "macro-client",
dependencies = Seq(macroProvider),
settings = Defaults.defaultSettings ++ defaultSettings
)
}
| dansanduleac/sbt | sbt/src/sbt-test/source-dependencies/macro-arg-dep-nested-2-11/project/build.scala | Scala | bsd-3-clause | 801 |
import scala.sys.process._
import java.io._
case class Config(name: String, repo: String)
val samples = List(
Config("name", "repo.git"))
s"mkdir /home/chisun/clone".!
samples foreach { c =>
println("git clone ... " + c.name)
s"git clone ssh://[email protected]:29427/${c.repo} -b LG_apps_master /home/chisun/clone/${c.name}".!
}
println("ok")
| agiledevteam/metr | scripts/run_clone.scala | Scala | mit | 365 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.types._
/**
* A literal value that is not foldable. Used in expression codegen testing to test code path
* that behave differently based on foldable values.
*/
case class NonFoldableLiteral(value: Any, dataType: DataType) extends LeafExpression {
override def foldable: Boolean = false
override def nullable: Boolean = true
override def toString: String = if (value != null) value.toString else "null"
override def eval(input: InternalRow): Any = value
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
Literal.create(value, dataType).doGenCode(ctx, ev)
}
}
object NonFoldableLiteral {
def apply(value: Any): NonFoldableLiteral = {
val lit = Literal(value)
NonFoldableLiteral(lit.value, lit.dataType)
}
def create(value: Any, dataType: DataType): NonFoldableLiteral = {
val lit = Literal.create(value, dataType)
NonFoldableLiteral(lit.value, lit.dataType)
}
}
| bravo-zhang/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/NonFoldableLiteral.scala | Scala | apache-2.0 | 1,922 |
package s {
sealed trait C[+A]
case class C00[+A]() extends C[A]
case class C10[+A](x: A) extends C[A]
case class C20[+A](x: A, y: A) extends C[A]
case class C01[+A](xs: A*) extends C[A]
case class C11[+A](x: A, ys: A*) extends C[A]
case class C21[+A](x: A, y: A, zs: A*) extends C[A]
object E00 { def unapply[A](x: Any): Boolean = ??? }
object E10 { def unapply[A](x: Any): Option[A] = ??? }
object E20 { def unapply[A](x: Any): Option[(A, A)] = ??? }
object E01 { def unapplySeq[A](x: Any): Option[Seq[A]] = ??? }
object E11 { def unapplySeq[A](x: Any): Option[(A, Seq[A])] = ??? }
object E21 { def unapplySeq[A](x: Any): Option[(A, A, Seq[A])] = ??? }
object F00 { def unapply[A](x: C[A]): Boolean = ??? }
object F10 { def unapply[A](x: C[A]): Option[A] = ??? }
object F20 { def unapply[A](x: C[A]): Option[(A, A)] = ??? }
object F01 { def unapplySeq[A](x: C[A]): Option[Seq[A]] = ??? }
object F11 { def unapplySeq[A](x: C[A]): Option[(A, Seq[A])] = ??? }
object F21 { def unapplySeq[A](x: C[A]): Option[(A, A, Seq[A])] = ??? }
object G00 { def unapply[A](x: C00[A]): Boolean = ??? }
object G10 { def unapply[A](x: C10[A]): Option[A] = ??? }
object G20 { def unapply[A](x: C20[A]): Option[(A, A)] = ??? }
object G01 { def unapplySeq[A](x: C01[A]): Option[Seq[A]] = ??? }
object G11 { def unapplySeq[A](x: C11[A]): Option[(A, Seq[A])] = ??? }
object G21 { def unapplySeq[A](x: C21[A]): Option[(A, A, Seq[A])] = ??? }
}
import s._
package pos {
object Test {
def ga1(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
def ga2(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
def ga3(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
def ga4(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
def ga5(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
def ga6(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
def gb1[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs : _*) => xs.head ; case E11(x, ys : _*) => x ; case E21(x, y, zs : _*) => x }
def gb2[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs : _*) => xs.head ; case E11(x, ys : _*) => x ; case E21(x, y, zs : _*) => x }
def gb3[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs : _*) => xs.head ; case E11(x, ys : _*) => x ; case E21(x, y, zs : _*) => x }
def gb4[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs : _*) => xs.head ; case E11(x, ys : _*) => x ; case E21(x, y, zs : _*) => x }
def gb5[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs : _*) => xs.head ; case E11(x, ys : _*) => x ; case E21(x, y, zs : _*) => x }
def gb6[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs : _*) => xs.head ; case E11(x, ys : _*) => x ; case E21(x, y, zs : _*) => x }
def gc1[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gc2[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gc3[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gc4[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gc5[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gc6[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gd1[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gd2[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gd3[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gd4[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gd5[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gd6[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
}
}
package neg {
object Fail {
def gb1[A](x: C00[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs : _*) => xs.head ; case E11(x, ys : _*) => x ; case E21(x, y, zs : _*) => x }
def gb2[A](x: C10[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs : _*) => xs.head ; case E11(x, ys : _*) => x ; case E21(x, y, zs : _*) => x }
def gb3[A](x: C20[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs : _*) => xs.head ; case E11(x, ys : _*) => x ; case E21(x, y, zs : _*) => x }
def gb4[A](x: C01[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs : _*) => xs.head ; case E11(x, ys : _*) => x ; case E21(x, y, zs : _*) => x }
def gb5[A](x: C11[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs : _*) => xs.head ; case E11(x, ys : _*) => x ; case E21(x, y, zs : _*) => x }
def gb6[A](x: C21[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs : _*) => xs.head ; case E11(x, ys : _*) => x ; case E21(x, y, zs : _*) => x }
def gc1[A](x: C00[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gc2[A](x: C10[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gc3[A](x: C20[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gc4[A](x: C01[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gc5[A](x: C11[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gc6[A](x: C21[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs : _*) => xs.head ; case F11(x, ys : _*) => x ; case F21(x, y, zs : _*) => x }
def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs : _*) => xs.head ; case G11(x, ys : _*) => x ; case G21(x, y, zs : _*) => x }
def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs : _*) => xs.head ; case G11(x, ys : _*) => x ; case G21(x, y, zs : _*) => x }
def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs : _*) => xs.head ; case G11(x, ys : _*) => x ; case G21(x, y, zs : _*) => x }
def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs : _*) => xs.head ; case G11(x, ys : _*) => x ; case G21(x, y, zs : _*) => x }
def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs : _*) => xs.head ; case G11(x, ys : _*) => x ; case G21(x, y, zs : _*) => x }
def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs : _*) => xs.head ; case G11(x, ys : _*) => x ; case G21(x, y, zs : _*) => x }
}
}
object Test {
def main(args: Array[String]): Unit = {
}
}
| yusuke2255/dotty | tests/pending/run/patmat-behavior.scala | Scala | bsd-3-clause | 9,573 |
import java.io.File
import scalaxb.compiler.Config
import scalaxb.compiler.xsd.Driver
import scalaxb.compiler.ConfigEntry._
object XmlSchemaTest extends TestBase {
// override val module = new Driver with Verbose
val inFile = new File("integration/src/test/resources/XMLSchema.xsd")
val config = Config.default.update(PackageNames(Map(Some("http://www.w3.org/2001/XMLSchema") -> Some("org.w3.xmlschema")))).
update(Outdir(tmp)).
update(ClassPrefix("X")).
update(ParamPrefix("m"))
lazy val generated = module.process(inFile, config)
"XMLSchema.scala file must compile so that Schema can be used" in {
(List("import scalaxb._",
"import org.w3.xmlschema._",
"""val document = <xs:schema targetNamespace="http://www.example.com/IPO"
xmlns:xs="http://www.w3.org/2001/XMLSchema"
xmlns:ipo="http://www.example.com/IPO">
<xs:complexType name="Address">
<xs:sequence>
<xs:element name="name" type="xs:string"/>
<xs:element name="street" type="xs:string"/>
<xs:element name="city" type="xs:string"/>
</xs:sequence>
<xs:attribute name="attr" type="xs:string"/>
</xs:complexType>
</xs:schema>""", // "
"""toXML[XSchema](fromXML[XSchema](document),
Some("http://www.w3.org/2001/XMLSchema"), Some("schema"), document.scope).toString""" // "
),
generated) must evaluateTo("""<xs:schema targetNamespace="http://www.example.com/IPO" """ +
"""xmlns:ipo="http://www.example.com/IPO" xmlns:xs="http://www.w3.org/2001/XMLSchema">""" +
"""<xs:complexType name="Address">""" +
"""<xs:sequence><xs:element type="xs:string" name="name"/>""" +
"""<xs:element type="xs:string" name="street"/>""" +
"""<xs:element type="xs:string" name="city"/>""" +
"""</xs:sequence>""" +
"""<xs:attribute type="xs:string" name="attr"/>""" +
"""</xs:complexType></xs:schema>""", // "
outdir = "./tmp")
}
}
| briantopping/scalaxb | integration/src/test/scala/XmlSchemaTest.scala | Scala | mit | 2,011 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming
import java.io.File
import java.nio.ByteBuffer
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._
import scala.language.implicitConversions
import scala.util.Random
import org.apache.hadoop.conf.Configuration
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{doThrow, reset, spy}
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StreamBlockId
import org.apache.spark.streaming.receiver.BlockManagerBasedStoreResult
import org.apache.spark.streaming.scheduler.{AllocatedBlocks, _}
import org.apache.spark.streaming.util._
import org.apache.spark.streaming.util.WriteAheadLogSuite._
import org.apache.spark.util.{Clock, ManualClock, SystemClock, Utils}
class ReceivedBlockTrackerSuite
extends SparkFunSuite with BeforeAndAfter with Matchers with Logging {
val hadoopConf = new Configuration()
val streamId = 1
var allReceivedBlockTrackers = new ArrayBuffer[ReceivedBlockTracker]()
var checkpointDirectory: File = null
var conf: SparkConf = null
before {
conf = new SparkConf().setMaster("local[2]").setAppName("ReceivedBlockTrackerSuite")
checkpointDirectory = Utils.createTempDir()
}
after {
allReceivedBlockTrackers.foreach { _.stop() }
Utils.deleteRecursively(checkpointDirectory)
}
test("block addition, and block to batch allocation") {
val receivedBlockTracker = createTracker(setCheckpointDir = false)
receivedBlockTracker.isWriteAheadLogEnabled should be (false) // should be disable by default
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldEqual Seq.empty
val blockInfos = generateBlockInfos()
blockInfos.map(receivedBlockTracker.addBlock)
// Verify added blocks are unallocated blocks
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldEqual blockInfos
receivedBlockTracker.hasUnallocatedReceivedBlocks should be (true)
// Allocate the blocks to a batch and verify that all of them have been allocated
receivedBlockTracker.allocateBlocksToBatch(1)
receivedBlockTracker.getBlocksOfBatchAndStream(1, streamId) shouldEqual blockInfos
receivedBlockTracker.getBlocksOfBatch(1) shouldEqual Map(streamId -> blockInfos)
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldBe empty
receivedBlockTracker.hasUnallocatedReceivedBlocks should be (false)
// Allocate no blocks to another batch
receivedBlockTracker.allocateBlocksToBatch(2)
receivedBlockTracker.getBlocksOfBatchAndStream(2, streamId) shouldBe empty
receivedBlockTracker.getBlocksOfBatch(2) shouldEqual Map(streamId -> Seq.empty)
// Verify that older batches have no operation on batch allocation,
// will return the same blocks as previously allocated.
receivedBlockTracker.allocateBlocksToBatch(1)
receivedBlockTracker.getBlocksOfBatchAndStream(1, streamId) shouldEqual blockInfos
blockInfos.map(receivedBlockTracker.addBlock)
receivedBlockTracker.allocateBlocksToBatch(2)
receivedBlockTracker.getBlocksOfBatchAndStream(2, streamId) shouldBe empty
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldEqual blockInfos
}
test("block addition, and block to batch allocation with many blocks") {
val receivedBlockTracker = createTracker()
receivedBlockTracker.isWriteAheadLogEnabled should be (true)
val blockInfos = generateBlockInfos(100000)
blockInfos.map(receivedBlockTracker.addBlock)
receivedBlockTracker.allocateBlocksToBatch(1)
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldEqual Seq.empty
receivedBlockTracker.hasUnallocatedReceivedBlocks should be (false)
receivedBlockTracker.getBlocksOfBatch(1) shouldEqual Map(streamId -> blockInfos)
receivedBlockTracker.getBlocksOfBatchAndStream(1, streamId) shouldEqual blockInfos
val expectedWrittenData1 = blockInfos.map(BlockAdditionEvent) :+
BatchAllocationEvent(1, AllocatedBlocks(Map(streamId -> blockInfos)))
getWrittenLogData() shouldEqual expectedWrittenData1
getWriteAheadLogFiles() should have size 1
receivedBlockTracker.stop()
}
test("recovery with write ahead logs should remove only allocated blocks from received queue") {
val manualClock = new ManualClock
val batchTime = manualClock.getTimeMillis()
val tracker1 = createTracker(clock = manualClock)
tracker1.isWriteAheadLogEnabled should be (true)
val allocatedBlockInfos = generateBlockInfos()
val unallocatedBlockInfos = generateBlockInfos()
val receivedBlockInfos = allocatedBlockInfos ++ unallocatedBlockInfos
receivedBlockInfos.foreach { b => tracker1.writeToLog(BlockAdditionEvent(b)) }
val allocatedBlocks = AllocatedBlocks(Map(streamId -> allocatedBlockInfos))
tracker1.writeToLog(BatchAllocationEvent(batchTime, allocatedBlocks))
tracker1.stop()
val tracker2 = createTracker(clock = manualClock, recoverFromWriteAheadLog = true)
tracker2.getBlocksOfBatch(batchTime) shouldEqual allocatedBlocks.streamIdToAllocatedBlocks
tracker2.getUnallocatedBlocks(streamId) shouldEqual unallocatedBlockInfos
tracker2.stop()
}
test("block allocation to batch should not loose blocks from received queue") {
val tracker1 = spy(createTracker())
tracker1.isWriteAheadLogEnabled should be (true)
tracker1.getUnallocatedBlocks(streamId) shouldEqual Seq.empty
// Add blocks
val blockInfos = generateBlockInfos()
blockInfos.map(tracker1.addBlock)
tracker1.getUnallocatedBlocks(streamId) shouldEqual blockInfos
// Try to allocate the blocks to a batch and verify that it's failing
// The blocks should stay in the received queue when WAL write failing
doThrow(new RuntimeException("Not able to write BatchAllocationEvent"))
.when(tracker1).writeToLog(any(classOf[BatchAllocationEvent]))
val errMsg = intercept[RuntimeException] {
tracker1.allocateBlocksToBatch(1)
}
assert(errMsg.getMessage === "Not able to write BatchAllocationEvent")
tracker1.getUnallocatedBlocks(streamId) shouldEqual blockInfos
tracker1.getBlocksOfBatch(1) shouldEqual Map.empty
tracker1.getBlocksOfBatchAndStream(1, streamId) shouldEqual Seq.empty
// Allocate the blocks to a batch and verify that all of them have been allocated
reset(tracker1)
tracker1.allocateBlocksToBatch(2)
tracker1.getUnallocatedBlocks(streamId) shouldEqual Seq.empty
tracker1.hasUnallocatedReceivedBlocks should be (false)
tracker1.getBlocksOfBatch(2) shouldEqual Map(streamId -> blockInfos)
tracker1.getBlocksOfBatchAndStream(2, streamId) shouldEqual blockInfos
tracker1.stop()
// Recover from WAL to see the correctness
val tracker2 = createTracker(recoverFromWriteAheadLog = true)
tracker2.getUnallocatedBlocks(streamId) shouldEqual Seq.empty
tracker2.hasUnallocatedReceivedBlocks should be (false)
tracker2.getBlocksOfBatch(2) shouldEqual Map(streamId -> blockInfos)
tracker2.getBlocksOfBatchAndStream(2, streamId) shouldEqual blockInfos
tracker2.stop()
}
test("recovery and cleanup with write ahead logs") {
val manualClock = new ManualClock
// Set the time increment level to twice the rotation interval so that every increment creates
// a new log file
def incrementTime(): Unit = {
val timeIncrementMillis = 2000L
manualClock.advance(timeIncrementMillis)
}
// Generate and add blocks to the given tracker
def addBlockInfos(tracker: ReceivedBlockTracker): Seq[ReceivedBlockInfo] = {
val blockInfos = generateBlockInfos()
blockInfos.map(tracker.addBlock)
blockInfos
}
// Print the data present in the log ahead files in the log directory
def printLogFiles(message: String): Unit = {
val fileContents = getWriteAheadLogFiles().map { file =>
(s"\n>>>>> $file: <<<<<\n${getWrittenLogData(file).mkString("\n")}")
}.mkString("\n")
logInfo(s"\n\n=====================\n$message\n$fileContents\n=====================\n")
}
// Set WAL configuration
conf.set("spark.streaming.driver.writeAheadLog.rollingIntervalSecs", "1")
require(WriteAheadLogUtils.getRollingIntervalSecs(conf, isDriver = true) === 1)
// Start tracker and add blocks
val tracker1 = createTracker(clock = manualClock)
tracker1.isWriteAheadLogEnabled should be (true)
val blockInfos1 = addBlockInfos(tracker1)
tracker1.getUnallocatedBlocks(streamId).toList shouldEqual blockInfos1
// Verify whether write ahead log has correct contents
val expectedWrittenData1 = blockInfos1.map(BlockAdditionEvent)
getWrittenLogData() shouldEqual expectedWrittenData1
getWriteAheadLogFiles() should have size 1
tracker1.stop()
incrementTime()
// Recovery without recovery from WAL and verify list of unallocated blocks is empty
val tracker1_ = createTracker(clock = manualClock, recoverFromWriteAheadLog = false)
tracker1_.getUnallocatedBlocks(streamId) shouldBe empty
tracker1_.hasUnallocatedReceivedBlocks should be (false)
tracker1_.stop()
// Restart tracker and verify recovered list of unallocated blocks
val tracker2 = createTracker(clock = manualClock, recoverFromWriteAheadLog = true)
val unallocatedBlocks = tracker2.getUnallocatedBlocks(streamId).toList
unallocatedBlocks shouldEqual blockInfos1
unallocatedBlocks.foreach { block =>
block.isBlockIdValid() should be (false)
}
// Allocate blocks to batch and verify whether the unallocated blocks got allocated
val batchTime1 = manualClock.getTimeMillis()
tracker2.allocateBlocksToBatch(batchTime1)
tracker2.getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual blockInfos1
tracker2.getBlocksOfBatch(batchTime1) shouldEqual Map(streamId -> blockInfos1)
// Add more blocks and allocate to another batch
incrementTime()
val batchTime2 = manualClock.getTimeMillis()
val blockInfos2 = addBlockInfos(tracker2)
tracker2.allocateBlocksToBatch(batchTime2)
tracker2.getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2
tracker2.stop()
// Verify whether log has correct contents
val expectedWrittenData2 = expectedWrittenData1 ++
Seq(createBatchAllocation(batchTime1, blockInfos1)) ++
blockInfos2.map(BlockAdditionEvent) ++
Seq(createBatchAllocation(batchTime2, blockInfos2))
getWrittenLogData() shouldEqual expectedWrittenData2
// Restart tracker and verify recovered state
incrementTime()
val tracker3 = createTracker(clock = manualClock, recoverFromWriteAheadLog = true)
tracker3.getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual blockInfos1
tracker3.getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2
tracker3.getUnallocatedBlocks(streamId) shouldBe empty
// Cleanup first batch but not second batch
val oldestLogFile = getWriteAheadLogFiles().head
incrementTime()
tracker3.cleanupOldBatches(batchTime2, waitForCompletion = true)
// Verify that the batch allocations have been cleaned, and the act has been written to log
tracker3.getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual Seq.empty
getWrittenLogData(getWriteAheadLogFiles().last) should contain(createBatchCleanup(batchTime1))
// Verify that at least one log file gets deleted
eventually(timeout(10.seconds), interval(10.millisecond)) {
getWriteAheadLogFiles() should not contain oldestLogFile
}
printLogFiles("After clean")
tracker3.stop()
// Restart tracker and verify recovered state, specifically whether info about the first
// batch has been removed, but not the second batch
incrementTime()
val tracker4 = createTracker(clock = manualClock, recoverFromWriteAheadLog = true)
tracker4.getUnallocatedBlocks(streamId) shouldBe empty
tracker4.getBlocksOfBatchAndStream(batchTime1, streamId) shouldBe empty // should be cleaned
tracker4.getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2
tracker4.stop()
}
test("disable write ahead log when checkpoint directory is not set") {
// When checkpoint is disabled, then the write ahead log is disabled
val tracker1 = createTracker(setCheckpointDir = false)
tracker1.isWriteAheadLogEnabled should be (false)
}
test("parallel file deletion in FileBasedWriteAheadLog is robust to deletion error") {
conf.set("spark.streaming.driver.writeAheadLog.rollingIntervalSecs", "1")
require(WriteAheadLogUtils.getRollingIntervalSecs(conf, isDriver = true) === 1)
val addBlocks = generateBlockInfos()
val batch1 = addBlocks.slice(0, 1)
val batch2 = addBlocks.slice(1, 3)
val batch3 = addBlocks.slice(3, addBlocks.length)
assert(getWriteAheadLogFiles().length === 0)
// list of timestamps for files
val t = Seq.tabulate(5)(i => i * 1000)
writeEventsManually(getLogFileName(t(0)), Seq(createBatchCleanup(t(0))))
assert(getWriteAheadLogFiles().length === 1)
// The goal is to create several log files which should have been cleaned up.
// If we face any issue during recovery, because these old files exist, then we need to make
// deletion more robust rather than a parallelized operation where we fire and forget
val batch1Allocation = createBatchAllocation(t(1), batch1)
writeEventsManually(getLogFileName(t(1)), batch1.map(BlockAdditionEvent) :+ batch1Allocation)
writeEventsManually(getLogFileName(t(2)), Seq(createBatchCleanup(t(1))))
val batch2Allocation = createBatchAllocation(t(3), batch2)
writeEventsManually(getLogFileName(t(3)), batch2.map(BlockAdditionEvent) :+ batch2Allocation)
writeEventsManually(getLogFileName(t(4)), batch3.map(BlockAdditionEvent))
// We should have 5 different log files as we called `writeEventsManually` with 5 different
// timestamps
assert(getWriteAheadLogFiles().length === 5)
// Create the tracker to recover from the log files. We're going to ask the tracker to clean
// things up, and then we're going to rewrite that data, and recover using a different tracker.
// They should have identical data no matter what
val tracker = createTracker(recoverFromWriteAheadLog = true, clock = new ManualClock(t(4)))
def compareTrackers(base: ReceivedBlockTracker, subject: ReceivedBlockTracker): Unit = {
subject.getBlocksOfBatchAndStream(t(3), streamId) should be(
base.getBlocksOfBatchAndStream(t(3), streamId))
subject.getBlocksOfBatchAndStream(t(1), streamId) should be(
base.getBlocksOfBatchAndStream(t(1), streamId))
subject.getBlocksOfBatchAndStream(t(0), streamId) should be(Nil)
}
// ask the tracker to clean up some old files
tracker.cleanupOldBatches(t(3), waitForCompletion = true)
assert(getWriteAheadLogFiles().length === 3)
val tracker2 = createTracker(recoverFromWriteAheadLog = true, clock = new ManualClock(t(4)))
compareTrackers(tracker, tracker2)
// rewrite first file
writeEventsManually(getLogFileName(t(0)), Seq(createBatchCleanup(t(0))))
assert(getWriteAheadLogFiles().length === 4)
// make sure trackers are consistent
val tracker3 = createTracker(recoverFromWriteAheadLog = true, clock = new ManualClock(t(4)))
compareTrackers(tracker, tracker3)
// rewrite second file
writeEventsManually(getLogFileName(t(1)), batch1.map(BlockAdditionEvent) :+ batch1Allocation)
assert(getWriteAheadLogFiles().length === 5)
// make sure trackers are consistent
val tracker4 = createTracker(recoverFromWriteAheadLog = true, clock = new ManualClock(t(4)))
compareTrackers(tracker, tracker4)
}
/**
* Create tracker object with the optional provided clock. Use fake clock if you
* want to control time by manually incrementing it to test log clean.
*/
def createTracker(
setCheckpointDir: Boolean = true,
recoverFromWriteAheadLog: Boolean = false,
clock: Clock = new SystemClock): ReceivedBlockTracker = {
val cpDirOption = if (setCheckpointDir) Some(checkpointDirectory.toString) else None
var tracker = new ReceivedBlockTracker(
conf, hadoopConf, Seq(streamId), clock, recoverFromWriteAheadLog, cpDirOption)
allReceivedBlockTrackers += tracker
tracker
}
/** Generate blocks infos using random ids */
def generateBlockInfos(blockCount: Int = 5): Seq[ReceivedBlockInfo] = {
List.fill(blockCount)(ReceivedBlockInfo(streamId, Some(0L), None,
BlockManagerBasedStoreResult(StreamBlockId(streamId, math.abs(Random.nextInt)), Some(0L))))
}
/**
* Write received block tracker events to a file manually.
*/
def writeEventsManually(filePath: String, events: Seq[ReceivedBlockTrackerLogEvent]): Unit = {
val writer = HdfsUtils.getOutputStream(filePath, hadoopConf)
events.foreach { event =>
val bytes = Utils.serialize(event)
writer.writeInt(bytes.size)
writer.write(bytes)
}
writer.close()
}
/** Get all the data written in the given write ahead log file. */
def getWrittenLogData(logFile: String): Seq[ReceivedBlockTrackerLogEvent] = {
getWrittenLogData(Seq(logFile))
}
/** Get the log file name for the given log start time. */
def getLogFileName(time: Long, rollingIntervalSecs: Int = 1): String = {
checkpointDirectory.toString + File.separator + "receivedBlockMetadata" +
File.separator + s"log-$time-${time + rollingIntervalSecs * 1000}"
}
/**
* Get all the data written in the given write ahead log files. By default, it will read all
* files in the test log directory.
*/
def getWrittenLogData(logFiles: Seq[String] = getWriteAheadLogFiles)
: Seq[ReceivedBlockTrackerLogEvent] = {
logFiles.flatMap {
file => new FileBasedWriteAheadLogReader(file, hadoopConf).toSeq
}.flatMap { byteBuffer =>
val validBuffer = if (WriteAheadLogUtils.isBatchingEnabled(conf, isDriver = true)) {
Utils.deserialize[Array[Array[Byte]]](byteBuffer.array()).map(ByteBuffer.wrap)
} else {
Array(byteBuffer)
}
validBuffer.map(b => Utils.deserialize[ReceivedBlockTrackerLogEvent](b.array()))
}.toList
}
/** Get all the write ahead log files in the test directory */
def getWriteAheadLogFiles(): Seq[String] = {
import ReceivedBlockTracker._
val logDir = checkpointDirToLogDir(checkpointDirectory.toString)
getLogFilesInDirectory(logDir).map { _.toString }
}
/** Create batch allocation object from the given info */
def createBatchAllocation(time: Long, blockInfos: Seq[ReceivedBlockInfo])
: BatchAllocationEvent = {
BatchAllocationEvent(time, AllocatedBlocks(Map((streamId -> blockInfos))))
}
/** Create batch clean object from the given info */
def createBatchCleanup(time: Long, moreTimes: Long*): BatchCleanupEvent = {
BatchCleanupEvent((Seq(time) ++ moreTimes).map(Time.apply))
}
implicit def millisToTime(milliseconds: Long): Time = Time(milliseconds)
implicit def timeToMillis(time: Time): Long = time.milliseconds
}
| wangmiao1981/spark | streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala | Scala | apache-2.0 | 20,087 |
package org.jetbrains.sbt.annotator.dependency
import com.intellij.openapi.command.WriteCommandAction
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.text.StringUtil
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.psi.{PsiElement, PsiFile}
import org.jetbrains.plugins.scala.extensions.PsiFileExt
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScPatternDefinition
import org.jetbrains.plugins.scala.lang.psi.api.{ScalaElementVisitor, ScalaFile}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.psi.types.result._
import org.jetbrains.plugins.scala.project.ProjectContext
import org.jetbrains.sbt.annotator.dependency.SbtDependenciesVisitor._
import org.jetbrains.sbt.resolvers.ArtifactInfo
/**
* Created by afonichkin on 7/21/17.
*/
object AddSbtDependencyUtils {
val LIBRARY_DEPENDENCIES: String = "libraryDependencies"
val SETTINGS: String = "settings"
val SEQ: String = "Seq"
val SBT_PROJECT_TYPE = "_root_.sbt.Project"
val SBT_SEQ_TYPE = "_root_.scala.collection.Seq"
val SBT_SETTING_TYPE = "_root_.sbt.Def.Setting"
private val InfixOpsSet = Set(":=", "+=", "++=")
def getPossiblePlacesToAddFromProjectDefinition(proj: ScPatternDefinition): Seq[PsiElement] = {
var res: Seq[PsiElement] = List()
def action(psiElement: PsiElement): Unit = {
psiElement match {
case e: ScInfixExpr if e.left.getText == LIBRARY_DEPENDENCIES && isAddableLibraryDependencies(e) => res ++= Seq(e)
case call: ScMethodCall if call.deepestInvokedExpr.getText == SEQ => res ++= Seq(call)
case typedSeq: ScTypedStmt if typedSeq.isSequenceArg =>
typedSeq.expr match {
case call: ScMethodCall if call.deepestInvokedExpr.getText == SEQ => res ++= Seq(typedSeq)
case _ =>
}
case settings: ScMethodCall if isAddableSettings(settings) =>
settings.getEffectiveInvokedExpr match {
case expr: ScReferenceExpression if expr.refName == SETTINGS => res ++= Seq(settings)
case _ =>
}
case _ =>
}
}
processPatternDefinition(proj)(action)
res
}
def getTopLevelSbtProjects(psiSbtFile: ScalaFile): Seq[ScPatternDefinition] = {
var res: Seq[ScPatternDefinition] = List()
psiSbtFile.acceptChildren(new ScalaElementVisitor {
override def visitPatternDefinition(pat: ScPatternDefinition): Unit = {
if (pat.expr.isEmpty)
return
if (pat.expr.get.`type`().getOrAny.canonicalText != SBT_PROJECT_TYPE)
return
res = res ++ Seq(pat)
super.visitPatternDefinition(pat)
}
})
res
}
def getTopLevelLibraryDependencies(psiSbtFile: ScalaFile): Seq[ScInfixExpr] = {
var res: Seq[ScInfixExpr] = List()
psiSbtFile.acceptChildren(new ScalaElementVisitor {
override def visitInfixExpression(infix: ScInfixExpr): Unit = {
if (infix.left.getText == LIBRARY_DEPENDENCIES && infix.getParent.isInstanceOf[PsiFile]) {
res = res ++ Seq(infix)
}
}
})
res
}
def getTopLevelPlaceToAdd(psiFile: ScalaFile)(implicit project: Project): Option[DependencyPlaceInfo] = {
val line: Int = StringUtil.offsetToLineNumber(psiFile.charSequence, psiFile.getTextLength) + 1
getRelativePath(psiFile).map { relpath =>
DependencyPlaceInfo(relpath, psiFile.getTextLength, line, psiFile, Seq())
}
}
def addDependency(expr: PsiElement, info: ArtifactInfo)(implicit project: Project): Option[PsiElement] = {
expr match {
case e: ScInfixExpr if e.left.getText == LIBRARY_DEPENDENCIES => addDependencyToLibraryDependencies(e, info)
case call: ScMethodCall if call.deepestInvokedExpr.getText == SEQ => addDependencyToSeq(call, info)
case typedSeq: ScTypedStmt if typedSeq.isSequenceArg => addDependencyToTypedSeq(typedSeq, info)
case settings: ScMethodCall if isAddableSettings(settings) =>
settings.getEffectiveInvokedExpr match {
case expr: ScReferenceExpression if expr.refName == SETTINGS =>
Option(addDependencyToSettings(settings, info)(project))
case _ => None
}
case file: PsiFile =>
Option(addDependencyToFile(file, info)(project))
case _ => None
}
}
def addDependencyToLibraryDependencies(infix: ScInfixExpr, info: ArtifactInfo)(implicit project: Project): Option[PsiElement] = {
val psiFile = infix.getContainingFile
infix.operation.refName match {
case "+=" =>
val dependency: ScExpression = infix.right
val seqCall: ScMethodCall = generateSeqPsiMethodCall(info)(project)
doInSbtWriteCommandAction({
seqCall.args.addExpr(dependency.copy().asInstanceOf[ScExpression])
seqCall.args.addExpr(generateArtifactPsiExpression(info)(project))
infix.operation.replace(ScalaPsiElementFactory.createElementFromText("++=")(project))
dependency.replace(seqCall)
}, psiFile)(project)
Option(infix.right)
case "++=" =>
val dependencies: ScExpression = infix.right
dependencies match {
case call: ScMethodCall if call.deepestInvokedExpr.getText == SEQ=>
val addedExpr = generateArtifactPsiExpression(info)(project)
doInSbtWriteCommandAction(call.args.addExpr(addedExpr), psiFile)(project)
Option(addedExpr)
case _ => None
}
case _ => None
}
}
def addDependencyToSeq(seqCall: ScMethodCall, info: ArtifactInfo)(implicit project: Project): Option[PsiElement] = {
def isValid(expr: ScInfixExpr) = InfixOpsSet.contains(expr.operation.refName)
val parentDef = Option(PsiTreeUtil.getParentOfType(seqCall, classOf[ScInfixExpr]))
val addedExpr = parentDef match {
case Some(expr) if isValid(expr) && expr.left.textMatches(LIBRARY_DEPENDENCIES) =>
generateArtifactPsiExpression(info)
case _ => generateLibraryDependency(info)
}
doInSbtWriteCommandAction(seqCall.args.addExpr(addedExpr), seqCall.getContainingFile)
Some(addedExpr)
}
def addDependencyToTypedSeq(typedSeq: ScTypedStmt, info: ArtifactInfo)(implicit project: Project): Option[PsiElement] =
typedSeq.expr match {
case seqCall: ScMethodCall =>
val addedExpr = generateLibraryDependency(info)(project)
doInSbtWriteCommandAction({
seqCall.args.addExpr(addedExpr)
}, seqCall.getContainingFile)
Option(addedExpr)
case _ => None
}
def addDependencyToFile(file: PsiFile, info: ArtifactInfo)(implicit project: Project): PsiElement = {
var addedExpr: PsiElement = null
doInSbtWriteCommandAction({
file.addAfter(generateNewLine(project), file.getLastChild)
addedExpr = file.addAfter(generateLibraryDependency(info), file.getLastChild)
}, file)
addedExpr
}
def addDependencyToSettings(settings: ScMethodCall, info: ArtifactInfo)(implicit project: Project): PsiElement = {
val addedExpr = generateLibraryDependency(info)(project)
doInSbtWriteCommandAction({
settings.args.addExpr(addedExpr)
}, settings.getContainingFile)(project)
addedExpr
}
def isAddableSettings(settings: ScMethodCall): Boolean = {
val args = settings.args.exprsArray
if (args.length == 1) {
args(0) match {
case typedStmt: ScTypedStmt if typedStmt.isSequenceArg =>
typedStmt.expr match {
case _: ScMethodCall => false
case _: ScReferenceExpression => false
case _ => true
}
case _ => true
}
} else true
}
def isAddableLibraryDependencies(libDeps: ScInfixExpr): Boolean =
libDeps.operation.refName match {
case "+=" => true
case "++=" => libDeps.right match {
// In this case we return false to not repeat it several times
case call: ScMethodCall if call.deepestInvokedExpr.getText == SEQ => false
case _ => true
}
case _ => false
}
private def doInSbtWriteCommandAction[T](f: => T, psiSbtFile: PsiFile)(implicit project: ProjectContext): T =
WriteCommandAction
.writeCommandAction(psiSbtFile)
.compute(() => f)
private def generateSeqPsiMethodCall(info: ArtifactInfo)(implicit ctx: ProjectContext): ScMethodCall =
ScalaPsiElementFactory.createElementFromText(s"$SEQ()").asInstanceOf[ScMethodCall]
private def generateLibraryDependency(info: ArtifactInfo)(implicit ctx: ProjectContext): ScInfixExpr =
ScalaPsiElementFactory.createElementFromText(s"$LIBRARY_DEPENDENCIES += ${generateArtifactText(info)}").asInstanceOf[ScInfixExpr]
private def generateArtifactPsiExpression(info: ArtifactInfo)(implicit ctx: ProjectContext): ScExpression =
ScalaPsiElementFactory.createElementFromText(generateArtifactText(info))(ctx).asInstanceOf[ScExpression]
private def generateNewLine(implicit ctx: ProjectContext): PsiElement = ScalaPsiElementFactory.createElementFromText("\\n")
private def generateArtifactText(info: ArtifactInfo): String = {
if (info.artifactId.matches("^.+_\\\\d+\\\\.\\\\d+$"))
s""""${info.groupId}" %% "${info.artifactId.replaceAll("_\\\\d+\\\\.\\\\d+$", "")}" % "${info.version}""""
else
s""""${info.groupId}" % "${info.artifactId}" % "${info.version}""""
}
def getRelativePath(elem: PsiElement)(implicit project: ProjectContext): Option[String] = {
for {
path <- Option(elem.getContainingFile.getVirtualFile.getCanonicalPath)
if path.startsWith(project.getBasePath)
} yield
path.substring(project.getBasePath.length + 1)
}
def toDependencyPlaceInfo(elem: PsiElement, affectedProjects: Seq[String])(implicit ctx: ProjectContext): Option[DependencyPlaceInfo] = {
val offset =
elem match {
case call: ScMethodCall =>
call.getEffectiveInvokedExpr match {
case expr: ScReferenceExpression => expr.nameId.getTextOffset
case _ => elem.getTextOffset
}
case _ => elem.getTextOffset
}
val line: Int = StringUtil.offsetToLineNumber(elem.getContainingFile.charSequence, offset) + 1
getRelativePath(elem).map { relpath =>
DependencyPlaceInfo(relpath, offset, line, elem, affectedProjects)
}
}
}
| jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/sbt/annotator/dependency/AddSbtDependencyUtils.scala | Scala | apache-2.0 | 10,366 |
/******************************************************************************
* Copyright (c) 2014, Equal Experts Ltd
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation
* are those of the authors and should not be interpreted as representing
* official policies, either expressed or implied, of the Midas Project.
******************************************************************************/
package com.ee.midas.model
import com.ee.midas.utils.{DirectoryWatcher, Loggable}
import java.nio.file.StandardWatchEventKinds._
import java.util.concurrent.TimeUnit
class ApplicationWatcher (application: Application, val watchEvery: Long = 100, val unit : TimeUnit = TimeUnit.MILLISECONDS) extends Watcher[Application] with Loggable
with ApplicationParsers {
val configDir = application.configDir
private val watcher: DirectoryWatcher = {
val dirWatchMsg = s"Setting up Directory Watcher for Application in ${configDir}..."
println(dirWatchMsg)
logInfo(dirWatchMsg)
new DirectoryWatcher(application.configDir.getPath, List(ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY),
watchEvery, stopWatchingOnException = false)(watchEvents => {
watchEvents.foreach { watchEvent =>
logInfo(s"Received ${watchEvent.kind()}, Context = ${watchEvent.context()}")
}
parse(application.configDir) match {
case scala.util.Success(newlyParsedApp) => application.update(newlyParsedApp)
case scala.util.Failure(t) =>
logError(s"Failed to parse Application Config because ${t.getMessage}")
logError(s"Will Continue To Use Old Application Config")
}
})
}
def startWatching = watcher.start
def stopWatching = watcher.stopWatching
def isActive = watcher.isActive
}
| EqualExperts/Midas | src/main/scala/com/ee/midas/model/ApplicationWatcher.scala | Scala | bsd-2-clause | 3,074 |
package database
import java.sql.{Date, Time, Timestamp}
import java.util.UUID
import models.{ReportCardRescheduled, UniqueDbEntity}
import org.joda.time.DateTime
import slick.jdbc.PostgresProfile.api._
import utils.date.DateTimeOps._
class ReportCardRescheduledTable(tag: Tag) extends Table[ReportCardRescheduledDb](tag, "REPORT_CARD_RESCHEDULED") with UniqueTable with DateStartEndTable with RoomIdTable with ReportCardEntryIdTable {
def reason = column[Option[String]]("REASON")
override def * = (reportCardEntry, date, start, end, room, reason, lastModified, invalidated, id) <> ((ReportCardRescheduledDb.apply _).tupled, ReportCardRescheduledDb.unapply)
}
case class ReportCardRescheduledDb(
reportCardEntry: UUID,
date: Date,
start: Time,
end: Time,
room: UUID,
reason: Option[String] = None,
lastModified: Timestamp = DateTime.now.timestamp,
invalidated: Option[Timestamp] = None,
id: UUID = UUID.randomUUID
) extends UniqueDbEntity {
override def toUniqueEntity = ReportCardRescheduled(
date.localDate,
start.localTime,
end.localTime,
room,
reason,
lastModified.dateTime,
id
)
override def equals(that: scala.Any) = that match {
case ReportCardRescheduledDb(rc, dt, st, et, r, rs, _, _, i) =>
rc == reportCardEntry &&
dt.localDate == date.localDate &&
st.localTime == start.localTime &&
et.localTime == end.localTime &&
r == room &&
rs == reason &&
i == id
case _ => false
}
}
| THK-ADV/lwm-reloaded | app/database/ReportCardRescheduledTable.scala | Scala | mit | 1,511 |
package me.gregd.cineworld.integration.cineworld.raw
import java.time.LocalDate
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import me.gregd.cineworld.domain.transformer.CineworldTransformer
import me.gregd.cineworld.integration.cineworld.CineworldIntegrationService
import me.gregd.cineworld.util.{NoOpCache, RealClock}
import org.scalatest.concurrent.{IntegrationPatience, ScalaFutures}
import org.scalatest.{FunSuite, Matchers}
import play.api.libs.ws.ahc.AhcWSClient
import stub.Stubs
import util.WSClient
class CineworldRepositoryTest extends FunSuite with ScalaFutures with IntegrationPatience with Matchers with WSClient {
val cineworld = new CineworldIntegrationService(wsClient, NoOpCache.cache, Stubs.cineworld.config, RealClock)
test("retrieveCinemas") {
val cinemas = cineworld.retrieveCinemas().futureValue
val (london, rest) = cinemas
.map(CineworldTransformer.toCinema(_, None))
.partition(_.name.startsWith("London - "))
london should not be empty
rest should not be empty
}
test("retrieve7DayListings") {
val listings = cineworld.retrieveListings("8112", LocalDate.now()).futureValue
listings.films should not be empty
listings.events should not be empty
}
}
| Grogs/cinema-service | domain/src/test/scala/me/gregd/cineworld/integration/cineworld/raw/CineworldRepositoryTest.scala | Scala | gpl-3.0 | 1,257 |
package rovak.steamkit.steam.msg
case class LogOnDetails(
username: String,
password: String,
/**
* Gets or sets the Steam Guard auth code used to login. This is the code sent to the user's email.
*/
authCode: String = "",
/**
* Gets or sets the sentry file hash for this logon attempt, or null if no sentry file is available.
*/
sentryFileHash: Option[Array[Byte]] = None,
/**
* Gets or sets the account instance. 1 for the PC instance or 2 for the Console (PS3) instance.
*/
accountInstance: Int = 1,
/**
* Gets or sets a value indicating whether to request the Steam2 ticket.
* This is an optional request only needed for Steam2 content downloads.
*/
requestSteam2Ticket: Boolean = false)
| Rovak/scala-steamkit | steam/src/main/scala/rovak/steamkit/steam/msg/LogOnDetails.scala | Scala | mit | 744 |
package com.mycompany.scalcium.sherlock
import java.util.Properties
import scala.collection.JavaConversions.asScalaBuffer
import scala.collection.JavaConversions.propertiesAsScalaMap
import scala.collection.mutable.Stack
import edu.stanford.nlp.ling.CoreAnnotations.SentencesAnnotation
import edu.stanford.nlp.ling.CoreAnnotations.TokensAnnotation
import edu.stanford.nlp.pipeline.Annotation
import edu.stanford.nlp.pipeline.StanfordCoreNLP
class NERFinder {
val props = new Properties()
props("annotators") = "tokenize, ssplit, pos, lemma, ner"
props("ssplit.isOneSentence") = "true"
val pipeline = new StanfordCoreNLP(props)
def find(sent: (Int, Int, Int, String)):
List[(Int, Int, Int, String, String)] = {
val fileId = sent._1
val paraId = sent._2
val sentId = sent._3
val sentText = sent._4
val annot = new Annotation(sentText)
pipeline.annotate(annot)
val tokTags = annot.get(classOf[SentencesAnnotation])
.head // only one sentence in input
.get(classOf[TokensAnnotation])
.map(token => {
val begin = token.beginPosition()
val end = token.endPosition()
val nerToken = sentText.substring(begin, end)
val nerTag = token.ner()
(nerToken, nerTag)
})
// consolidate NER for multiple tokens into one, so
// for example: Ronald/PERSON Adair/PERSON becomes
// "Ronald Adair"/PERSON
val spans = Stack[(String, String)]()
tokTags.foreach(tokTag => {
if (spans.isEmpty) spans.push(tokTag)
else if (tokTag._2.equals("O")) spans.push(tokTag)
else {
val prevEntry = spans.pop
if (prevEntry._2.equals(tokTag._2))
spans.push((Array(prevEntry._1, tokTag._1).mkString(" "),
tokTag._2))
else {
spans.push(prevEntry)
spans.push(tokTag)
}
}
})
spans.reverse
.filter(tokTag => !tokTag._2.equals("O"))
.map(tokTag => (fileId, paraId, sentId, tokTag._1, tokTag._2))
.toList
}
}
| sujitpal/scalcium | src/main/scala/com/mycompany/scalcium/sherlock/NERFinder.scala | Scala | apache-2.0 | 2,421 |
package com.criteo.dev.cluster.aws
import com.criteo.dev.cluster._
import org.slf4j.LoggerFactory
import scala.collection.mutable
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
import scala.concurrent.ExecutionContext.Implicits.global
/**
* Mounts unmounted disks.
*/
object ConfigureDiskAction {
private val logger = LoggerFactory.getLogger(ConfigureDiskAction.getClass)
def apply(conf: Map[String, String], cluster: JcloudCluster) : List[String] = {
logger.info(s"Mounting disks on ${cluster.size} host(s) in parallel")
val nodes = cluster.slaves + cluster.master
val allFutures = nodes.map(n => GeneralUtilities.getFuture {
configureDisk(conf, NodeFactory.getAwsNode(conf, n))
})
val aggFuture = Future.sequence(allFutures)
val result: mutable.Set[List[String]] = Await.result(aggFuture, Duration.Inf)
result.toIterator.next()
}
/**
* Lists all unmounted partitions and mounts them.
*/
def configureDisk(conf: Map[String, String], node: Node) : List[String] = {
val result = SshAction(node, "lsblk", returnResult = true).stripLineEnd
logger.info(s"Block information on ${node.ip}:")
val lines = result.split("\\n").map(_.trim)
require(lines(0).trim.split("\\\\s+")(6).equalsIgnoreCase("MOUNTPOINT"),
s"Mount point not in expected position in lsblk output: ${lines(0)}")
//this is a bit delicate, but assuming the unmounted ones are at the end,
//then we will take the ones up to the first one that has a mount entry.
val toMount = lines.reverse.takeWhile(l => l.split("\\\\s+").length <= 6).map(l => l.split("\\\\s+")(0))
val mountCommands = toMount.zipWithIndex.flatMap { case (tm, i) =>
List(s"sudo /sbin/mkfs.ext4 /dev/$tm",
s"sudo mkdir -p /${GeneralConstants.data}/$i",
s"sudo mount /dev/$tm /${GeneralConstants.data}/$i")
}.toList
SshMultiAction(node, mountCommands)
0.to(toMount.length - 1).map(i => s"/${GeneralConstants.data}/$i").toList
}
}
| szehonCriteo/berilia | src/main/scala/com/criteo/dev/cluster/aws/ConfigureDiskAction.scala | Scala | apache-2.0 | 2,022 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.planner.logical.plans.rewriter
import org.neo4j.cypher.internal.frontend.v2_3.ast._
import org.neo4j.cypher.internal.compiler.v2_3.planner.logical.Cardinality
import org.neo4j.cypher.internal.compiler.v2_3.planner.logical.plans.{IdName, NodeHashJoin, Selection}
import org.neo4j.cypher.internal.compiler.v2_3.planner.{CardinalityEstimation, LogicalPlanningTestSupport, PlannerQuery, QueryGraph, Selections}
import org.neo4j.cypher.internal.frontend.v2_3.test_helpers.CypherFunSuite
class PredicateRemovalThroughJoinsTest extends CypherFunSuite with LogicalPlanningTestSupport {
val aHasLabel = identHasLabel("a", "LABEL")
val rhsLeaf = newMockedLogicalPlan("a")
val pred1: Expression = Equals(SignedDecimalIntegerLiteral("42")_, SignedDecimalIntegerLiteral("42")_)_
val pred2: Expression = Equals(SignedDecimalIntegerLiteral("44")_, SignedDecimalIntegerLiteral("44")_)_
test("same predicate on both sides - Selection is removed entirely") {
// Given
val lhsSelection = selectionOp("a", aHasLabel)
val rhsSelection = Selection(Seq(aHasLabel), rhsLeaf)(solved)
val join = NodeHashJoin(Set(IdName("a")), lhsSelection, rhsSelection)(solved)
// When
val result = join.endoRewrite(predicateRemovalThroughJoins)
// Then the Selection operator is removed from the RHS
result should equal(NodeHashJoin(Set(IdName("a")), lhsSelection, rhsLeaf)(solved))
}
test("multiple predicates on both sides - only one is common on both sides and is removed") {
// Given
val lhsSelection = selectionOp("a", aHasLabel, pred1)
val rhsSelection = Selection(Seq(aHasLabel, pred2), rhsLeaf)(solved)
val join = NodeHashJoin(Set(IdName("a")), lhsSelection, rhsSelection)(solved)
// When rewritten
val result = join.endoRewrite(predicateRemovalThroughJoins)
// Then the predicate is removed from the RHS selection operator
val newRhsSelection = Selection(Seq(pred2), rhsLeaf)(solved)
result should equal(
NodeHashJoin(Set(IdName("a")), lhsSelection, newRhsSelection)(solved))
}
test("same predicate on both sides, but not depending on the join ids - nothing should be removed") {
// Given
val lhsSelection = selectionOp("a", pred1)
val rhsSelection = Selection(Seq(pred1), rhsLeaf)(solved)
val originalJoin = NodeHashJoin(Set(IdName("a")), lhsSelection, rhsSelection)(solved)
// When rewritten
val result = originalJoin.endoRewrite(predicateRemovalThroughJoins)
// Then nothing is removed
result should equal(originalJoin)
}
private def selectionOp(id: String, predicates: Expression*) = {
val selections = Selections.from(predicates: _*)
val lhsLeaf = newMockedLogicalPlan("a")
val solved: PlannerQuery = PlannerQuery.empty.withGraph(QueryGraph(selections = selections))
Selection(Seq(aHasLabel), lhsLeaf)(CardinalityEstimation.lift(solved, Cardinality(0)))
}
}
| HuangLS/neo4j | community/cypher/cypher-compiler-2.3/src/test/scala/org/neo4j/cypher/internal/compiler/v2_3/planner/logical/plans/rewriter/PredicateRemovalThroughJoinsTest.scala | Scala | apache-2.0 | 3,735 |
package ru.pavlenov.scala.homework.coursera
import ru.pavlenov.scala.libs.peptide.Peptide
import ru.pavlenov.scala.utils.File
/**
* ⓭ + 07
* Какой сам? by Pavlenov Semen 24.10.14.
* Implement Trim (reproduced below).
* https://stepic.org/lesson/CS-Trimming-the-Peptide-Leaderboard-4913/step/3?course=Bioinformatics-Algorithms&unit=1085
*
* Given:
* A collection of peptides Leaderboard, a collection of integers Spectrum, and an integer N.
*
* Return:
* The N highest-scoring linear peptides on Leaderboard with respect to Spectrum.
*/
object Step2_13_3 {
def start() {
println("Implement Trim (reproduced below).")
println("from https://stepic.org/lesson/CS-Trimming-the-Peptide-Leaderboard-4913/step/3?course=Bioinformatics-Algorithms&unit=1085")
println("==========================")
val data = File.fromData(this)
val leaderboard = data(0).split(" ").toList
val spectrum = data(1).split(" ").map(_.toInt).toList
val n = data(2).toInt
println(Peptide.trim(leaderboard, spectrum, n).mkString(" "))
}
} | laser13/rosalind | src/scala/ru/pavlenov/scala/homework/coursera/Step2_13_3.scala | Scala | apache-2.0 | 1,070 |
package x
object Main {
def main(args:Array[String]):Unit =
val arr = new MyArr[Int]()
val r = X.process{
arr.map1( zDebug =>
await(CBM.pure(1).map(a => zDebug + a))
)
}
println("r")
} | dotty-staging/dotty | tests/pos-macros/i9894/Test_2.scala | Scala | apache-2.0 | 224 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.catalog
import java.net.URI
import java.time.ZoneOffset
import java.util.Date
import scala.collection.mutable
import scala.util.control.NonFatal
import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.{FunctionIdentifier, InternalRow, TableIdentifier}
import org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeMap, AttributeReference, Cast, ExprId, Literal}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.logical.statsEstimation.EstimationUtils
import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, DateFormatter, DateTimeUtils, TimestampFormatter}
import org.apache.spark.sql.catalyst.util.quoteIdentifier
import org.apache.spark.sql.connector.catalog.CatalogManager
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
/**
* A function defined in the catalog.
*
* @param identifier name of the function
* @param className fully qualified class name, e.g. "org.apache.spark.util.MyFunc"
* @param resources resource types and Uris used by the function
*/
case class CatalogFunction(
identifier: FunctionIdentifier,
className: String,
resources: Seq[FunctionResource])
/**
* Storage format, used to describe how a partition or a table is stored.
*/
case class CatalogStorageFormat(
locationUri: Option[URI],
inputFormat: Option[String],
outputFormat: Option[String],
serde: Option[String],
compressed: Boolean,
properties: Map[String, String]) {
override def toString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("Storage(", ", ", ")")
}
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
val map = new mutable.LinkedHashMap[String, String]()
locationUri.foreach(l => map.put("Location", l.toString))
serde.foreach(map.put("Serde Library", _))
inputFormat.foreach(map.put("InputFormat", _))
outputFormat.foreach(map.put("OutputFormat", _))
if (compressed) map.put("Compressed", "")
SQLConf.get.redactOptions(properties) match {
case props if props.isEmpty => // No-op
case props =>
map.put("Storage Properties", props.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]"))
}
map
}
}
object CatalogStorageFormat {
/** Empty storage format for default values and copies. */
val empty = CatalogStorageFormat(locationUri = None, inputFormat = None,
outputFormat = None, serde = None, compressed = false, properties = Map.empty)
}
/**
* A partition (Hive style) defined in the catalog.
*
* @param spec partition spec values indexed by column name
* @param storage storage format of the partition
* @param parameters some parameters for the partition
* @param createTime creation time of the partition, in milliseconds
* @param lastAccessTime last access time, in milliseconds
* @param stats optional statistics (number of rows, total size, etc.)
*/
case class CatalogTablePartition(
spec: CatalogTypes.TablePartitionSpec,
storage: CatalogStorageFormat,
parameters: Map[String, String] = Map.empty,
createTime: Long = System.currentTimeMillis,
lastAccessTime: Long = -1,
stats: Option[CatalogStatistics] = None) {
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
val map = new mutable.LinkedHashMap[String, String]()
val specString = spec.map { case (k, v) => s"$k=$v" }.mkString(", ")
map.put("Partition Values", s"[$specString]")
map ++= storage.toLinkedHashMap
if (parameters.nonEmpty) {
map.put("Partition Parameters", s"{${parameters.map(p => p._1 + "=" + p._2).mkString(", ")}}")
}
map.put("Created Time", new Date(createTime).toString)
val lastAccess = {
if (lastAccessTime <= 0) "UNKNOWN" else new Date(lastAccessTime).toString
}
map.put("Last Access", lastAccess)
stats.foreach(s => map.put("Partition Statistics", s.simpleString))
map
}
override def toString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("CatalogPartition(\\n\\t", "\\n\\t", ")")
}
/** Readable string representation for the CatalogTablePartition. */
def simpleString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("", "\\n", "")
}
/** Return the partition location, assuming it is specified. */
def location: URI = storage.locationUri.getOrElse {
val specString = spec.map { case (k, v) => s"$k=$v" }.mkString(", ")
throw new AnalysisException(s"Partition [$specString] did not specify locationUri")
}
/**
* Given the partition schema, returns a row with that schema holding the partition values.
*/
def toRow(partitionSchema: StructType, defaultTimeZondId: String): InternalRow = {
val caseInsensitiveProperties = CaseInsensitiveMap(storage.properties)
val timeZoneId = caseInsensitiveProperties.getOrElse(
DateTimeUtils.TIMEZONE_OPTION, defaultTimeZondId)
InternalRow.fromSeq(partitionSchema.map { field =>
val partValue = if (spec(field.name) == ExternalCatalogUtils.DEFAULT_PARTITION_NAME) {
null
} else {
spec(field.name)
}
Cast(Literal(partValue), field.dataType, Option(timeZoneId)).eval()
})
}
}
/**
* A container for bucketing information.
* Bucketing is a technology for decomposing data sets into more manageable parts, and the number
* of buckets is fixed so it does not fluctuate with data.
*
* @param numBuckets number of buckets.
* @param bucketColumnNames the names of the columns that used to generate the bucket id.
* @param sortColumnNames the names of the columns that used to sort data in each bucket.
*/
case class BucketSpec(
numBuckets: Int,
bucketColumnNames: Seq[String],
sortColumnNames: Seq[String]) {
def conf: SQLConf = SQLConf.get
if (numBuckets <= 0 || numBuckets > conf.bucketingMaxBuckets) {
throw new AnalysisException(
s"Number of buckets should be greater than 0 but less than or equal to " +
s"bucketing.maxBuckets (`${conf.bucketingMaxBuckets}`). Got `$numBuckets`")
}
override def toString: String = {
val bucketString = s"bucket columns: [${bucketColumnNames.mkString(", ")}]"
val sortString = if (sortColumnNames.nonEmpty) {
s", sort columns: [${sortColumnNames.mkString(", ")}]"
} else {
""
}
s"$numBuckets buckets, $bucketString$sortString"
}
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
mutable.LinkedHashMap[String, String](
"Num Buckets" -> numBuckets.toString,
"Bucket Columns" -> bucketColumnNames.map(quoteIdentifier).mkString("[", ", ", "]"),
"Sort Columns" -> sortColumnNames.map(quoteIdentifier).mkString("[", ", ", "]")
)
}
}
/**
* A table defined in the catalog.
*
* Note that Hive's metastore also tracks skewed columns. We should consider adding that in the
* future once we have a better understanding of how we want to handle skewed columns.
*
* @param provider the name of the data source provider for this table, e.g. parquet, json, etc.
* Can be None if this table is a View, should be "hive" for hive serde tables.
* @param unsupportedFeatures is a list of string descriptions of features that are used by the
* underlying table but not supported by Spark SQL yet.
* @param tracksPartitionsInCatalog whether this table's partition metadata is stored in the
* catalog. If false, it is inferred automatically based on file
* structure.
* @param schemaPreservesCase Whether or not the schema resolved for this table is case-sensitive.
* When using a Hive Metastore, this flag is set to false if a case-
* sensitive schema was unable to be read from the table properties.
* Used to trigger case-sensitive schema inference at query time, when
* configured.
* @param ignoredProperties is a list of table properties that are used by the underlying table
* but ignored by Spark SQL yet.
* @param createVersion records the version of Spark that created this table metadata. The default
* is an empty string. We expect it will be read from the catalog or filled by
* ExternalCatalog.createTable. For temporary views, the value will be empty.
*/
case class CatalogTable(
identifier: TableIdentifier,
tableType: CatalogTableType,
storage: CatalogStorageFormat,
schema: StructType,
provider: Option[String] = None,
partitionColumnNames: Seq[String] = Seq.empty,
bucketSpec: Option[BucketSpec] = None,
owner: String = "",
createTime: Long = System.currentTimeMillis,
lastAccessTime: Long = -1,
createVersion: String = "",
properties: Map[String, String] = Map.empty,
stats: Option[CatalogStatistics] = None,
viewText: Option[String] = None,
comment: Option[String] = None,
unsupportedFeatures: Seq[String] = Seq.empty,
tracksPartitionsInCatalog: Boolean = false,
schemaPreservesCase: Boolean = true,
ignoredProperties: Map[String, String] = Map.empty,
viewOriginalText: Option[String] = None) {
import CatalogTable._
/**
* schema of this table's partition columns
*/
def partitionSchema: StructType = {
val partitionFields = schema.takeRight(partitionColumnNames.length)
assert(partitionFields.map(_.name) == partitionColumnNames)
StructType(partitionFields)
}
/**
* schema of this table's data columns
*/
def dataSchema: StructType = {
val dataFields = schema.dropRight(partitionColumnNames.length)
StructType(dataFields)
}
/** Return the database this table was specified to belong to, assuming it exists. */
def database: String = identifier.database.getOrElse {
throw new AnalysisException(s"table $identifier did not specify database")
}
/** Return the table location, assuming it is specified. */
def location: URI = storage.locationUri.getOrElse {
throw new AnalysisException(s"table $identifier did not specify locationUri")
}
/** Return the fully qualified name of this table, assuming the database was specified. */
def qualifiedName: String = identifier.unquotedString
/**
* Return the current catalog and namespace (concatenated as a Seq[String]) of when the view was
* created.
*/
def viewCatalogAndNamespace: Seq[String] = {
if (properties.contains(VIEW_CATALOG_AND_NAMESPACE)) {
val numParts = properties(VIEW_CATALOG_AND_NAMESPACE).toInt
(0 until numParts).map { index =>
properties.getOrElse(
s"$VIEW_CATALOG_AND_NAMESPACE_PART_PREFIX$index",
throw new AnalysisException("Corrupted table name context in catalog: " +
s"$numParts parts expected, but part $index is missing.")
)
}
} else if (properties.contains(VIEW_DEFAULT_DATABASE)) {
// Views created before Spark 3.0 can only access tables in the session catalog.
Seq(CatalogManager.SESSION_CATALOG_NAME, properties(VIEW_DEFAULT_DATABASE))
} else {
Nil
}
}
/**
* Return the output column names of the query that creates a view, the column names are used to
* resolve a view, should be empty if the CatalogTable is not a View or created by older versions
* of Spark(before 2.2.0).
*/
def viewQueryColumnNames: Seq[String] = {
for {
numCols <- properties.get(VIEW_QUERY_OUTPUT_NUM_COLUMNS).toSeq
index <- 0 until numCols.toInt
} yield properties.getOrElse(
s"$VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX$index",
throw new AnalysisException("Corrupted view query output column names in catalog: " +
s"$numCols parts expected, but part $index is missing.")
)
}
/** Syntactic sugar to update a field in `storage`. */
def withNewStorage(
locationUri: Option[URI] = storage.locationUri,
inputFormat: Option[String] = storage.inputFormat,
outputFormat: Option[String] = storage.outputFormat,
compressed: Boolean = false,
serde: Option[String] = storage.serde,
properties: Map[String, String] = storage.properties): CatalogTable = {
copy(storage = CatalogStorageFormat(
locationUri, inputFormat, outputFormat, serde, compressed, properties))
}
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
val map = new mutable.LinkedHashMap[String, String]()
val tableProperties = properties.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]")
val partitionColumns = partitionColumnNames.map(quoteIdentifier).mkString("[", ", ", "]")
val lastAccess = {
if (lastAccessTime <= 0) "UNKNOWN" else new Date(lastAccessTime).toString
}
identifier.database.foreach(map.put("Database", _))
map.put("Table", identifier.table)
if (owner != null && owner.nonEmpty) map.put("Owner", owner)
map.put("Created Time", new Date(createTime).toString)
map.put("Last Access", lastAccess)
map.put("Created By", "Spark " + createVersion)
map.put("Type", tableType.name)
provider.foreach(map.put("Provider", _))
bucketSpec.foreach(map ++= _.toLinkedHashMap)
comment.foreach(map.put("Comment", _))
if (tableType == CatalogTableType.VIEW) {
viewText.foreach(map.put("View Text", _))
viewOriginalText.foreach(map.put("View Original Text", _))
if (viewCatalogAndNamespace.nonEmpty) {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
map.put("View Catalog and Namespace", viewCatalogAndNamespace.quoted)
}
if (viewQueryColumnNames.nonEmpty) {
map.put("View Query Output Columns", viewQueryColumnNames.mkString("[", ", ", "]"))
}
}
if (properties.nonEmpty) map.put("Table Properties", tableProperties)
stats.foreach(s => map.put("Statistics", s.simpleString))
map ++= storage.toLinkedHashMap
if (tracksPartitionsInCatalog) map.put("Partition Provider", "Catalog")
if (partitionColumnNames.nonEmpty) map.put("Partition Columns", partitionColumns)
if (schema.nonEmpty) map.put("Schema", schema.treeString)
map
}
override def toString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("CatalogTable(\\n", "\\n", ")")
}
/** Readable string representation for the CatalogTable. */
def simpleString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("", "\\n", "")
}
}
object CatalogTable {
val VIEW_PREFIX = "view."
// Starting from Spark 3.0, we don't use this property any more. `VIEW_CATALOG_AND_NAMESPACE` is
// used instead.
val VIEW_DEFAULT_DATABASE = VIEW_PREFIX + "default.database"
val VIEW_CATALOG_AND_NAMESPACE = VIEW_PREFIX + "catalogAndNamespace.numParts"
val VIEW_CATALOG_AND_NAMESPACE_PART_PREFIX = VIEW_PREFIX + "catalogAndNamespace.part."
// Convert the current catalog and namespace to properties.
def catalogAndNamespaceToProps(
currentCatalog: String,
currentNamespace: Seq[String]): Map[String, String] = {
val props = new mutable.HashMap[String, String]
val parts = currentCatalog +: currentNamespace
if (parts.nonEmpty) {
props.put(VIEW_CATALOG_AND_NAMESPACE, parts.length.toString)
parts.zipWithIndex.foreach { case (name, index) =>
props.put(s"$VIEW_CATALOG_AND_NAMESPACE_PART_PREFIX$index", name)
}
}
props.toMap
}
val VIEW_QUERY_OUTPUT_PREFIX = VIEW_PREFIX + "query.out."
val VIEW_QUERY_OUTPUT_NUM_COLUMNS = VIEW_QUERY_OUTPUT_PREFIX + "numCols"
val VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX = VIEW_QUERY_OUTPUT_PREFIX + "col."
}
/**
* This class of statistics is used in [[CatalogTable]] to interact with metastore.
* We define this new class instead of directly using [[Statistics]] here because there are no
* concepts of attributes in catalog.
*/
case class CatalogStatistics(
sizeInBytes: BigInt,
rowCount: Option[BigInt] = None,
colStats: Map[String, CatalogColumnStat] = Map.empty) {
/**
* Convert [[CatalogStatistics]] to [[Statistics]], and match column stats to attributes based
* on column names.
*/
def toPlanStats(planOutput: Seq[Attribute], planStatsEnabled: Boolean): Statistics = {
if (planStatsEnabled && rowCount.isDefined) {
val attrStats = AttributeMap(planOutput
.flatMap(a => colStats.get(a.name).map(a -> _.toPlanStat(a.name, a.dataType))))
// Estimate size as number of rows * row size.
val size = EstimationUtils.getOutputSize(planOutput, rowCount.get, attrStats)
Statistics(sizeInBytes = size, rowCount = rowCount, attributeStats = attrStats)
} else {
// When plan statistics are disabled or the table doesn't have other statistics,
// we apply the size-only estimation strategy and only propagate sizeInBytes in statistics.
Statistics(sizeInBytes = sizeInBytes)
}
}
/** Readable string representation for the CatalogStatistics. */
def simpleString: String = {
val rowCountString = if (rowCount.isDefined) s", ${rowCount.get} rows" else ""
s"$sizeInBytes bytes$rowCountString"
}
}
/**
* This class of statistics for a column is used in [[CatalogTable]] to interact with metastore.
*/
case class CatalogColumnStat(
distinctCount: Option[BigInt] = None,
min: Option[String] = None,
max: Option[String] = None,
nullCount: Option[BigInt] = None,
avgLen: Option[Long] = None,
maxLen: Option[Long] = None,
histogram: Option[Histogram] = None,
version: Int = CatalogColumnStat.VERSION) {
/**
* Returns a map from string to string that can be used to serialize the column stats.
* The key is the name of the column and name of the field (e.g. "colName.distinctCount"),
* and the value is the string representation for the value.
* min/max values are stored as Strings. They can be deserialized using
* [[CatalogColumnStat.fromExternalString]].
*
* As part of the protocol, the returned map always contains a key called "version".
* Any of the fields that are null (None) won't appear in the map.
*/
def toMap(colName: String): Map[String, String] = {
val map = new scala.collection.mutable.HashMap[String, String]
map.put(s"${colName}.${CatalogColumnStat.KEY_VERSION}", CatalogColumnStat.VERSION.toString)
distinctCount.foreach { v =>
map.put(s"${colName}.${CatalogColumnStat.KEY_DISTINCT_COUNT}", v.toString)
}
nullCount.foreach { v =>
map.put(s"${colName}.${CatalogColumnStat.KEY_NULL_COUNT}", v.toString)
}
avgLen.foreach { v => map.put(s"${colName}.${CatalogColumnStat.KEY_AVG_LEN}", v.toString) }
maxLen.foreach { v => map.put(s"${colName}.${CatalogColumnStat.KEY_MAX_LEN}", v.toString) }
min.foreach { v => map.put(s"${colName}.${CatalogColumnStat.KEY_MIN_VALUE}", v) }
max.foreach { v => map.put(s"${colName}.${CatalogColumnStat.KEY_MAX_VALUE}", v) }
histogram.foreach { h =>
map.put(s"${colName}.${CatalogColumnStat.KEY_HISTOGRAM}", HistogramSerializer.serialize(h))
}
map.toMap
}
/** Convert [[CatalogColumnStat]] to [[ColumnStat]]. */
def toPlanStat(
colName: String,
dataType: DataType): ColumnStat =
ColumnStat(
distinctCount = distinctCount,
min = min.map(CatalogColumnStat.fromExternalString(_, colName, dataType, version)),
max = max.map(CatalogColumnStat.fromExternalString(_, colName, dataType, version)),
nullCount = nullCount,
avgLen = avgLen,
maxLen = maxLen,
histogram = histogram,
version = version)
}
object CatalogColumnStat extends Logging {
// List of string keys used to serialize CatalogColumnStat
val KEY_VERSION = "version"
private val KEY_DISTINCT_COUNT = "distinctCount"
private val KEY_MIN_VALUE = "min"
private val KEY_MAX_VALUE = "max"
private val KEY_NULL_COUNT = "nullCount"
private val KEY_AVG_LEN = "avgLen"
private val KEY_MAX_LEN = "maxLen"
private val KEY_HISTOGRAM = "histogram"
val VERSION = 2
private def getTimestampFormatter(): TimestampFormatter = {
TimestampFormatter(format = "yyyy-MM-dd HH:mm:ss.SSSSSS", zoneId = ZoneOffset.UTC)
}
/**
* Converts from string representation of data type to the corresponding Catalyst data type.
*/
def fromExternalString(s: String, name: String, dataType: DataType, version: Int): Any = {
dataType match {
case BooleanType => s.toBoolean
case DateType if version == 1 => DateTimeUtils.fromJavaDate(java.sql.Date.valueOf(s))
case DateType => DateFormatter(ZoneOffset.UTC).parse(s)
case TimestampType if version == 1 =>
DateTimeUtils.fromJavaTimestamp(java.sql.Timestamp.valueOf(s))
case TimestampType => getTimestampFormatter().parse(s)
case ByteType => s.toByte
case ShortType => s.toShort
case IntegerType => s.toInt
case LongType => s.toLong
case FloatType => s.toFloat
case DoubleType => s.toDouble
case _: DecimalType => Decimal(s)
// This version of Spark does not use min/max for binary/string types so we ignore it.
case BinaryType | StringType => null
case _ =>
throw new AnalysisException("Column statistics deserialization is not supported for " +
s"column $name of data type: $dataType.")
}
}
/**
* Converts the given value from Catalyst data type to string representation of external
* data type.
*/
def toExternalString(v: Any, colName: String, dataType: DataType): String = {
val externalValue = dataType match {
case DateType => DateFormatter(ZoneOffset.UTC).format(v.asInstanceOf[Int])
case TimestampType => getTimestampFormatter().format(v.asInstanceOf[Long])
case BooleanType | _: IntegralType | FloatType | DoubleType => v
case _: DecimalType => v.asInstanceOf[Decimal].toJavaBigDecimal
// This version of Spark does not use min/max for binary/string types so we ignore it.
case _ =>
throw new AnalysisException("Column statistics serialization is not supported for " +
s"column $colName of data type: $dataType.")
}
externalValue.toString
}
/**
* Creates a [[CatalogColumnStat]] object from the given map.
* This is used to deserialize column stats from some external storage.
* The serialization side is defined in [[CatalogColumnStat.toMap]].
*/
def fromMap(
table: String,
colName: String,
map: Map[String, String]): Option[CatalogColumnStat] = {
try {
Some(CatalogColumnStat(
distinctCount = map.get(s"${colName}.${KEY_DISTINCT_COUNT}").map(v => BigInt(v.toLong)),
min = map.get(s"${colName}.${KEY_MIN_VALUE}"),
max = map.get(s"${colName}.${KEY_MAX_VALUE}"),
nullCount = map.get(s"${colName}.${KEY_NULL_COUNT}").map(v => BigInt(v.toLong)),
avgLen = map.get(s"${colName}.${KEY_AVG_LEN}").map(_.toLong),
maxLen = map.get(s"${colName}.${KEY_MAX_LEN}").map(_.toLong),
histogram = map.get(s"${colName}.${KEY_HISTOGRAM}").map(HistogramSerializer.deserialize),
version = map(s"${colName}.${KEY_VERSION}").toInt
))
} catch {
case NonFatal(e) =>
logWarning(s"Failed to parse column statistics for column ${colName} in table $table", e)
None
}
}
}
case class CatalogTableType private(name: String)
object CatalogTableType {
val EXTERNAL = new CatalogTableType("EXTERNAL")
val MANAGED = new CatalogTableType("MANAGED")
val VIEW = new CatalogTableType("VIEW")
val tableTypes = Seq(EXTERNAL, MANAGED, VIEW)
}
/**
* A database defined in the catalog.
*/
case class CatalogDatabase(
name: String,
description: String,
locationUri: URI,
properties: Map[String, String])
object CatalogTypes {
/**
* Specifications of a table partition. Mapping column name to column value.
*/
type TablePartitionSpec = Map[String, String]
/**
* Initialize an empty spec.
*/
lazy val emptyTablePartitionSpec: TablePartitionSpec = Map.empty[String, String]
}
/**
* A placeholder for a table relation, which will be replaced by concrete relation like
* `LogicalRelation` or `HiveTableRelation`, during analysis.
*/
case class UnresolvedCatalogRelation(tableMeta: CatalogTable) extends LeafNode {
assert(tableMeta.identifier.database.isDefined)
override lazy val resolved: Boolean = false
override def output: Seq[Attribute] = Nil
}
/**
* A `LogicalPlan` that represents a hive table.
*
* TODO: remove this after we completely make hive as a data source.
*/
case class HiveTableRelation(
tableMeta: CatalogTable,
dataCols: Seq[AttributeReference],
partitionCols: Seq[AttributeReference],
tableStats: Option[Statistics] = None,
@transient prunedPartitions: Option[Seq[CatalogTablePartition]] = None)
extends LeafNode with MultiInstanceRelation {
assert(tableMeta.identifier.database.isDefined)
assert(tableMeta.partitionSchema.sameType(partitionCols.toStructType))
assert(tableMeta.dataSchema.sameType(dataCols.toStructType))
// The partition column should always appear after data columns.
override def output: Seq[AttributeReference] = dataCols ++ partitionCols
def isPartitioned: Boolean = partitionCols.nonEmpty
override def doCanonicalize(): HiveTableRelation = copy(
tableMeta = tableMeta.copy(
storage = CatalogStorageFormat.empty,
createTime = -1
),
dataCols = dataCols.zipWithIndex.map {
case (attr, index) => attr.withExprId(ExprId(index))
},
partitionCols = partitionCols.zipWithIndex.map {
case (attr, index) => attr.withExprId(ExprId(index + dataCols.length))
}
)
override def computeStats(): Statistics = {
tableMeta.stats.map(_.toPlanStats(output, conf.cboEnabled || conf.planStatsEnabled))
.orElse(tableStats)
.getOrElse {
throw new IllegalStateException("table stats must be specified.")
}
}
override def newInstance(): HiveTableRelation = copy(
dataCols = dataCols.map(_.newInstance()),
partitionCols = partitionCols.map(_.newInstance()))
}
| goldmedal/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala | Scala | apache-2.0 | 27,322 |
/*
* Copyright (c) 2014 Contributor. All rights reserved.
*/
package org.scalaide.debug.internal
import org.eclipse.core.runtime.preferences.InstanceScope
import org.eclipse.debug.core.DebugPlugin
import org.eclipse.ui.preferences.ScopedPreferenceStore
trait ScalaDebugRunningTest {
// debug tests need this
disableStatusHandlers()
def disableStatusHandlers(): Unit = {
// disable UI-dependent checks done during pre-launch. Gets rid of annoying exceptions during tests
val prefs = new ScopedPreferenceStore(InstanceScope.INSTANCE, DebugPlugin.getUniqueIdentifier);
prefs.setValue("org.eclipse.debug.core.PREF_ENABLE_STATUS_HANDLERS", false)
}
val TYPENAME_FC_LS = "stepping.ForComprehensionListString"
val TYPENAME_FC_LS2 = "stepping.ForComprehensionListString2"
val TYPENAME_FC_LO = "stepping.ForComprehensionListObject"
val TYPENAME_FC_LI = "stepping.ForComprehensionListInt"
val TYPENAME_FC_LIO = "stepping.ForComprehensionListIntOptimized"
val TYPENAME_AF_LI = "stepping.AnonFunOnListInt"
val TYPENAME_AF_LS = "stepping.AnonFunOnListString"
val TYPENAME_VARIABLES = "debug.Variables"
val TYPENAME_SIMPLE_STEPPING = "stepping.SimpleStepping"
val TYPENAME_STEP_FILTERS = "stepping.StepFilters"
val TYPENAME_HELLOWORLD = "debug.HelloWorld"
val TYPENAME_SAYHELLOWORLD = "debug.SayHelloWorld"
} | andrey-ilinykh/scala-ide | org.scala-ide.sdt.debug.tests/src/org/scalaide/debug/internal/ScalaDebugRunningTest.scala | Scala | bsd-3-clause | 1,345 |
package com.overviewdocs.models
/** A page of search results from DocumentCloud.
*/
case class DocumentCloudImportIdList(
id: Int,
documentCloudImportId: Int,
/** 0-based page of search results. */
pageNumber: Int,
/** IDs from DocumentCloud: our to-fetch list, encoded as a String.
*
* It contains:
*
* * `documentCloudId`
* * `title`
* * `nPages`
* * `fullTextUrl`
* * `pageTextUrlTemplate` (replace `{page}` with page number)
*
* See com.overviewdocs.documentcloud.IdList for encoding logic.
*/
idsString: String,
/** The number of DocumentCloud documents in this list.
*
* We cache this so we can avoid a RAM hit when summing.
*/
nDocuments: Int,
/** The number of DocumentCloud pages in this list.
*
* We cache this so we can avoid a RAM hit when summing.
*/
nPages: Int
)
object DocumentCloudImportIdList {
case class CreateAttributes(
documentCloudImportId: Int,
pageNumber: Int,
idsString: String,
nDocuments: Int,
nPages: Int
)
}
| overview/overview-server | common/src/main/scala/com/overviewdocs/models/DocumentCloudImportIdList.scala | Scala | agpl-3.0 | 1,059 |
package com.twitter.finagle.netty4.param
import com.twitter.concurrent.NamedPoolThreadFactory
import com.twitter.finagle.Stack
import com.twitter.finagle.netty4.{nativeEpoll, numWorkers}
import com.twitter.finagle.util.BlockingTimeTrackingThreadFactory
import io.netty.channel.EventLoopGroup
import io.netty.channel.epoll.EpollEventLoopGroup
import io.netty.channel.nio.NioEventLoopGroup
import java.util.concurrent.Executors
/**
* A class eligible for configuring the [[io.netty.channel.EventLoopGroup]] used
* to execute I/O work for finagle clients and servers. The default is global and shared
* among clients and servers such that we can inline work on the I/O threads. Modifying
* the default has performance and instrumentation implications and should only be
* done so with care. If there is particular work you would like to schedule off
* the I/O threads, consider scheduling that work on a separate thread pool
* more granularly (e.g. [[com.twitter.util.FuturePool]] is a good tool for this).
*/
case class WorkerPool(eventLoopGroup: EventLoopGroup)
object WorkerPool {
implicit val workerPoolParam: Stack.Param[WorkerPool] = Stack.Param {
val threadFactory = new BlockingTimeTrackingThreadFactory(
new NamedPoolThreadFactory("finagle/netty4", makeDaemons = true)
)
// Netty will create `numWorkers` children in the `EventLoopGroup`. Each `EventLoop` will
// pin itself to a thread acquired from the `executor` and will multiplex over channels.
// Thus, with this configuration, we should not acquire more than `numWorkers`
// threads from the `executor`.
val executor = Executors.newCachedThreadPool(threadFactory)
val eventLoopGroup =
if (nativeEpoll.enabled) new EpollEventLoopGroup(numWorkers(), executor)
else new NioEventLoopGroup(numWorkers(), executor)
WorkerPool(eventLoopGroup)
}
}
| koshelev/finagle | finagle-netty4/src/main/scala/com/twitter/finagle/netty4/param/WorkerPool.scala | Scala | apache-2.0 | 1,872 |
/**
*
*/
package models
import java.net.URL
import java.util.Date
import java._
import play.api.templates.Html
import utils.Utils4Devel
import java.util.Calendar
/**
* @author stefan.illgen
*/
class Item {
// defaults
val HREF:URL = new URL("http://anonymous.org")
val IMG:URL = new URL("http://anonymous.org")
val CREATED_ON:Date = Utils4Devel.createDate(1982, 2, 20, 23, 30, 35)
val CREATOR = "Anonymous"
val CONTENT = Html("There was no markdown scripted yet.")
// bean stuff
private var _creator: Option[String] = Utils4Devel.toOption(CREATOR)
def creator(): Option[String] = _creator
def getCreator(): String = _creator getOrElse CREATOR
def setCreator(creator: String) = _creator = Utils4Devel.toOption(creator)
private var _createdOn: Option[Date] = Utils4Devel.toOption(CREATED_ON)
def createdOn(): Option[Date] = _createdOn
def getCreatedOn(): Date = _createdOn getOrElse CREATED_ON
def setCreatedOn(createdOn: Date) = _createdOn = Utils4Devel.toOption(createdOn)
private var _href: Option[URL] = Utils4Devel.toOption(HREF)
def href(): Option[URL] = _href
def getHref(): URL = _href getOrElse HREF
def setHref(href: URL) = _href = Utils4Devel.toOption(href)
private var _img: Option[URL] = Utils4Devel.toOption(IMG)
def img(): Option[URL] = _img
def getImg(): URL = _img getOrElse IMG
def setImg(img: URL) = _img = Utils4Devel.toOption(img)
private var _content: Option[Html] = Utils4Devel.toOption(CONTENT)
def content(): Option[Html] = _content
def getContent(): Html = _content getOrElse CONTENT
def setContent(content: Html) = _content = Utils4Devel.toOption(content)
} | stefanil/org.devel.skills.play2 | app/models/Item.scala | Scala | apache-2.0 | 1,660 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2015-2017, Carlos Quiroz **
** **
\\* */
package squants.electro
import squants._
import squants.motion.Newtons
/**
* Quantity and units for Permeability
*
* https://en.wikipedia.org/wiki/Permeability_(electromagnetism)
*
* @author cquiroz
* @since 1.4
*
* @param value value in [[squants.electro.HenriesPerMeter]]
*/
final class Permeability private (val value: Double, val unit: PermeabilityUnit)
extends Quantity[Permeability] {
val dimension = Permeability
def *(that: Length): Inductance = Henry(this.toHenriesPerMeter * that.toMeters)
def toHenriesPerMeter = to(HenriesPerMeter)
def toNewtonsPerAmpereSquared = to(NewtonsPerAmperesSquared)
}
object Permeability extends Dimension[Permeability] {
private[electro] def apply[A](n: A, unit: PermeabilityUnit)(implicit num: Numeric[A]) = new Permeability(num.toDouble(n), unit)
val apply = parse _
val name = "Permeability"
val primaryUnit = HenriesPerMeter
val siUnit = HenriesPerMeter
val units = Set[UnitOfMeasure[Permeability]](HenriesPerMeter, NewtonsPerAmperesSquared)
}
trait PermeabilityUnit extends UnitOfMeasure[Permeability] with UnitConverter {
def apply[A](n: A)(implicit num: Numeric[A]) = Permeability(n, this)
}
object HenriesPerMeter extends PermeabilityUnit with PrimaryUnit with SiUnit {
val symbol = s"${Henry.symbol}/${Meters.symbol}"
}
object NewtonsPerAmperesSquared extends PermeabilityUnit with PrimaryUnit with SiUnit {
val symbol = s"${Newtons.symbol}/${Amperes.symbol}²"
}
object PermeabilityConversions {
lazy val henriesPerMeter = HenriesPerMeter(1)
lazy val newtonsPerAmperesSquared = NewtonsPerAmperesSquared(1)
implicit class PermeabilityConversions[A](n: A)(implicit num: Numeric[A]) {
def henriesPerMeter = HenriesPerMeter(n)
def newtonsPerAmperesSquared = NewtonsPerAmperesSquared(n)
}
implicit object PermeabilityNumeric extends AbstractQuantityNumeric[Permeability](Permeability.primaryUnit)
}
| underscorenico/squants | shared/src/main/scala/squants/electro/Permeability.scala | Scala | apache-2.0 | 2,442 |
package scalariform.formatter
import preferences.FormattingPreferences._
import scalariform.formatter.preferences._
import scalariform.parser._
import scalariform.formatter._
// format: OFF
class CompactControlReadabilityTest extends AbstractExpressionFormatterTest {
implicit val formattingPreferences = FormattingPreferences.setPreference(CompactControlReadability, true)
"""if(a){
|foo
|} else {
|bar }""" ==>
"""if (a) {
| foo
|}
|else {
| bar
|}"""
"""if(a){
|foo
|}
|else {
|bar }""" ==>
"""if (a) {
| foo
|}
|else {
| bar
|}"""
"""if(a){
|foo
|} else {
|
|bar }""" ==>
"""if (a) {
| foo
|}
|else {
|
| bar
|}"""
"""try{
| foo
|} catch {
| bar
|}""" ==>
"""try {
| foo
|}
|catch {
| bar
|}"""
"""try{
| foo
|} finally {
| bar
|}""" ==>
"""try {
| foo
|}
|finally {
| bar
|}"""
"""try{
| foo
|}
|finally {
| bar
|}""" ==>
"""try {
| foo
|}
|finally {
| bar
|}"""
"""try{
| foo
|} finally {
|
| bar
|}""" ==>
"""try {
| foo
|}
|finally {
|
| bar
|}"""
"if (y > 0) positive else if (y < 0) negative else zero" ==> "if (y > 0) positive else if (y < 0) negative else zero"
"try x catch y finally z" ==> "try x catch y finally z"
} | yu-iskw/scalariform | scalariform/src/test/scala/scalariform/formatter/CompactControlReadabilityTest.scala | Scala | mit | 1,490 |
package org.fiware.cosmos.orion.flink.connector.tests
import java.net.InetSocketAddress
import io.netty.buffer.Unpooled
import io.netty.channel.ChannelHandlerContext
import io.netty.handler.codec.http.{DefaultFullHttpRequest, HttpMethod, HttpVersion}
import io.netty.util.CharsetUtil
import org.apache.http.client.methods.{HttpPatch, HttpPost, HttpPut}
import org.fiware.cosmos.orion.flink.connector._
import org.junit.{Assert, Test}
import org.mockito.Mockito.mock
object UtilsLD {
final val Port = 9201
final val SleepTime = 20000
final val SleepTimeShort = 6000
final val ServerAddress = "http://localhost:" + this.Port
final val OrionAddress = "http://localhost:2026"
final val ContentType = "Content-Type"
final val ContentType2 = "Content-Type2"
final val Content = "Content"
final val Accept = "Accept"
final val UserAgent = "User-Agent"
final val Json = "application/json"
final val Orion = "orion/0.10.0"
final val UTF8 = "application/json; charset=utf-8"
final val FiwareService = "Fiware-Service"
final val FiwareServicePath = "Fiware-ServicePath"
final val Demo = "demo"
final val Test = "/test"
final val BadContent = "BAD CONTENT"
final val OtherUrl = "http://localhost:9302"
}
class OrionConnectorTestLD extends BaseTest{
def createMockFullHttpRequest(str: String = simulatedNotificationLD.notification()): DefaultFullHttpRequest ={
val bytes = str.getBytes(CharsetUtil.UTF_8)
val content = Unpooled.copiedBuffer(bytes)
val fhr = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, UtilsLD.ServerAddress, content)
fhr.headers().set(UtilsLD.ContentType, UtilsLD.UTF8)
fhr.headers().set(UtilsLD.ContentType2, UtilsLD.UTF8)
fhr.headers().set(UtilsLD.Accept, UtilsLD.Json)
fhr.headers().set(UtilsLD.UserAgent, UtilsLD.Orion)
fhr.headers().set(UtilsLD.FiwareService, UtilsLD.Demo)
fhr.headers().set(UtilsLD.FiwareServicePath, UtilsLD.Test)
fhr
}
def createMockFullHttpRequestGet(): DefaultFullHttpRequest ={
val fhr = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, UtilsLD.ServerAddress)
// val headers = new HttpHeaders(UtilsLD.ContentType, "application/json; charset=utf-8")
fhr.headers().set(UtilsLD.ContentType, UtilsLD.Json)
fhr.headers().set(UtilsLD.ContentType2, UtilsLD.Json)
fhr.headers().set(UtilsLD.Accept, UtilsLD.Json)
fhr.headers().set(UtilsLD.UserAgent, UtilsLD.Orion)
fhr.headers().set(UtilsLD.FiwareService, UtilsLD.Demo)
fhr.headers().set(UtilsLD.FiwareServicePath, UtilsLD.Test)
fhr
}
@Test def correctNotification: Unit = {
val sc = new DummySourceContextLD()
val ohh = new OrionHttpHandlerLD(sc)
val req = createMockFullHttpRequest()
val mockCtx = mock(classOf[ChannelHandlerContext])
// ohh.channelRead(mockCtx, req)
var res = ohh.parseMessage(req)
Assert.assertTrue(res.isInstanceOf[NgsiEventLD])
}
@Test
def incorrectNotification: Unit = {
val sc = new DummySourceContextLD()
val ohh = new OrionHttpHandlerLD(sc)
val req = createMockFullHttpRequest("{}")
val mockCtx = mock(classOf[ChannelHandlerContext])
// ohh.channelRead(mockCtx, req)
var res = ohh.parseMessage(req)
Assert.assertNull(res)
}
@Test(expected=classOf[java.lang.Exception])
def getNotification: Unit = {
val sc = new DummySourceContextLD()
val ohh = new OrionHttpHandlerLD(sc)
val req = createMockFullHttpRequestGet()
val mockCtx = mock(classOf[ChannelHandlerContext])
ohh.channelRead(mockCtx, req)
}
@Test def postNotification: Unit = {
val sc = new DummySourceContextLD()
val ohh = new OrionHttpHandlerLD(sc)
val req = createMockFullHttpRequest()
val mockCtx = mock(classOf[ChannelHandlerContext])
ohh.channelRead(mockCtx, req)
}
@Test def buildHttpPostSinkEntity : Unit = {
val os = OrionSinkObject(UtilsLD.Content, UtilsLD.OrionAddress, ContentType.Plain, HTTPMethod.POST)
val httpMsg = OrionSink.createHttpMsg(os)
val content = scala.io.Source.fromInputStream(httpMsg.getEntity.getContent).mkString
Assert.assertEquals(httpMsg.getHeaders(UtilsLD.ContentType)(0).getValue, ContentType.Plain.toString())
Assert.assertEquals(httpMsg.getMethod(), "POST")
Assert.assertEquals(content, UtilsLD.Content)
}
@Test def buildHttpPutSinkEntity : Unit = {
val os = OrionSinkObject(UtilsLD.Content, UtilsLD.OrionAddress, ContentType.JSON, HTTPMethod.PUT)
val httpMsg = OrionSink.createHttpMsg(os)
val content = scala.io.Source.fromInputStream(httpMsg.getEntity.getContent).mkString
Assert.assertEquals(httpMsg.getHeaders(UtilsLD.ContentType)(0).getValue, ContentType.JSON.toString())
Assert.assertEquals(httpMsg.getMethod(), "PUT")
Assert.assertEquals(content, UtilsLD.Content)
}
@Test def buildHttpPatchSinkEntity : Unit = {
val os = OrionSinkObject(UtilsLD.Content, UtilsLD.OrionAddress, ContentType.JSON, HTTPMethod.PATCH)
val httpMsg = OrionSink.createHttpMsg(os)
val content = scala.io.Source.fromInputStream(httpMsg.getEntity.getContent).mkString
Assert.assertEquals(httpMsg.getHeaders(UtilsLD.ContentType)(0).getValue, ContentType.JSON.toString())
Assert.assertEquals(httpMsg.getMethod(), "PATCH")
Assert.assertEquals(content, UtilsLD.Content)
}
@Test def getHTTPMethod : Unit = {
Assert.assertTrue(OrionSink.getMethod(HTTPMethod.POST,"").isInstanceOf[HttpPost])
Assert.assertTrue(OrionSink.getMethod(HTTPMethod.PUT,"").isInstanceOf[HttpPut])
Assert.assertTrue(OrionSink.getMethod(HTTPMethod.PATCH,"").isInstanceOf[HttpPatch])
}
@Test (expected=classOf[java.lang.Exception]) def nettyServerCallbackUrl : Unit = {
val sc = new DummySourceContextLD()
val os = new OrionHttpServerLD(sc)
Assert.assertEquals(os.startNettyServer(UtilsLD.Port,Some("http://callback")).getPort(),UtilsLD.Port)
}
@Test def nettyServerNoCallbackUrl : Unit = {
val sc = new DummySourceContextLD()
val os : OrionHttpServerLD = new OrionHttpServerLD(sc)
new Thread(new Runnable {
def run() {
Thread.sleep(UtilsLD.SleepTime)
os.close()
}
}).run()
var currentAddr : InetSocketAddress = os.startNettyServer(UtilsLD.Port,None)
Assert.assertEquals(currentAddr.getPort(), UtilsLD.Port)
}
@Test def orionSource() : Unit = {
run(() =>FlinkJobTestLD.main(Array()))
Thread.sleep(UtilsLD.SleepTime*2)
for ( x <- 0 to 10){
val json = simulatedNotificationLD.notification(10*x,x).toString
sendPostRequest(UtilsLD.OtherUrl,json)
Thread.sleep(UtilsLD.SleepTimeShort*2)
}
Thread.sleep(UtilsLD.SleepTimeShort)
Assert.assertEquals(simulatedNotificationLD.maxTempVal,100*1,0)
Assert.assertEquals(simulatedNotificationLD.maxPresVal,10*1,0)
}
@Test def orionSourceBadRequest() : Unit = {
run(() =>FlinkJobTestLD.main(Array()))
Thread.sleep(UtilsLD.SleepTime)
val originalValue = simulatedNotificationLD.maxTempVal
for ( x <- 0 to 10){
sendPostRequest(UtilsLD.OtherUrl,UtilsLD.BadContent)
Thread.sleep(UtilsLD.SleepTimeShort)
}
Thread.sleep(UtilsLD.SleepTimeShort)
Assert.assertEquals(simulatedNotificationLD.maxTempVal,originalValue,0)
}
}
| Fiware/context.Cosmos | src/test/scala/org.fiware.cosmos.orion.flink.connector.tests/OrionConnectorTestLD.scala | Scala | agpl-3.0 | 7,262 |
package com.github.gigurra.math
import org.scalatest._
import org.scalatest.mock._
import scala.language.postfixOps
class Tuple2VecSpec
extends WordSpec
with MockitoSugar
with Matchers
with OneInstancePerTest {
"Tuple2Vec" should {
"Convert tuple2 to vec2" in {
val v2: Vec2 = (1.0f, 2.0f)
v2 shouldBe Vec2(1.0f, 2.0f)
}
"Convert tuple3 to vec3" in {
val v3: Vec3 = (1.0f, 2.0f, 3.0f)
v3 shouldBe Vec3(1.0f, 2.0f, 3.0f)
}
"Convert tuple4 to vec4" in {
val v4: Vec4 = (1.0f, 2.0f, 3.0f, 4.0f)
v4 shouldBe Vec4(1.0f, 2.0f, 3.0f, 4.0f)
}
}
}
| GiGurra/scala-libgurra | src/test/scala/com/github/gigurra/math/Tuple2VecSpec.scala | Scala | mit | 619 |
/* Copyright 2014 Ireneusz Stawarczyk
*
* This file is part of scala-fractals.
*
* scala-fractals is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* scala-fractals is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with scala-fractals. If not, see <http://www.gnu.org/licenses/>.
*/
package edu.scala.fractal.mandelbrot
import java.awt.Color
import scala.math._
object ColoringAlgorithm {
/** Traditional continuous implementation of coloring. Gives better looking fractals.
*/
def continuousRgb(escapeValues : (Double, Double, Int), maxIterations : Int) : Int = {
escapeValues match {
case (re, im, nrOfIterations) =>
if (maxIterations == nrOfIterations) Color.BLACK.getRGB()
else {
val distance = sqrt(re * re + im * im)
val color = (nrOfIterations + 1 - log(log(distance))/log(2)).toFloat / maxIterations
Color.HSBtoRGB(0.60f + 10 * color, 0.7f, 0.7f)
}
}
}
/** Traditional approach: based on number of iterations it takes to escape from the fractal set
* we find out the rgb colour of the screen point. Here additionally we use HSV model (HSB in Java)
* to achieve continuous colours.
*
* For points that achieve maximum number of iterations we assume that they are part of the set
* and we draw them with black colour.
*/
def escapeTimeRgb(nrOfIterations : Int, maxIterations : Int) : Int = {
if (maxIterations == nrOfIterations) Color.BLACK.getRGB()
else Color.HSBtoRGB((nrOfIterations.toFloat / maxIterations), 0.7f, 0.7f)
}
} | irek-stawarczyk/scala-fractals | src/main/scala/edu/scala/fractal/mandelbrot/ColoringAlgorithm.scala | Scala | gpl-3.0 | 1,996 |
/**
* © 2019 Refinitiv. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package filters
import javax.inject._
import akka.stream.Materializer
import play.api.mvc.{Filter, RequestHeader, Result, Results}
import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.duration._
import scala.util.{Success, Try}
import cmwell.ws.Settings._
import trafficshaping._
/**
* Created by michael on 6/29/16.
*/
class TrafficShapingFilter @Inject()(implicit override val mat: Materializer, ec: ExecutionContext) extends Filter {
def reqType(req: RequestHeader): String = {
val opOpt = req.getQueryString("op")
val segs = req.path.split("/")
val path = Try(req.path.split("/")(1))
(opOpt, path) match {
case (Some(op), _) => op
case (None, Success(p)) if p.startsWith("_") => p
case _ => "get"
}
}
def collectData(resultFuture: Future[Result], ip: String, requestType: String, startTime: Long): Unit = {
if (ip != "127.0.0.1") {
resultFuture.foreach { r =>
val requestDuration = System.currentTimeMillis() - startTime
TrafficShaper.addRequest(ip, requestType, requestDuration)
}
}
}
def collectData(ip: String, requestType: String, startTime: Long): Unit = {
if (ip != "127.0.0.1") {
val requestDuration = System.currentTimeMillis() - startTime
TrafficShaper.addRequest(ip, requestType, requestDuration)
}
}
def isNeedTrafficShapping(ip: String, requestType: String): Boolean = {
val untrackedRequests = Vector("_in", "_ow")
!untrackedRequests.contains(requestType)
}
override def apply(next: (RequestHeader) => Future[Result])(request: RequestHeader): Future[Result] = {
import Math._
val ip = request.attrs(Attrs.UserIP)
lazy val resultFuture = next(request)
val startTime = request.attrs(Attrs.RequestReceivedTimestamp)
val maxDurationMillis = maxRequestTimeSec * 1000
val penalty = TrafficShaper.penalty(ip)
val requestType = reqType(request)
if (TrafficShaper.isEnabled && isNeedTrafficShapping(ip, requestType))
penalty match {
case NoPenalty =>
collectData(resultFuture, ip, requestType, startTime)
resultFuture
case DelayPenalty =>
collectData(resultFuture, ip, requestType, startTime)
resultFuture.flatMap { res =>
val currentTime = System.currentTimeMillis()
val reqDurationMillis = currentTime - startTime
val penalty = min(reqDurationMillis, maxDurationMillis - reqDurationMillis).max(0)
cmwell.util.concurrent.delayedTask(penalty.millis) { res }
}
case FullBlockPenalty =>
cmwell.util.concurrent.delayedTask(maxDurationMillis.millis) {
collectData(ip, requestType, startTime)
Results.ServiceUnavailable("Please reduce the amount of requests")
}
} else
resultFuture
}
}
| dudi3001/CM-Well | server/cmwell-ws/app/filters/TrafficShapingFilter.scala | Scala | apache-2.0 | 3,555 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package spark.streaming.dstream
import spark.streaming.{Duration, DStream, Time}
import spark.RDD
import spark.SparkContext._
private[streaming]
class FlatMapValuedDStream[K: ClassManifest, V: ClassManifest, U: ClassManifest](
parent: DStream[(K, V)],
flatMapValueFunc: V => TraversableOnce[U]
) extends DStream[(K, U)](parent.ssc) {
override def dependencies = List(parent)
override def slideDuration: Duration = parent.slideDuration
override def compute(validTime: Time): Option[RDD[(K, U)]] = {
parent.getOrCompute(validTime).map(_.flatMapValues[U](flatMapValueFunc))
}
}
| wgpshashank/spark | streaming/src/main/scala/spark/streaming/dstream/FlatMapValuedDStream.scala | Scala | apache-2.0 | 1,404 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.time
import org.scalatest._
import scala.concurrent.duration._
class DurationSpec extends FunSpec with Matchers {
import scala.language.postfixOps
def span(passed: Span): Span = passed
def duration(passed: Duration): Duration = passed
def finiteDuration(passed: FiniteDuration): FiniteDuration = passed
describe("A Span") {
it("can be specified with a finite scala.concurrent.Duration via an implicit conversion") {
span(100 millis) shouldEqual Span(100, Millis)
span(100 nanos) shouldEqual Span(100, Nanoseconds)
}
it("can be specified with an infinite scala.concurrent.Duration via an implicit conversion") {
span(Duration.Inf) shouldEqual Span.Max
span(Duration.MinusInf) shouldEqual Span.Zero
}
it("can be specified with an undefined scala.concurrent.Duration via an implicit conversion") {
span(Duration.Undefined) shouldEqual Span.Max
}
}
describe("A Duration") {
it("can be specified with a Span via an implicit conversion") {
duration(Span(100, Millis)) shouldEqual (100 millis)
duration(Span(100, Nanoseconds)) shouldEqual (100 nanos)
}
}
describe("A FiniteDuration") {
it("can be specified with a Span via an implicit conversion") {
finiteDuration(Span(100, Millis)) shouldEqual (100 millis)
finiteDuration(Span(100, Nanoseconds)) shouldEqual (100 nanos)
}
}
}
| dotty-staging/scalatest | scalatest-test/src/test/scala/org/scalatest/time/DurationSpec.scala | Scala | apache-2.0 | 2,014 |
package com.originate.scalypher
import com.originate.scalypher.util.Exceptions.CharacterNotAllowedInLabel
import scala.language.implicitConversions
case class Label(name: String) {
def toQuery: String =
s":$name"
def escapedName: String =
if (name contains "`") throw new CharacterNotAllowedInLabel('`', name)
else if (name contains " ") s"`$name`"
else name
}
object Label {
implicit def stringToLabel(string: String): Label =
Label(string)
}
| Originate/scalypher | src/main/scala/Label.scala | Scala | mit | 475 |
package com.dwolla.cloudflare.domain.model
import com.dwolla.cloudflare.domain.dto.ResponseInfoDTO
import org.http4s.Status
object Exceptions {
private def unexpectedCloudflareErrorExceptionMessage(errors: List[Error], messages: List[Message]): String = {
val base =
s"""An unexpected Cloudflare error occurred. Errors:
|
| - ${errors.mkString("\\n - ")}
| """.stripMargin
if (messages.nonEmpty)
base ++
s"""
| Messages:
|
| - ${messages.mkString("\\n - ")}
|""".stripMargin
else base
}
case class UnexpectedCloudflareErrorException(errors: List[Error], messages: List[Message] = List.empty) extends RuntimeException(unexpectedCloudflareErrorExceptionMessage(errors, messages))
case class UnexpectedCloudflareResponseStatus(status: Status) extends RuntimeException(s"Received $status response from Cloudflare, but don't know how to handle it")
case object RecordAlreadyExists extends RuntimeException("Cloudflare already has a matching record, and refuses to create a new one.")
case class AccessDenied(errorChain: List[ResponseInfoDTO] = List.empty)
extends RuntimeException(s"The given credentials were invalid${
if (errorChain.nonEmpty)
errorChain
.map(ResponseInfoDTO.unapply)
.map(e => s" - ${e.getOrElse("None")}")
.mkString("\\n\\n See the following errors:\\n", "\\n", "\\n")
else ""
}")
}
| Dwolla/scala-cloudflare | client/src/main/scala/com/dwolla/cloudflare/domain/model/Exceptions.scala | Scala | mit | 1,479 |
class T8679 {
def foo1(): Int = 3
def foo2(): Int = 4
def foo3(): Int = 5
def foo4(): Int = 6
def foo5(): Int = 7
def foo6(): Int = 8
def foo7(): Int = 9
def foo8(): Int = 10
def foo9(): Int = 11
def foo10(): Int = 12
def foo11(): Int = 13
def foo12(): Int = 14
def foo13(): Int = 15
def foo14(): Int = 16
def foo15(): Int = 17
def foo16(): Int = 18
def foo17(): Int = 19
def foo18(): Int = 20
def foo19(): Int = 21
def foo20(): Int = 22
def foo21(): Int = 23
def foo22(): Int = 24
def foo23(): Int = 25
def foo24(): Int = 26
def foo25(): Int = 27
def foo26(): Int = 28
def foo27(): Int = 29
def foo28(): Int = 30
def foo29(): Int = 31
def foo30(): Int = 32
def foo31(): Int = 33
def foo32(): Int = 34
def foo33(): Int = 35
def foo34(): Int = 36
def foo35(): Int = 37
def foo36(): Int = 38
def foo37(): Int = 39
def foo38(): Int = 40
def foo39(): Int = 41
def foo40(): Int = 42
def foo41(): Int = 43
def foo42(): Int = 44
def foo43(): Int = 45
def foo44(): Int = 46
def foo45(): Int = 47
def foo46(): Int = 48
def foo47(): Int = 49
def foo48(): Int = 50
def foo49(): Int = 51
def foo50(): Int = 52
def foo51(): Int = 53
def foo52(): Int = 54
def foo53(): Int = 55
def foo54(): Int = 56
def foo55(): Int = 57
def foo56(): Int = 58
def foo57(): Int = 59
def foo58(): Int = 60
def foo59(): Int = 61
def foo60(): Int = 62
def foo61(): Int = 63
def foo62(): Int = 64
def foo63(): Int = 65
def foo64(): Int = 66
def foo65(): Int = 67
def foo66(): Int = 68
def foo67(): Int = 69
def foo68(): Int = 70
def foo69(): Int = 71
def foo70(): Int = 72
def foo71(): Int = 73
def foo72(): Int = 74
def foo73(): Int = 75
def foo74(): Int = 76
def foo75(): Int = 77
def foo76(): Int = 78
def foo77(): Int = 79
def foo78(): Int = 80
def foo79(): Int = 81
def foo80(): Int = 82
def foo81(): Int = 83
def foo82(): Int = 84
def foo83(): Int = 85
def foo84(): Int = 86
def foo85(): Int = 87
def foo86(): Int = 88
def foo87(): Int = 89
def foo88(): Int = 90
def foo89(): Int = 91
def foo90(): Int = 92
def foo91(): Int = 93
def foo92(): Int = 94
def foo93(): Int = 95
def foo94(): Int = 96
def foo95(): Int = 97
def foo96(): Int = 98
def foo97(): Int = 99
def foo98(): Int = 100
def foo99(): Int = 101
def foo100(): Int = 102
def foo101(): Int = 103
def foo102(): Int = 104
def foo103(): Int = 105
def foo104(): Int = 106
def foo105(): Int = 107
def foo106(): Int = 108
def foo107(): Int = 109
def foo108(): Int = 110
def foo109(): Int = 111
def foo110(): Int = 112
def foo111(): Int = 113
def foo112(): Int = 114
def foo113(): Int = 115
def foo114(): Int = 116
def foo115(): Int = 117
def foo116(): Int = 118
def foo117(): Int = 119
def foo118(): Int = 120
def foo119(): Int = 121
def foo120(): Int = 122
def foo121(): Int = 123
def foo122(): Int = 124
def foo123(): Int = 125
def foo124(): Int = 126
def foo125(): Int = 127
def foo126(): Int = 128
def foo127(): Int = 129
def foo128(): Int = 130
def foo129(): Int = 131
def foo130(): Int = 132
def foo131(): Int = 133
def foo132(): Int = 134
def foo133(): Int = 135
def foo134(): Int = 136
def foo135(): Int = 137
def foo136(): Int = 138
def foo137(): Int = 139
def foo138(): Int = 140
def foo139(): Int = 141
def foo140(): Int = 142
def foo141(): Int = 143
def foo142(): Int = 144
def foo143(): Int = 145
def foo144(): Int = 146
def foo145(): Int = 147
def foo146(): Int = 148
def foo147(): Int = 149
def foo148(): Int = 150
def foo149(): Int = 151
def foo150(): Int = 152
def foo151(): Int = 153
def foo152(): Int = 154
def foo153(): Int = 155
def foo154(): Int = 156
def foo155(): Int = 157
def foo156(): Int = 158
def foo157(): Int = 159
def foo158(): Int = 160
def foo159(): Int = 161
def foo160(): Int = 162
def foo161(): Int = 163
def foo162(): Int = 164
def foo163(): Int = 165
def foo164(): Int = 166
def foo165(): Int = 167
def foo166(): Int = 168
def foo167(): Int = 169
def foo168(): Int = 170
def foo169(): Int = 171
def foo170(): Int = 172
def foo171(): Int = 173
def foo172(): Int = 174
def foo173(): Int = 175
def foo174(): Int = 176
def foo175(): Int = 177
def foo176(): Int = 178
def foo177(): Int = 179
def foo178(): Int = 180
def foo179(): Int = 181
def foo180(): Int = 182
def foo181(): Int = 183
def foo182(): Int = 184
def foo183(): Int = 185
def foo184(): Int = 186
def foo185(): Int = 187
def foo186(): Int = 188
def foo187(): Int = 189
def foo188(): Int = 190
def foo189(): Int = 191
def foo190(): Int = 192
def foo191(): Int = 193
def foo192(): Int = 194
def foo193(): Int = 195
def foo194(): Int = 196
def foo195(): Int = 197
def foo196(): Int = 198
def foo197(): Int = 199
def foo198(): Int = 200
def foo199(): Int = 201
def foo200(): Int = 202
def foo201(): Int = 203
def foo202(): Int = 204
def foo203(): Int = 205
def foo204(): Int = 206
def foo205(): Int = 207
def foo206(): Int = 208
def foo207(): Int = 209
def foo208(): Int = 210
def foo209(): Int = 211
def foo210(): Int = 212
def foo211(): Int = 213
def foo212(): Int = 214
def foo213(): Int = 215
def foo214(): Int = 216
def foo215(): Int = 217
def foo216(): Int = 218
def foo217(): Int = 219
def foo218(): Int = 220
def foo219(): Int = 221
def foo220(): Int = 222
def foo221(): Int = 223
def foo222(): Int = 224
def foo223(): Int = 225
def foo224(): Int = 226
def foo225(): Int = 227
def foo226(): Int = 228
def foo227(): Int = 229
def foo228(): Int = 230
def foo229(): Int = 231
def foo230(): Int = 232
def foo231(): Int = 233
def foo232(): Int = 234
def foo233(): Int = 235
def foo234(): Int = 236
def foo235(): Int = 237
def foo236(): Int = 238
def foo237(): Int = 239
def foo238(): Int = 240
def foo239(): Int = 241
def foo240(): Int = 242
def foo241(): Int = 243
def foo242(): Int = 244
def foo243(): Int = 245
def foo244(): Int = 246
def foo245(): Int = 247
def foo246(): Int = 248
def foo247(): Int = 249
def foo248(): Int = 250
def foo249(): Int = 251
def foo250(): Int = 252
def foo251(): Int = 253
def foo252(): Int = 254
def foo253(): Int = 255
def foo254(): Int = 256
def foo255(): Int = 257
def foo256(): Int = 258
def foo257(): Int = 259
def foo258(): Int = 260
def foo259(): Int = 261
def foo260(): Int = 262
def foo261(): Int = 263
def foo262(): Int = 264
def foo263(): Int = 265
def foo264(): Int = 266
def foo265(): Int = 267
def foo266(): Int = 268
def foo267(): Int = 269
def foo268(): Int = 270
def foo269(): Int = 271
def foo270(): Int = 272
def foo271(): Int = 273
def foo272(): Int = 274
def foo273(): Int = 275
def foo274(): Int = 276
def foo275(): Int = 277
def foo276(): Int = 278
def foo277(): Int = 279
def foo278(): Int = 280
def foo279(): Int = 281
def foo280(): Int = 282
def foo281(): Int = 283
def foo282(): Int = 284
def foo283(): Int = 285
def foo284(): Int = 286
def foo285(): Int = 287
def foo286(): Int = 288
def foo287(): Int = 289
def foo288(): Int = 290
def foo289(): Int = 291
def foo290(): Int = 292
def foo291(): Int = 293
def foo292(): Int = 294
def foo293(): Int = 295
def foo294(): Int = 296
def foo295(): Int = 297
def foo296(): Int = 298
def foo297(): Int = 299
def foo298(): Int = 300
def foo299(): Int = 301
def foo300(): Int = 302
def foo301(): Int = 303
def foo302(): Int = 304
def foo303(): Int = 305
def foo304(): Int = 306
def foo305(): Int = 307
def foo306(): Int = 308
def foo307(): Int = 309
def foo308(): Int = 310
def foo309(): Int = 311
def foo310(): Int = 312
def foo311(): Int = 313
def foo312(): Int = 314
def foo313(): Int = 315
def foo314(): Int = 316
def foo315(): Int = 317
def foo316(): Int = 318
def foo317(): Int = 319
def foo318(): Int = 320
def foo319(): Int = 321
def foo320(): Int = 322
def foo321(): Int = 323
def foo322(): Int = 324
def foo323(): Int = 325
def foo324(): Int = 326
def foo325(): Int = 327
def foo326(): Int = 328
def foo327(): Int = 329
def foo328(): Int = 330
def foo329(): Int = 331
def foo330(): Int = 332
def foo331(): Int = 333
def foo332(): Int = 334
def foo333(): Int = 335
def foo334(): Int = 336
def foo335(): Int = 337
def foo336(): Int = 338
def foo337(): Int = 339
def foo338(): Int = 340
def foo339(): Int = 341
def foo340(): Int = 342
def foo341(): Int = 343
def foo342(): Int = 344
def foo343(): Int = 345
def foo344(): Int = 346
def foo345(): Int = 347
def foo346(): Int = 348
def foo347(): Int = 349
def foo348(): Int = 350
def foo349(): Int = 351
def foo350(): Int = 352
def foo351(): Int = 353
def foo352(): Int = 354
def foo353(): Int = 355
def foo354(): Int = 356
def foo355(): Int = 357
def foo356(): Int = 358
def foo357(): Int = 359
def foo358(): Int = 360
def foo359(): Int = 361
def foo360(): Int = 362
def foo361(): Int = 363
def foo362(): Int = 364
def foo363(): Int = 365
def foo364(): Int = 366
def foo365(): Int = 367
def foo366(): Int = 368
def foo367(): Int = 369
def foo368(): Int = 370
def foo369(): Int = 371
def foo370(): Int = 372
def foo371(): Int = 373
def foo372(): Int = 374
def foo373(): Int = 375
def foo374(): Int = 376
def foo375(): Int = 377
def foo376(): Int = 378
def foo377(): Int = 379
def foo378(): Int = 380
def foo379(): Int = 381
def foo380(): Int = 382
def foo381(): Int = 383
def foo382(): Int = 384
def foo383(): Int = 385
def foo384(): Int = 386
def foo385(): Int = 387
def foo386(): Int = 388
def foo387(): Int = 389
def foo388(): Int = 390
def foo389(): Int = 391
def foo390(): Int = 392
def foo391(): Int = 393
def foo392(): Int = 394
def foo393(): Int = 395
def foo394(): Int = 396
def foo395(): Int = 397
def foo396(): Int = 398
def foo397(): Int = 399
def foo398(): Int = 400
def foo399(): Int = 401
def foo400(): Int = 402
def foo401(): Int = 403
def foo402(): Int = 404
def foo403(): Int = 405
def foo404(): Int = 406
def foo405(): Int = 407
def foo406(): Int = 408
def foo407(): Int = 409
def foo408(): Int = 410
def foo409(): Int = 411
def foo410(): Int = 412
def foo411(): Int = 413
def foo412(): Int = 414
def foo413(): Int = 415
def foo414(): Int = 416
def foo415(): Int = 417
def foo416(): Int = 418
def foo417(): Int = 419
def foo418(): Int = 420
def foo419(): Int = 421
def foo420(): Int = 422
def foo421(): Int = 423
def foo422(): Int = 424
def foo423(): Int = 425
def foo424(): Int = 426
def foo425(): Int = 427
def foo426(): Int = 428
def foo427(): Int = 429
def foo428(): Int = 430
def foo429(): Int = 431
def foo430(): Int = 432
def foo431(): Int = 433
def foo432(): Int = 434
def foo433(): Int = 435
def foo434(): Int = 436
def foo435(): Int = 437
def foo436(): Int = 438
def foo437(): Int = 439
def foo438(): Int = 440
def foo439(): Int = 441
def foo440(): Int = 442
def foo441(): Int = 443
def foo442(): Int = 444
def foo443(): Int = 445
def foo444(): Int = 446
def foo445(): Int = 447
def foo446(): Int = 448
def foo447(): Int = 449
def foo448(): Int = 450
def foo449(): Int = 451
def foo450(): Int = 452
def foo451(): Int = 453
def foo452(): Int = 454
def foo453(): Int = 455
def foo454(): Int = 456
def foo455(): Int = 457
def foo456(): Int = 458
def foo457(): Int = 459
def foo458(): Int = 460
def foo459(): Int = 461
def foo460(): Int = 462
def foo461(): Int = 463
def foo462(): Int = 464
def foo463(): Int = 465
def foo464(): Int = 466
def foo465(): Int = 467
def foo466(): Int = 468
def foo467(): Int = 469
def foo468(): Int = 470
def foo469(): Int = 471
def foo470(): Int = 472
def foo471(): Int = 473
def foo472(): Int = 474
def foo473(): Int = 475
def foo474(): Int = 476
def foo475(): Int = 477
def foo476(): Int = 478
def foo477(): Int = 479
def foo478(): Int = 480
def foo479(): Int = 481
def foo480(): Int = 482
def foo481(): Int = 483
def foo482(): Int = 484
def foo483(): Int = 485
def foo484(): Int = 486
def foo485(): Int = 487
def foo486(): Int = 488
def foo487(): Int = 489
def foo488(): Int = 490
def foo489(): Int = 491
def foo490(): Int = 492
def foo491(): Int = 493
def foo492(): Int = 494
def foo493(): Int = 495
def foo494(): Int = 496
def foo495(): Int = 497
def foo496(): Int = 498
def foo497(): Int = 499
def foo498(): Int = 500
def foo499(): Int = 501
def foo500(): Int = 502
def foo501(): Int = 503
def foo502(): Int = 504
def foo503(): Int = 505
def foo504(): Int = 506
def foo505(): Int = 507
def foo506(): Int = 508
def foo507(): Int = 509
def foo508(): Int = 510
def foo509(): Int = 511
def foo510(): Int = 512
def foo511(): Int = 513
def foo512(): Int = 514
def foo513(): Int = 515
def foo514(): Int = 516
def foo515(): Int = 517
def foo516(): Int = 518
def foo517(): Int = 519
def foo518(): Int = 520
def foo519(): Int = 521
def foo520(): Int = 522
def foo521(): Int = 523
def foo522(): Int = 524
def foo523(): Int = 525
def foo524(): Int = 526
def foo525(): Int = 527
def foo526(): Int = 528
def foo527(): Int = 529
def foo528(): Int = 530
def foo529(): Int = 531
def foo530(): Int = 532
def foo531(): Int = 533
def foo532(): Int = 534
def foo533(): Int = 535
def foo534(): Int = 536
def foo535(): Int = 537
def foo536(): Int = 538
def foo537(): Int = 539
def foo538(): Int = 540
def foo539(): Int = 541
def foo540(): Int = 542
def foo541(): Int = 543
def foo542(): Int = 544
def foo543(): Int = 545
def foo544(): Int = 546
def foo545(): Int = 547
def foo546(): Int = 548
def foo547(): Int = 549
def foo548(): Int = 550
def foo549(): Int = 551
def foo550(): Int = 552
def foo551(): Int = 553
def foo552(): Int = 554
def foo553(): Int = 555
def foo554(): Int = 556
def foo555(): Int = 557
def foo556(): Int = 558
def foo557(): Int = 559
def foo558(): Int = 560
def foo559(): Int = 561
def foo560(): Int = 562
def foo561(): Int = 563
def foo562(): Int = 564
def foo563(): Int = 565
def foo564(): Int = 566
def foo565(): Int = 567
def foo566(): Int = 568
def foo567(): Int = 569
def foo568(): Int = 570
def foo569(): Int = 571
def foo570(): Int = 572
def foo571(): Int = 573
def foo572(): Int = 574
def foo573(): Int = 575
def foo574(): Int = 576
def foo575(): Int = 577
def foo576(): Int = 578
def foo577(): Int = 579
def foo578(): Int = 580
def foo579(): Int = 581
def foo580(): Int = 582
def foo581(): Int = 583
def foo582(): Int = 584
def foo583(): Int = 585
def foo584(): Int = 586
def foo585(): Int = 587
def foo586(): Int = 588
def foo587(): Int = 589
def foo588(): Int = 590
def foo589(): Int = 591
def foo590(): Int = 592
def foo591(): Int = 593
def foo592(): Int = 594
def foo593(): Int = 595
def foo594(): Int = 596
def foo595(): Int = 597
def foo596(): Int = 598
def foo597(): Int = 599
def foo598(): Int = 600
def foo599(): Int = 601
def foo600(): Int = 602
def foo601(): Int = 603
def foo602(): Int = 604
def foo603(): Int = 605
def foo604(): Int = 606
def foo605(): Int = 607
def foo606(): Int = 608
def foo607(): Int = 609
def foo608(): Int = 610
def foo609(): Int = 611
def foo610(): Int = 612
def foo611(): Int = 613
def foo612(): Int = 614
def foo613(): Int = 615
def foo614(): Int = 616
def foo615(): Int = 617
def foo616(): Int = 618
def foo617(): Int = 619
def foo618(): Int = 620
def foo619(): Int = 621
def foo620(): Int = 622
def foo621(): Int = 623
def foo622(): Int = 624
def foo623(): Int = 625
def foo624(): Int = 626
def foo625(): Int = 627
def foo626(): Int = 628
def foo627(): Int = 629
def foo628(): Int = 630
def foo629(): Int = 631
def foo630(): Int = 632
def foo631(): Int = 633
def foo632(): Int = 634
def foo633(): Int = 635
def foo634(): Int = 636
def foo635(): Int = 637
def foo636(): Int = 638
def foo637(): Int = 639
def foo638(): Int = 640
def foo639(): Int = 641
def foo640(): Int = 642
def foo641(): Int = 643
def foo642(): Int = 644
def foo643(): Int = 645
def foo644(): Int = 646
def foo645(): Int = 647
def foo646(): Int = 648
def foo647(): Int = 649
def foo648(): Int = 650
def foo649(): Int = 651
def foo650(): Int = 652
def foo651(): Int = 653
def foo652(): Int = 654
def foo653(): Int = 655
def foo654(): Int = 656
def foo655(): Int = 657
def foo656(): Int = 658
def foo657(): Int = 659
def foo658(): Int = 660
def foo659(): Int = 661
def foo660(): Int = 662
def foo661(): Int = 663
def foo662(): Int = 664
def foo663(): Int = 665
def foo664(): Int = 666
def foo665(): Int = 667
def foo666(): Int = 668
def foo667(): Int = 669
def foo668(): Int = 670
def foo669(): Int = 671
def foo670(): Int = 672
def foo671(): Int = 673
def foo672(): Int = 674
def foo673(): Int = 675
def foo674(): Int = 676
def foo675(): Int = 677
def foo676(): Int = 678
def foo677(): Int = 679
def foo678(): Int = 680
def foo679(): Int = 681
def foo680(): Int = 682
def foo681(): Int = 683
def foo682(): Int = 684
def foo683(): Int = 685
def foo684(): Int = 686
def foo685(): Int = 687
def foo686(): Int = 688
def foo687(): Int = 689
def foo688(): Int = 690
def foo689(): Int = 691
def foo690(): Int = 692
def foo691(): Int = 693
def foo692(): Int = 694
def foo693(): Int = 695
def foo694(): Int = 696
def foo695(): Int = 697
def foo696(): Int = 698
def foo697(): Int = 699
def foo698(): Int = 700
def foo699(): Int = 701
def foo700(): Int = 702
def foo701(): Int = 703
def foo702(): Int = 704
def foo703(): Int = 705
def foo704(): Int = 706
def foo705(): Int = 707
def foo706(): Int = 708
def foo707(): Int = 709
def foo708(): Int = 710
def foo709(): Int = 711
def foo710(): Int = 712
def foo711(): Int = 713
def foo712(): Int = 714
def foo713(): Int = 715
def foo714(): Int = 716
def foo715(): Int = 717
def foo716(): Int = 718
def foo717(): Int = 719
def foo718(): Int = 720
def foo719(): Int = 721
def foo720(): Int = 722
def foo721(): Int = 723
def foo722(): Int = 724
def foo723(): Int = 725
def foo724(): Int = 726
def foo725(): Int = 727
def foo726(): Int = 728
def foo727(): Int = 729
def foo728(): Int = 730
def foo729(): Int = 731
def foo730(): Int = 732
def foo731(): Int = 733
def foo732(): Int = 734
def foo733(): Int = 735
def foo734(): Int = 736
def foo735(): Int = 737
def foo736(): Int = 738
def foo737(): Int = 739
def foo738(): Int = 740
def foo739(): Int = 741
def foo740(): Int = 742
def foo741(): Int = 743
def foo742(): Int = 744
def foo743(): Int = 745
def foo744(): Int = 746
def foo745(): Int = 747
def foo746(): Int = 748
def foo747(): Int = 749
def foo748(): Int = 750
def foo749(): Int = 751
def foo750(): Int = 752
def foo751(): Int = 753
def foo752(): Int = 754
def foo753(): Int = 755
def foo754(): Int = 756
def foo755(): Int = 757
def foo756(): Int = 758
def foo757(): Int = 759
def foo758(): Int = 760
def foo759(): Int = 761
def foo760(): Int = 762
def foo761(): Int = 763
def foo762(): Int = 764
def foo763(): Int = 765
def foo764(): Int = 766
def foo765(): Int = 767
def foo766(): Int = 768
def foo767(): Int = 769
def foo768(): Int = 770
def foo769(): Int = 771
def foo770(): Int = 772
def foo771(): Int = 773
def foo772(): Int = 774
def foo773(): Int = 775
def foo774(): Int = 776
def foo775(): Int = 777
def foo776(): Int = 778
def foo777(): Int = 779
def foo778(): Int = 780
def foo779(): Int = 781
def foo780(): Int = 782
def foo781(): Int = 783
def foo782(): Int = 784
def foo783(): Int = 785
def foo784(): Int = 786
def foo785(): Int = 787
def foo786(): Int = 788
def foo787(): Int = 789
def foo788(): Int = 790
def foo789(): Int = 791
def foo790(): Int = 792
def foo791(): Int = 793
def foo792(): Int = 794
def foo793(): Int = 795
def foo794(): Int = 796
def foo795(): Int = 797
def foo796(): Int = 798
def foo797(): Int = 799
def foo798(): Int = 800
def foo799(): Int = 801
def foo800(): Int = 802
def foo801(): Int = 803
def foo802(): Int = 804
def foo803(): Int = 805
def foo804(): Int = 806
def foo805(): Int = 807
def foo806(): Int = 808
def foo807(): Int = 809
def foo808(): Int = 810
def foo809(): Int = 811
def foo810(): Int = 812
def foo811(): Int = 813
def foo812(): Int = 814
def foo813(): Int = 815
def foo814(): Int = 816
def foo815(): Int = 817
def foo816(): Int = 818
def foo817(): Int = 819
def foo818(): Int = 820
def foo819(): Int = 821
def foo820(): Int = 822
def foo821(): Int = 823
def foo822(): Int = 824
def foo823(): Int = 825
def foo824(): Int = 826
def foo825(): Int = 827
def foo826(): Int = 828
def foo827(): Int = 829
def foo828(): Int = 830
def foo829(): Int = 831
def foo830(): Int = 832
def foo831(): Int = 833
def foo832(): Int = 834
def foo833(): Int = 835
def foo834(): Int = 836
def foo835(): Int = 837
def foo836(): Int = 838
def foo837(): Int = 839
def foo838(): Int = 840
def foo839(): Int = 841
def foo840(): Int = 842
def foo841(): Int = 843
def foo842(): Int = 844
def foo843(): Int = 845
def foo844(): Int = 846
def foo845(): Int = 847
def foo846(): Int = 848
def foo847(): Int = 849
def foo848(): Int = 850
def foo849(): Int = 851
def foo850(): Int = 852
def foo851(): Int = 853
def foo852(): Int = 854
def foo853(): Int = 855
def foo854(): Int = 856
def foo855(): Int = 857
def foo856(): Int = 858
def foo857(): Int = 859
def foo858(): Int = 860
def foo859(): Int = 861
def foo860(): Int = 862
def foo861(): Int = 863
def foo862(): Int = 864
def foo863(): Int = 865
def foo864(): Int = 866
def foo865(): Int = 867
def foo866(): Int = 868
def foo867(): Int = 869
def foo868(): Int = 870
def foo869(): Int = 871
def foo870(): Int = 872
def foo871(): Int = 873
def foo872(): Int = 874
def foo873(): Int = 875
def foo874(): Int = 876
def foo875(): Int = 877
def foo876(): Int = 878
def foo877(): Int = 879
def foo878(): Int = 880
def foo879(): Int = 881
def foo880(): Int = 882
def foo881(): Int = 883
def foo882(): Int = 884
def foo883(): Int = 885
def foo884(): Int = 886
def foo885(): Int = 887
def foo886(): Int = 888
def foo887(): Int = 889
def foo888(): Int = 890
def foo889(): Int = 891
def foo890(): Int = 892
def foo891(): Int = 893
def foo892(): Int = 894
def foo893(): Int = 895
def foo894(): Int = 896
def foo895(): Int = 897
def foo896(): Int = 898
def foo897(): Int = 899
def foo898(): Int = 900
def foo899(): Int = 901
def foo900(): Int = 902
def foo901(): Int = 903
def foo902(): Int = 904
def foo903(): Int = 905
def foo904(): Int = 906
def foo905(): Int = 907
def foo906(): Int = 908
def foo907(): Int = 909
def foo908(): Int = 910
def foo909(): Int = 911
def foo910(): Int = 912
def foo911(): Int = 913
def foo912(): Int = 914
def foo913(): Int = 915
def foo914(): Int = 916
def foo915(): Int = 917
def foo916(): Int = 918
def foo917(): Int = 919
def foo918(): Int = 920
def foo919(): Int = 921
def foo920(): Int = 922
def foo921(): Int = 923
def foo922(): Int = 924
def foo923(): Int = 925
def foo924(): Int = 926
def foo925(): Int = 927
def foo926(): Int = 928
def foo927(): Int = 929
def foo928(): Int = 930
def foo929(): Int = 931
def foo930(): Int = 932
def foo931(): Int = 933
def foo932(): Int = 934
def foo933(): Int = 935
def foo934(): Int = 936
def foo935(): Int = 937
def foo936(): Int = 938
def foo937(): Int = 939
def foo938(): Int = 940
def foo939(): Int = 941
def foo940(): Int = 942
def foo941(): Int = 943
def foo942(): Int = 944
def foo943(): Int = 945
def foo944(): Int = 946
def foo945(): Int = 947
def foo946(): Int = 948
def foo947(): Int = 949
def foo948(): Int = 950
def foo949(): Int = 951
def foo950(): Int = 952
def foo951(): Int = 953
def foo952(): Int = 954
def foo953(): Int = 955
def foo954(): Int = 956
def foo955(): Int = 957
def foo956(): Int = 958
def foo957(): Int = 959
def foo958(): Int = 960
def foo959(): Int = 961
def foo960(): Int = 962
def foo961(): Int = 963
def foo962(): Int = 964
def foo963(): Int = 965
def foo964(): Int = 966
def foo965(): Int = 967
def foo966(): Int = 968
def foo967(): Int = 969
def foo968(): Int = 970
def foo969(): Int = 971
def foo970(): Int = 972
def foo971(): Int = 973
def foo972(): Int = 974
def foo973(): Int = 975
def foo974(): Int = 976
def foo975(): Int = 977
def foo976(): Int = 978
def foo977(): Int = 979
def foo978(): Int = 980
def foo979(): Int = 981
def foo980(): Int = 982
def foo981(): Int = 983
def foo982(): Int = 984
def foo983(): Int = 985
def foo984(): Int = 986
def foo985(): Int = 987
def foo986(): Int = 988
def foo987(): Int = 989
def foo988(): Int = 990
def foo989(): Int = 991
def foo990(): Int = 992
def foo991(): Int = 993
def foo992(): Int = 994
def foo993(): Int = 995
def foo994(): Int = 996
def foo995(): Int = 997
def foo996(): Int = 998
def foo997(): Int = 999
def foo998(): Int = 1000
def foo999(): Int = 1001
def foo1000(): Int = 1002
def foo1001(): Int = 1003
def foo1002(): Int = 1004
def foo1003(): Int = 1005
def foo1004(): Int = 1006
def foo1005(): Int = 1007
def foo1006(): Int = 1008
def foo1007(): Int = 1009
def foo1008(): Int = 1010
def foo1009(): Int = 1011
def foo1010(): Int = 1012
def foo1011(): Int = 1013
def foo1012(): Int = 1014
def foo1013(): Int = 1015
def foo1014(): Int = 1016
def foo1015(): Int = 1017
def foo1016(): Int = 1018
def foo1017(): Int = 1019
def foo1018(): Int = 1020
def foo1019(): Int = 1021
def foo1020(): Int = 1022
def foo1021(): Int = 1023
def foo1022(): Int = 1024
def foo1023(): Int = 1025
def foo1024(): Int = 1026
def foo1025(): Int = 1027
def foo1026(): Int = 1028
def foo1027(): Int = 1029
def foo1028(): Int = 1030
def foo1029(): Int = 1031
def foo1030(): Int = 1032
def foo1031(): Int = 1033
def foo1032(): Int = 1034
def foo1033(): Int = 1035
def foo1034(): Int = 1036
def foo1035(): Int = 1037
def foo1036(): Int = 1038
def foo1037(): Int = 1039
def foo1038(): Int = 1040
def foo1039(): Int = 1041
def foo1040(): Int = 1042
def foo1041(): Int = 1043
def foo1042(): Int = 1044
def foo1043(): Int = 1045
def foo1044(): Int = 1046
def foo1045(): Int = 1047
def foo1046(): Int = 1048
def foo1047(): Int = 1049
def foo1048(): Int = 1050
def foo1049(): Int = 1051
def foo1050(): Int = 1052
def foo1051(): Int = 1053
def foo1052(): Int = 1054
def foo1053(): Int = 1055
def foo1054(): Int = 1056
def foo1055(): Int = 1057
def foo1056(): Int = 1058
def foo1057(): Int = 1059
def foo1058(): Int = 1060
def foo1059(): Int = 1061
def foo1060(): Int = 1062
def foo1061(): Int = 1063
def foo1062(): Int = 1064
def foo1063(): Int = 1065
def foo1064(): Int = 1066
def foo1065(): Int = 1067
def foo1066(): Int = 1068
def foo1067(): Int = 1069
def foo1068(): Int = 1070
def foo1069(): Int = 1071
def foo1070(): Int = 1072
def foo1071(): Int = 1073
def foo1072(): Int = 1074
def foo1073(): Int = 1075
def foo1074(): Int = 1076
def foo1075(): Int = 1077
def foo1076(): Int = 1078
def foo1077(): Int = 1079
def foo1078(): Int = 1080
def foo1079(): Int = 1081
def foo1080(): Int = 1082
def foo1081(): Int = 1083
def foo1082(): Int = 1084
def foo1083(): Int = 1085
def foo1084(): Int = 1086
def foo1085(): Int = 1087
def foo1086(): Int = 1088
def foo1087(): Int = 1089
def foo1088(): Int = 1090
def foo1089(): Int = 1091
def foo1090(): Int = 1092
def foo1091(): Int = 1093
def foo1092(): Int = 1094
def foo1093(): Int = 1095
def foo1094(): Int = 1096
def foo1095(): Int = 1097
def foo1096(): Int = 1098
def foo1097(): Int = 1099
def foo1098(): Int = 1100
def foo1099(): Int = 1101
def foo1100(): Int = 1102
def foo1101(): Int = 1103
def foo1102(): Int = 1104
def foo1103(): Int = 1105
def foo1104(): Int = 1106
def foo1105(): Int = 1107
def foo1106(): Int = 1108
def foo1107(): Int = 1109
def foo1108(): Int = 1110
def foo1109(): Int = 1111
def foo1110(): Int = 1112
def foo1111(): Int = 1113
def foo1112(): Int = 1114
def foo1113(): Int = 1115
def foo1114(): Int = 1116
def foo1115(): Int = 1117
def foo1116(): Int = 1118
def foo1117(): Int = 1119
def foo1118(): Int = 1120
def foo1119(): Int = 1121
def foo1120(): Int = 1122
def foo1121(): Int = 1123
def foo1122(): Int = 1124
def foo1123(): Int = 1125
def foo1124(): Int = 1126
def foo1125(): Int = 1127
def foo1126(): Int = 1128
def foo1127(): Int = 1129
def foo1128(): Int = 1130
def foo1129(): Int = 1131
def foo1130(): Int = 1132
def foo1131(): Int = 1133
def foo1132(): Int = 1134
def foo1133(): Int = 1135
def foo1134(): Int = 1136
def foo1135(): Int = 1137
def foo1136(): Int = 1138
def foo1137(): Int = 1139
def foo1138(): Int = 1140
def foo1139(): Int = 1141
def foo1140(): Int = 1142
def foo1141(): Int = 1143
def foo1142(): Int = 1144
def foo1143(): Int = 1145
def foo1144(): Int = 1146
def foo1145(): Int = 1147
def foo1146(): Int = 1148
def foo1147(): Int = 1149
def foo1148(): Int = 1150
def foo1149(): Int = 1151
def foo1150(): Int = 1152
def foo1151(): Int = 1153
def foo1152(): Int = 1154
def foo1153(): Int = 1155
def foo1154(): Int = 1156
def foo1155(): Int = 1157
def foo1156(): Int = 1158
def foo1157(): Int = 1159
def foo1158(): Int = 1160
def foo1159(): Int = 1161
def foo1160(): Int = 1162
def foo1161(): Int = 1163
def foo1162(): Int = 1164
def foo1163(): Int = 1165
def foo1164(): Int = 1166
def foo1165(): Int = 1167
def foo1166(): Int = 1168
def foo1167(): Int = 1169
def foo1168(): Int = 1170
def foo1169(): Int = 1171
def foo1170(): Int = 1172
def foo1171(): Int = 1173
def foo1172(): Int = 1174
def foo1173(): Int = 1175
def foo1174(): Int = 1176
def foo1175(): Int = 1177
def foo1176(): Int = 1178
def foo1177(): Int = 1179
def foo1178(): Int = 1180
def foo1179(): Int = 1181
def foo1180(): Int = 1182
def foo1181(): Int = 1183
def foo1182(): Int = 1184
def foo1183(): Int = 1185
def foo1184(): Int = 1186
def foo1185(): Int = 1187
def foo1186(): Int = 1188
def foo1187(): Int = 1189
def foo1188(): Int = 1190
def foo1189(): Int = 1191
def foo1190(): Int = 1192
def foo1191(): Int = 1193
def foo1192(): Int = 1194
def foo1193(): Int = 1195
def foo1194(): Int = 1196
def foo1195(): Int = 1197
def foo1196(): Int = 1198
def foo1197(): Int = 1199
def foo1198(): Int = 1200
def foo1199(): Int = 1201
def foo1200(): Int = 1202
def foo1201(): Int = 1203
def foo1202(): Int = 1204
def foo1203(): Int = 1205
def foo1204(): Int = 1206
def foo1205(): Int = 1207
def foo1206(): Int = 1208
def foo1207(): Int = 1209
def foo1208(): Int = 1210
def foo1209(): Int = 1211
def foo1210(): Int = 1212
def foo1211(): Int = 1213
def foo1212(): Int = 1214
def foo1213(): Int = 1215
def foo1214(): Int = 1216
def foo1215(): Int = 1217
def foo1216(): Int = 1218
def foo1217(): Int = 1219
def foo1218(): Int = 1220
def foo1219(): Int = 1221
def foo1220(): Int = 1222
def foo1221(): Int = 1223
def foo1222(): Int = 1224
def foo1223(): Int = 1225
def foo1224(): Int = 1226
def foo1225(): Int = 1227
def foo1226(): Int = 1228
def foo1227(): Int = 1229
def foo1228(): Int = 1230
def foo1229(): Int = 1231
def foo1230(): Int = 1232
def foo1231(): Int = 1233
def foo1232(): Int = 1234
def foo1233(): Int = 1235
def foo1234(): Int = 1236
def foo1235(): Int = 1237
def foo1236(): Int = 1238
def foo1237(): Int = 1239
def foo1238(): Int = 1240
def foo1239(): Int = 1241
def foo1240(): Int = 1242
def foo1241(): Int = 1243
def foo1242(): Int = 1244
def foo1243(): Int = 1245
def foo1244(): Int = 1246
def foo1245(): Int = 1247
def foo1246(): Int = 1248
def foo1247(): Int = 1249
def foo1248(): Int = 1250
def foo1249(): Int = 1251
def foo1250(): Int = 1252
def foo1251(): Int = 1253
def foo1252(): Int = 1254
def foo1253(): Int = 1255
def foo1254(): Int = 1256
def foo1255(): Int = 1257
def foo1256(): Int = 1258
def foo1257(): Int = 1259
def foo1258(): Int = 1260
def foo1259(): Int = 1261
def foo1260(): Int = 1262
def foo1261(): Int = 1263
def foo1262(): Int = 1264
def foo1263(): Int = 1265
def foo1264(): Int = 1266
def foo1265(): Int = 1267
def foo1266(): Int = 1268
def foo1267(): Int = 1269
def foo1268(): Int = 1270
def foo1269(): Int = 1271
def foo1270(): Int = 1272
def foo1271(): Int = 1273
def foo1272(): Int = 1274
def foo1273(): Int = 1275
def foo1274(): Int = 1276
def foo1275(): Int = 1277
def foo1276(): Int = 1278
def foo1277(): Int = 1279
def foo1278(): Int = 1280
def foo1279(): Int = 1281
def foo1280(): Int = 1282
def foo1281(): Int = 1283
def foo1282(): Int = 1284
def foo1283(): Int = 1285
def foo1284(): Int = 1286
def foo1285(): Int = 1287
def foo1286(): Int = 1288
def foo1287(): Int = 1289
def foo1288(): Int = 1290
def foo1289(): Int = 1291
def foo1290(): Int = 1292
def foo1291(): Int = 1293
def foo1292(): Int = 1294
def foo1293(): Int = 1295
def foo1294(): Int = 1296
def foo1295(): Int = 1297
def foo1296(): Int = 1298
def foo1297(): Int = 1299
def foo1298(): Int = 1300
def foo1299(): Int = 1301
def foo1300(): Int = 1302
def foo1301(): Int = 1303
def foo1302(): Int = 1304
def foo1303(): Int = 1305
def foo1304(): Int = 1306
def foo1305(): Int = 1307
def foo1306(): Int = 1308
def foo1307(): Int = 1309
def foo1308(): Int = 1310
def foo1309(): Int = 1311
def foo1310(): Int = 1312
def foo1311(): Int = 1313
def foo1312(): Int = 1314
def foo1313(): Int = 1315
def foo1314(): Int = 1316
def foo1315(): Int = 1317
def foo1316(): Int = 1318
def foo1317(): Int = 1319
def foo1318(): Int = 1320
def foo1319(): Int = 1321
def foo1320(): Int = 1322
def foo1321(): Int = 1323
def foo1322(): Int = 1324
def foo1323(): Int = 1325
def foo1324(): Int = 1326
def foo1325(): Int = 1327
def foo1326(): Int = 1328
def foo1327(): Int = 1329
def foo1328(): Int = 1330
def foo1329(): Int = 1331
def foo1330(): Int = 1332
def foo1331(): Int = 1333
def foo1332(): Int = 1334
def foo1333(): Int = 1335
def foo1334(): Int = 1336
def foo1335(): Int = 1337
def foo1336(): Int = 1338
def foo1337(): Int = 1339
def foo1338(): Int = 1340
def foo1339(): Int = 1341
def foo1340(): Int = 1342
def foo1341(): Int = 1343
def foo1342(): Int = 1344
def foo1343(): Int = 1345
def foo1344(): Int = 1346
def foo1345(): Int = 1347
def foo1346(): Int = 1348
def foo1347(): Int = 1349
def foo1348(): Int = 1350
def foo1349(): Int = 1351
def foo1350(): Int = 1352
def foo1351(): Int = 1353
def foo1352(): Int = 1354
def foo1353(): Int = 1355
def foo1354(): Int = 1356
def foo1355(): Int = 1357
def foo1356(): Int = 1358
def foo1357(): Int = 1359
def foo1358(): Int = 1360
def foo1359(): Int = 1361
def foo1360(): Int = 1362
def foo1361(): Int = 1363
def foo1362(): Int = 1364
def foo1363(): Int = 1365
def foo1364(): Int = 1366
def foo1365(): Int = 1367
def foo1366(): Int = 1368
def foo1367(): Int = 1369
def foo1368(): Int = 1370
def foo1369(): Int = 1371
def foo1370(): Int = 1372
def foo1371(): Int = 1373
def foo1372(): Int = 1374
def foo1373(): Int = 1375
def foo1374(): Int = 1376
def foo1375(): Int = 1377
def foo1376(): Int = 1378
def foo1377(): Int = 1379
def foo1378(): Int = 1380
def foo1379(): Int = 1381
def foo1380(): Int = 1382
def foo1381(): Int = 1383
def foo1382(): Int = 1384
def foo1383(): Int = 1385
def foo1384(): Int = 1386
def foo1385(): Int = 1387
def foo1386(): Int = 1388
def foo1387(): Int = 1389
def foo1388(): Int = 1390
def foo1389(): Int = 1391
def foo1390(): Int = 1392
def foo1391(): Int = 1393
def foo1392(): Int = 1394
def foo1393(): Int = 1395
def foo1394(): Int = 1396
def foo1395(): Int = 1397
def foo1396(): Int = 1398
def foo1397(): Int = 1399
def foo1398(): Int = 1400
def foo1399(): Int = 1401
def foo1400(): Int = 1402
def foo1401(): Int = 1403
def foo1402(): Int = 1404
def foo1403(): Int = 1405
def foo1404(): Int = 1406
def foo1405(): Int = 1407
def foo1406(): Int = 1408
def foo1407(): Int = 1409
def foo1408(): Int = 1410
def foo1409(): Int = 1411
def foo1410(): Int = 1412
def foo1411(): Int = 1413
def foo1412(): Int = 1414
def foo1413(): Int = 1415
def foo1414(): Int = 1416
def foo1415(): Int = 1417
def foo1416(): Int = 1418
def foo1417(): Int = 1419
def foo1418(): Int = 1420
def foo1419(): Int = 1421
def foo1420(): Int = 1422
def foo1421(): Int = 1423
def foo1422(): Int = 1424
def foo1423(): Int = 1425
def foo1424(): Int = 1426
def foo1425(): Int = 1427
def foo1426(): Int = 1428
def foo1427(): Int = 1429
def foo1428(): Int = 1430
def foo1429(): Int = 1431
def foo1430(): Int = 1432
def foo1431(): Int = 1433
def foo1432(): Int = 1434
def foo1433(): Int = 1435
def foo1434(): Int = 1436
def foo1435(): Int = 1437
def foo1436(): Int = 1438
def foo1437(): Int = 1439
def foo1438(): Int = 1440
def foo1439(): Int = 1441
def foo1440(): Int = 1442
def foo1441(): Int = 1443
def foo1442(): Int = 1444
def foo1443(): Int = 1445
def foo1444(): Int = 1446
def foo1445(): Int = 1447
def foo1446(): Int = 1448
def foo1447(): Int = 1449
def foo1448(): Int = 1450
def foo1449(): Int = 1451
def foo1450(): Int = 1452
def foo1451(): Int = 1453
def foo1452(): Int = 1454
def foo1453(): Int = 1455
def foo1454(): Int = 1456
def foo1455(): Int = 1457
def foo1456(): Int = 1458
def foo1457(): Int = 1459
def foo1458(): Int = 1460
def foo1459(): Int = 1461
def foo1460(): Int = 1462
def foo1461(): Int = 1463
def foo1462(): Int = 1464
def foo1463(): Int = 1465
def foo1464(): Int = 1466
def foo1465(): Int = 1467
def foo1466(): Int = 1468
def foo1467(): Int = 1469
def foo1468(): Int = 1470
def foo1469(): Int = 1471
def foo1470(): Int = 1472
def foo1471(): Int = 1473
def foo1472(): Int = 1474
def foo1473(): Int = 1475
def foo1474(): Int = 1476
def foo1475(): Int = 1477
def foo1476(): Int = 1478
def foo1477(): Int = 1479
def foo1478(): Int = 1480
def foo1479(): Int = 1481
def foo1480(): Int = 1482
def foo1481(): Int = 1483
def foo1482(): Int = 1484
def foo1483(): Int = 1485
def foo1484(): Int = 1486
def foo1485(): Int = 1487
def foo1486(): Int = 1488
def foo1487(): Int = 1489
def foo1488(): Int = 1490
def foo1489(): Int = 1491
def foo1490(): Int = 1492
def foo1491(): Int = 1493
def foo1492(): Int = 1494
def foo1493(): Int = 1495
def foo1494(): Int = 1496
def foo1495(): Int = 1497
def foo1496(): Int = 1498
def foo1497(): Int = 1499
def foo1498(): Int = 1500
def foo1499(): Int = 1501
def foo1500(): Int = 1502
def foo1501(): Int = 1503
def foo1502(): Int = 1504
def foo1503(): Int = 1505
def foo1504(): Int = 1506
def foo1505(): Int = 1507
def foo1506(): Int = 1508
def foo1507(): Int = 1509
def foo1508(): Int = 1510
def foo1509(): Int = 1511
def foo1510(): Int = 1512
def foo1511(): Int = 1513
def foo1512(): Int = 1514
def foo1513(): Int = 1515
def foo1514(): Int = 1516
def foo1515(): Int = 1517
def foo1516(): Int = 1518
def foo1517(): Int = 1519
def foo1518(): Int = 1520
def foo1519(): Int = 1521
def foo1520(): Int = 1522
def foo1521(): Int = 1523
def foo1522(): Int = 1524
def foo1523(): Int = 1525
def foo1524(): Int = 1526
def foo1525(): Int = 1527
def foo1526(): Int = 1528
def foo1527(): Int = 1529
def foo1528(): Int = 1530
def foo1529(): Int = 1531
def foo1530(): Int = 1532
def foo1531(): Int = 1533
def foo1532(): Int = 1534
def foo1533(): Int = 1535
def foo1534(): Int = 1536
def foo1535(): Int = 1537
def foo1536(): Int = 1538
def foo1537(): Int = 1539
def foo1538(): Int = 1540
def foo1539(): Int = 1541
def foo1540(): Int = 1542
def foo1541(): Int = 1543
def foo1542(): Int = 1544
def foo1543(): Int = 1545
def foo1544(): Int = 1546
def foo1545(): Int = 1547
def foo1546(): Int = 1548
def foo1547(): Int = 1549
def foo1548(): Int = 1550
def foo1549(): Int = 1551
def foo1550(): Int = 1552
def foo1551(): Int = 1553
def foo1552(): Int = 1554
def foo1553(): Int = 1555
def foo1554(): Int = 1556
def foo1555(): Int = 1557
def foo1556(): Int = 1558
def foo1557(): Int = 1559
def foo1558(): Int = 1560
def foo1559(): Int = 1561
def foo1560(): Int = 1562
def foo1561(): Int = 1563
def foo1562(): Int = 1564
def foo1563(): Int = 1565
def foo1564(): Int = 1566
def foo1565(): Int = 1567
def foo1566(): Int = 1568
def foo1567(): Int = 1569
def foo1568(): Int = 1570
def foo1569(): Int = 1571
def foo1570(): Int = 1572
def foo1571(): Int = 1573
def foo1572(): Int = 1574
def foo1573(): Int = 1575
def foo1574(): Int = 1576
def foo1575(): Int = 1577
def foo1576(): Int = 1578
def foo1577(): Int = 1579
def foo1578(): Int = 1580
def foo1579(): Int = 1581
def foo1580(): Int = 1582
def foo1581(): Int = 1583
def foo1582(): Int = 1584
def foo1583(): Int = 1585
def foo1584(): Int = 1586
def foo1585(): Int = 1587
def foo1586(): Int = 1588
def foo1587(): Int = 1589
def foo1588(): Int = 1590
def foo1589(): Int = 1591
def foo1590(): Int = 1592
def foo1591(): Int = 1593
def foo1592(): Int = 1594
def foo1593(): Int = 1595
def foo1594(): Int = 1596
def foo1595(): Int = 1597
def foo1596(): Int = 1598
def foo1597(): Int = 1599
def foo1598(): Int = 1600
def foo1599(): Int = 1601
def foo1600(): Int = 1602
def foo1601(): Int = 1603
def foo1602(): Int = 1604
def foo1603(): Int = 1605
def foo1604(): Int = 1606
def foo1605(): Int = 1607
def foo1606(): Int = 1608
def foo1607(): Int = 1609
def foo1608(): Int = 1610
def foo1609(): Int = 1611
def foo1610(): Int = 1612
def foo1611(): Int = 1613
def foo1612(): Int = 1614
def foo1613(): Int = 1615
def foo1614(): Int = 1616
def foo1615(): Int = 1617
def foo1616(): Int = 1618
def foo1617(): Int = 1619
def foo1618(): Int = 1620
def foo1619(): Int = 1621
def foo1620(): Int = 1622
def foo1621(): Int = 1623
def foo1622(): Int = 1624
def foo1623(): Int = 1625
def foo1624(): Int = 1626
def foo1625(): Int = 1627
def foo1626(): Int = 1628
def foo1627(): Int = 1629
def foo1628(): Int = 1630
def foo1629(): Int = 1631
def foo1630(): Int = 1632
def foo1631(): Int = 1633
def foo1632(): Int = 1634
def foo1633(): Int = 1635
def foo1634(): Int = 1636
def foo1635(): Int = 1637
def foo1636(): Int = 1638
def foo1637(): Int = 1639
def foo1638(): Int = 1640
def foo1639(): Int = 1641
def foo1640(): Int = 1642
def foo1641(): Int = 1643
def foo1642(): Int = 1644
def foo1643(): Int = 1645
def foo1644(): Int = 1646
def foo1645(): Int = 1647
def foo1646(): Int = 1648
def foo1647(): Int = 1649
def foo1648(): Int = 1650
def foo1649(): Int = 1651
def foo1650(): Int = 1652
def foo1651(): Int = 1653
def foo1652(): Int = 1654
def foo1653(): Int = 1655
def foo1654(): Int = 1656
def foo1655(): Int = 1657
def foo1656(): Int = 1658
def foo1657(): Int = 1659
def foo1658(): Int = 1660
def foo1659(): Int = 1661
def foo1660(): Int = 1662
def foo1661(): Int = 1663
def foo1662(): Int = 1664
def foo1663(): Int = 1665
def foo1664(): Int = 1666
def foo1665(): Int = 1667
def foo1666(): Int = 1668
def foo1667(): Int = 1669
def foo1668(): Int = 1670
def foo1669(): Int = 1671
def foo1670(): Int = 1672
def foo1671(): Int = 1673
def foo1672(): Int = 1674
def foo1673(): Int = 1675
def foo1674(): Int = 1676
def foo1675(): Int = 1677
def foo1676(): Int = 1678
def foo1677(): Int = 1679
def foo1678(): Int = 1680
def foo1679(): Int = 1681
def foo1680(): Int = 1682
def foo1681(): Int = 1683
def foo1682(): Int = 1684
def foo1683(): Int = 1685
def foo1684(): Int = 1686
def foo1685(): Int = 1687
def foo1686(): Int = 1688
def foo1687(): Int = 1689
def foo1688(): Int = 1690
def foo1689(): Int = 1691
def foo1690(): Int = 1692
def foo1691(): Int = 1693
def foo1692(): Int = 1694
def foo1693(): Int = 1695
def foo1694(): Int = 1696
def foo1695(): Int = 1697
def foo1696(): Int = 1698
def foo1697(): Int = 1699
def foo1698(): Int = 1700
def foo1699(): Int = 1701
def foo1700(): Int = 1702
def foo1701(): Int = 1703
def foo1702(): Int = 1704
def foo1703(): Int = 1705
def foo1704(): Int = 1706
def foo1705(): Int = 1707
def foo1706(): Int = 1708
def foo1707(): Int = 1709
def foo1708(): Int = 1710
def foo1709(): Int = 1711
def foo1710(): Int = 1712
def foo1711(): Int = 1713
def foo1712(): Int = 1714
def foo1713(): Int = 1715
def foo1714(): Int = 1716
def foo1715(): Int = 1717
def foo1716(): Int = 1718
def foo1717(): Int = 1719
def foo1718(): Int = 1720
def foo1719(): Int = 1721
def foo1720(): Int = 1722
def foo1721(): Int = 1723
def foo1722(): Int = 1724
def foo1723(): Int = 1725
def foo1724(): Int = 1726
def foo1725(): Int = 1727
def foo1726(): Int = 1728
def foo1727(): Int = 1729
def foo1728(): Int = 1730
def foo1729(): Int = 1731
def foo1730(): Int = 1732
def foo1731(): Int = 1733
def foo1732(): Int = 1734
def foo1733(): Int = 1735
def foo1734(): Int = 1736
def foo1735(): Int = 1737
def foo1736(): Int = 1738
def foo1737(): Int = 1739
def foo1738(): Int = 1740
def foo1739(): Int = 1741
def foo1740(): Int = 1742
def foo1741(): Int = 1743
def foo1742(): Int = 1744
def foo1743(): Int = 1745
def foo1744(): Int = 1746
def foo1745(): Int = 1747
def foo1746(): Int = 1748
def foo1747(): Int = 1749
def foo1748(): Int = 1750
def foo1749(): Int = 1751
def foo1750(): Int = 1752
def foo1751(): Int = 1753
def foo1752(): Int = 1754
def foo1753(): Int = 1755
def foo1754(): Int = 1756
def foo1755(): Int = 1757
def foo1756(): Int = 1758
def foo1757(): Int = 1759
def foo1758(): Int = 1760
def foo1759(): Int = 1761
def foo1760(): Int = 1762
def foo1761(): Int = 1763
def foo1762(): Int = 1764
def foo1763(): Int = 1765
def foo1764(): Int = 1766
def foo1765(): Int = 1767
def foo1766(): Int = 1768
def foo1767(): Int = 1769
def foo1768(): Int = 1770
def foo1769(): Int = 1771
def foo1770(): Int = 1772
def foo1771(): Int = 1773
def foo1772(): Int = 1774
def foo1773(): Int = 1775
def foo1774(): Int = 1776
def foo1775(): Int = 1777
def foo1776(): Int = 1778
def foo1777(): Int = 1779
def foo1778(): Int = 1780
def foo1779(): Int = 1781
def foo1780(): Int = 1782
def foo1781(): Int = 1783
def foo1782(): Int = 1784
def foo1783(): Int = 1785
def foo1784(): Int = 1786
def foo1785(): Int = 1787
def foo1786(): Int = 1788
def foo1787(): Int = 1789
def foo1788(): Int = 1790
def foo1789(): Int = 1791
def foo1790(): Int = 1792
def foo1791(): Int = 1793
def foo1792(): Int = 1794
def foo1793(): Int = 1795
def foo1794(): Int = 1796
def foo1795(): Int = 1797
def foo1796(): Int = 1798
def foo1797(): Int = 1799
def foo1798(): Int = 1800
def foo1799(): Int = 1801
def foo1800(): Int = 1802
def foo1801(): Int = 1803
def foo1802(): Int = 1804
def foo1803(): Int = 1805
def foo1804(): Int = 1806
def foo1805(): Int = 1807
def foo1806(): Int = 1808
def foo1807(): Int = 1809
def foo1808(): Int = 1810
def foo1809(): Int = 1811
def foo1810(): Int = 1812
def foo1811(): Int = 1813
def foo1812(): Int = 1814
def foo1813(): Int = 1815
def foo1814(): Int = 1816
def foo1815(): Int = 1817
def foo1816(): Int = 1818
def foo1817(): Int = 1819
def foo1818(): Int = 1820
def foo1819(): Int = 1821
def foo1820(): Int = 1822
def foo1821(): Int = 1823
def foo1822(): Int = 1824
def foo1823(): Int = 1825
def foo1824(): Int = 1826
def foo1825(): Int = 1827
def foo1826(): Int = 1828
def foo1827(): Int = 1829
def foo1828(): Int = 1830
def foo1829(): Int = 1831
def foo1830(): Int = 1832
def foo1831(): Int = 1833
def foo1832(): Int = 1834
def foo1833(): Int = 1835
def foo1834(): Int = 1836
def foo1835(): Int = 1837
def foo1836(): Int = 1838
def foo1837(): Int = 1839
def foo1838(): Int = 1840
def foo1839(): Int = 1841
def foo1840(): Int = 1842
def foo1841(): Int = 1843
def foo1842(): Int = 1844
def foo1843(): Int = 1845
def foo1844(): Int = 1846
def foo1845(): Int = 1847
def foo1846(): Int = 1848
def foo1847(): Int = 1849
def foo1848(): Int = 1850
def foo1849(): Int = 1851
def foo1850(): Int = 1852
def foo1851(): Int = 1853
def foo1852(): Int = 1854
def foo1853(): Int = 1855
def foo1854(): Int = 1856
def foo1855(): Int = 1857
def foo1856(): Int = 1858
def foo1857(): Int = 1859
def foo1858(): Int = 1860
def foo1859(): Int = 1861
def foo1860(): Int = 1862
def foo1861(): Int = 1863
def foo1862(): Int = 1864
def foo1863(): Int = 1865
def foo1864(): Int = 1866
def foo1865(): Int = 1867
def foo1866(): Int = 1868
def foo1867(): Int = 1869
def foo1868(): Int = 1870
def foo1869(): Int = 1871
def foo1870(): Int = 1872
def foo1871(): Int = 1873
def foo1872(): Int = 1874
def foo1873(): Int = 1875
def foo1874(): Int = 1876
def foo1875(): Int = 1877
def foo1876(): Int = 1878
def foo1877(): Int = 1879
def foo1878(): Int = 1880
def foo1879(): Int = 1881
def foo1880(): Int = 1882
def foo1881(): Int = 1883
def foo1882(): Int = 1884
def foo1883(): Int = 1885
def foo1884(): Int = 1886
def foo1885(): Int = 1887
def foo1886(): Int = 1888
def foo1887(): Int = 1889
def foo1888(): Int = 1890
def foo1889(): Int = 1891
def foo1890(): Int = 1892
def foo1891(): Int = 1893
def foo1892(): Int = 1894
def foo1893(): Int = 1895
def foo1894(): Int = 1896
def foo1895(): Int = 1897
def foo1896(): Int = 1898
def foo1897(): Int = 1899
def foo1898(): Int = 1900
def foo1899(): Int = 1901
def foo1900(): Int = 1902
def foo1901(): Int = 1903
def foo1902(): Int = 1904
def foo1903(): Int = 1905
def foo1904(): Int = 1906
def foo1905(): Int = 1907
def foo1906(): Int = 1908
def foo1907(): Int = 1909
def foo1908(): Int = 1910
def foo1909(): Int = 1911
def foo1910(): Int = 1912
def foo1911(): Int = 1913
def foo1912(): Int = 1914
def foo1913(): Int = 1915
def foo1914(): Int = 1916
def foo1915(): Int = 1917
def foo1916(): Int = 1918
def foo1917(): Int = 1919
def foo1918(): Int = 1920
def foo1919(): Int = 1921
def foo1920(): Int = 1922
def foo1921(): Int = 1923
def foo1922(): Int = 1924
def foo1923(): Int = 1925
def foo1924(): Int = 1926
def foo1925(): Int = 1927
def foo1926(): Int = 1928
def foo1927(): Int = 1929
def foo1928(): Int = 1930
def foo1929(): Int = 1931
def foo1930(): Int = 1932
def foo1931(): Int = 1933
def foo1932(): Int = 1934
def foo1933(): Int = 1935
def foo1934(): Int = 1936
def foo1935(): Int = 1937
def foo1936(): Int = 1938
def foo1937(): Int = 1939
def foo1938(): Int = 1940
def foo1939(): Int = 1941
def foo1940(): Int = 1942
def foo1941(): Int = 1943
def foo1942(): Int = 1944
def foo1943(): Int = 1945
def foo1944(): Int = 1946
def foo1945(): Int = 1947
def foo1946(): Int = 1948
def foo1947(): Int = 1949
def foo1948(): Int = 1950
def foo1949(): Int = 1951
def foo1950(): Int = 1952
def foo1951(): Int = 1953
def foo1952(): Int = 1954
def foo1953(): Int = 1955
def foo1954(): Int = 1956
def foo1955(): Int = 1957
def foo1956(): Int = 1958
def foo1957(): Int = 1959
def foo1958(): Int = 1960
def foo1959(): Int = 1961
def foo1960(): Int = 1962
def foo1961(): Int = 1963
def foo1962(): Int = 1964
def foo1963(): Int = 1965
def foo1964(): Int = 1966
def foo1965(): Int = 1967
def foo1966(): Int = 1968
def foo1967(): Int = 1969
def foo1968(): Int = 1970
def foo1969(): Int = 1971
def foo1970(): Int = 1972
def foo1971(): Int = 1973
def foo1972(): Int = 1974
def foo1973(): Int = 1975
def foo1974(): Int = 1976
def foo1975(): Int = 1977
def foo1976(): Int = 1978
def foo1977(): Int = 1979
def foo1978(): Int = 1980
def foo1979(): Int = 1981
def foo1980(): Int = 1982
def foo1981(): Int = 1983
def foo1982(): Int = 1984
def foo1983(): Int = 1985
def foo1984(): Int = 1986
def foo1985(): Int = 1987
def foo1986(): Int = 1988
def foo1987(): Int = 1989
def foo1988(): Int = 1990
def foo1989(): Int = 1991
def foo1990(): Int = 1992
def foo1991(): Int = 1993
def foo1992(): Int = 1994
def foo1993(): Int = 1995
def foo1994(): Int = 1996
def foo1995(): Int = 1997
def foo1996(): Int = 1998
def foo1997(): Int = 1999
def foo1998(): Int = 2000
def foo1999(): Int = 2001
def foo2000(): Int = 2002
def foo2001(): Int = 2003
def foo2002(): Int = 2004
def foo2003(): Int = 2005
def foo2004(): Int = 2006
def foo2005(): Int = 2007
def foo2006(): Int = 2008
def foo2007(): Int = 2009
def foo2008(): Int = 2010
def foo2009(): Int = 2011
def foo2010(): Int = 2012
def foo2011(): Int = 2013
def foo2012(): Int = 2014
def foo2013(): Int = 2015
def foo2014(): Int = 2016
def foo2015(): Int = 2017
def foo2016(): Int = 2018
def foo2017(): Int = 2019
def foo2018(): Int = 2020
def foo2019(): Int = 2021
def foo2020(): Int = 2022
def foo2021(): Int = 2023
def foo2022(): Int = 2024
def foo2023(): Int = 2025
def foo2024(): Int = 2026
def foo2025(): Int = 2027
def foo2026(): Int = 2028
def foo2027(): Int = 2029
def foo2028(): Int = 2030
def foo2029(): Int = 2031
def foo2030(): Int = 2032
def foo2031(): Int = 2033
def foo2032(): Int = 2034
def foo2033(): Int = 2035
def foo2034(): Int = 2036
def foo2035(): Int = 2037
def foo2036(): Int = 2038
def foo2037(): Int = 2039
def foo2038(): Int = 2040
def foo2039(): Int = 2041
def foo2040(): Int = 2042
def foo2041(): Int = 2043
def foo2042(): Int = 2044
def foo2043(): Int = 2045
def foo2044(): Int = 2046
def foo2045(): Int = 2047
def foo2046(): Int = 2048
def foo2047(): Int = 2049
def foo2048(): Int = 2050
def foo2049(): Int = 2051
def foo2050(): Int = 2052
def foo2051(): Int = 2053
def foo2052(): Int = 2054
def foo2053(): Int = 2055
def foo2054(): Int = 2056
def foo2055(): Int = 2057
def foo2056(): Int = 2058
def foo2057(): Int = 2059
def foo2058(): Int = 2060
def foo2059(): Int = 2061
def foo2060(): Int = 2062
def foo2061(): Int = 2063
def foo2062(): Int = 2064
def foo2063(): Int = 2065
def foo2064(): Int = 2066
def foo2065(): Int = 2067
def foo2066(): Int = 2068
def foo2067(): Int = 2069
def foo2068(): Int = 2070
def foo2069(): Int = 2071
def foo2070(): Int = 2072
def foo2071(): Int = 2073
def foo2072(): Int = 2074
def foo2073(): Int = 2075
def foo2074(): Int = 2076
def foo2075(): Int = 2077
def foo2076(): Int = 2078
def foo2077(): Int = 2079
def foo2078(): Int = 2080
def foo2079(): Int = 2081
def foo2080(): Int = 2082
def foo2081(): Int = 2083
def foo2082(): Int = 2084
def foo2083(): Int = 2085
def foo2084(): Int = 2086
def foo2085(): Int = 2087
def foo2086(): Int = 2088
def foo2087(): Int = 2089
def foo2088(): Int = 2090
def foo2089(): Int = 2091
def foo2090(): Int = 2092
def foo2091(): Int = 2093
def foo2092(): Int = 2094
def foo2093(): Int = 2095
def foo2094(): Int = 2096
def foo2095(): Int = 2097
def foo2096(): Int = 2098
def foo2097(): Int = 2099
def foo2098(): Int = 2100
def foo2099(): Int = 2101
def foo2100(): Int = 2102
def foo2101(): Int = 2103
def foo2102(): Int = 2104
def foo2103(): Int = 2105
def foo2104(): Int = 2106
def foo2105(): Int = 2107
def foo2106(): Int = 2108
def foo2107(): Int = 2109
def foo2108(): Int = 2110
def foo2109(): Int = 2111
def foo2110(): Int = 2112
def foo2111(): Int = 2113
def foo2112(): Int = 2114
def foo2113(): Int = 2115
def foo2114(): Int = 2116
def foo2115(): Int = 2117
def foo2116(): Int = 2118
def foo2117(): Int = 2119
def foo2118(): Int = 2120
def foo2119(): Int = 2121
def foo2120(): Int = 2122
def foo2121(): Int = 2123
def foo2122(): Int = 2124
def foo2123(): Int = 2125
def foo2124(): Int = 2126
def foo2125(): Int = 2127
def foo2126(): Int = 2128
def foo2127(): Int = 2129
def foo2128(): Int = 2130
def foo2129(): Int = 2131
def foo2130(): Int = 2132
def foo2131(): Int = 2133
def foo2132(): Int = 2134
def foo2133(): Int = 2135
def foo2134(): Int = 2136
def foo2135(): Int = 2137
def foo2136(): Int = 2138
def foo2137(): Int = 2139
def foo2138(): Int = 2140
def foo2139(): Int = 2141
def foo2140(): Int = 2142
def foo2141(): Int = 2143
def foo2142(): Int = 2144
def foo2143(): Int = 2145
def foo2144(): Int = 2146
def foo2145(): Int = 2147
def foo2146(): Int = 2148
def foo2147(): Int = 2149
def foo2148(): Int = 2150
def foo2149(): Int = 2151
def foo2150(): Int = 2152
def foo2151(): Int = 2153
def foo2152(): Int = 2154
def foo2153(): Int = 2155
def foo2154(): Int = 2156
def foo2155(): Int = 2157
def foo2156(): Int = 2158
def foo2157(): Int = 2159
def foo2158(): Int = 2160
def foo2159(): Int = 2161
def foo2160(): Int = 2162
def foo2161(): Int = 2163
def foo2162(): Int = 2164
def foo2163(): Int = 2165
def foo2164(): Int = 2166
def foo2165(): Int = 2167
def foo2166(): Int = 2168
def foo2167(): Int = 2169
def foo2168(): Int = 2170
def foo2169(): Int = 2171
def foo2170(): Int = 2172
def foo2171(): Int = 2173
def foo2172(): Int = 2174
def foo2173(): Int = 2175
def foo2174(): Int = 2176
def foo2175(): Int = 2177
def foo2176(): Int = 2178
def foo2177(): Int = 2179
def foo2178(): Int = 2180
def foo2179(): Int = 2181
def foo2180(): Int = 2182
def foo2181(): Int = 2183
def foo2182(): Int = 2184
def foo2183(): Int = 2185
def foo2184(): Int = 2186
def foo2185(): Int = 2187
def foo2186(): Int = 2188
def foo2187(): Int = 2189
def foo2188(): Int = 2190
def foo2189(): Int = 2191
def foo2190(): Int = 2192
def foo2191(): Int = 2193
def foo2192(): Int = 2194
def foo2193(): Int = 2195
def foo2194(): Int = 2196
def foo2195(): Int = 2197
def foo2196(): Int = 2198
def foo2197(): Int = 2199
def foo2198(): Int = 2200
def foo2199(): Int = 2201
def foo2200(): Int = 2202
def foo2201(): Int = 2203
def foo2202(): Int = 2204
def foo2203(): Int = 2205
def foo2204(): Int = 2206
def foo2205(): Int = 2207
def foo2206(): Int = 2208
def foo2207(): Int = 2209
def foo2208(): Int = 2210
def foo2209(): Int = 2211
def foo2210(): Int = 2212
def foo2211(): Int = 2213
def foo2212(): Int = 2214
def foo2213(): Int = 2215
def foo2214(): Int = 2216
def foo2215(): Int = 2217
def foo2216(): Int = 2218
def foo2217(): Int = 2219
def foo2218(): Int = 2220
def foo2219(): Int = 2221
def foo2220(): Int = 2222
def foo2221(): Int = 2223
def foo2222(): Int = 2224
def foo2223(): Int = 2225
def foo2224(): Int = 2226
def foo2225(): Int = 2227
def foo2226(): Int = 2228
def foo2227(): Int = 2229
def foo2228(): Int = 2230
def foo2229(): Int = 2231
def foo2230(): Int = 2232
def foo2231(): Int = 2233
def foo2232(): Int = 2234
def foo2233(): Int = 2235
def foo2234(): Int = 2236
def foo2235(): Int = 2237
def foo2236(): Int = 2238
def foo2237(): Int = 2239
def foo2238(): Int = 2240
def foo2239(): Int = 2241
def foo2240(): Int = 2242
def foo2241(): Int = 2243
def foo2242(): Int = 2244
def foo2243(): Int = 2245
def foo2244(): Int = 2246
def foo2245(): Int = 2247
def foo2246(): Int = 2248
def foo2247(): Int = 2249
def foo2248(): Int = 2250
def foo2249(): Int = 2251
def foo2250(): Int = 2252
def foo2251(): Int = 2253
def foo2252(): Int = 2254
def foo2253(): Int = 2255
def foo2254(): Int = 2256
def foo2255(): Int = 2257
def foo2256(): Int = 2258
def foo2257(): Int = 2259
def foo2258(): Int = 2260
def foo2259(): Int = 2261
def foo2260(): Int = 2262
def foo2261(): Int = 2263
def foo2262(): Int = 2264
def foo2263(): Int = 2265
def foo2264(): Int = 2266
def foo2265(): Int = 2267
def foo2266(): Int = 2268
def foo2267(): Int = 2269
def foo2268(): Int = 2270
def foo2269(): Int = 2271
def foo2270(): Int = 2272
def foo2271(): Int = 2273
def foo2272(): Int = 2274
def foo2273(): Int = 2275
def foo2274(): Int = 2276
def foo2275(): Int = 2277
def foo2276(): Int = 2278
def foo2277(): Int = 2279
def foo2278(): Int = 2280
def foo2279(): Int = 2281
def foo2280(): Int = 2282
def foo2281(): Int = 2283
def foo2282(): Int = 2284
def foo2283(): Int = 2285
def foo2284(): Int = 2286
def foo2285(): Int = 2287
def foo2286(): Int = 2288
def foo2287(): Int = 2289
def foo2288(): Int = 2290
def foo2289(): Int = 2291
def foo2290(): Int = 2292
def foo2291(): Int = 2293
def foo2292(): Int = 2294
def foo2293(): Int = 2295
def foo2294(): Int = 2296
def foo2295(): Int = 2297
def foo2296(): Int = 2298
def foo2297(): Int = 2299
def foo2298(): Int = 2300
def foo2299(): Int = 2301
def foo2300(): Int = 2302
def foo2301(): Int = 2303
def foo2302(): Int = 2304
def foo2303(): Int = 2305
def foo2304(): Int = 2306
def foo2305(): Int = 2307
def foo2306(): Int = 2308
def foo2307(): Int = 2309
def foo2308(): Int = 2310
def foo2309(): Int = 2311
def foo2310(): Int = 2312
def foo2311(): Int = 2313
def foo2312(): Int = 2314
def foo2313(): Int = 2315
def foo2314(): Int = 2316
def foo2315(): Int = 2317
def foo2316(): Int = 2318
def foo2317(): Int = 2319
def foo2318(): Int = 2320
def foo2319(): Int = 2321
def foo2320(): Int = 2322
def foo2321(): Int = 2323
def foo2322(): Int = 2324
def foo2323(): Int = 2325
def foo2324(): Int = 2326
def foo2325(): Int = 2327
def foo2326(): Int = 2328
def foo2327(): Int = 2329
def foo2328(): Int = 2330
def foo2329(): Int = 2331
def foo2330(): Int = 2332
def foo2331(): Int = 2333
def foo2332(): Int = 2334
def foo2333(): Int = 2335
def foo2334(): Int = 2336
def foo2335(): Int = 2337
def foo2336(): Int = 2338
def foo2337(): Int = 2339
def foo2338(): Int = 2340
def foo2339(): Int = 2341
def foo2340(): Int = 2342
def foo2341(): Int = 2343
def foo2342(): Int = 2344
def foo2343(): Int = 2345
def foo2344(): Int = 2346
def foo2345(): Int = 2347
def foo2346(): Int = 2348
def foo2347(): Int = 2349
def foo2348(): Int = 2350
def foo2349(): Int = 2351
def foo2350(): Int = 2352
def foo2351(): Int = 2353
def foo2352(): Int = 2354
def foo2353(): Int = 2355
def foo2354(): Int = 2356
def foo2355(): Int = 2357
def foo2356(): Int = 2358
def foo2357(): Int = 2359
def foo2358(): Int = 2360
def foo2359(): Int = 2361
def foo2360(): Int = 2362
def foo2361(): Int = 2363
def foo2362(): Int = 2364
def foo2363(): Int = 2365
def foo2364(): Int = 2366
def foo2365(): Int = 2367
def foo2366(): Int = 2368
def foo2367(): Int = 2369
def foo2368(): Int = 2370
def foo2369(): Int = 2371
def foo2370(): Int = 2372
def foo2371(): Int = 2373
def foo2372(): Int = 2374
def foo2373(): Int = 2375
def foo2374(): Int = 2376
def foo2375(): Int = 2377
def foo2376(): Int = 2378
def foo2377(): Int = 2379
def foo2378(): Int = 2380
def foo2379(): Int = 2381
def foo2380(): Int = 2382
def foo2381(): Int = 2383
def foo2382(): Int = 2384
def foo2383(): Int = 2385
def foo2384(): Int = 2386
def foo2385(): Int = 2387
def foo2386(): Int = 2388
def foo2387(): Int = 2389
def foo2388(): Int = 2390
def foo2389(): Int = 2391
def foo2390(): Int = 2392
def foo2391(): Int = 2393
def foo2392(): Int = 2394
def foo2393(): Int = 2395
def foo2394(): Int = 2396
def foo2395(): Int = 2397
def foo2396(): Int = 2398
def foo2397(): Int = 2399
def foo2398(): Int = 2400
def foo2399(): Int = 2401
def foo2400(): Int = 2402
def foo2401(): Int = 2403
def foo2402(): Int = 2404
def foo2403(): Int = 2405
def foo2404(): Int = 2406
def foo2405(): Int = 2407
def foo2406(): Int = 2408
def foo2407(): Int = 2409
def foo2408(): Int = 2410
def foo2409(): Int = 2411
def foo2410(): Int = 2412
def foo2411(): Int = 2413
def foo2412(): Int = 2414
def foo2413(): Int = 2415
def foo2414(): Int = 2416
def foo2415(): Int = 2417
def foo2416(): Int = 2418
def foo2417(): Int = 2419
def foo2418(): Int = 2420
def foo2419(): Int = 2421
def foo2420(): Int = 2422
def foo2421(): Int = 2423
def foo2422(): Int = 2424
def foo2423(): Int = 2425
def foo2424(): Int = 2426
def foo2425(): Int = 2427
def foo2426(): Int = 2428
def foo2427(): Int = 2429
def foo2428(): Int = 2430
def foo2429(): Int = 2431
def foo2430(): Int = 2432
def foo2431(): Int = 2433
def foo2432(): Int = 2434
def foo2433(): Int = 2435
def foo2434(): Int = 2436
def foo2435(): Int = 2437
def foo2436(): Int = 2438
def foo2437(): Int = 2439
def foo2438(): Int = 2440
def foo2439(): Int = 2441
def foo2440(): Int = 2442
def foo2441(): Int = 2443
def foo2442(): Int = 2444
def foo2443(): Int = 2445
def foo2444(): Int = 2446
def foo2445(): Int = 2447
def foo2446(): Int = 2448
def foo2447(): Int = 2449
def foo2448(): Int = 2450
def foo2449(): Int = 2451
def foo2450(): Int = 2452
def foo2451(): Int = 2453
def foo2452(): Int = 2454
def foo2453(): Int = 2455
def foo2454(): Int = 2456
def foo2455(): Int = 2457
def foo2456(): Int = 2458
def foo2457(): Int = 2459
def foo2458(): Int = 2460
def foo2459(): Int = 2461
def foo2460(): Int = 2462
def foo2461(): Int = 2463
def foo2462(): Int = 2464
def foo2463(): Int = 2465
def foo2464(): Int = 2466
def foo2465(): Int = 2467
def foo2466(): Int = 2468
def foo2467(): Int = 2469
def foo2468(): Int = 2470
def foo2469(): Int = 2471
def foo2470(): Int = 2472
def foo2471(): Int = 2473
def foo2472(): Int = 2474
def foo2473(): Int = 2475
def foo2474(): Int = 2476
def foo2475(): Int = 2477
def foo2476(): Int = 2478
def foo2477(): Int = 2479
def foo2478(): Int = 2480
def foo2479(): Int = 2481
def foo2480(): Int = 2482
def foo2481(): Int = 2483
def foo2482(): Int = 2484
def foo2483(): Int = 2485
def foo2484(): Int = 2486
def foo2485(): Int = 2487
def foo2486(): Int = 2488
def foo2487(): Int = 2489
def foo2488(): Int = 2490
def foo2489(): Int = 2491
def foo2490(): Int = 2492
def foo2491(): Int = 2493
def foo2492(): Int = 2494
def foo2493(): Int = 2495
def foo2494(): Int = 2496
def foo2495(): Int = 2497
def foo2496(): Int = 2498
def foo2497(): Int = 2499
def foo2498(): Int = 2500
def foo2499(): Int = 2501
def foo2500(): Int = 2502
def foo2501(): Int = 2503
def foo2502(): Int = 2504
def foo2503(): Int = 2505
def foo2504(): Int = 2506
def foo2505(): Int = 2507
def foo2506(): Int = 2508
def foo2507(): Int = 2509
def foo2508(): Int = 2510
def foo2509(): Int = 2511
def foo2510(): Int = 2512
def foo2511(): Int = 2513
def foo2512(): Int = 2514
def foo2513(): Int = 2515
def foo2514(): Int = 2516
def foo2515(): Int = 2517
def foo2516(): Int = 2518
def foo2517(): Int = 2519
def foo2518(): Int = 2520
def foo2519(): Int = 2521
def foo2520(): Int = 2522
def foo2521(): Int = 2523
def foo2522(): Int = 2524
def foo2523(): Int = 2525
def foo2524(): Int = 2526
def foo2525(): Int = 2527
def foo2526(): Int = 2528
def foo2527(): Int = 2529
def foo2528(): Int = 2530
def foo2529(): Int = 2531
def foo2530(): Int = 2532
def foo2531(): Int = 2533
def foo2532(): Int = 2534
def foo2533(): Int = 2535
def foo2534(): Int = 2536
def foo2535(): Int = 2537
def foo2536(): Int = 2538
def foo2537(): Int = 2539
def foo2538(): Int = 2540
def foo2539(): Int = 2541
def foo2540(): Int = 2542
def foo2541(): Int = 2543
def foo2542(): Int = 2544
def foo2543(): Int = 2545
def foo2544(): Int = 2546
def foo2545(): Int = 2547
def foo2546(): Int = 2548
def foo2547(): Int = 2549
def foo2548(): Int = 2550
def foo2549(): Int = 2551
def foo2550(): Int = 2552
def foo2551(): Int = 2553
def foo2552(): Int = 2554
def foo2553(): Int = 2555
def foo2554(): Int = 2556
def foo2555(): Int = 2557
def foo2556(): Int = 2558
def foo2557(): Int = 2559
def foo2558(): Int = 2560
def foo2559(): Int = 2561
def foo2560(): Int = 2562
def foo2561(): Int = 2563
def foo2562(): Int = 2564
def foo2563(): Int = 2565
def foo2564(): Int = 2566
def foo2565(): Int = 2567
def foo2566(): Int = 2568
def foo2567(): Int = 2569
def foo2568(): Int = 2570
def foo2569(): Int = 2571
def foo2570(): Int = 2572
def foo2571(): Int = 2573
def foo2572(): Int = 2574
def foo2573(): Int = 2575
def foo2574(): Int = 2576
def foo2575(): Int = 2577
def foo2576(): Int = 2578
def foo2577(): Int = 2579
def foo2578(): Int = 2580
def foo2579(): Int = 2581
def foo2580(): Int = 2582
def foo2581(): Int = 2583
def foo2582(): Int = 2584
def foo2583(): Int = 2585
def foo2584(): Int = 2586
def foo2585(): Int = 2587
def foo2586(): Int = 2588
def foo2587(): Int = 2589
def foo2588(): Int = 2590
def foo2589(): Int = 2591
def foo2590(): Int = 2592
def foo2591(): Int = 2593
def foo2592(): Int = 2594
def foo2593(): Int = 2595
def foo2594(): Int = 2596
def foo2595(): Int = 2597
def foo2596(): Int = 2598
def foo2597(): Int = 2599
def foo2598(): Int = 2600
def foo2599(): Int = 2601
def foo2600(): Int = 2602
def foo2601(): Int = 2603
def foo2602(): Int = 2604
def foo2603(): Int = 2605
def foo2604(): Int = 2606
def foo2605(): Int = 2607
def foo2606(): Int = 2608
def foo2607(): Int = 2609
def foo2608(): Int = 2610
def foo2609(): Int = 2611
def foo2610(): Int = 2612
def foo2611(): Int = 2613
def foo2612(): Int = 2614
def foo2613(): Int = 2615
def foo2614(): Int = 2616
def foo2615(): Int = 2617
def foo2616(): Int = 2618
def foo2617(): Int = 2619
def foo2618(): Int = 2620
def foo2619(): Int = 2621
def foo2620(): Int = 2622
def foo2621(): Int = 2623
def foo2622(): Int = 2624
def foo2623(): Int = 2625
def foo2624(): Int = 2626
def foo2625(): Int = 2627
def foo2626(): Int = 2628
def foo2627(): Int = 2629
def foo2628(): Int = 2630
def foo2629(): Int = 2631
def foo2630(): Int = 2632
def foo2631(): Int = 2633
def foo2632(): Int = 2634
def foo2633(): Int = 2635
def foo2634(): Int = 2636
def foo2635(): Int = 2637
def foo2636(): Int = 2638
def foo2637(): Int = 2639
def foo2638(): Int = 2640
def foo2639(): Int = 2641
def foo2640(): Int = 2642
def foo2641(): Int = 2643
def foo2642(): Int = 2644
def foo2643(): Int = 2645
def foo2644(): Int = 2646
def foo2645(): Int = 2647
def foo2646(): Int = 2648
def foo2647(): Int = 2649
def foo2648(): Int = 2650
def foo2649(): Int = 2651
def foo2650(): Int = 2652
def foo2651(): Int = 2653
def foo2652(): Int = 2654
def foo2653(): Int = 2655
def foo2654(): Int = 2656
def foo2655(): Int = 2657
def foo2656(): Int = 2658
def foo2657(): Int = 2659
def foo2658(): Int = 2660
def foo2659(): Int = 2661
def foo2660(): Int = 2662
def foo2661(): Int = 2663
def foo2662(): Int = 2664
def foo2663(): Int = 2665
def foo2664(): Int = 2666
def foo2665(): Int = 2667
def foo2666(): Int = 2668
def foo2667(): Int = 2669
def foo2668(): Int = 2670
def foo2669(): Int = 2671
def foo2670(): Int = 2672
def foo2671(): Int = 2673
def foo2672(): Int = 2674
def foo2673(): Int = 2675
def foo2674(): Int = 2676
def foo2675(): Int = 2677
def foo2676(): Int = 2678
def foo2677(): Int = 2679
def foo2678(): Int = 2680
def foo2679(): Int = 2681
def foo2680(): Int = 2682
def foo2681(): Int = 2683
def foo2682(): Int = 2684
def foo2683(): Int = 2685
def foo2684(): Int = 2686
def foo2685(): Int = 2687
def foo2686(): Int = 2688
def foo2687(): Int = 2689
def foo2688(): Int = 2690
def foo2689(): Int = 2691
def foo2690(): Int = 2692
def foo2691(): Int = 2693
def foo2692(): Int = 2694
def foo2693(): Int = 2695
def foo2694(): Int = 2696
def foo2695(): Int = 2697
def foo2696(): Int = 2698
def foo2697(): Int = 2699
def foo2698(): Int = 2700
def foo2699(): Int = 2701
def foo2700(): Int = 2702
def foo2701(): Int = 2703
def foo2702(): Int = 2704
def foo2703(): Int = 2705
def foo2704(): Int = 2706
def foo2705(): Int = 2707
def foo2706(): Int = 2708
def foo2707(): Int = 2709
def foo2708(): Int = 2710
def foo2709(): Int = 2711
def foo2710(): Int = 2712
def foo2711(): Int = 2713
def foo2712(): Int = 2714
def foo2713(): Int = 2715
def foo2714(): Int = 2716
def foo2715(): Int = 2717
def foo2716(): Int = 2718
def foo2717(): Int = 2719
def foo2718(): Int = 2720
def foo2719(): Int = 2721
def foo2720(): Int = 2722
def foo2721(): Int = 2723
def foo2722(): Int = 2724
def foo2723(): Int = 2725
def foo2724(): Int = 2726
def foo2725(): Int = 2727
def foo2726(): Int = 2728
def foo2727(): Int = 2729
def foo2728(): Int = 2730
def foo2729(): Int = 2731
def foo2730(): Int = 2732
def foo2731(): Int = 2733
def foo2732(): Int = 2734
def foo2733(): Int = 2735
def foo2734(): Int = 2736
def foo2735(): Int = 2737
def foo2736(): Int = 2738
def foo2737(): Int = 2739
def foo2738(): Int = 2740
def foo2739(): Int = 2741
def foo2740(): Int = 2742
def foo2741(): Int = 2743
def foo2742(): Int = 2744
def foo2743(): Int = 2745
def foo2744(): Int = 2746
def foo2745(): Int = 2747
def foo2746(): Int = 2748
def foo2747(): Int = 2749
def foo2748(): Int = 2750
def foo2749(): Int = 2751
def foo2750(): Int = 2752
def foo2751(): Int = 2753
def foo2752(): Int = 2754
def foo2753(): Int = 2755
def foo2754(): Int = 2756
def foo2755(): Int = 2757
def foo2756(): Int = 2758
def foo2757(): Int = 2759
def foo2758(): Int = 2760
def foo2759(): Int = 2761
def foo2760(): Int = 2762
def foo2761(): Int = 2763
def foo2762(): Int = 2764
def foo2763(): Int = 2765
def foo2764(): Int = 2766
def foo2765(): Int = 2767
def foo2766(): Int = 2768
def foo2767(): Int = 2769
def foo2768(): Int = 2770
def foo2769(): Int = 2771
def foo2770(): Int = 2772
def foo2771(): Int = 2773
def foo2772(): Int = 2774
def foo2773(): Int = 2775
def foo2774(): Int = 2776
def foo2775(): Int = 2777
def foo2776(): Int = 2778
def foo2777(): Int = 2779
def foo2778(): Int = 2780
def foo2779(): Int = 2781
def foo2780(): Int = 2782
def foo2781(): Int = 2783
def foo2782(): Int = 2784
def foo2783(): Int = 2785
def foo2784(): Int = 2786
def foo2785(): Int = 2787
def foo2786(): Int = 2788
def foo2787(): Int = 2789
def foo2788(): Int = 2790
def foo2789(): Int = 2791
def foo2790(): Int = 2792
def foo2791(): Int = 2793
def foo2792(): Int = 2794
def foo2793(): Int = 2795
def foo2794(): Int = 2796
def foo2795(): Int = 2797
def foo2796(): Int = 2798
def foo2797(): Int = 2799
def foo2798(): Int = 2800
def foo2799(): Int = 2801
def foo2800(): Int = 2802
def foo2801(): Int = 2803
def foo2802(): Int = 2804
def foo2803(): Int = 2805
def foo2804(): Int = 2806
def foo2805(): Int = 2807
def foo2806(): Int = 2808
def foo2807(): Int = 2809
def foo2808(): Int = 2810
def foo2809(): Int = 2811
def foo2810(): Int = 2812
def foo2811(): Int = 2813
def foo2812(): Int = 2814
def foo2813(): Int = 2815
def foo2814(): Int = 2816
def foo2815(): Int = 2817
def foo2816(): Int = 2818
def foo2817(): Int = 2819
def foo2818(): Int = 2820
def foo2819(): Int = 2821
def foo2820(): Int = 2822
def foo2821(): Int = 2823
def foo2822(): Int = 2824
def foo2823(): Int = 2825
def foo2824(): Int = 2826
def foo2825(): Int = 2827
def foo2826(): Int = 2828
def foo2827(): Int = 2829
def foo2828(): Int = 2830
def foo2829(): Int = 2831
def foo2830(): Int = 2832
def foo2831(): Int = 2833
def foo2832(): Int = 2834
def foo2833(): Int = 2835
def foo2834(): Int = 2836
def foo2835(): Int = 2837
def foo2836(): Int = 2838
def foo2837(): Int = 2839
def foo2838(): Int = 2840
def foo2839(): Int = 2841
def foo2840(): Int = 2842
def foo2841(): Int = 2843
def foo2842(): Int = 2844
def foo2843(): Int = 2845
def foo2844(): Int = 2846
def foo2845(): Int = 2847
def foo2846(): Int = 2848
def foo2847(): Int = 2849
def foo2848(): Int = 2850
def foo2849(): Int = 2851
def foo2850(): Int = 2852
def foo2851(): Int = 2853
def foo2852(): Int = 2854
def foo2853(): Int = 2855
def foo2854(): Int = 2856
def foo2855(): Int = 2857
def foo2856(): Int = 2858
def foo2857(): Int = 2859
def foo2858(): Int = 2860
def foo2859(): Int = 2861
def foo2860(): Int = 2862
def foo2861(): Int = 2863
def foo2862(): Int = 2864
def foo2863(): Int = 2865
def foo2864(): Int = 2866
def foo2865(): Int = 2867
def foo2866(): Int = 2868
def foo2867(): Int = 2869
def foo2868(): Int = 2870
def foo2869(): Int = 2871
def foo2870(): Int = 2872
def foo2871(): Int = 2873
def foo2872(): Int = 2874
def foo2873(): Int = 2875
def foo2874(): Int = 2876
def foo2875(): Int = 2877
def foo2876(): Int = 2878
def foo2877(): Int = 2879
def foo2878(): Int = 2880
def foo2879(): Int = 2881
def foo2880(): Int = 2882
def foo2881(): Int = 2883
def foo2882(): Int = 2884
def foo2883(): Int = 2885
def foo2884(): Int = 2886
def foo2885(): Int = 2887
def foo2886(): Int = 2888
def foo2887(): Int = 2889
def foo2888(): Int = 2890
def foo2889(): Int = 2891
def foo2890(): Int = 2892
def foo2891(): Int = 2893
def foo2892(): Int = 2894
def foo2893(): Int = 2895
def foo2894(): Int = 2896
def foo2895(): Int = 2897
def foo2896(): Int = 2898
def foo2897(): Int = 2899
def foo2898(): Int = 2900
def foo2899(): Int = 2901
def foo2900(): Int = 2902
def foo2901(): Int = 2903
def foo2902(): Int = 2904
def foo2903(): Int = 2905
def foo2904(): Int = 2906
def foo2905(): Int = 2907
def foo2906(): Int = 2908
def foo2907(): Int = 2909
def foo2908(): Int = 2910
def foo2909(): Int = 2911
def foo2910(): Int = 2912
def foo2911(): Int = 2913
def foo2912(): Int = 2914
def foo2913(): Int = 2915
def foo2914(): Int = 2916
def foo2915(): Int = 2917
def foo2916(): Int = 2918
def foo2917(): Int = 2919
def foo2918(): Int = 2920
def foo2919(): Int = 2921
def foo2920(): Int = 2922
def foo2921(): Int = 2923
def foo2922(): Int = 2924
def foo2923(): Int = 2925
def foo2924(): Int = 2926
def foo2925(): Int = 2927
def foo2926(): Int = 2928
def foo2927(): Int = 2929
def foo2928(): Int = 2930
def foo2929(): Int = 2931
def foo2930(): Int = 2932
def foo2931(): Int = 2933
def foo2932(): Int = 2934
def foo2933(): Int = 2935
def foo2934(): Int = 2936
def foo2935(): Int = 2937
def foo2936(): Int = 2938
def foo2937(): Int = 2939
def foo2938(): Int = 2940
def foo2939(): Int = 2941
def foo2940(): Int = 2942
def foo2941(): Int = 2943
def foo2942(): Int = 2944
def foo2943(): Int = 2945
def foo2944(): Int = 2946
def foo2945(): Int = 2947
def foo2946(): Int = 2948
def foo2947(): Int = 2949
def foo2948(): Int = 2950
def foo2949(): Int = 2951
def foo2950(): Int = 2952
def foo2951(): Int = 2953
def foo2952(): Int = 2954
def foo2953(): Int = 2955
def foo2954(): Int = 2956
def foo2955(): Int = 2957
def foo2956(): Int = 2958
def foo2957(): Int = 2959
def foo2958(): Int = 2960
def foo2959(): Int = 2961
def foo2960(): Int = 2962
def foo2961(): Int = 2963
def foo2962(): Int = 2964
def foo2963(): Int = 2965
def foo2964(): Int = 2966
def foo2965(): Int = 2967
def foo2966(): Int = 2968
def foo2967(): Int = 2969
def foo2968(): Int = 2970
def foo2969(): Int = 2971
def foo2970(): Int = 2972
def foo2971(): Int = 2973
def foo2972(): Int = 2974
def foo2973(): Int = 2975
def foo2974(): Int = 2976
def foo2975(): Int = 2977
def foo2976(): Int = 2978
def foo2977(): Int = 2979
def foo2978(): Int = 2980
def foo2979(): Int = 2981
def foo2980(): Int = 2982
def foo2981(): Int = 2983
def foo2982(): Int = 2984
def foo2983(): Int = 2985
def foo2984(): Int = 2986
def foo2985(): Int = 2987
def foo2986(): Int = 2988
def foo2987(): Int = 2989
def foo2988(): Int = 2990
def foo2989(): Int = 2991
def foo2990(): Int = 2992
def foo2991(): Int = 2993
def foo2992(): Int = 2994
def foo2993(): Int = 2995
def foo2994(): Int = 2996
def foo2995(): Int = 2997
def foo2996(): Int = 2998
def foo2997(): Int = 2999
def foo2998(): Int = 3000
def foo2999(): Int = 3001
def foo3000(): Int = 3002
def foo3001(): Int = 3003
def foo3002(): Int = 3004
def foo3003(): Int = 3005
def foo3004(): Int = 3006
def foo3005(): Int = 3007
def foo3006(): Int = 3008
def foo3007(): Int = 3009
def foo3008(): Int = 3010
def foo3009(): Int = 3011
def foo3010(): Int = 3012
def foo3011(): Int = 3013
def foo3012(): Int = 3014
def foo3013(): Int = 3015
def foo3014(): Int = 3016
def foo3015(): Int = 3017
def foo3016(): Int = 3018
def foo3017(): Int = 3019
def foo3018(): Int = 3020
def foo3019(): Int = 3021
def foo3020(): Int = 3022
def foo3021(): Int = 3023
def foo3022(): Int = 3024
def foo3023(): Int = 3025
def foo3024(): Int = 3026
def foo3025(): Int = 3027
def foo3026(): Int = 3028
def foo3027(): Int = 3029
def foo3028(): Int = 3030
def foo3029(): Int = 3031
def foo3030(): Int = 3032
def foo3031(): Int = 3033
def foo3032(): Int = 3034
def foo3033(): Int = 3035
def foo3034(): Int = 3036
def foo3035(): Int = 3037
def foo3036(): Int = 3038
def foo3037(): Int = 3039
def foo3038(): Int = 3040
def foo3039(): Int = 3041
def foo3040(): Int = 3042
def foo3041(): Int = 3043
def foo3042(): Int = 3044
def foo3043(): Int = 3045
def foo3044(): Int = 3046
def foo3045(): Int = 3047
def foo3046(): Int = 3048
def foo3047(): Int = 3049
def foo3048(): Int = 3050
def foo3049(): Int = 3051
def foo3050(): Int = 3052
def foo3051(): Int = 3053
def foo3052(): Int = 3054
def foo3053(): Int = 3055
def foo3054(): Int = 3056
def foo3055(): Int = 3057
def foo3056(): Int = 3058
def foo3057(): Int = 3059
def foo3058(): Int = 3060
def foo3059(): Int = 3061
def foo3060(): Int = 3062
def foo3061(): Int = 3063
def foo3062(): Int = 3064
def foo3063(): Int = 3065
def foo3064(): Int = 3066
def foo3065(): Int = 3067
def foo3066(): Int = 3068
def foo3067(): Int = 3069
def foo3068(): Int = 3070
def foo3069(): Int = 3071
def foo3070(): Int = 3072
def foo3071(): Int = 3073
def foo3072(): Int = 3074
def foo3073(): Int = 3075
def foo3074(): Int = 3076
def foo3075(): Int = 3077
def foo3076(): Int = 3078
def foo3077(): Int = 3079
def foo3078(): Int = 3080
def foo3079(): Int = 3081
def foo3080(): Int = 3082
def foo3081(): Int = 3083
def foo3082(): Int = 3084
def foo3083(): Int = 3085
def foo3084(): Int = 3086
def foo3085(): Int = 3087
def foo3086(): Int = 3088
def foo3087(): Int = 3089
def foo3088(): Int = 3090
def foo3089(): Int = 3091
def foo3090(): Int = 3092
def foo3091(): Int = 3093
def foo3092(): Int = 3094
def foo3093(): Int = 3095
def foo3094(): Int = 3096
def foo3095(): Int = 3097
def foo3096(): Int = 3098
def foo3097(): Int = 3099
def foo3098(): Int = 3100
def foo3099(): Int = 3101
def foo3100(): Int = 3102
def foo3101(): Int = 3103
def foo3102(): Int = 3104
def foo3103(): Int = 3105
def foo3104(): Int = 3106
def foo3105(): Int = 3107
def foo3106(): Int = 3108
def foo3107(): Int = 3109
def foo3108(): Int = 3110
def foo3109(): Int = 3111
def foo3110(): Int = 3112
def foo3111(): Int = 3113
def foo3112(): Int = 3114
def foo3113(): Int = 3115
def foo3114(): Int = 3116
def foo3115(): Int = 3117
def foo3116(): Int = 3118
def foo3117(): Int = 3119
def foo3118(): Int = 3120
def foo3119(): Int = 3121
def foo3120(): Int = 3122
def foo3121(): Int = 3123
def foo3122(): Int = 3124
def foo3123(): Int = 3125
def foo3124(): Int = 3126
def foo3125(): Int = 3127
def foo3126(): Int = 3128
def foo3127(): Int = 3129
def foo3128(): Int = 3130
def foo3129(): Int = 3131
def foo3130(): Int = 3132
def foo3131(): Int = 3133
def foo3132(): Int = 3134
def foo3133(): Int = 3135
def foo3134(): Int = 3136
def foo3135(): Int = 3137
def foo3136(): Int = 3138
def foo3137(): Int = 3139
def foo3138(): Int = 3140
def foo3139(): Int = 3141
def foo3140(): Int = 3142
def foo3141(): Int = 3143
def foo3142(): Int = 3144
def foo3143(): Int = 3145
def foo3144(): Int = 3146
def foo3145(): Int = 3147
def foo3146(): Int = 3148
def foo3147(): Int = 3149
def foo3148(): Int = 3150
def foo3149(): Int = 3151
def foo3150(): Int = 3152
def foo3151(): Int = 3153
def foo3152(): Int = 3154
def foo3153(): Int = 3155
def foo3154(): Int = 3156
def foo3155(): Int = 3157
def foo3156(): Int = 3158
def foo3157(): Int = 3159
def foo3158(): Int = 3160
def foo3159(): Int = 3161
def foo3160(): Int = 3162
def foo3161(): Int = 3163
def foo3162(): Int = 3164
def foo3163(): Int = 3165
def foo3164(): Int = 3166
def foo3165(): Int = 3167
def foo3166(): Int = 3168
def foo3167(): Int = 3169
def foo3168(): Int = 3170
def foo3169(): Int = 3171
def foo3170(): Int = 3172
def foo3171(): Int = 3173
def foo3172(): Int = 3174
def foo3173(): Int = 3175
def foo3174(): Int = 3176
def foo3175(): Int = 3177
def foo3176(): Int = 3178
def foo3177(): Int = 3179
def foo3178(): Int = 3180
def foo3179(): Int = 3181
def foo3180(): Int = 3182
def foo3181(): Int = 3183
def foo3182(): Int = 3184
def foo3183(): Int = 3185
def foo3184(): Int = 3186
def foo3185(): Int = 3187
def foo3186(): Int = 3188
def foo3187(): Int = 3189
def foo3188(): Int = 3190
def foo3189(): Int = 3191
def foo3190(): Int = 3192
def foo3191(): Int = 3193
def foo3192(): Int = 3194
def foo3193(): Int = 3195
def foo3194(): Int = 3196
def foo3195(): Int = 3197
def foo3196(): Int = 3198
def foo3197(): Int = 3199
def foo3198(): Int = 3200
def foo3199(): Int = 3201
def foo3200(): Int = 3202
def foo3201(): Int = 3203
def foo3202(): Int = 3204
def foo3203(): Int = 3205
def foo3204(): Int = 3206
def foo3205(): Int = 3207
def foo3206(): Int = 3208
def foo3207(): Int = 3209
def foo3208(): Int = 3210
def foo3209(): Int = 3211
def foo3210(): Int = 3212
def foo3211(): Int = 3213
def foo3212(): Int = 3214
def foo3213(): Int = 3215
def foo3214(): Int = 3216
def foo3215(): Int = 3217
def foo3216(): Int = 3218
def foo3217(): Int = 3219
def foo3218(): Int = 3220
def foo3219(): Int = 3221
def foo3220(): Int = 3222
def foo3221(): Int = 3223
def foo3222(): Int = 3224
def foo3223(): Int = 3225
def foo3224(): Int = 3226
def foo3225(): Int = 3227
def foo3226(): Int = 3228
def foo3227(): Int = 3229
def foo3228(): Int = 3230
def foo3229(): Int = 3231
def foo3230(): Int = 3232
def foo3231(): Int = 3233
def foo3232(): Int = 3234
def foo3233(): Int = 3235
def foo3234(): Int = 3236
def foo3235(): Int = 3237
def foo3236(): Int = 3238
def foo3237(): Int = 3239
def foo3238(): Int = 3240
def foo3239(): Int = 3241
def foo3240(): Int = 3242
def foo3241(): Int = 3243
def foo3242(): Int = 3244
def foo3243(): Int = 3245
def foo3244(): Int = 3246
def foo3245(): Int = 3247
def foo3246(): Int = 3248
def foo3247(): Int = 3249
def foo3248(): Int = 3250
def foo3249(): Int = 3251
def foo3250(): Int = 3252
def foo3251(): Int = 3253
def foo3252(): Int = 3254
def foo3253(): Int = 3255
def foo3254(): Int = 3256
def foo3255(): Int = 3257
def foo3256(): Int = 3258
def foo3257(): Int = 3259
def foo3258(): Int = 3260
def foo3259(): Int = 3261
def foo3260(): Int = 3262
def foo3261(): Int = 3263
def foo3262(): Int = 3264
def foo3263(): Int = 3265
def foo3264(): Int = 3266
def foo3265(): Int = 3267
def foo3266(): Int = 3268
def foo3267(): Int = 3269
def foo3268(): Int = 3270
def foo3269(): Int = 3271
def foo3270(): Int = 3272
def foo3271(): Int = 3273
def foo3272(): Int = 3274
def foo3273(): Int = 3275
def foo3274(): Int = 3276
def foo3275(): Int = 3277
def foo3276(): Int = 3278
def foo3277(): Int = 3279
def foo3278(): Int = 3280
def foo3279(): Int = 3281
def foo3280(): Int = 3282
def foo3281(): Int = 3283
def foo3282(): Int = 3284
def foo3283(): Int = 3285
def foo3284(): Int = 3286
def foo3285(): Int = 3287
def foo3286(): Int = 3288
def foo3287(): Int = 3289
def foo3288(): Int = 3290
def foo3289(): Int = 3291
def foo3290(): Int = 3292
def foo3291(): Int = 3293
def foo3292(): Int = 3294
def foo3293(): Int = 3295
def foo3294(): Int = 3296
def foo3295(): Int = 3297
def foo3296(): Int = 3298
def foo3297(): Int = 3299
def foo3298(): Int = 3300
def foo3299(): Int = 3301
def foo3300(): Int = 3302
def foo3301(): Int = 3303
def foo3302(): Int = 3304
def foo3303(): Int = 3305
def foo3304(): Int = 3306
def foo3305(): Int = 3307
def foo3306(): Int = 3308
def foo3307(): Int = 3309
def foo3308(): Int = 3310
def foo3309(): Int = 3311
def foo3310(): Int = 3312
def foo3311(): Int = 3313
def foo3312(): Int = 3314
def foo3313(): Int = 3315
def foo3314(): Int = 3316
def foo3315(): Int = 3317
def foo3316(): Int = 3318
def foo3317(): Int = 3319
def foo3318(): Int = 3320
def foo3319(): Int = 3321
def foo3320(): Int = 3322
def foo3321(): Int = 3323
def foo3322(): Int = 3324
def foo3323(): Int = 3325
def foo3324(): Int = 3326
def foo3325(): Int = 3327
def foo3326(): Int = 3328
def foo3327(): Int = 3329
def foo3328(): Int = 3330
def foo3329(): Int = 3331
def foo3330(): Int = 3332
def foo3331(): Int = 3333
def foo3332(): Int = 3334
def foo3333(): Int = 3335
def foo3334(): Int = 3336
def foo3335(): Int = 3337
def foo3336(): Int = 3338
def foo3337(): Int = 3339
def foo3338(): Int = 3340
def foo3339(): Int = 3341
def foo3340(): Int = 3342
def foo3341(): Int = 3343
def foo3342(): Int = 3344
def foo3343(): Int = 3345
def foo3344(): Int = 3346
def foo3345(): Int = 3347
def foo3346(): Int = 3348
def foo3347(): Int = 3349
def foo3348(): Int = 3350
def foo3349(): Int = 3351
def foo3350(): Int = 3352
def foo3351(): Int = 3353
def foo3352(): Int = 3354
def foo3353(): Int = 3355
def foo3354(): Int = 3356
def foo3355(): Int = 3357
def foo3356(): Int = 3358
def foo3357(): Int = 3359
def foo3358(): Int = 3360
def foo3359(): Int = 3361
def foo3360(): Int = 3362
def foo3361(): Int = 3363
def foo3362(): Int = 3364
def foo3363(): Int = 3365
def foo3364(): Int = 3366
def foo3365(): Int = 3367
def foo3366(): Int = 3368
def foo3367(): Int = 3369
def foo3368(): Int = 3370
def foo3369(): Int = 3371
def foo3370(): Int = 3372
def foo3371(): Int = 3373
def foo3372(): Int = 3374
def foo3373(): Int = 3375
def foo3374(): Int = 3376
def foo3375(): Int = 3377
def foo3376(): Int = 3378
def foo3377(): Int = 3379
def foo3378(): Int = 3380
def foo3379(): Int = 3381
def foo3380(): Int = 3382
def foo3381(): Int = 3383
def foo3382(): Int = 3384
def foo3383(): Int = 3385
def foo3384(): Int = 3386
def foo3385(): Int = 3387
def foo3386(): Int = 3388
def foo3387(): Int = 3389
def foo3388(): Int = 3390
def foo3389(): Int = 3391
def foo3390(): Int = 3392
def foo3391(): Int = 3393
def foo3392(): Int = 3394
def foo3393(): Int = 3395
def foo3394(): Int = 3396
def foo3395(): Int = 3397
def foo3396(): Int = 3398
def foo3397(): Int = 3399
def foo3398(): Int = 3400
def foo3399(): Int = 3401
def foo3400(): Int = 3402
def foo3401(): Int = 3403
def foo3402(): Int = 3404
def foo3403(): Int = 3405
def foo3404(): Int = 3406
def foo3405(): Int = 3407
def foo3406(): Int = 3408
def foo3407(): Int = 3409
def foo3408(): Int = 3410
def foo3409(): Int = 3411
def foo3410(): Int = 3412
def foo3411(): Int = 3413
def foo3412(): Int = 3414
def foo3413(): Int = 3415
def foo3414(): Int = 3416
def foo3415(): Int = 3417
def foo3416(): Int = 3418
def foo3417(): Int = 3419
def foo3418(): Int = 3420
def foo3419(): Int = 3421
def foo3420(): Int = 3422
def foo3421(): Int = 3423
def foo3422(): Int = 3424
def foo3423(): Int = 3425
def foo3424(): Int = 3426
def foo3425(): Int = 3427
def foo3426(): Int = 3428
def foo3427(): Int = 3429
def foo3428(): Int = 3430
def foo3429(): Int = 3431
def foo3430(): Int = 3432
def foo3431(): Int = 3433
def foo3432(): Int = 3434
def foo3433(): Int = 3435
def foo3434(): Int = 3436
def foo3435(): Int = 3437
def foo3436(): Int = 3438
def foo3437(): Int = 3439
def foo3438(): Int = 3440
def foo3439(): Int = 3441
def foo3440(): Int = 3442
def foo3441(): Int = 3443
def foo3442(): Int = 3444
def foo3443(): Int = 3445
def foo3444(): Int = 3446
def foo3445(): Int = 3447
def foo3446(): Int = 3448
def foo3447(): Int = 3449
def foo3448(): Int = 3450
def foo3449(): Int = 3451
def foo3450(): Int = 3452
def foo3451(): Int = 3453
def foo3452(): Int = 3454
def foo3453(): Int = 3455
def foo3454(): Int = 3456
def foo3455(): Int = 3457
def foo3456(): Int = 3458
def foo3457(): Int = 3459
def foo3458(): Int = 3460
def foo3459(): Int = 3461
def foo3460(): Int = 3462
def foo3461(): Int = 3463
def foo3462(): Int = 3464
def foo3463(): Int = 3465
def foo3464(): Int = 3466
def foo3465(): Int = 3467
def foo3466(): Int = 3468
def foo3467(): Int = 3469
def foo3468(): Int = 3470
def foo3469(): Int = 3471
def foo3470(): Int = 3472
def foo3471(): Int = 3473
def foo3472(): Int = 3474
def foo3473(): Int = 3475
def foo3474(): Int = 3476
def foo3475(): Int = 3477
def foo3476(): Int = 3478
def foo3477(): Int = 3479
def foo3478(): Int = 3480
def foo3479(): Int = 3481
def foo3480(): Int = 3482
def foo3481(): Int = 3483
def foo3482(): Int = 3484
def foo3483(): Int = 3485
def foo3484(): Int = 3486
def foo3485(): Int = 3487
def foo3486(): Int = 3488
def foo3487(): Int = 3489
def foo3488(): Int = 3490
def foo3489(): Int = 3491
def foo3490(): Int = 3492
def foo3491(): Int = 3493
def foo3492(): Int = 3494
def foo3493(): Int = 3495
def foo3494(): Int = 3496
def foo3495(): Int = 3497
def foo3496(): Int = 3498
def foo3497(): Int = 3499
def foo3498(): Int = 3500
def foo3499(): Int = 3501
def foo3500(): Int = 3502
}
| felixmulder/scala | test/files/scalap/t8679.scala | Scala | bsd-3-clause | 95,808 |
package springnz.sparkplug.util
import com.typesafe.scalalogging.{ LazyLogging, Logger }
private[sparkplug] trait Logging extends LazyLogging {
implicit lazy val log: Logger = logger
}
| springnz/sparkplug | sparkplug-core/src/main/scala/springnz/sparkplug/util/Logging.scala | Scala | mit | 189 |
package models
case class ScreencastResource(title: String, description: String, tags: String) {
}
| CraigGiles/scalacasts | presentation/app/models/ScreencastResource.scala | Scala | mit | 100 |
package com.programmaticallyspeaking.ncd.nashorn
import com.programmaticallyspeaking.ncd.host._
import com.programmaticallyspeaking.ncd.host.types.{ExceptionData, Undefined}
import com.programmaticallyspeaking.ncd.infra.{ErrorUtils, IdGenerator}
import com.programmaticallyspeaking.ncd.nashorn.NashornDebuggerHost.ObjectDescriptor
import com.programmaticallyspeaking.ncd.nashorn.mirrors._
import com.sun.jdi._
import jdk.nashorn.api.scripting.NashornException
import jdk.nashorn.internal.runtime.PrototypeObject
import scala.collection.concurrent.TrieMap
import scala.collection.immutable.ListMap
import scala.collection.mutable
import scala.language.implicitConversions
import scala.util.{Success, Try}
object Marshaller {
val objectIdGenerator = new IdGenerator("objid-")
case class ExceptionInfo(stack: String, lineNumber1Based: Int, fileName: String)
private[Marshaller] case class ExceptionDataWithJavaStack(data: ExceptionData, javaStack: Option[String])
private[Marshaller] case class MarshallerResult(valueNode: ValueNode, extraProperties: Map[String, ValueNode])
implicit def valueNode2MarshallerResult(valueNode: ValueNode): MarshallerResult = MarshallerResult(valueNode, Map.empty)
def isUndefined(value: Value): Boolean = value != null && "jdk.nashorn.internal.runtime.Undefined".equals(value.`type`().name())
val ConsStringClassName = "jdk.nashorn.internal.runtime.ConsString"
val ScriptObjectClassName = "jdk.nashorn.internal.runtime.ScriptObject"
val SymbolClassName = "jdk.nashorn.internal.runtime.Symbol"
}
object MappingRegistry {
/**
* A [[MappingRegistry]] that does not actually do any registration.
*/
val noop = new MappingRegistry {
override def register(value: Value, valueNode: ComplexNode, extraProperties: Map[String, ValueNode]): Unit = {}
override def clear(): Unit = {}
override def byId(id: ObjectId) = None
}
}
trait MappingRegistry {
def register(value: Value, valueNode: ComplexNode, extraProperties: Map[String, ValueNode]): Unit
def clear(): Unit
def byId(id: ObjectId): Option[ObjectDescriptor]
}
class MappingRegistryImpl extends MappingRegistry {
private val reg = TrieMap[ObjectId, ObjectDescriptor]()
override def register(value: Value, valueNode: ComplexNode, extraProperties: Map[String, ValueNode]): Unit = {
reg += valueNode.objectId -> ObjectDescriptor(Option(value), valueNode, extraProperties)
}
override def clear(): Unit = reg.clear()
override def byId(id: ObjectId): Option[ObjectDescriptor] = reg.get(id)
}
object MarshallerCache {
val global = new MarshallerCache
}
class MarshallerCache {
private val inheritorCache = mutable.Map[(String, String), Boolean]()
def inherits(obj: ObjectReference, typeName: String): Option[Boolean] = {
val key = (obj.referenceType().name(), typeName)
inheritorCache.get(key)
}
def indicateInheritance(obj: ObjectReference, typeName: String, inherits: Boolean): Unit = {
val key = (obj.referenceType().name(), typeName)
inheritorCache += key -> inherits
}
}
class Marshaller(mappingRegistry: MappingRegistry, cache: MarshallerCache = MarshallerCache.global)
(implicit val thread: ThreadReference) {
import Marshaller._
import scala.collection.JavaConverters._
import JDIExtensions._
private implicit val marshaller: Marshaller = this
def marshal(value: Value): ValueNode = {
val result = marshalInPrivate(value)
result.valueNode match {
case c: ComplexNode =>
mappingRegistry.register(value, c, result.extraProperties)
case _ =>
}
result.valueNode
}
def throwError(err: ThrownExceptionReference): Nothing = marshal(err) match {
case ev: ErrorValue =>
throw new RuntimeException("Error from object property extraction: " + ev.fullStack)
case other => throw new RuntimeException("Thrown exception, but marshalled to: " + other)
}
/**
* Generate an ID for an object. If possible, we generate an ID based on the unique ID of the object reference,
* so that two references to the same object get the same ID.
*
* @param v the value for which an object ID should be generated
*/
private def objectId(v: Value) = {
val id = v match {
case o: ObjectReference => "uid-" + o.uniqueID()
case _ => objectIdGenerator.next
}
ObjectId(id)
}
private def isConsString(str: ObjectReference) = str.`type`().name == ConsStringClassName
private def isSymbol(sym: ObjectReference) = sym.`type`().name == SymbolClassName
private def toStringOf(obj: ObjectReference) = {
val invoker = Invokers.shared.getDynamic(obj)
marshalInPrivate(invoker.applyDynamic("toString")())
}
private def marshalInPrivate(value: Value): MarshallerResult = value match {
case x if x == null => EmptyNode
case UndefinedValue(vn) => vn
case primitive: PrimitiveValue => SimpleValue(marshalPrimitive(primitive))
case s: StringReference => SimpleValue(s.value())
case arr: ArrayReference => toArray(arr)
case so if isScriptObject(so) => marshalScriptObject(so)
case so if isJSObject(so) => marshalJSObject(so)
case BoxedValue(vn) => vn
case ExceptionValue(vn) =>
val extra = vn.javaStack.map(st => "JavaStack" -> valueNodeFromString(st)).toMap + ("Message" -> valueNodeFromString(vn.data.message))
MarshallerResult(vn, extra)
case str: ObjectReference if isConsString(str) =>
toStringOf(str)
case sym: ObjectReference if isSymbol(sym) =>
marshalSymbol(sym)
case obj: ObjectReference =>
// Scala/Java object perhaps?
ObjectNode(obj.`type`().name(), objectId(obj))
case other => throw new IllegalArgumentException("Don't know how to marshal: " + other)
}
private def valueNodeFromString(s: String): ValueNode = if (s == null) EmptyNode else SimpleValue(s)
private def attemptUnboxing(value: ObjectReference): Option[ValueNode] = {
val invoker = Invokers.shared.getDynamic(value)
val v = value.referenceType().name() match {
case "java.lang.Double" => invoker.doubleValue()
case "java.lang.Float" => invoker.floatValue()
case "java.lang.Character" => invoker.charValue()
case "java.lang.Boolean" => invoker.booleanValue()
case "java.lang.Integer" => invoker.intValue()
case "java.lang.Long" => invoker.longValue()
case "java.lang.Short" => invoker.shortValue()
case "java.lang.Byte" => invoker.byteValue()
case _ => null
}
Option(v).map(marshal)
}
private def marshalSymbol(reference: ObjectReference) = SymbolNode(toStringOf(reference).valueNode.asString, objectId(reference))
private def marshalScriptObject(value: Value): MarshallerResult = {
val scriptObject = value.asInstanceOf[ObjectReference]
val mirror = new ScriptObjectMirror(scriptObject)
// Check if this is a prototype first. For example, NativeArray$Prototype has class name Array, but we don't
// want to treat it as a regular array.
if (inherits(scriptObject, classOf[PrototypeObject].getName)) {
ObjectNode(mirror.className, objectId(scriptObject))
} else {
if (mirror.isRegularOrTypedArray) toArray(mirror)
else mirror.className match {
case "Function" => toFunction(mirror.asFunction)
case "Error" => toError(mirror)
case "Date" => toDate(mirror)
case "RegExp" => toRegExp(mirror)
case "Map" => toMap(mirror, weak = false)
case "WeakMap" => toMap(mirror, weak = true)
case "Set" => toSet(mirror, weak = false)
case "WeakSet" => toSet(mirror, weak = true)
case "Object" =>
Try(mirror.getString(ScriptBasedPropertyHolderFactory.mapSetEntryMarker)) match {
case Success("Set") =>
val entryValue = mirror.get("value")
MapSetEntryNode(None, entryValue, objectId(value))
case Success("Map") =>
val key = mirror.get("key")
val entryValue = mirror.get("value")
MapSetEntryNode(Some(key), entryValue, objectId(value))
case _ =>
toObject(mirror)
}
case "global" =>
ObjectNode("global", objectId(value))
case _ =>
toObject(mirror)
}
}
}
// Less capable than marshalScriptObject because JSObject doesn't expose as many methods
private def marshalJSObject(value: Value): ValueNode = {
val jsObject = value.asInstanceOf[ObjectReference]
val mirror = new JSObjectMirror(jsObject)
if (mirror.isArray) toArray(mirror)
else if (mirror.isFunction) toFunction(mirror)
else toObject(mirror)
}
private def marshalPrimitive(value: Value): Any = value match {
case b: ByteValue => b.byteValue()
case b: BooleanValue => b.booleanValue()
case s: ShortValue => s.shortValue()
case i: IntegerValue => i.intValue()
case l: LongValue => l.longValue()
case f: FloatValue => f.floatValue()
case d: DoubleValue => d.doubleValue()
case c: CharValue => c.charValue()
case _ => throw new UnsupportedOperationException("Unhandled primitive value: " + value)
}
def isJSObject(value: Value): Boolean = value match {
case objRef: ObjectReference if inherits(objRef, "jdk.nashorn.api.scripting.JSObject") => true
case _ => false
}
def isScriptObject(value: Value): Boolean = value match {
case objRef: ObjectReference =>
val typeName = value.`type`().name()
// We only care about Nashorn classes
if (!typeName.startsWith("jdk.nashorn.internal")) return false
// JO classes are dynamically generated
if (typeName.startsWith("jdk.nashorn.internal.scripts.JO")) return true
// JD classes as well. Saw these first with JDK 9!
if (typeName.startsWith("jdk.nashorn.internal.scripts.JD")) return true
// Some shortcuts
if (typeName == "jdk.nashorn.internal.runtime.ScriptFunction" || typeName == "jdk.nashorn.internal.objects.NativeObject$Constructor") return true
// Check inheritance
inherits(objRef, ScriptObjectClassName)
case _ => false
}
def isHashtable(value: Value): Boolean = value match {
case objRef: ObjectReference if inherits(objRef, classOf[java.util.Hashtable[_, _]].getName) => true
case _ => false
}
private def toArray(ref: ArrayReference) = ArrayNode(ref.length(), None, objectId(ref))
private def toArray(mirror: ScriptObjectMirror) = {
val size = mirror.getRequiredInt("length")
val typedClassName = if (mirror.className != "Array") Some(mirror.className) else None
ArrayNode(size, typedClassName, objectId(mirror.scriptObject))
}
private def toArray(mirror: JSObjectMirror) = {
// Not required here - JSObject can have a custom implementation that is broken.
val size = mirror.getInt("length", 0)
ArrayNode(size, None, objectId(mirror.jsObject)) // TODO: Items, but we will refactor...
}
private def toDate(mirror: ScriptObjectMirror) = {
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toString
// "The toString() method always returns a string representation of the date in American English."
// The Chrome debugging protocol (in particular, RemoteObject) doesn't seem to care about Date details.
DateNode(mirror.actualToString, objectId(mirror.scriptObject))
}
private def toRegExp(mirror: ScriptObjectMirror) = {
RegExpNode(mirror.actualToString, objectId(mirror.scriptObject))
}
private def toMap(mirror: ScriptObjectMirror, weak: Boolean) = {
// Size isn't used by DevTools for WeakMap, so skip the extra call in that case
val size = if (weak) -1 else mirror.getInt("size", 0)
MapNode(size, weak, objectId(mirror.scriptObject))
}
private def toSet(mirror: ScriptObjectMirror, weak: Boolean) = {
// Size isn't used by DevTools for WeakSet, so skip the extra call in that case
val size = if (weak) -1 else mirror.getInt("size", 0)
SetNode(size, weak, objectId(mirror.scriptObject))
}
private def toObject(mirror: ScriptObjectMirror) = ObjectNode(mirror.typeOfObject(), objectId(mirror.scriptObject))
private def toObject(mirror: JSObjectMirror) = ObjectNode(mirror.className, objectId(mirror.jsObject))
private def toFunction(mirror: ScriptFunctionMirror): MarshallerResult = {
val name = mirror.name
val source = mirror.source
def boundExtra = mirror.boundTargetFunction.map(f => Seq("TargetFunction" -> toFunction(f).valueNode)).getOrElse(Seq.empty)
def scopes = Seq("Scopes" -> objectFromScopes(mirror.scopes))
val extra = boundExtra ++ scopes
MarshallerResult(FunctionNode(name, source, objectId(mirror.scriptObject)), Map(extra: _*))
}
private def objectFromScopes(scopes: Seq[Value]) = {
scopeList(scopes.map(scopeObject))
}
private def scopeObject(s: Value): ScopeObject = {
marshalInPrivate(s) match {
case MarshallerResult(c: ComplexNode, _) =>
// Hm, we reuse object ID, so this becomes a temporary "wrapper" that is stored in the scope-list object.
ScopeObject(s.scopeType, "", c.objectId)
case other => throw new IllegalStateException("Unexpected marshalled scope: " + other)
}
}
private def scopeList(valueNodes: Seq[ScopeObject]) = {
// Scope list has no 'length' property
val props = valueNodes.zipWithIndex.map(e => e._2.toString -> e._1)
// ListMap preserves order
val obj = new LocalObject(ListMap(props: _*))
registeredNode(obj, ScopeList(valueNodes.size, objectId(obj)))
}
private def registeredNode(v: Value, n: ComplexNode) = {
mappingRegistry.register(v, n, Map.empty)
n
}
private def toFunction(mirror: JSObjectMirror) = {
// No way to get the source here. We try to get the name as a member.
val name = Option(mirror.getString("name")).getOrElse("")
val source = s"function $name() {}"
FunctionNode(name, source, objectId(mirror.jsObject))
}
private def toError(mirror: ScriptObjectMirror) = {
val msgValue = mirror.getString("message")
val nameValue = mirror.getString("name")
// Nashorn extensions, see https://wiki.openjdk.java.net/display/Nashorn/Nashorn+extensions
val stackValue = mirror.getString("stack")
val lineNumberValue = mirror.getInt("lineNumber", 0)
val colNumberValue = mirror.getInt("columnNumber", -1)
val fileNameValue = mirror.getString("fileName")
val exData = createExceptionData(nameValue, msgValue, lineNumberValue, colNumberValue,
Option(fileNameValue).getOrElse("<unknown>"), //TODO: To URL?
Option(stackValue)
)
ErrorValue(exData, isThrown = false, objectId(mirror.scriptObject), None)
}
class LazyMarshalledValue(v: Value) extends LazyNode {
override def resolve(): ValueNode = marshal(v)
}
object BoxedValue {
def unapply(v: Value): Option[ValueNode] = v match {
case value: ObjectReference => attemptUnboxing(value)
case _ => None
}
}
object UndefinedValue {
def unapply(v: Value): Option[ValueNode] = v match {
case value if isUndefined(value) => Some(SimpleValue(Undefined))
case _ => None
}
}
private def pretendToBeEvalIfNecessary(fileName: String): String =
if (fileName == CodeEval.EvalSourceName) "<eval>" else fileName
private def replaceEvalSourceName(str: Option[String]): Option[String] =
str.map(_.replace(CodeEval.EvalSourceName, "<eval>"))
// Creates ExceptionData but makes sure that file URL shows as <eval> for an NCDbg-evaled script.
private def createExceptionData(name: String, message: String, lineNumberBase1: Int, columnNumberBase0: Int, url: String, stackIncludingMessage: Option[String]): ExceptionData =
ExceptionData(name, message, lineNumberBase1, columnNumberBase0, pretendToBeEvalIfNecessary(url), replaceEvalSourceName(stackIncludingMessage))
object ExceptionValue {
/** Unpack an [[ErrorValue]] instance and an optional Java stack string from an exception value. The Java stack
* string is returned separately because it is added as an extra property for the exception object when it is
* registered in the mapping registry.
*
* @param v the value that may be an exception
*/
def unapply(v: Value): Option[ErrorValue] = v match {
case t: ThrownExceptionReference => unpack(t.exception, wasThrown = true)
case objRef: ObjectReference => unpack(objRef, wasThrown = false)
case _ => None
}
private def unpack(exception: ObjectReference, wasThrown: Boolean): Option[ErrorValue] = {
val types = allReachableTypesIncluding(exception.referenceType())
val isThrowable = types.exists(_.name() == classOf[Throwable].getName)
val nashornException = types.find(_.name() == classOf[NashornException].getName)
if (isThrowable) {
val data = exceptionDataOf(exception, nashornException)
Some(ErrorValue(data.data, isThrown = wasThrown, objectId(exception), data.javaStack))
} else None
}
private def extractJavaExceptionInfo(mirror: ThrowableMirror): Option[ExceptionInfo] = {
val stackTraceElements = mirror.stackTrace
val stack = stackTraceElements.map("\tat " + _.actualToString).mkString("\n")
stackTraceElements.headOption.map(ste => ExceptionInfo(stack, ste.lineNumber, ste.fileName))
}
private def exceptionDataOf(objRef: ObjectReference, nashornException: Option[ReferenceType]): ExceptionDataWithJavaStack = {
val mirror = new ThrowableMirror(objRef)
// Extract information about the Exception from a Java point of view. This is different than the Nashorn point
// of view, where only script frames are considered.
val javaExceptionInfo = extractJavaExceptionInfo(mirror)
val data: (LocationData, String) = nashornException match {
case Some(classType: ClassType) =>
// This is a NashornException instance.
// TODO: Get the actual error name. One possibility is to parse the message.
val classMirror = new NashornExceptionClassMirror(classType)
val stackWithoutMessage = classMirror.getScriptStackString(objRef)
val nashornMirror = new NashornExceptionMirror(objRef)
(LocationData(
nashornMirror.lineNumber,
nashornMirror.columnNumber,
nashornMirror.fileName //TODO: new File(_).toURI.toString??
), stackWithoutMessage)
case _ =>
javaExceptionInfo match {
case Some(info) =>
// Note that we don't include the Java stack here, because we want the stack trace to be a script stack
// trace always, for consistency. Instead we include the Java stack trace as an extra Error property.
(LocationData(info.lineNumber1Based, -1, info.fileName), null)
case None =>
(LocationData(0, -1, "<unknown>"), null)
}
}
// Use full Exception type name, e.g. java.lang.IllegalArgumentException
var name = objRef.referenceType().name()
var message = mirror.message
// Special handling of ECMAException as we want to get to the actual JS error type.
if (name == TypeConstants.NIR_ECMAException) {
// Message is on the form Type: Message
// Let name be the first part and message the second
val typeAndMessage = ErrorUtils.parseMessage(message)
name = typeAndMessage.typ
message = typeAndMessage.message
}
val fullStack = s"$name: $message" + Option(data._2).map(st => "\n" + st).getOrElse("")
// Note the slight implementation inconsistency wrt `fullStack`: we don't prefix with name and message outside
// of the Option map. The reason is that we always expect Java exception info including stack to be present
// (we get None if there are no stack frames at all, which would be odd).
val fullJavaStack = javaExceptionInfo.map(info => s"$name: $message\n${info.stack}")
ExceptionDataWithJavaStack(createExceptionData(name, message, data._1.lineNumberBase1, data._1.columnNumberBase0, data._1.url, Option(fullStack)),
fullJavaStack)
}
}
private def allReachableTypesIncluding(refType: ReferenceType): Seq[ReferenceType] = refType match {
case ct: ClassType =>
Seq(ct) ++ allReachableTypesIncluding(ct.superclass()) ++ ct.interfaces().asScala.flatMap(allReachableTypesIncluding)
case it: InterfaceType =>
Seq(it) ++ it.superinterfaces().asScala.flatMap(allReachableTypesIncluding)
case _ => Seq.empty
}
/**
* Tests if the given object inherits a type (class or interface) with the given name.
* @param obj the object
* @param typeName the full type name
*/
private def inherits(obj: ObjectReference, typeName: String) = {
cache.inherits(obj, typeName) match {
case Some(answer) => answer
case None =>
val answer = allReachableTypesIncluding(obj.referenceType()).exists(_.name() == typeName)
cache.indicateInheritance(obj, typeName, answer)
answer
}
}
private case class LocationData(lineNumberBase1: Int, columnNumberBase0: Int, url: String)
}
| provegard/ncdbg | src/main/scala/com/programmaticallyspeaking/ncd/nashorn/Marshaller.scala | Scala | bsd-3-clause | 21,181 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import scala.collection.mutable
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.scheduler._
/**
* :: DeveloperApi ::
* A SparkListener that maintains executor storage status.
* 维护执行程序存储状态的SparkListener
*
* This class is thread-safe (unlike JobProgressListener)
* 这个类是线程安全的(不同于JobProgressListener)
*/
@DeveloperApi
class StorageStatusListener extends SparkListener {
// This maintains only blocks that are cached (i.e. storage level is not StorageLevel.NONE)
//这仅维护缓存的块(即,存储级别不是StorageLevel.NONE)
private[storage] val executorIdToStorageStatus = mutable.Map[String, StorageStatus]()
def storageStatusList: Seq[StorageStatus] = synchronized {
executorIdToStorageStatus.values.toSeq
}
/** Update storage status list to reflect updated block statuses
* 更新存储状态列表以反映更新的块状态 */
private def updateStorageStatus(execId: String, updatedBlocks: Seq[(BlockId, BlockStatus)]) {
executorIdToStorageStatus.get(execId).foreach { storageStatus =>
updatedBlocks.foreach { case (blockId, updatedStatus) =>
if (updatedStatus.storageLevel == StorageLevel.NONE) {
storageStatus.removeBlock(blockId)
} else {
storageStatus.updateBlock(blockId, updatedStatus)
}
}
}
}
/** Update storage status list to reflect the removal of an RDD from the cache
* 更新存储状态列表以反映从缓存中删除RDD */
private def updateStorageStatus(unpersistedRDDId: Int) {
storageStatusList.foreach { storageStatus =>
storageStatus.rddBlocksById(unpersistedRDDId).foreach { case (blockId, _) =>
storageStatus.removeBlock(blockId)
}
}
}
override def onTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = synchronized {
val info = taskEnd.taskInfo
val metrics = taskEnd.taskMetrics
if (info != null && metrics != null) {
val updatedBlocks = metrics.updatedBlocks.getOrElse(Seq[(BlockId, BlockStatus)]())
if (updatedBlocks.length > 0) {
updateStorageStatus(info.executorId, updatedBlocks)
}
}
}
override def onUnpersistRDD(unpersistRDD: SparkListenerUnpersistRDD): Unit = synchronized {
updateStorageStatus(unpersistRDD.rddId)
}
override def onBlockManagerAdded(blockManagerAdded: SparkListenerBlockManagerAdded) {
synchronized {
val blockManagerId = blockManagerAdded.blockManagerId
val executorId = blockManagerId.executorId
val maxMem = blockManagerAdded.maxMem
val storageStatus = new StorageStatus(blockManagerId, maxMem)
executorIdToStorageStatus(executorId) = storageStatus
}
}
override def onBlockManagerRemoved(blockManagerRemoved: SparkListenerBlockManagerRemoved) {
synchronized {
val executorId = blockManagerRemoved.blockManagerId.executorId
executorIdToStorageStatus.remove(executorId)
}
}
}
| tophua/spark1.52 | core/src/main/scala/org/apache/spark/storage/StorageStatusListener.scala | Scala | apache-2.0 | 3,803 |
// Copyright 2013 trananh
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import java.io._
import scala.Array
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
/** A simple binary file reader.
* @constructor Create a binary file reader.
* @param file The binary file to be read.
*
* @author trananh
*/
class VecBinaryReader(val file: File) {
/** Overloaded constructor */
def this(filename: String) = this(new File(filename))
/** ASCII values for common delimiter characters */
private val SPACE = 32
private val LF = 10
/** Open input streams */
private val fis = new FileInputStream(file)
private val bis = new BufferedInputStream(fis)
private val dis = new DataInputStream(bis)
/** Close the stream. */
def close() { dis.close(); bis.close(); fis.close() }
/** Read the next byte.
* @return The next byte from the file.
*/
def read(): Byte = dis.readByte()
/** Read the next token as a string, using the provided delimiters as breaking points.
* @param delimiters ASCII code of delimiter characters (default to SPACE and LINE-FEED).
* @return String representation of the next token.
*/
def readToken(delimiters: Set[Int] = Set(SPACE, LF)): String = {
val bytes = new ArrayBuffer[Byte]()
val sb = new StringBuilder()
var byte = dis.readByte()
while (!delimiters.contains(byte)) {
bytes.append(byte)
byte = dis.readByte()
}
sb.append(new String(bytes.toArray[Byte])).toString()
}
/** Read next 4 bytes as a floating-point number.
* @return The floating-point value of the next 4 bytes.
*/
def readFloat(): Float = {
// We need to reverse the byte order here due to endian-compatibility.
java.lang.Float.intBitsToFloat(java.lang.Integer.reverseBytes(dis.readInt()))
}
}
/** A Scala port of the word2vec model. This interface allows the user to access the vector representations
* output by the word2vec tool, as well as perform some common operations on those vectors. It does NOT
* implement the actual continuous bag-of-words and skip-gram architectures for computing the vectors.
*
* More information on word2vec can be found here: https://code.google.com/p/word2vec/
*
* Example usage:
* {{{
* val model = new Word2Vec()
* model.load("vectors.bin")
* val results = model.distance(List("france"), N = 10)
*
* model.pprint(results)
* }}}
*
* @constructor Create a word2vec model.
*
* @author trananh
*/
class Word2Vec {
/** Map of words and their associated vector representations */
private val vocab = new mutable.HashMap[String, Array[Float]]()
/** Number of words */
private var numWords = 0
/** Number of floating-point values associated with each word (i.e., length of the vectors) */
private var vecSize = 0
/** Load data from a binary file.
* @param filename Path to file containing word projections in the BINARY FORMAT.
* @param limit Maximum number of words to load from file (a.k.a. max vocab size).
* @param normalize Normalize the loaded vectors if true (default to true).
*/
def load(filename: String, limit: Integer = Int.MaxValue, normalize: Boolean = true): Unit = {
// Check edge case
val file = new File(filename)
if (!file.exists()) {
throw new FileNotFoundException("Binary vector file not found <" + file.toString + ">")
}
// Create new reader to read data
val reader = new VecBinaryReader(file)
// Read header info
numWords = Integer.parseInt(reader.readToken())
vecSize = Integer.parseInt(reader.readToken())
println("\\nFile contains " + numWords + " words with vector size " + vecSize)
// Read the vocab words and their associated vector representations
var word = ""
val vector = new Array[Float](vecSize)
var normFactor = 1f
for (_ <- 0 until math.min(numWords, limit)) {
// Read the word
word = reader.readToken()
// Read the vector representation (each vector contains vecSize number of floats)
for (i <- 0 until vector.length) vector(i) = reader.readFloat()
// Store the normalized vector representation, keyed by the word
normFactor = if (normalize) magnitude(vector).toFloat else 1f
vocab.put(word, vector.map(_ / normFactor) )
// Eat up the next delimiter character
reader.read()
}
println("Loaded " + math.min(numWords, limit) + " words.\\n")
// Finally, close the reader
reader.close()
}
/** Return the number of words in the vocab.
* @return Number of words in the vocab.
*/
def wordsCount: Int = numWords
/** Size of the vectors.
* @return Size of the vectors.
*/
def vectorSize: Int = vecSize
/** Clear internal data. */
def clear() {
vocab.clear()
numWords = 0
vecSize = 0
}
/** Check if the word is present in the vocab map.
* @param word Word to be checked.
* @return True if the word is in the vocab map.
*/
def contains(word: String): Boolean = {
vocab.get(word).isDefined
}
/** Get the vector representation for the word.
* @param word Word to retrieve vector for.
* @return The vector representation of the word.
*/
def vector(word: String): Array[Float] = {
vocab.getOrElse(word, Array[Float]())
}
/** Compute the Euclidean distance between two vectors.
* @param vec1 The first vector.
* @param vec2 The other vector.
* @return The Euclidean distance between the two vectors.
*/
def euclidean(vec1: Array[Float], vec2: Array[Float]): Double = {
assert(vec1.length == vec2.length, "Uneven vectors!")
var sum = 0.0
for (i <- 0 until vec1.length) sum += math.pow(vec1(i) - vec2(i), 2)
math.sqrt(sum)
}
/** Compute the Euclidean distance between the vector representations of the words.
* @param word1 The first word.
* @param word2 The other word.
* @return The Euclidean distance between the vector representations of the words.
*/
def euclidean(word1: String, word2: String): Double = {
assert(contains(word1) && contains(word2), "Out of dictionary word! " + word1 + " or " + word2)
euclidean(vocab.get(word1).get, vocab.get(word2).get)
}
/** Compute the cosine similarity score between two vectors.
* @param vec1 The first vector.
* @param vec2 The other vector.
* @return The cosine similarity score of the two vectors.
*/
def cosine(vec1: Array[Float], vec2: Array[Float]): Double = {
assert(vec1.length == vec2.length, "Uneven vectors!")
var dot, sum1, sum2 = 0.0
for (i <- 0 until vec1.length) {
dot += (vec1(i) * vec2(i))
sum1 += (vec1(i) * vec1(i))
sum2 += (vec2(i) * vec2(i))
}
dot / (math.sqrt(sum1) * math.sqrt(sum2))
}
/** Compute the cosine similarity score between the vector representations of the words.
* @param word1 The first word.
* @param word2 The other word.
* @return The cosine similarity score between the vector representations of the words.
*/
def cosine(word1: String, word2: String): Double = {
assert(contains(word1) && contains(word2), "Out of dictionary word! " + word1 + " or " + word2)
cosine(vocab.get(word1).get, vocab.get(word2).get)
}
/** Compute the magnitude of the vector.
* @param vec The vector.
* @return The magnitude of the vector.
*/
def magnitude(vec: Array[Float]): Double = {
math.sqrt(vec.foldLeft(0.0){(sum, x) => sum + (x * x)})
}
/** Normalize the vector.
* @param vec The vector.
* @return A normalized vector.
*/
def normalize(vec: Array[Float]): Array[Float] = {
val mag = magnitude(vec).toFloat
vec.map(_ / mag)
}
/** Find the vector representation for the given list of word(s) by aggregating (summing) the
* vector for each word.
* @param input The input word(s).
* @return The sum vector (aggregated from the input vectors).
*/
def sumVector(input: List[String]): Array[Float] = {
// Find the vector representation for the input. If multiple words, then aggregate (sum) their vectors.
input.foreach(w => assert(contains(w), "Out of dictionary word! " + w))
val vector = new Array[Float](vecSize)
input.foreach(w => for (j <- 0 until vector.length) vector(j) += vocab.get(w).get(j))
vector
}
/** Find N closest terms in the vocab to the given vector, using only words from the in-set (if defined)
* and excluding all words from the out-set (if non-empty). Although you can, it doesn't make much
* sense to define both in and out sets.
* @param vector The vector.
* @param inSet Set of words to consider. Specify None to use all words in the vocab (default behavior).
* @param outSet Set of words to exclude (default to empty).
* @param N The maximum number of terms to return (default to 40).
* @return The N closest terms in the vocab to the given vector and their associated cosine similarity scores.
*/
def nearestNeighbors(vector: Array[Float], inSet: Option[Set[String]] = None,
outSet: Set[String] = Set[String](), N: Integer = 40)
: List[(String, Float)] = {
// For performance efficiency, we maintain the top/closest terms using a priority queue.
// Note: We invert the distance here because a priority queue will dequeue the highest priority element,
// but we would like it to dequeue the lowest scoring element instead.
val top = new mutable.PriorityQueue[(String, Float)]()(Ordering.by(-_._2))
// Iterate over each token in the vocab and compute its cosine score to the input.
var dist = 0f
val iterator = if (inSet.isDefined) vocab.filterKeys(k => inSet.get.contains(k)).iterator else vocab.iterator
iterator.foreach(entry => {
// Skip tokens in the out set
if (!outSet.contains(entry._1)) {
dist = cosine(vector, entry._2).toFloat
if (top.size < N || top.head._2 < dist) {
top.enqueue((entry._1, dist))
if (top.length > N) {
// If the queue contains over N elements, then dequeue the highest priority element
// (which will be the element with the lowest cosine score).
top.dequeue()
}
}
}
})
// Return the top N results as a sorted list.
assert(top.length <= N)
top.toList.sortWith(_._2 > _._2)
}
/** Find the N closest terms in the vocab to the input word(s).
* @param input The input word(s).
* @param N The maximum number of terms to return (default to 40).
* @return The N closest terms in the vocab to the input word(s) and their associated cosine similarity scores.
*/
def distance(input: List[String], N: Integer = 40): List[(String, Float)] = {
// Check for edge cases
if (input.size == 0) return List[(String, Float)]()
input.foreach(w => {
if (!contains(w)) {
println("Out of dictionary word! " + w)
return List[(String, Float)]()
}
})
// Find the vector representation for the input. If multiple words, then aggregate (sum) their vectors.
val vector = sumVector(input)
nearestNeighbors(normalize(vector), outSet = input.toSet, N = N)
}
/** Find the N closest terms in the vocab to the analogy:
* - [word1] is to [word2] as [word3] is to ???
*
* The algorithm operates as follow:
* - Find a vector approximation of the missing word = vec([word2]) - vec([word1]) + vec([word3]).
* - Return words closest to the approximated vector.
*
* @param word1 First word in the analogy [word1] is to [word2] as [word3] is to ???.
* @param word2 Second word in the analogy [word1] is to [word2] as [word3] is to ???
* @param word3 Third word in the analogy [word1] is to [word2] as [word3] is to ???.
* @param N The maximum number of terms to return (default to 40).
*
* @return The N closest terms in the vocab to the analogy and their associated cosine similarity scores.
*/
def analogy(word1: String, word2: String, word3: String, N: Integer = 40): List[(String, Float)] = {
// Check for edge cases
if (!contains(word1) || !contains(word2) || !contains(word3)) {
println("Out of dictionary word! " + Array(word1, word2, word3).mkString(" or "))
return List[(String, Float)]()
}
// Find the vector approximation for the missing analogy.
val vector = new Array[Float](vecSize)
for (j <- 0 until vector.length)
vector(j) = vocab.get(word2).get(j) - vocab.get(word1).get(j) + vocab.get(word3).get(j)
nearestNeighbors(normalize(vector), outSet = Set(word1, word2, word3), N = N)
}
/** Rank a set of words by their respective distance to some central term.
* @param word The central word.
* @param set Set of words to rank.
* @return Ordered list of words and their associated scores.
*/
def rank(word: String, set: Set[String]): List[(String, Float)] = {
// Check for edge cases
if (set.size == 0) return List[(String, Float)]()
(set + word).foreach(w => {
if (!contains(w)) {
println("Out of dictionary word! " + w)
return List[(String, Float)]()
}
})
nearestNeighbors(vocab.get(word).get, inSet = Option(set), N = set.size)
}
/** Pretty print the list of words and their associated scores.
* @param words List of (word, score) pairs to be printed.
*/
def pprint(words: List[(String, Float)]) = {
println("\\n%50s".format("Word") + (" " * 7) + "Cosine distance\\n" + ("-" * 72))
println(words.map(s => "%50s".format(s._1) + (" " * 7) + "%15f".format(s._2)).mkString("\\n"))
}
}
/** ********************************************************************************
* Demo of the Scala ported word2vec model.
* ********************************************************************************
*/
object RunWord2Vec {
/** Demo. */
def main(args: Array[String]) {
// Load word2vec model from binary file.
val model = new Word2Vec()
model.load("../word2vec-scala/vectors.bin")
// distance: Find N closest words
model.pprint(model.distance(List("france"), N = 10))
model.pprint(model.distance(List("france", "usa")))
model.pprint(model.distance(List("france", "usa", "usa")))
// analogy: "king" is to "queen", as "man" is to ?
model.pprint(model.analogy("king", "queen", "man", N = 10))
// rank: Rank a set of words by their respective distance to the central term
model.pprint(model.rank("apple", Set("orange", "soda", "lettuce")))
}
}
| matiasmolinas/channelorganizer | src/word2vec/src/Word2Vec.scala | Scala | apache-2.0 | 15,052 |
package com.omegaup.grader.drivers
import com.omegaup._
import com.omegaup.data._
import com.omegaup.grader._
trait Driver {
def run(run: Run)(implicit ctx: RunContext): Run
def validateOutput(run: Run)(implicit ctx: RunContext): Run
def setLogs(run: Run, logs: String)(implicit ctx: RunContext): Unit
def cleanResults(run: Run)(implicit ctx: RunContext): Unit
}
/* vim: set noexpandtab: */
| omegaup/backend | grader/src/main/scala/com/omegaup/grader/drivers/Driver.scala | Scala | bsd-2-clause | 400 |
/* scala-stm - (c) 2009-2010, Stanford University, PPL */
package scala.concurrent.stm
package skel
import scala.collection.{immutable, mutable}
private[stm] object TMapViaClone {
class FrozenMutableMap[A, B](self: mutable.Map[A, B]) extends immutable.Map[A, B] {
override def isEmpty: Boolean = self.isEmpty
override def size: Int = self.size
def get(key: A): Option[B] = self.get(key)
def iterator: Iterator[(A, B)] = self.iterator
override def foreach[U](f: ((A, B)) => U) { self foreach f }
def + [B1 >: B](kv: (A, B1)): immutable.Map[A, B1] =
new FrozenMutableMap(self.clone().asInstanceOf[mutable.Map[A, B1]] += kv)
def - (k: A): immutable.Map[A, B] = new FrozenMutableMap(self.clone() -= k)
}
}
/** Provides an implementation for the bulk of the functionality of `TMap` and
* `TMap.View` by making extensive use of `clone()`. Assumes that the
* underlying implementation of `clone()` is O(1).
*
* @author Nathan Bronson
*/
private[stm] trait TMapViaClone[A, B] extends TMap.View[A, B] with TMap[A, B] {
import TMapViaClone._
// Implementations may be able to do better.
override def snapshot: immutable.Map[A, B] = new FrozenMutableMap(clone())
def tmap: TMap[A, B] = this
def single: TMap.View[A, B] = this
//////////// builder functionality (from mutable.MapLike via TMap.View)
override protected[this] def newBuilder: TMap.View[A, B] = empty
override def result: TMap.View[A, B] = this
//////////// construction of new TMaps
// A cheap clone() means that mutable.MapLike's implementations of +, ++,
// -, and -- are all pretty reasonable.
override def clone(): TMap.View[A, B]
//////////// atomic compound ops
override def getOrElseUpdate(key: A, op: => B): B = {
single.get(key) getOrElse {
atomic { implicit txn =>
tmap.get(key) getOrElse { val v = op ; tmap.put(key, v) ; v }
}
}
}
override def transform(f: (A, B) => B): this.type = {
atomic { implicit txn =>
for (kv <- tmap)
tmap.update(kv._1, f(kv._1, kv._2))
}
this
}
override def retain(p: (A, B) => Boolean): this.type = {
atomic { implicit txn =>
for (kv <- tmap)
if (!p(kv._1, kv._2))
tmap -= kv._1
}
this
}
}
| djspiewak/scala-stm | src/main/scala/scala/concurrent/stm/skel/TMapViaClone.scala | Scala | bsd-3-clause | 2,277 |
package openstackApi.domain
case class Instance(name: String) {
}
| Spirals-Team/ermis | src/main/scala/openstackApi/domain/Instance.scala | Scala | agpl-3.0 | 68 |
package com.github.vooolll.client
import java.net.URL
import com.github.vooolll.base._
import com.github.vooolll.client.feed._
import cats.implicits._
import com.github.vooolll.domain.profile._
import com.github.vooolll.domain.oauth.FacebookError
import com.github.vooolll.domain.profile.FacebookUserAttribute.defaultAttributeValues
class UserSpec extends FacebookClientSupport {
import com.github.vooolll.base.TestConfiguration._
val realUserId = FacebookUserId("117656352360395")
val realUser = FacebookUser(
id = realUserId,
email = Some("[email protected]"),
name = Some("Bob Willins"),
picture = Some(
FacebookUserPicture(
50.0,
isSilhouette = false,
new URL("https://platform-lookaside.fbsbx.com/platform/profilepic/"),
50.0
)
),
firstName = Some("Bob"),
lastName = Some("Willins"),
link = Some(new URL("https://www.facebook.com")),
gender = Some(Gender.Female),
ageRange = Some(AgeRange(21, None)),
hometown = Some(FacebookTown("115486911798138", "Almaty, Kazakhstan")),
location = Some(FacebookTown("115353315143936", "Rome, Italy"))
)
"Facebook Graph Api" should {
"return user profile" in { c =>
c.userProfile(realUserId, defaultAttributeValues) map (_.withoutQueryParams shouldBe realUser)
}
"return user profile result" in { c =>
c.userProfileResult(realUserId) map (_.map(_.withoutQueryParams) shouldBe realUser.asRight)
}
"return error SpecifiedObjectNotFound" in { c =>
c.userProfileResult(realUserId.copy(value = "asd")) map {
case Right(_) => fail("bad request expected")
case Left(e) =>
e.errorType shouldBe FacebookError.SpecifiedObjectNotFound
}
}
"return error InvalidVerificationCodeFormat" in { c =>
c.userProfileResult(realUserId.copy(value = "777661112359912")) map {
case Right(_) => fail("bad request expected")
case Left(e) =>
e.errorType shouldBe FacebookError.InvalidVerificationCodeFormat
}
}
}
}
| vooolll/facebook4s | src/test/scala/com/github/vooolll/client/UserSpec.scala | Scala | apache-2.0 | 2,080 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.iterators
import org.apache.accumulo.core.client.IteratorSetting
import org.locationtech.geomesa.index.filters.S2Filter
import org.locationtech.geomesa.index.index.s2.S2IndexValues
class S2Iterator extends RowFilterIterator[S2Filter](S2Filter)
object S2Iterator {
def configure(values: S2IndexValues, prefix: Int, priority: Int): IteratorSetting = {
val is = new IteratorSetting(priority, "s2", classOf[S2Iterator])
// index space values for comparing in the iterator
S2Filter.serializeToStrings(S2Filter(values)).foreach { case (k, v) => is.addOption(k, v) }
// account for shard and table sharing bytes
is.addOption(RowFilterIterator.RowOffsetKey, prefix.toString)
is
}
}
| aheyne/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/iterators/S2Iterator.scala | Scala | apache-2.0 | 1,212 |
/**
* Created by android on 22/2/15.
*/
import scala.slick.driver.DerbyDriver.simple._
object DBUtils {
def db = Database.forURL("jdbc:derby://localhost:1527/demo;create=true", driver = "org.apache.derby.jdbc.ClientDriver")
}
| pamu/Slick-Demos | src/main/scala/DBUtils.scala | Scala | apache-2.0 | 232 |
package com.markfeeney
package object circlet {
type Cont = Option[Response] => Sent.type
type Handler = Request => Cont => Sent.type
type Middleware = Handler => Handler
}
| overthink/circlet | src/main/scala/com/markfeeney/circlet/package.scala | Scala | mit | 180 |
/* ----------------- sse-breaker ----------------- *\\
* Licensed under the Apache License, Version 2.0. *
* Author: Spiros Tzavellas *
\\* ----------------------------------------------- */
package com.tzavellas.sse.breaker.extras
import java.util.Properties
import javax.mail.internet.{ InternetAddress, MimeMessage }
import javax.mail.{ Message, Session, Transport }
import com.tzavellas.sse.breaker.CircuitBreaker
import com.tzavellas.sse.breaker.CircuitStateListener
import com.tzavellas.sse.breaker.extras.EmailCircuitListener._
class EmailCircuitListener(addresses: EmailConfig, config: SMTPConfig)
extends CircuitStateListener {
def onOpen(breaker: CircuitBreaker, error: Throwable): Unit = {
sendEmail(
subject = "Open circuit for " + breaker.name,
body = s"The system had lots of errors so it will stop processing temporarily. Last error was...\\n ${format(error)}")
}
def onClose(breaker: CircuitBreaker): Unit = {
sendEmail(
subject = "Closed circuit for '" + breaker.name + "'",
body = "The system is back to normal")
}
private def sendEmail(subject: String, body: String): Unit = {
val session = Session.getInstance(new Properties)
val message = new MimeMessage(session)
message.setFrom(new InternetAddress(addresses.from))
message.setRecipients(Message.RecipientType.TO, addresses.to)
message.setSubject(subject)
message.setText(body)
val transport = session.getTransport("smtp")
transport.connect(config.host, config.port, config.username, config.password)
Transport.send(message)
}
private def format(error: Throwable) = s"""
|Exception: ${error.getClass}
|
|Message: ${error.getMessage}
|
|Stacktrace:
|${error.getStackTrace.mkString("\\n")}
""".stripMargin
}
object EmailCircuitListener {
case class EmailConfig(from: String, to: String)
case class SMTPConfig(host: String, port: Int, username: String, password: String)
}
| sptz45/sse-breaker | src/main/scala/com/tzavellas/sse/breaker/extras/EmailCircuitListener.scala | Scala | apache-2.0 | 1,996 |
package es.upm.oeg.epnoi.matching.metrics.similarity
import breeze.linalg.DenseVector
import es.upm.oeg.epnoi.matching.metrics.utils.SparkWrapper
import org.apache.spark.mllib.feature.Word2VecModel
import org.apache.spark.mllib.linalg.Vectors
/**
* Created by cbadenes on 20/07/15.
*/
object WordVecText8Example {
def main(args: Array[String]): Unit = {
val modelId = "word2vec/models/word2vect_text8_500"
// val input = SparkWrapper.sc.textFile("word2vec/resources/text8").map(line => line.split(" ").toSeq)
//
// val word2vec = new Word2Vec()
//
// word2vec.setVectorSize(500) // vector dimension (default 100)
//// word2vec.setLearningRate(0) // Initial learning rate (default 0)
//// word2vec.setMinCount(5) // minimum number of times a token must appear to be included in the vocabulary (default 5)
//// word2vec.setNumIterations(1)// should be smaller than or equal to number of partitions. (default 1)
//// word2vec.setNumPartitions(1)// num partitions (default 1)
//
//
// val model = word2vec.fit(input)
//
//
// val synonyms = model.findSynonyms("china", 40)
//
// for((synonym, cosineSimilarity) <- synonyms) {
// println(s"$synonym $cosineSimilarity")
// }
//
// // Save and load model
// model.save(SparkWrapper.sc, modelId)
// Use of existing model
val model = Word2VecModel.load(SparkWrapper.sc, modelId)
// val sameModel = Word2VecModel.load(SparkWrapper.sc, "word2vect_text8_500")
// TEST 1: Area result of sum of keywords
val topicwords = List(
"science",
"star",
"planet",
"galaxy",
"survey",
"study",
"distance",
"knowledge",
"scientific",
"education",
"astronomy")
val vectout = topicwords.map(t1 => DenseVector(model.transform(t1).toArray)).reduce((v1,v2) => v1 + v2)
println(s"###### Topic for keywords: $topicwords")
model.findSynonyms(Vectors.dense(vectout.toArray), 20).foreach{case (synonym, cosineSimilarity) => println(s"$synonym $cosineSimilarity")}
// TEST 2: Analogy
val king = DenseVector(model.transform("king").toArray)
val man = DenseVector(model.transform("man").toArray)
val woman = DenseVector(model.transform("woman").toArray)
val analogyVec = king - man + woman
println("###### King - Man + Woman: ")
model.findSynonyms(Vectors.dense(analogyVec.toArray), 20).foreach{case (synonym, cosineSimilarity) => println(s"$synonym $cosineSimilarity")}
}
}
| cbadenes/epnoi-matching-metrics | src/test/scala/es/upm/oeg/epnoi/matching/metrics/similarity/WordVecText8Example.scala | Scala | apache-2.0 | 2,502 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.parquet.jobs
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.mapreduce.RecordReader
import org.apache.hadoop.mapreduce.lib.input.FileSplit
import org.locationtech.geomesa.fs.storage.api.StorageMetadata.StorageFileAction.StorageFileAction
import org.locationtech.geomesa.fs.storage.common.jobs.StorageConfiguration
import org.locationtech.geomesa.fs.storage.common.jobs.StorageConfiguration.SimpleFeatureAction
import org.locationtech.geomesa.parquet.jobs.ParquetSimpleFeatureInputFormat.{ParquetSimpleFeatureInputFormatBase, ParquetSimpleFeatureRecordReaderBase, ParquetSimpleFeatureTransformRecordReaderBase}
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.filter.Filter
/**
* Input format for parquet files that tracks the action and timestamp associated with each feature
*/
class ParquetSimpleFeatureActionInputFormat extends ParquetSimpleFeatureInputFormatBase[SimpleFeatureAction] {
override protected def createRecordReader(
delegate: RecordReader[Void, SimpleFeature],
conf: Configuration,
split: FileSplit,
sft: SimpleFeatureType,
filter: Option[Filter],
transform: Option[(String, SimpleFeatureType)]): RecordReader[SimpleFeatureAction, SimpleFeature] = {
val (timestamp, action) = StorageConfiguration.getPathAction(conf, split.getPath)
transform match {
case None => new ParquetSimpleFeatureActionRecordReader(delegate, filter, timestamp, action)
case Some((tdefs, tsft)) =>
new ParquetSimpleFeatureActionTransformRecordReader(delegate, filter, sft, tsft, tdefs, timestamp, action)
}
}
class ParquetSimpleFeatureActionRecordReader(
delegate: RecordReader[Void, SimpleFeature],
filter: Option[Filter],
timestamp: Long,
action: StorageFileAction
) extends ParquetSimpleFeatureRecordReaderBase[SimpleFeatureAction](delegate, filter) {
override def getCurrentKey: SimpleFeatureAction =
new SimpleFeatureAction(getCurrentValue.getID, timestamp, action)
}
class ParquetSimpleFeatureActionTransformRecordReader(
delegate: RecordReader[Void, SimpleFeature],
filter: Option[Filter],
sft: SimpleFeatureType,
tsft: SimpleFeatureType,
tdefs: String,
timestamp: Long,
action: StorageFileAction
) extends ParquetSimpleFeatureTransformRecordReaderBase[SimpleFeatureAction](delegate, filter, sft, tsft, tdefs) {
override def getCurrentKey: SimpleFeatureAction =
new SimpleFeatureAction(getCurrentValue.getID, timestamp, action)
}
}
| elahrvivaz/geomesa | geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-parquet/src/main/scala/org/locationtech/geomesa/parquet/jobs/ParquetSimpleFeatureActionInputFormat.scala | Scala | apache-2.0 | 3,070 |
package io.peregrine
import com.twitter.logging._
import com.twitter.logging.Logging._
import java.util.{logging => javalog}
trait LoggerColors {
val ANSI_RESET = "\u001B[0m"
val ANSI_BLACK = "\u001B[30m"
val ANSI_RED = "\u001B[31m"
val ANSI_GREEN = "\u001B[32m"
val ANSI_YELLOW = "\u001B[33m"
val ANSI_BLUE = "\u001B[34m"
val ANSI_PURPLE = "\u001B[35m"
val ANSI_CYAN = "\u001B[36m"
val ANSI_WHITE = "\u001B[37m"
}
trait PeregrineLogger extends LoggerColors {
val level = LevelFlaggable.parse(config.logLevel())
val environment = config.env()
protected val policy = if (environment != "production") Policy.SigHup else Policy.Weekly(1)
protected val consoleFormatter = new Formatter(
prefix = s"$ANSI_CYAN%.3s$ANSI_RESET <yyyy/MM/dd HH:mm:ss.SSS> ")
protected val fileFormatter = new Formatter(prefix = s"%.3s <yyyy/MM/dd HH:mm:ss.SSS> ") {
override def formatText(record: javalog.LogRecord): String = {
super.formatText(record).replaceAll("\u001B\\[[;\\d]*m", "")
}
}
def loggingFactories: List[LoggerFactory] = logger :: Nil
val logger = LoggerFactory(
node = config.logNode(),
level = Option(level),
handlers = List(
FileHandler(filename = s"log/$environment.log", rollPolicy = policy, formatter = fileFormatter),
ConsoleHandler(consoleFormatter)
)
)
}
object PeregrineLogger extends PeregrineLogger
| pairi/pairi | src/main/scala/io/peregrine/PeregrineLogger.scala | Scala | apache-2.0 | 1,427 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.status.api.v1
import java.lang.{Long => JLong}
import java.util.Date
import scala.xml.{NodeSeq, Text}
import com.fasterxml.jackson.annotation.JsonIgnoreProperties
import com.fasterxml.jackson.core.{JsonGenerator, JsonParser}
import com.fasterxml.jackson.core.`type`.TypeReference
import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer, JsonSerializer, SerializerProvider}
import com.fasterxml.jackson.databind.annotation.{JsonDeserialize, JsonSerialize}
import org.apache.spark.JobExecutionStatus
import org.apache.spark.executor.ExecutorMetrics
import org.apache.spark.metrics.ExecutorMetricType
case class ApplicationInfo private[spark](
id: String,
name: String,
coresGranted: Option[Int],
maxCores: Option[Int],
coresPerExecutor: Option[Int],
memoryPerExecutorMB: Option[Int],
attempts: Seq[ApplicationAttemptInfo])
@JsonIgnoreProperties(
value = Array("startTimeEpoch", "endTimeEpoch", "lastUpdatedEpoch"),
allowGetters = true)
case class ApplicationAttemptInfo private[spark](
attemptId: Option[String],
startTime: Date,
endTime: Date,
lastUpdated: Date,
duration: Long,
sparkUser: String,
completed: Boolean = false,
appSparkVersion: String) {
def getStartTimeEpoch: Long = startTime.getTime
def getEndTimeEpoch: Long = endTime.getTime
def getLastUpdatedEpoch: Long = lastUpdated.getTime
}
class ExecutorStageSummary private[spark](
val taskTime : Long,
val failedTasks : Int,
val succeededTasks : Int,
val killedTasks : Int,
val inputBytes : Long,
val inputRecords : Long,
val outputBytes : Long,
val outputRecords : Long,
val shuffleRead : Long,
val shuffleReadRecords : Long,
val shuffleWrite : Long,
val shuffleWriteRecords : Long,
val memoryBytesSpilled : Long,
val diskBytesSpilled : Long,
val isBlacklistedForStage: Boolean)
class ExecutorSummary private[spark](
val id: String,
val hostPort: String,
val isActive: Boolean,
val rddBlocks: Int,
val memoryUsed: Long,
val diskUsed: Long,
val totalCores: Int,
val maxTasks: Int,
val activeTasks: Int,
val failedTasks: Int,
val completedTasks: Int,
val totalTasks: Int,
val totalDuration: Long,
val totalGCTime: Long,
val totalInputBytes: Long,
val totalShuffleRead: Long,
val totalShuffleWrite: Long,
val isBlacklisted: Boolean,
val maxMemory: Long,
val addTime: Date,
val removeTime: Option[Date],
val removeReason: Option[String],
val executorLogs: Map[String, String],
val memoryMetrics: Option[MemoryMetrics],
val blacklistedInStages: Set[Int],
@JsonSerialize(using = classOf[ExecutorMetricsJsonSerializer])
@JsonDeserialize(using = classOf[ExecutorMetricsJsonDeserializer])
val peakMemoryMetrics: Option[ExecutorMetrics],
val attributes: Map[String, String])
class MemoryMetrics private[spark](
val usedOnHeapStorageMemory: Long,
val usedOffHeapStorageMemory: Long,
val totalOnHeapStorageMemory: Long,
val totalOffHeapStorageMemory: Long)
/** deserializer for peakMemoryMetrics: convert map to ExecutorMetrics */
private[spark] class ExecutorMetricsJsonDeserializer
extends JsonDeserializer[Option[ExecutorMetrics]] {
override def deserialize(
jsonParser: JsonParser,
deserializationContext: DeserializationContext): Option[ExecutorMetrics] = {
val metricsMap = jsonParser.readValueAs[Option[Map[String, Long]]](
new TypeReference[Option[Map[String, java.lang.Long]]] {})
metricsMap.map(metrics => new ExecutorMetrics(metrics))
}
}
/** serializer for peakMemoryMetrics: convert ExecutorMetrics to map with metric name as key */
private[spark] class ExecutorMetricsJsonSerializer
extends JsonSerializer[Option[ExecutorMetrics]] {
override def serialize(
metrics: Option[ExecutorMetrics],
jsonGenerator: JsonGenerator,
serializerProvider: SerializerProvider): Unit = {
metrics.foreach { m: ExecutorMetrics =>
val metricsMap = ExecutorMetricType.metricToOffset.map { case (metric, _) =>
metric -> m.getMetricValue(metric)
}
jsonGenerator.writeObject(metricsMap)
}
}
override def isEmpty(provider: SerializerProvider, value: Option[ExecutorMetrics]): Boolean =
value.isEmpty
}
class JobData private[spark](
val jobId: Int,
val name: String,
val description: Option[String],
val submissionTime: Option[Date],
val completionTime: Option[Date],
val stageIds: Seq[Int],
val jobGroup: Option[String],
val status: JobExecutionStatus,
val numTasks: Int,
val numActiveTasks: Int,
val numCompletedTasks: Int,
val numSkippedTasks: Int,
val numFailedTasks: Int,
val numKilledTasks: Int,
val numCompletedIndices: Int,
val numActiveStages: Int,
val numCompletedStages: Int,
val numSkippedStages: Int,
val numFailedStages: Int,
val killedTasksSummary: Map[String, Int])
class RDDStorageInfo private[spark](
val id: Int,
val name: String,
val numPartitions: Int,
val numCachedPartitions: Int,
val storageLevel: String,
val memoryUsed: Long,
val diskUsed: Long,
val dataDistribution: Option[Seq[RDDDataDistribution]],
val partitions: Option[Seq[RDDPartitionInfo]])
class RDDDataDistribution private[spark](
val address: String,
val memoryUsed: Long,
val memoryRemaining: Long,
val diskUsed: Long,
@JsonDeserialize(contentAs = classOf[JLong])
val onHeapMemoryUsed: Option[Long],
@JsonDeserialize(contentAs = classOf[JLong])
val offHeapMemoryUsed: Option[Long],
@JsonDeserialize(contentAs = classOf[JLong])
val onHeapMemoryRemaining: Option[Long],
@JsonDeserialize(contentAs = classOf[JLong])
val offHeapMemoryRemaining: Option[Long])
class RDDPartitionInfo private[spark](
val blockName: String,
val storageLevel: String,
val memoryUsed: Long,
val diskUsed: Long,
val executors: Seq[String])
class StageData private[spark](
val status: StageStatus,
val stageId: Int,
val attemptId: Int,
val numTasks: Int,
val numActiveTasks: Int,
val numCompleteTasks: Int,
val numFailedTasks: Int,
val numKilledTasks: Int,
val numCompletedIndices: Int,
val submissionTime: Option[Date],
val firstTaskLaunchedTime: Option[Date],
val completionTime: Option[Date],
val failureReason: Option[String],
val executorDeserializeTime: Long,
val executorDeserializeCpuTime: Long,
val executorRunTime: Long,
val executorCpuTime: Long,
val resultSize: Long,
val jvmGcTime: Long,
val resultSerializationTime: Long,
val memoryBytesSpilled: Long,
val diskBytesSpilled: Long,
val peakExecutionMemory: Long,
val inputBytes: Long,
val inputRecords: Long,
val outputBytes: Long,
val outputRecords: Long,
val shuffleRemoteBlocksFetched: Long,
val shuffleLocalBlocksFetched: Long,
val shuffleFetchWaitTime: Long,
val shuffleRemoteBytesRead: Long,
val shuffleRemoteBytesReadToDisk: Long,
val shuffleLocalBytesRead: Long,
val shuffleReadBytes: Long,
val shuffleReadRecords: Long,
val shuffleWriteBytes: Long,
val shuffleWriteTime: Long,
val shuffleWriteRecords: Long,
val name: String,
val description: Option[String],
val details: String,
val schedulingPool: String,
val rddIds: Seq[Int],
val accumulatorUpdates: Seq[AccumulableInfo],
val tasks: Option[Map[Long, TaskData]],
val executorSummary: Option[Map[String, ExecutorStageSummary]],
val killedTasksSummary: Map[String, Int])
class TaskData private[spark](
val taskId: Long,
val index: Int,
val attempt: Int,
val launchTime: Date,
val resultFetchStart: Option[Date],
@JsonDeserialize(contentAs = classOf[JLong])
val duration: Option[Long],
val executorId: String,
val host: String,
val status: String,
val taskLocality: String,
val speculative: Boolean,
val accumulatorUpdates: Seq[AccumulableInfo],
val errorMessage: Option[String] = None,
val taskMetrics: Option[TaskMetrics] = None,
val executorLogs: Map[String, String],
val schedulerDelay: Long,
val gettingResultTime: Long)
class TaskMetrics private[spark](
val executorDeserializeTime: Long,
val executorDeserializeCpuTime: Long,
val executorRunTime: Long,
val executorCpuTime: Long,
val resultSize: Long,
val jvmGcTime: Long,
val resultSerializationTime: Long,
val memoryBytesSpilled: Long,
val diskBytesSpilled: Long,
val peakExecutionMemory: Long,
val inputMetrics: InputMetrics,
val outputMetrics: OutputMetrics,
val shuffleReadMetrics: ShuffleReadMetrics,
val shuffleWriteMetrics: ShuffleWriteMetrics)
class InputMetrics private[spark](
val bytesRead: Long,
val recordsRead: Long)
class OutputMetrics private[spark](
val bytesWritten: Long,
val recordsWritten: Long)
class ShuffleReadMetrics private[spark](
val remoteBlocksFetched: Long,
val localBlocksFetched: Long,
val fetchWaitTime: Long,
val remoteBytesRead: Long,
val remoteBytesReadToDisk: Long,
val localBytesRead: Long,
val recordsRead: Long)
class ShuffleWriteMetrics private[spark](
val bytesWritten: Long,
val writeTime: Long,
val recordsWritten: Long)
class TaskMetricDistributions private[spark](
val quantiles: IndexedSeq[Double],
val executorDeserializeTime: IndexedSeq[Double],
val executorDeserializeCpuTime: IndexedSeq[Double],
val executorRunTime: IndexedSeq[Double],
val executorCpuTime: IndexedSeq[Double],
val resultSize: IndexedSeq[Double],
val jvmGcTime: IndexedSeq[Double],
val resultSerializationTime: IndexedSeq[Double],
val gettingResultTime: IndexedSeq[Double],
val schedulerDelay: IndexedSeq[Double],
val peakExecutionMemory: IndexedSeq[Double],
val memoryBytesSpilled: IndexedSeq[Double],
val diskBytesSpilled: IndexedSeq[Double],
val inputMetrics: InputMetricDistributions,
val outputMetrics: OutputMetricDistributions,
val shuffleReadMetrics: ShuffleReadMetricDistributions,
val shuffleWriteMetrics: ShuffleWriteMetricDistributions)
class InputMetricDistributions private[spark](
val bytesRead: IndexedSeq[Double],
val recordsRead: IndexedSeq[Double])
class OutputMetricDistributions private[spark](
val bytesWritten: IndexedSeq[Double],
val recordsWritten: IndexedSeq[Double])
class ShuffleReadMetricDistributions private[spark](
val readBytes: IndexedSeq[Double],
val readRecords: IndexedSeq[Double],
val remoteBlocksFetched: IndexedSeq[Double],
val localBlocksFetched: IndexedSeq[Double],
val fetchWaitTime: IndexedSeq[Double],
val remoteBytesRead: IndexedSeq[Double],
val remoteBytesReadToDisk: IndexedSeq[Double],
val totalBlocksFetched: IndexedSeq[Double])
class ShuffleWriteMetricDistributions private[spark](
val writeBytes: IndexedSeq[Double],
val writeRecords: IndexedSeq[Double],
val writeTime: IndexedSeq[Double])
class AccumulableInfo private[spark](
val id: Long,
val name: String,
val update: Option[String],
val value: String)
class VersionInfo private[spark](
val spark: String)
class ApplicationEnvironmentInfo private[spark] (
val runtime: RuntimeInfo,
val sparkProperties: Seq[(String, String)],
val hadoopProperties: Seq[(String, String)],
val systemProperties: Seq[(String, String)],
val classpathEntries: Seq[(String, String)])
class RuntimeInfo private[spark](
val javaVersion: String,
val javaHome: String,
val scalaVersion: String)
case class StackTrace(elems: Seq[String]) {
override def toString: String = elems.mkString
def html: NodeSeq = {
val withNewLine = elems.foldLeft(NodeSeq.Empty) { (acc, elem) =>
if (acc.isEmpty) {
acc :+ Text(elem)
} else {
acc :+ <br /> :+ Text(elem)
}
}
withNewLine
}
def mkString(start: String, sep: String, end: String): String = {
elems.mkString(start, sep, end)
}
}
case class ThreadStackTrace(
val threadId: Long,
val threadName: String,
val threadState: Thread.State,
val stackTrace: StackTrace,
val blockedByThreadId: Option[Long],
val blockedByLock: String,
val holdingLocks: Seq[String])
| aosagie/spark | core/src/main/scala/org/apache/spark/status/api/v1/api.scala | Scala | apache-2.0 | 13,248 |
package dotty.tools
package dottydoc
package core
import dotc.core.Contexts.Context
import transform.DocMiniPhase
import model._
import model.internal._
import model.comment._
import model.references._
import HtmlParsers._
import util.MemberLookup
import util.syntax._
class LinkReturnTypes extends DocMiniPhase with TypeLinker {
override def transformDef(implicit ctx: Context) = { case df: DefImpl =>
val returnValue = linkReference(df, df.returnValue, ctx.docbase.packages)
df.copy(returnValue = returnValue) :: Nil
}
override def transformVal(implicit ctx: Context) = { case vl: ValImpl =>
val returnValue = linkReference(vl, vl.returnValue, ctx.docbase.packages)
vl.copy(returnValue = returnValue) :: Nil
}
override def transformTypeAlias(implicit ctx: Context) = { case ta: TypeAliasImpl =>
ta.alias.map { alias =>
val linkedAlias = linkReference(ta, alias, ctx.docbase.packages)
ta.copy(alias = Some(linkedAlias)) :: Nil
}
.getOrElse(ta :: Nil)
}
}
class LinkParamListTypes extends DocMiniPhase with TypeLinker {
override def transformDef(implicit ctx: Context) = { case df: DefImpl =>
val newParamLists = for {
ParamListImpl(list, isImplicit) <- df.paramLists
newList = list.map(linkReference(df, _, ctx.docbase.packages))
} yield ParamListImpl(newList.asInstanceOf[List[NamedReference]], isImplicit)
df.copy(paramLists = newParamLists) :: Nil
}
}
class LinkSuperTypes extends DocMiniPhase with TypeLinker {
def linkSuperTypes(ent: Entity with SuperTypes)(implicit ctx: Context): List[MaterializableLink] =
ent.superTypes.collect {
case UnsetLink(title, query) =>
handleEntityLink(title, lookup(Some(ent), ctx.docbase.packages, query), ent)
}
override def transformClass(implicit ctx: Context) = { case cls: ClassImpl =>
cls.copy(superTypes = linkSuperTypes(cls)) :: Nil
}
override def transformCaseClass(implicit ctx: Context) = { case cc: CaseClassImpl =>
cc.copy(superTypes = linkSuperTypes(cc)) :: Nil
}
override def transformTrait(implicit ctx: Context) = { case trt: TraitImpl =>
trt.copy(superTypes = linkSuperTypes(trt)) :: Nil
}
override def transformObject(implicit ctx: Context) = { case obj: ObjectImpl =>
obj.copy(superTypes = linkSuperTypes(obj)) :: Nil
}
}
class LinkImplicitlyAddedTypes extends DocMiniPhase with TypeLinker {
override def transformDef(implicit ctx: Context) = {
case df: DefImpl if df.implicitlyAddedFrom.isDefined =>
val implicitlyAddedFrom = linkReference(df, df.implicitlyAddedFrom.get, ctx.docbase.packages)
df.copy(implicitlyAddedFrom = Some(implicitlyAddedFrom)) :: Nil
}
override def transformVal(implicit ctx: Context) = {
case vl: ValImpl if vl.implicitlyAddedFrom.isDefined =>
val implicitlyAddedFrom = linkReference(vl, vl.implicitlyAddedFrom.get, ctx.docbase.packages)
vl.copy(implicitlyAddedFrom = Some(implicitlyAddedFrom)) :: Nil
}
}
trait TypeLinker extends MemberLookup {
def handleEntityLink(title: String, target: Option[Entity], ent: Entity, query: String = ""): MaterializableLink =
target match {
case Some(target) => new MaterializedLink(title, target)
case none => NoLink(title, query)
}
def linkReference(ent: Entity, ref: Reference, packs: Map[String, Package]): Reference = {
def linkRef(ref: Reference) = linkReference(ent, ref, packs)
ref match {
case ref @ TypeReference(_, UnsetLink(t, query), tps) =>
val inlineToHtml = InlineToHtml(ent)
val title = t
val target = handleEntityLink(title, lookup(Some(ent), packs, query), ent, query)
val tpTargets = tps.map(linkReference(ent, _, packs))
ref.copy(tpeLink = target, paramLinks = tpTargets)
case ref @ OrTypeReference(left, right) =>
ref.copy(left = linkReference(ent, left, packs), right = linkReference(ent, right, packs))
case ref @ AndTypeReference(left, right) =>
ref.copy(left = linkReference(ent, left, packs), right = linkReference(ent, right, packs))
case ref @ NamedReference(_, rf, _, _) =>
ref.copy(ref = linkRef(rf))
case ref @ FunctionReference(args, rv, _) =>
ref.copy(args = args.map(linkReference(ent, _, packs)), returnValue = linkReference(ent, rv, packs))
case ref @ TupleReference(args) =>
ref.copy(args = args.map(linkRef))
case ref @ BoundsReference(low, high) =>
ref.copy(low = linkRef(low), high = linkRef(high))
case _ =>
ref
}
}
}
| som-snytt/dotty | doc-tool/src/dotty/tools/dottydoc/core/TypeLinkingPhases.scala | Scala | apache-2.0 | 4,555 |
package commons.mapper
import java.lang.reflect.Method
import java.util.UUID
import commons.mapper.utils.TypeUtils
import javassist.{ ClassClassPath, ClassPool, CtClass, CtConstructor, CtMethod, Modifier }
/**
* @author Kai Han
*/
private[mapper] trait MapToBeanMapper extends Mapper[java.util.Map[String, Any], AnyRef] {
override def map(m : java.util.Map[String, Any]) : AnyRef
}
private[mapper] abstract class AbstractMapToBeanMapper extends MapToBeanMapper {
private var defaultValues : Array[Object] = null
def getDefaultValue(index : Int) : Object = defaultValues(index)
def setDefaultValues(values : java.util.ArrayList[Object]) : Unit = {
if (values != null) this.defaultValues = values.toArray()
}
override def map(m : java.util.Map[String, Any]) : AnyRef
}
object MapToBeanMapper extends MapperFactory[java.util.Map[String, Any], AnyRef] {
private val classPool = ClassPool.getDefault()
classPool.insertClassPath(new ClassClassPath(this.getClass()))
override def createMapper(clazz : Class[_]) : MapToBeanMapper = {
createMapper(clazz, false)
}
def createMapper(clazz : Class[_], autoConvertType : Boolean) : MapToBeanMapper = {
val typeInfo = TypeUtils.extractTypeInfo(clazz)
doCreateMapper(clazz, typeInfo._1, typeInfo._2, autoConvertType)
}
private[mapper] def doCreateMapper(clazz : Class[_], args : List[ArgWithDefault], setMethods : List[Method], autoConvertType : Boolean = false) : MapToBeanMapper = {
val ctClass = classPool.makeClass(s"commons.mapper.MapToBeanMapper_${UUID.randomUUID().toString().replace("-", "")}")
ctClass.setInterfaces(Array[CtClass](classPool.get(classOf[Mapper[java.util.Map[String, Any], AnyRef]].getName()), classPool.get(classOf[MapToBeanMapper].getName())))
ctClass.setSuperclass(classPool.get(classOf[AbstractMapToBeanMapper].getName()))
var constructor = new CtConstructor(null, ctClass)
constructor.setModifiers(Modifier.PUBLIC)
constructor.setBody("{}")
ctClass.addConstructor(constructor)
//private Type newInstance(java.util.Map[String, Any] map)
val defaultValues = addNewInstanceMethod(ctClass, clazz, args, autoConvertType)
//private void modifyValues(Type bean, java.util.Map map)
addModifyValuesMethod(ctClass, clazz, setMethods, autoConvertType)
//public Object map(java.util.Map map)
addMapToBeanMethod(ctClass, clazz)
val childClazz = ctClass.toClass()
var obj = childClazz.newInstance()
val mapper = obj.asInstanceOf[AbstractMapToBeanMapper]
mapper.setDefaultValues(defaultValues)
mapper
}
private def fixArgs(args : List[ArgWithDefault]) : List[ArgWithDefault] = {
args.zipWithIndex.map(argWithIndex => {
val arg = argWithIndex._1
val index = argWithIndex._2
if (arg.paramName != null) arg
else arg.copy(paramName = s"_arg_${index}")
})
}
/**
* private Type newInstance(java.util.Map map)
*/
private def addNewInstanceMethod(ctClass : CtClass, clazz : Class[_], args : List[ArgWithDefault], autoConvertType : Boolean) : java.util.ArrayList[Object] = {
val Type = clazz.getName
val defaultValues = new java.util.ArrayList[Object]()
val fixedArgs = fixArgs(args)
val declaration = fixedArgs
.map(arg => declareField(arg, defaultValues))
.mkString("\\r\\n ").trim()
def convertType(clazz : Class[_]) =
if (autoConvertType) {
s"obj = commons.mapper.utils.TypeUtils.convertType(${clazz.getName}.class, obj);"
} else {
""
}
val setValues = args.filter(_.paramName != null).map(arg => {
s"""
obj = map.get("${arg.paramName}");
if(obj != null){
${convertType(arg.paramType)}
${arg.paramName} = ${unboxValue(arg.paramType, "obj")};
}
"""
}).mkString("").trim()
val argValues = fixedArgs.map(arg => arg.paramName).mkString(", ")
val newInstanceSrc =
s"""
private ${Type} newInstance(java.util.Map map){
${declaration}
if(map != null || map.size() == 0){
Object obj = null;
${setValues}
}
${Type} instance = new ${Type}(${argValues});
return instance;
}
"""
val newInstanceMethod = CtMethod.make(newInstanceSrc, ctClass)
ctClass.addMethod(newInstanceMethod)
defaultValues
}
/**
* private void modifyValues(Type bean, java.util.Map map)
*/
private def addModifyValuesMethod(ctClass : CtClass, clazz : Class[_], setMethods : List[Method], autoConvertType : Boolean) : Unit = {
val Type = clazz.getName
def convertType(clazz : Class[_]) =
if (autoConvertType) {
s"obj = commons.mapper.utils.TypeUtils.convertType(${clazz.getName}.class, obj);"
} else {
""
}
val setValues = setMethods.map(method => {
val key = TypeUtils.fieldName(method.getName)
val clazz = method.getParameterTypes.head
s"""
obj = map.get("${key}");
if(obj != null){
${convertType(clazz)}
bean.${method.getName}(${unboxValue(clazz, "obj")});
}
"""
}).mkString("").trim()
val modifyValuesSrc =
s"""
private void modifyValues(${Type} bean, java.util.Map map){
if(map == null || map.size() == 0){
return;
}
Object obj = null;
${setValues}
}
"""
val modifyValuesMethod = CtMethod.make(modifyValuesSrc, ctClass)
ctClass.addMethod(modifyValuesMethod)
}
/**
* public Object map(java.util.Map map)
*/
private def addMapToBeanMethod(ctClass : CtClass, clazz : Class[_]) : Unit = {
val Type = clazz.getName //
{
val toBeanSrc =
s"""
public Object map(java.util.Map m){
${Type} bean = newInstance(m);
modifyValues(bean, m);
return bean;
}
"""
val toBeanMethod = CtMethod.make(toBeanSrc, ctClass)
ctClass.addMethod(toBeanMethod)
}
{
val toBeanSrc =
s"""
public Object map(Object obj){
return map((java.util.Map) obj);
}
"""
val toBeanMethod = CtMethod.make(toBeanSrc, ctClass)
ctClass.addMethod(toBeanMethod)
}
}
private def unboxValue(paramType : Class[_], value : String) : String = {
paramType match {
case t if t == classOf[Boolean] => s"((Boolean)${value}).booleanValue()"
case t if t == classOf[Byte] => s"((Byte)${value}).byteValue()"
case t if t == classOf[Short] => s"((Short)${value}).shortValue()"
case t if t == classOf[Char] => s"((Character)${value}).charValue()"
case t if t == classOf[Int] => s"((Integer)${value}).intValue()"
case t if t == classOf[Long] => s"((Long)${value}).longValue()"
case t if t == classOf[Float] => s"((Float)${value}).floatValue()"
case t if t == classOf[Double] => s"((Double)${value}).doubleValue()"
case _ => s"(${paramType.getName})${value}"
}
}
private def declareField(argWithDefault : ArgWithDefault, defaultValues : java.util.ArrayList[AnyRef]) : String = {
val paramName = argWithDefault.paramName
val paramType = argWithDefault.paramType
val defaultValue = argWithDefault.defaultValue
if (defaultValue == null) return s"${paramType.getName} ${paramName} = null;"
val value = paramType match {
case t if t == classOf[Boolean] => String.valueOf(defaultValue)
case t if t == classOf[Byte] => s"(byte)${defaultValue}"
case t if t == classOf[Short] => s"(short)${defaultValue}"
case t if t == classOf[Char] => s"(char)${defaultValue}"
case t if t == classOf[Int] => String.valueOf(defaultValue)
case t if t == classOf[Long] => s"${defaultValue}L"
case t if t == classOf[Float] => s"${defaultValue}F"
case t if t == classOf[Double] => s"${defaultValue}D"
case _ => {
defaultValues.add(defaultValue.asInstanceOf[AnyRef])
s"(${paramType.getName})getDefaultValue(${defaultValues.size() - 1})"
}
}
s"${paramType.getName} ${paramName} = ${value};"
}
} | hank-whu/common4s | src/main/scala/commons/mapper/MapToBeanMapper.scala | Scala | apache-2.0 | 7,637 |
package extruder.metrics.spectator
import com.netflix.spectator.api.{Registry, Spectator}
import extruder.metrics.dimensional.DimensionalMetricSettings
trait SpectatorMetricSettings extends DimensionalMetricSettings {
val registry: Registry = Spectator.globalRegistry()
}
| janstenpickle/extruder | metrics/spectator/src/main/scala/extruder/metrics/spectator/SpectatorMetricSettings.scala | Scala | mit | 276 |
package fpinscala.gettingstarted.samples
/**
* Created by younggi on 4/26/17.
*/
object GroupByCheck {
def main(args: Array[String]): Unit = {
val a = List("A", "B", "C", "D", "E", "F", "A")
println(a.groupBy(identity))
}
}
| younggi/books | functional_programming_in_scala/funpro_scala/src/main/scala/fpinscala/gettingstarted/samples/GroupByCheck.scala | Scala | mit | 242 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package expr
import com.intellij.lang.ASTNode
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.types.result._
/**
* @author Alexander Podkhalyuzin
* Date: 06.03.2008
*/
class ScTypedStmtImpl(node: ASTNode) extends ScExpressionImplBase(node) with ScTypedStmt {
protected override def innerType: TypeResult = {
typeElement match {
case Some(te) => te.`type`()
case None if !expr.isInstanceOf[ScUnderscoreSection] => expr.`type`()
case _ => Failure("Typed statement is not complete for underscore section")
}
}
override def toString: String = "TypedStatement"
} | triplequote/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScTypedStmtImpl.scala | Scala | apache-2.0 | 722 |
/*
* Licensed to STRATIO (C) under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. The STRATIO (C) licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.stratio.crossdata.driver.querybuilder
import scala.collection.JavaConverters.collectionAsScalaIterableConverter
/**
* Selection builder
*/
class Selection(){
/**
* String builder with the selected columns.
*/
val selected : StringBuilder = new StringBuilder
/**
* Class constructor.
* @param columnName The first column to be selected.
*/
def this(columnName : String){
this()
selected.append(columnName)
}
/**
* Class constructor.
* @param selectionList The list of columns to be selected.
*/
def this(selectionList : List[String]){
this()
selected.append(selectionList.mkString(", "))
}
/**
* Add a new column.
* @param columnName The column name.
* @return The resulting Selection.
*/
def and(columnName : String) : Selection = {
addSeparator()
selected.append(columnName)
this
}
/**
* Add a new column with an alias.
* @param columnName The column name.
* @param alias The alias.
* @return The resulting Selection.
*/
def and(columnName : String, alias : String) : Selection = {
addSeparator()
selected.append(columnName + " AS " + alias)
this
}
/**
* Get the string representation of the selected columns.
* @return A String.
*/
def asString() : String = {
selected.toString
}
private def addSeparator() : Unit = {
if(selected.size > 0){
selected.append(", ");
}
}
} | ccaballe/crossdata | crossdata-driver/src/main/scala/com/stratio/crossdata/driver/querybuilder/Selection.scala | Scala | apache-2.0 | 2,239 |
package code.facade
import code.model.UserSignUpVo
import code.model.OmegaDAO
import com.mongodb.casbah.Imports._
object ApiUserFacade {
def signUp(userSignUpVo: UserSignUpVo): Boolean = {
val userVoByEmail = OmegaDAO.findOne(MongoDBObject("email" -> userSignUpVo.email))
userVoByEmail match {
case Some(userVo) => false
case _ =>
val _id = OmegaDAO.insert(userSignUpVo)
true
}
}
def login (email: String, password: String):Boolean = {
val userVoByEmail = OmegaDAO.findOne(MongoDBObject("email" -> email))
userVoByEmail match {
case Some(userVo) =>
if(userVo.password == password) true else false
case _ => false
}
}
} | f3r10/3dsquito | lift_basic/src/main/scala/code/facade/ApiUserFacade.scala | Scala | apache-2.0 | 667 |
package scavlink.connection
import akka.actor.{Actor, ActorLogging, ActorRef, Props}
import akka.util.ByteString
import scavlink.ScavlinkContext
import scavlink.connection.frame.{FrameError, FrameReceiver}
import scavlink.link._
import scavlink.log.{LinkLoggingActor, LogSettings}
import scavlink.message.common.{AuthKey, Heartbeat}
import scavlink.message.enums.{MavAutopilot, MavType}
import scavlink.message.{Packet, SystemId, VehicleId}
import scala.util.Try
/**
* Parses received data into packets and publishes them on the event bus for any type of link.
* Vehicle lifecycle is handled here as well, when a packet from a new vehicle are detected.
* @author Nick Rossi
*/
trait PacketReceiver extends Actor with ActorLogging {
type PacketHandler = Either[FrameError, Packet] => Unit
import context.dispatcher
def address: String
def sctx: ScavlinkContext
val linkEvents = new LinkEventBus
val heartbeat = sctx.config.heartbeat
// lazy val subscriberMetric = s"$address subscribers"
private var rx: Option[FrameReceiver] = None
private var link: Option[Link] = None
private var logger: Option[ActorRef] = None
// current packet handler - updated as link is established, then authorized
private var packetHandler: PacketHandler = emptyHandler
private var vehicles: Map[SystemId, (ActorRef, VehicleInfo)] = Map.empty
private var vehicleKeys: Map[SystemId, String] = Map.empty
private var vehicleHeartbeats: Map[SystemId, Long] = Map.empty
override def preStart() = context.system.scheduler.schedule(heartbeat.timeout, heartbeat.timeout)(reapVehicles())
override def postStop() = stop()
/**
* Start packet receiver when link is established.
*/
def start(writeData: ByteString => Unit, fallback: Receive): Unit = {
// sctx.metrics.register(subscriberMetric, linkEvents.gauge)
val senderProps = PacketSender.props(
address, VehicleId.GroundControl, heartbeat, linkEvents, sctx.marshallerFactory, writeData, fallback, Some(sctx.metrics))
rx = Some(new FrameReceiver(address, sctx.marshallerFactory, Some(sctx.metrics)))
val logSettings = LogSettings(sctx.config.root)
if (logSettings.isEnabled) {
logger = Try(context.actorOf(LinkLoggingActor.props(address, logSettings.path), "logger")).toOption
}
// if link authorization is required, don't create the link until a valid AuthKey is received
packetHandler = sctx.linkAuthorizer match {
case Some(authorizer) => waitForAuthKey(senderProps)
case None => linkUp(senderProps, None)
}
}
/**
* Signal that the link is ready to send and receive packets.
*/
def linkUp(props: Props, authKey: Option[String]): PacketHandler = {
// start packet sender now
val packetSender = context.actorOf(props, "sender")
val _link = new Link(address, linkEvents, sctx.config, packetSender, authKey)
link = Some(_link)
sctx.events.publish(LinkUp(_link))
linkAuthorized
}
/**
* Process received data.
*/
def receiveData(data: ByteString): Unit = {
rx.get.receivedData(data) foreach packetHandler
logger.foreach(_ ! data)
}
/**
* Signal that a new vehicle is detected and ready to receive packets.
* Starts supervisor actor for the vehicle, which initializes all business logic actors.
*/
def startVehicle(vehicleInfo: VehicleInfo) = link foreach { _link =>
val systemId = vehicleInfo.systemId
val props = VehicleSupervisor.props(sctx.events, _link, vehicleInfo, sctx.vehicleInitializers)
val actor = context.actorOf(props, "vehicle_" + systemId + "_" + System.currentTimeMillis())
vehicles += systemId ->(actor, vehicleInfo)
}
/**
* Signal that a vehicle has disappeared from the link.
*/
def stopVehicle(systemId: SystemId): Unit = {
vehicles.get(systemId) foreach { case (actor, vehicleInfo) =>
context.stop(actor)
vehicles -= systemId
vehicleKeys -= systemId
vehicleHeartbeats -= systemId
VehicleNumberPool.free(vehicleInfo.number.number)
}
}
/**
* Shut down the link and empty all state data.
*/
def stop() = link foreach { _link =>
// sctx.metrics.remove(subscriberMetric)
context.stop(_link.packetSender)
logger.foreach(context.stop)
logger = None
vehicles.keySet.foreach(stopVehicle)
vehicles = Map.empty
vehicleKeys = Map.empty
vehicleHeartbeats = Map.empty
packetHandler = emptyHandler
rx = None
sctx.events.publish(LinkDown(_link))
link = None
}
/**
* Stop any vehicle whose last heartbeat is older than the heartbeat-timeout setting.
*/
def reapVehicles(): Unit = {
log.debug("Checking heartbeats")
val now = System.currentTimeMillis()
vehicleHeartbeats foreach { case (systemId, lastHeartbeat) =>
if (now - lastHeartbeat > heartbeat.timeout.toMillis) {
val id = vehicles(systemId)._2.id
log.warning(s"Vehicle $id went silent")
stopVehicle(systemId)
}
}
}
private def emptyHandler: PacketHandler = x => {}
/**
* Handles packets when the link is waiting for authorization.
* No packets are published until a valid AuthKey is received.
*/
private def waitForAuthKey(props: Props): PacketHandler = {
case Right(Packet(from, AuthKey(key))) if from.systemId == heartbeat.thisSystemId =>
sctx.linkAuthorizer foreach { authorizer =>
if (authorizer(key)) {
log.debug(s"Authorized key from $from")
linkUp(props, Some(key))
} else {
log.debug(s"Invalid key from $from")
}
}
case Left(error) =>
linkEvents.publish(ReceiveError(error))
case _ => //
}
/**
* Handles packets when the link is authorized.
*/
private def linkAuthorized: PacketHandler = {
case Right(packet@Packet(from, AuthKey(key))) if sctx.vehicleAuthorizer.isDefined && !vehicleKeys.contains(from.systemId) =>
sctx.vehicleAuthorizer foreach { authorizer =>
if (authorizer(key)) {
log.debug(s"Authorized key from $from")
vehicleKeys += from.systemId -> key
linkEvents.publish(packet)
} else {
log.debug(s"Invalid key from $from")
}
}
// this is where a new Vehicle is initialized when its heartbeat is first detected
case Right(packet@Packet(from, Heartbeat(vehicleType, autopilot, _, _, _, _))) if !vehicles.contains(from.systemId) =>
val key = vehicleKeys.get(from.systemId)
if (sctx.vehicleAuthorizer.isEmpty || key.isDefined) {
val vehicleNumber = VehicleNumber(VehicleNumberPool.next())
val vehicleInfo = VehicleInfo(from.id, vehicleNumber, from.systemId, from.componentId, MavType(vehicleType), MavAutopilot(autopilot), key)
startVehicle(vehicleInfo)
linkEvents.publish(packet)
}
case Right(packet@Packet(from, _: Heartbeat)) =>
vehicleHeartbeats += from.systemId -> System.currentTimeMillis()
linkEvents.publish(packet)
case Right(packet) =>
linkEvents.publish(packet)
case Left(error) =>
linkEvents.publish(ReceiveError(error))
}
}
| nickolasrossi/scavlink | src/main/scala/scavlink/connection/PacketReceiver.scala | Scala | mit | 7,131 |
package dotGenerator
import java.io.File
import sys.process._
/**
* Created by snipy on 07.10.16.
*/
case class Dot2PdfGenerator(filepath: String)
{
def generatePdfFile(): Dot2PdfGenerator =
{
val file = new File(filepath)
if (file.exists())
{
val output = filepath.replace(".dot",".pdf")
// val result = s"dot -Tpdf $filepath -o $output " !!
val result = s"circo -Tpdf $filepath -o $output " !!
if (!result.isEmpty)
{
System.err.println(result)
}
}
else
System.err.println
{
s"No dot file named $filepath found \\n call generateDotFile()"
}
this
}
}
| SnipyJulmy/MSE_1617_PA | AstGenerator/src/main/scala/dotGenerator/Dot2PdfGenerator.scala | Scala | gpl-2.0 | 757 |
// Copyright © 2011-2012, Jeremy Heiner (github.com/JHeiner).
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of any
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package protractor.gui
import protractor._
import java.awt.{Color,Dimension,Graphics2D}
import java.awt.event.{ActionEvent,ActionListener,MouseEvent}
import java.awt.geom.{Arc2D,Ellipse2D,Path2D}
import scala.collection.mutable.{ArrayBuffer,ListBuffer}
import scala.annotation.tailrec
import Common._
import javax.swing.Timer
import java.awt.event.{MouseListener,MouseMotionListener}
import javax.swing.event.MouseInputAdapter
import javax.swing._
abstract class RangeDoodle(
val range:Range = new Range,
val doodle:Doodle = new Doodle )
extends MouserDispatcherJ2D( range, doodle )
{
def paintComponent2D( g:Graphics2D ) {
range.paint(g)
doodle.paint(g) }
override def mouserPressed( m:Mouser, e:MouseEvent ) {
super.mouserPressed(m,e)
if ( m eq range ) update() }
override def mouserDragged( m:Mouser, e:MouseEvent ) {
super.mouserDragged(m,e)
if ( m eq range ) update() }
override def mouserReleased( m:Mouser, e:MouseEvent ) {
super.mouserReleased(m,e)
update() }
def update() {
val g = doodle.gesture
if ( null != g ) {
doodle.gesture = g.limit(range.angle)
updated() } }
def updated()
}
| JHeiner/Protractor | src/test/scala/protractor/gui/RangeDoodle.scala | Scala | bsd-3-clause | 2,758 |
package ghost.patmat
import stainless.lang._
import stainless.annotation.ghost
object GhostPatmat {
sealed trait GhostList
case class GhostCons(@ghost val head: BigInt, val tail: GhostList) extends GhostList
case class GhostNil() extends GhostList
def patmatch(): Unit = {
val x = GhostCons(BigInt(10), GhostCons(BigInt(2), GhostNil()))
x match {
case GhostCons(x, GhostCons(y, t)) =>
val foo = x // error: x is ghost
val bar = y // error: y is ghost
()
case _ =>
()
}
}
}
| epfl-lara/stainless | frontends/benchmarks/extraction/invalid/GhostPatmat.scala | Scala | apache-2.0 | 545 |
package core.schema
import akka.actor._
import scala.concurrent.duration._
import play.api.libs.concurrent.Execution.Implicits._
import akka.pattern.ask
import scala.concurrent.Await
import core.schema.SchemaProvider._
import core.SchemaService
import play.api.Play.current
import play.api.libs.concurrent._
import eu.delving.schema._
import play.api.libs.ws.WS
import java.util.concurrent.TimeUnit
import play.api.{ Play, Logger }
import scala.collection.JavaConverters._
import akka.util.Timeout
import models.OrganizationConfiguration
import play.api.libs.ws.Response
import util.FileSystemFetcher
/**
* This component provides schemas and schema versions through a SchemaRepository that is updated every 5 minutes.
*
* It is wrapped inside of an actor to keep refresh operations safe
*
* TODO better error handling
*
* @author Manuel Bernhardt <[email protected]>
*/
class SchemaProvider extends SchemaService {
private val log = Logger("CultureHub")
private def repository = Akka.system.actorFor("akka://application/user/schemaRepository")
private implicit val timeout = Timeout(2000 milliseconds)
private val refreshTimeout = Timeout(1 minute) // at this point in March 2013, one minute is perfectly reasonable
override def refresh = {
val eventuallyRefreshed = (repository ? Refresh)(refreshTimeout)
try {
Await.result(eventuallyRefreshed, refreshTimeout.duration) match {
case true => true
case false => false
}
} catch {
case t: Throwable =>
log.error("Problem refreshing the SchemaRepository", t)
false
}
}
override def getSchemas(implicit configuration: OrganizationConfiguration): Seq[eu.delving.schema.xml.Schema] = {
getAllSchemas.filter(s => configuration.schemas.contains(s.prefix))
}
override def getAllSchemas: Seq[eu.delving.schema.xml.Schema] = {
try {
val future = (repository ? GetSchemas)
Await.result(future, timeout.duration).asInstanceOf[Schemas].schemas.filterNot(s => s.versions.isEmpty)
} catch {
case t: Throwable =>
log.error("Error while retrieving all schemas", t)
Seq.empty
}
}
override def getSchema(prefix: String, version: String, schemaType: SchemaType): Option[String] = {
val future = repository ? GetSchema(new SchemaVersion(prefix, version), schemaType)
try {
Await.result(future, timeout.duration) match {
case SchemaContent(schemaContent) =>
log.trace(s"Retrieved schema $prefix $version $schemaType: $schemaContent")
Option(schemaContent)
case SchemaError(error: Throwable) =>
log.error("Error while trying to retrieve schema %s of type %s: %s".format(
version, schemaType.fileName, error.getMessage
), error)
None
}
} catch {
case t: Throwable =>
log.error(s"Error while retrieving schema $prefix:$version from repository", t)
None
}
}
}
object SchemaProvider {
// ~~~ questions
case object Refresh
case object GetSchemas
case class GetSchema(schemaVersion: SchemaVersion, schemaType: SchemaType)
// ~~~ answers
case class Schemas(schemas: Seq[eu.delving.schema.xml.Schema])
case class SchemaContent(schemaContent: String)
case class SchemaError(t: Throwable)
}
class SchemaRepositoryWrapper extends Actor {
private val log = Logger("CultureHub")
private var scheduler: Cancellable = null
private var schemaRepository: SchemaRepository = null
private lazy val fetcher = if (Play.isDev || Play.isTest) new FileSystemFetcher(false) else new RemoteFetcher
override def preStart() {
scheduler = Akka.system.scheduler.schedule(3 minutes, 5 minutes, self, SchemaProvider.Refresh)
}
override def postStop() {
scheduler.cancel()
}
def receive = {
case SchemaProvider.Refresh =>
sender ! refresh
case GetSchemas =>
if (schemaRepository == null) {
log.warn("Schema repository was null?! Refreshing...")
refresh
}
sender ! Schemas(schemaRepository.getSchemas.asScala)
case GetSchema(version, schemaType) =>
if (schemaRepository == null) {
log.warn("Schema repository was null?! Refreshing...")
refresh
}
try {
val response = schemaRepository.getSchema(version, schemaType)
sender ! SchemaContent(response.getSchemaText)
} catch {
case t: Throwable =>
sender ! SchemaError(t)
}
}
private def refresh = {
try {
val newSchemaRepository = new SchemaRepository(fetcher)
newSchemaRepository.prefetchAllSchemas()
schemaRepository = newSchemaRepository
log.info("Refreshed all schemas")
true
} catch {
case t: Throwable =>
log.error("Could not prefetch schema repository", t)
false
}
}
private def prefixes(schemas: Seq[eu.delving.schema.xml.Schema]) = schemas.map(_.prefix).mkString(", ")
}
class RemoteFetcher extends Fetcher {
val log = Logger("CultureHub")
private val SCHEMA_REPO = "http://schemas.delving.eu"
def isValidating: java.lang.Boolean = true
def fetchList(): String = WS.url(SCHEMA_REPO + "/schema-repository.xml").get().await(5, TimeUnit.SECONDS).fold(
{ t: Throwable => log.error("RemoteFetcher: could not retrieve schema list", t); "" },
{ r: Response => r.getAHCResponse.getResponseBody("UTF-8") }
)
def fetchSchema(version: SchemaVersion, schemaType: SchemaType): String = WS.url(SCHEMA_REPO + version.getPath(schemaType)).get().await(5, TimeUnit.SECONDS).fold(
{ t: Throwable => log.error("RemoteFetcher: could not retrieve schema", t); "" },
{ r: Response => r.getAHCResponse.getResponseBody("UTF-8") }
)
} | delving/culture-hub | web-core/app/core/schema/SchemaProvider.scala | Scala | apache-2.0 | 5,755 |
package spatutorial.client.services
import spatutorial.client.ukko.Dispatcher
object MainDispatcher extends Dispatcher {
// no need to define anything specific here, just basic Dispatcher functionality
}
| zoosky/eispoc | js/src/main/scala/spatutorial/client/services/MainDispatcher.scala | Scala | agpl-3.0 | 208 |
package gitbucket.core.service
import fr.brouillard.oss.security.xhub.XHub
import fr.brouillard.oss.security.xhub.XHub.{XHubConverter, XHubDigest}
import gitbucket.core.api._
import gitbucket.core.model.{Account, CommitComment, Issue, IssueComment, PullRequest, WebHook, WebHookEvent}
import gitbucket.core.model.Profile._
import gitbucket.core.model.Profile.profile.blockingApi._
import org.apache.http.client.utils.URLEncodedUtils
import gitbucket.core.util.JGitUtil.CommitInfo
import gitbucket.core.util.RepositoryName
import gitbucket.core.service.RepositoryService.RepositoryInfo
import org.apache.http.NameValuePair
import org.apache.http.client.entity.UrlEncodedFormEntity
import org.apache.http.message.BasicNameValuePair
import org.eclipse.jgit.api.Git
import org.eclipse.jgit.lib.ObjectId
import org.slf4j.LoggerFactory
import scala.concurrent._
import scala.util.{Success, Failure}
import org.apache.http.HttpRequest
import org.apache.http.HttpResponse
import gitbucket.core.model.WebHookContentType
import org.apache.http.client.entity.EntityBuilder
import org.apache.http.entity.ContentType
trait WebHookService {
import WebHookService._
private val logger = LoggerFactory.getLogger(classOf[WebHookService])
/** get All WebHook informations of repository */
def getWebHooks(owner: String, repository: String)(implicit s: Session): List[(WebHook, Set[WebHook.Event])] =
WebHooks.filter(_.byRepository(owner, repository))
.join(WebHookEvents).on { (w, t) => t.byWebHook(w) }
.map { case (w, t) => w -> t.event }
.list.groupBy(_._1).mapValues(_.map(_._2).toSet).toList.sortBy(_._1.url)
/** get All WebHook informations of repository event */
def getWebHooksByEvent(owner: String, repository: String, event: WebHook.Event)(implicit s: Session): List[WebHook] =
WebHooks.filter(_.byRepository(owner, repository))
.join(WebHookEvents).on { (wh, whe) => whe.byWebHook(wh) }
.filter { case (wh, whe) => whe.event === event.bind}
.map{ case (wh, whe) => wh }
.list.distinct
/** get All WebHook information from repository to url */
def getWebHook(owner: String, repository: String, url: String)(implicit s: Session): Option[(WebHook, Set[WebHook.Event])] =
WebHooks
.filter(_.byPrimaryKey(owner, repository, url))
.join(WebHookEvents).on { (w, t) => t.byWebHook(w) }
.map { case (w, t) => w -> t.event }
.list.groupBy(_._1).mapValues(_.map(_._2).toSet).headOption
def addWebHook(owner: String, repository: String, url :String, events: Set[WebHook.Event], ctype: WebHookContentType, token: Option[String])(implicit s: Session): Unit = {
WebHooks insert WebHook(owner, repository, url, ctype, token)
events.map { event: WebHook.Event =>
WebHookEvents insert WebHookEvent(owner, repository, url, event)
}
}
def updateWebHook(owner: String, repository: String, url :String, events: Set[WebHook.Event], ctype: WebHookContentType, token: Option[String])(implicit s: Session): Unit = {
WebHooks.filter(_.byPrimaryKey(owner, repository, url)).map(w => (w.ctype, w.token)).update((ctype, token))
WebHookEvents.filter(_.byWebHook(owner, repository, url)).delete
events.map { event: WebHook.Event =>
WebHookEvents insert WebHookEvent(owner, repository, url, event)
}
}
def deleteWebHook(owner: String, repository: String, url :String)(implicit s: Session): Unit =
WebHooks.filter(_.byPrimaryKey(owner, repository, url)).delete
def callWebHookOf(owner: String, repository: String, event: WebHook.Event)(makePayload: => Option[WebHookPayload])
(implicit s: Session, c: JsonFormat.Context): Unit = {
val webHooks = getWebHooksByEvent(owner, repository, event)
if(webHooks.nonEmpty){
makePayload.map(callWebHook(event, webHooks, _))
}
}
def callWebHook(event: WebHook.Event, webHooks: List[WebHook], payload: WebHookPayload)
(implicit c: JsonFormat.Context): List[(WebHook, String, Future[HttpRequest], Future[HttpResponse])] = {
import org.apache.http.impl.client.HttpClientBuilder
import ExecutionContext.Implicits.global // TODO Shouldn't use the default execution context
import org.apache.http.protocol.HttpContext
import org.apache.http.client.methods.HttpPost
if(webHooks.nonEmpty){
val json = JsonFormat(payload)
webHooks.map { webHook =>
val reqPromise = Promise[HttpRequest]
val f = Future {
val itcp = new org.apache.http.HttpRequestInterceptor {
def process(res: HttpRequest, ctx: HttpContext): Unit = {
reqPromise.success(res)
}
}
try{
val httpClient = HttpClientBuilder.create.useSystemProperties.addInterceptorLast(itcp).build
logger.debug(s"start web hook invocation for ${webHook.url}")
val httpPost = new HttpPost(webHook.url)
logger.info(s"Content-Type: ${webHook.ctype.ctype}")
httpPost.addHeader("Content-Type", webHook.ctype.ctype)
httpPost.addHeader("X-Github-Event", event.name)
httpPost.addHeader("X-Github-Delivery", java.util.UUID.randomUUID().toString)
webHook.ctype match {
case WebHookContentType.FORM => {
val params: java.util.List[NameValuePair] = new java.util.ArrayList()
params.add(new BasicNameValuePair("payload", json))
def postContent = new UrlEncodedFormEntity(params, "UTF-8")
httpPost.setEntity(postContent)
if (webHook.token.exists(_.trim.nonEmpty)) {
// TODO find a better way and see how to extract content from postContent
val contentAsBytes = URLEncodedUtils.format(params, "UTF-8").getBytes("UTF-8")
httpPost.addHeader("X-Hub-Signature", XHub.generateHeaderXHubToken(XHubConverter.HEXA_LOWERCASE, XHubDigest.SHA1, webHook.token.get, contentAsBytes))
}
}
case WebHookContentType.JSON => {
httpPost.setEntity(EntityBuilder.create().setContentType(ContentType.APPLICATION_JSON).setText(json).build())
if (webHook.token.exists(_.trim.nonEmpty)) {
httpPost.addHeader("X-Hub-Signature", XHub.generateHeaderXHubToken(XHubConverter.HEXA_LOWERCASE, XHubDigest.SHA1, webHook.token.orNull, json.getBytes("UTF-8")))
}
}
}
val res = httpClient.execute(httpPost)
httpPost.releaseConnection()
logger.debug(s"end web hook invocation for ${webHook}")
res
} catch {
case e: Throwable => {
if(!reqPromise.isCompleted){
reqPromise.failure(e)
}
throw e
}
}
}
f.onComplete {
case Success(_) => logger.debug(s"Success: web hook request to ${webHook.url}")
case Failure(t) => logger.error(s"Failed: web hook request to ${webHook.url}", t)
}
(webHook, json, reqPromise.future, f)
}
} else {
Nil
}
// logger.debug("end callWebHook")
}
}
trait WebHookPullRequestService extends WebHookService {
self: AccountService with RepositoryService with PullRequestService with IssuesService =>
import WebHookService._
// https://developer.github.com/v3/activity/events/types/#issuesevent
def callIssuesWebHook(action: String, repository: RepositoryService.RepositoryInfo, issue: Issue, baseUrl: String, sender: Account)
(implicit s: Session, context:JsonFormat.Context): Unit = {
callWebHookOf(repository.owner, repository.name, WebHook.Issues){
val users = getAccountsByUserNames(Set(repository.owner, issue.openedUserName), Set(sender))
for{
repoOwner <- users.get(repository.owner)
issueUser <- users.get(issue.openedUserName)
} yield {
WebHookIssuesPayload(
action = action,
number = issue.issueId,
repository = ApiRepository(repository, ApiUser(repoOwner)),
issue = ApiIssue(issue, RepositoryName(repository), ApiUser(issueUser)),
sender = ApiUser(sender))
}
}
}
def callPullRequestWebHook(action: String, repository: RepositoryService.RepositoryInfo, issueId: Int, baseUrl: String, sender: Account)
(implicit s: Session, context:JsonFormat.Context): Unit = {
import WebHookService._
callWebHookOf(repository.owner, repository.name, WebHook.PullRequest){
for{
(issue, pullRequest) <- getPullRequest(repository.owner, repository.name, issueId)
users = getAccountsByUserNames(Set(repository.owner, pullRequest.requestUserName, issue.openedUserName), Set(sender))
baseOwner <- users.get(repository.owner)
headOwner <- users.get(pullRequest.requestUserName)
issueUser <- users.get(issue.openedUserName)
headRepo <- getRepository(pullRequest.requestUserName, pullRequest.requestRepositoryName)
} yield {
WebHookPullRequestPayload(
action = action,
issue = issue,
issueUser = issueUser,
pullRequest = pullRequest,
headRepository = headRepo,
headOwner = headOwner,
baseRepository = repository,
baseOwner = baseOwner,
sender = sender,
mergedComment = getMergedComment(repository.owner, repository.name, issueId)
)
}
}
}
/** @return Map[(issue, issueUser, pullRequest, baseOwner, headOwner), webHooks] */
def getPullRequestsByRequestForWebhook(userName:String, repositoryName:String, branch:String)
(implicit s: Session): Map[(Issue, Account, PullRequest, Account, Account), List[WebHook]] =
(for{
is <- Issues if is.closed === false.bind
pr <- PullRequests if pr.byPrimaryKey(is.userName, is.repositoryName, is.issueId)
if pr.requestUserName === userName.bind
if pr.requestRepositoryName === repositoryName.bind
if pr.requestBranch === branch.bind
bu <- Accounts if bu.userName === pr.userName
ru <- Accounts if ru.userName === pr.requestUserName
iu <- Accounts if iu.userName === is.openedUserName
wh <- WebHooks if wh.byRepository(is.userName , is.repositoryName)
wht <- WebHookEvents if wht.event === WebHook.PullRequest.asInstanceOf[WebHook.Event].bind && wht.byWebHook(wh)
} yield {
((is, iu, pr, bu, ru), wh)
}).list.groupBy(_._1).mapValues(_.map(_._2))
def callPullRequestWebHookByRequestBranch(action: String, requestRepository: RepositoryService.RepositoryInfo, requestBranch: String, baseUrl: String, sender: Account)
(implicit s: Session, context:JsonFormat.Context): Unit = {
import WebHookService._
for{
((issue, issueUser, pullRequest, baseOwner, headOwner), webHooks) <- getPullRequestsByRequestForWebhook(requestRepository.owner, requestRepository.name, requestBranch)
baseRepo <- getRepository(pullRequest.userName, pullRequest.repositoryName)
} yield {
val payload = WebHookPullRequestPayload(
action = action,
issue = issue,
issueUser = issueUser,
pullRequest = pullRequest,
headRepository = requestRepository,
headOwner = headOwner,
baseRepository = baseRepo,
baseOwner = baseOwner,
sender = sender,
mergedComment = getMergedComment(baseRepo.owner, baseRepo.name, issue.issueId)
)
callWebHook(WebHook.PullRequest, webHooks, payload)
}
}
}
trait WebHookPullRequestReviewCommentService extends WebHookService {
self: AccountService with RepositoryService with PullRequestService with IssuesService with CommitsService =>
def callPullRequestReviewCommentWebHook(action: String, comment: CommitComment, repository: RepositoryService.RepositoryInfo, issueId: Int, baseUrl: String, sender: Account)
(implicit s: Session, context:JsonFormat.Context): Unit = {
import WebHookService._
callWebHookOf(repository.owner, repository.name, WebHook.PullRequestReviewComment){
for{
(issue, pullRequest) <- getPullRequest(repository.owner, repository.name, issueId)
users = getAccountsByUserNames(Set(repository.owner, pullRequest.requestUserName, issue.openedUserName), Set(sender))
baseOwner <- users.get(repository.owner)
headOwner <- users.get(pullRequest.requestUserName)
issueUser <- users.get(issue.openedUserName)
headRepo <- getRepository(pullRequest.requestUserName, pullRequest.requestRepositoryName)
} yield {
WebHookPullRequestReviewCommentPayload(
action = action,
comment = comment,
issue = issue,
issueUser = issueUser,
pullRequest = pullRequest,
headRepository = headRepo,
headOwner = headOwner,
baseRepository = repository,
baseOwner = baseOwner,
sender = sender,
mergedComment = getMergedComment(repository.owner, repository.name, issue.issueId)
)
}
}
}
}
trait WebHookIssueCommentService extends WebHookPullRequestService {
self: AccountService with RepositoryService with PullRequestService with IssuesService =>
import WebHookService._
def callIssueCommentWebHook(repository: RepositoryService.RepositoryInfo, issue: Issue, issueCommentId: Int, sender: Account)
(implicit s: Session, context:JsonFormat.Context): Unit = {
callWebHookOf(repository.owner, repository.name, WebHook.IssueComment){
for{
issueComment <- getComment(repository.owner, repository.name, issueCommentId.toString())
users = getAccountsByUserNames(Set(issue.openedUserName, repository.owner, issueComment.commentedUserName), Set(sender))
issueUser <- users.get(issue.openedUserName)
repoOwner <- users.get(repository.owner)
commenter <- users.get(issueComment.commentedUserName)
} yield {
WebHookIssueCommentPayload(
issue = issue,
issueUser = issueUser,
comment = issueComment,
commentUser = commenter,
repository = repository,
repositoryUser = repoOwner,
sender = sender)
}
}
}
}
object WebHookService {
trait WebHookPayload
// https://developer.github.com/v3/activity/events/types/#pushevent
case class WebHookPushPayload(
pusher: ApiPusher,
sender: ApiUser,
ref: String,
before: String,
after: String,
commits: List[ApiCommit],
repository: ApiRepository
) extends FieldSerializable with WebHookPayload {
val compare = commits.size match {
case 0 => ApiPath(s"/${repository.full_name}") // maybe test hook on un-initalied repository
case 1 => ApiPath(s"/${repository.full_name}/commit/${after}")
case _ if before.filterNot(_=='0').isEmpty => ApiPath(s"/${repository.full_name}/compare/${commits.head.id}^...${after}")
case _ => ApiPath(s"/${repository.full_name}/compare/${before}...${after}")
}
val head_commit = commits.lastOption
}
object WebHookPushPayload {
def apply(git: Git, sender: Account, refName: String, repositoryInfo: RepositoryInfo,
commits: List[CommitInfo], repositoryOwner: Account,
newId: ObjectId, oldId: ObjectId): WebHookPushPayload =
WebHookPushPayload(
pusher = ApiPusher(sender),
sender = ApiUser(sender),
ref = refName,
before = ObjectId.toString(oldId),
after = ObjectId.toString(newId),
commits = commits.map{ commit => ApiCommit.forPushPayload(git, RepositoryName(repositoryInfo), commit) },
repository = ApiRepository.forPushPayload(
repositoryInfo,
owner= ApiUser(repositoryOwner))
)
}
// https://developer.github.com/v3/activity/events/types/#issuesevent
case class WebHookIssuesPayload(
action: String,
number: Int,
repository: ApiRepository,
issue: ApiIssue,
sender: ApiUser) extends WebHookPayload
// https://developer.github.com/v3/activity/events/types/#pullrequestevent
case class WebHookPullRequestPayload(
action: String,
number: Int,
repository: ApiRepository,
pull_request: ApiPullRequest,
sender: ApiUser
) extends WebHookPayload
object WebHookPullRequestPayload{
def apply(action: String,
issue: Issue,
issueUser: Account,
pullRequest: PullRequest,
headRepository: RepositoryInfo,
headOwner: Account,
baseRepository: RepositoryInfo,
baseOwner: Account,
sender: Account,
mergedComment: Option[(IssueComment, Account)]): WebHookPullRequestPayload = {
val headRepoPayload = ApiRepository(headRepository, headOwner)
val baseRepoPayload = ApiRepository(baseRepository, baseOwner)
val senderPayload = ApiUser(sender)
val pr = ApiPullRequest(
issue = issue,
pullRequest = pullRequest,
headRepo = headRepoPayload,
baseRepo = baseRepoPayload,
user = ApiUser(issueUser),
mergedComment = mergedComment
)
WebHookPullRequestPayload(
action = action,
number = issue.issueId,
repository = pr.base.repo,
pull_request = pr,
sender = senderPayload
)
}
}
// https://developer.github.com/v3/activity/events/types/#issuecommentevent
case class WebHookIssueCommentPayload(
action: String,
repository: ApiRepository,
issue: ApiIssue,
comment: ApiComment,
sender: ApiUser
) extends WebHookPayload
object WebHookIssueCommentPayload {
def apply(
issue: Issue,
issueUser: Account,
comment: IssueComment,
commentUser: Account,
repository: RepositoryInfo,
repositoryUser: Account,
sender: Account): WebHookIssueCommentPayload =
WebHookIssueCommentPayload(
action = "created",
repository = ApiRepository(repository, repositoryUser),
issue = ApiIssue(issue, RepositoryName(repository), ApiUser(issueUser)),
comment = ApiComment(comment, RepositoryName(repository), issue.issueId, ApiUser(commentUser), issue.isPullRequest),
sender = ApiUser(sender))
}
// https://developer.github.com/v3/activity/events/types/#pullrequestreviewcommentevent
case class WebHookPullRequestReviewCommentPayload(
action: String,
comment: ApiPullRequestReviewComment,
pull_request: ApiPullRequest,
repository: ApiRepository,
sender: ApiUser
) extends WebHookPayload
object WebHookPullRequestReviewCommentPayload {
def apply(
action: String,
comment: CommitComment,
issue: Issue,
issueUser: Account,
pullRequest: PullRequest,
headRepository: RepositoryInfo,
headOwner: Account,
baseRepository: RepositoryInfo,
baseOwner: Account,
sender: Account,
mergedComment: Option[(IssueComment, Account)]
) : WebHookPullRequestReviewCommentPayload = {
val headRepoPayload = ApiRepository(headRepository, headOwner)
val baseRepoPayload = ApiRepository(baseRepository, baseOwner)
val senderPayload = ApiUser(sender)
WebHookPullRequestReviewCommentPayload(
action = action,
comment = ApiPullRequestReviewComment(
comment = comment,
commentedUser = senderPayload,
repositoryName = RepositoryName(baseRepository),
issueId = issue.issueId
),
pull_request = ApiPullRequest(
issue = issue,
pullRequest = pullRequest,
headRepo = headRepoPayload,
baseRepo = baseRepoPayload,
user = ApiUser(issueUser),
mergedComment = mergedComment
),
repository = baseRepoPayload,
sender = senderPayload)
}
}
}
| nobusugi246/gitbucket | src/main/scala/gitbucket/core/service/WebHookService.scala | Scala | apache-2.0 | 20,393 |
package io.mpjsons.impl.special
import io.mpjsons.impl.util.{Context, TypesUtil}
import io.mpjsons.impl.{DeserializerFactory, SerializerFactory, StringIterator}
import io.mpjsons.{JsonTypeDeserializer, JsonTypeSerializer}
import scala.reflect.runtime.universe._
object TypedConverter {
class TypedSerializer[T <: AnyRef](packageName: String, serializerFactory: SerializerFactory)
(implicit tag: TypeTag[T]) extends JsonTypeSerializer[T] {
private var serializers: Map[String, JsonTypeSerializer[AnyRef]] = Map.empty
override def serialize(obj: T, jsonBuilder: StringBuilder): Unit = {
val typeName = obj.getClass.getSimpleName
val innerElementSerializer = serializers.getOrElse(typeName, {
val s = serializerFactory.getSerializer(TypesUtil.getTypeFromClass(obj.getClass), Context(List.empty, Map.empty), allowSuperType = false).asInstanceOf[JsonTypeSerializer[AnyRef]]
serializers += typeName -> s
s
})
val simpleName: String = obj.getClass.getSimpleName
jsonBuilder.append("{\\"" + simpleName + "\\":")
innerElementSerializer.serialize(obj, jsonBuilder)
jsonBuilder.append('}')
}
}
class TypedDeserializer[T <: AnyRef](packageName: String, deserializerFactory: DeserializerFactory)
extends JsonTypeDeserializer[T] {
private var deserializers: Map[String, JsonTypeDeserializer[AnyRef]] = Map.empty
override def deserialize(jsonIterator: StringIterator): T = {
jsonIterator.consumeObjectStart()
val typeName = extractTypeName(jsonIterator)
val innerElementDeserializer = deserializers.getOrElse(typeName, {
val elementType = TypesUtil.getTypeFromClass(Class.forName(packageName + "." + typeName))
val d = deserializerFactory.getDeserializer(elementType, Context(List.empty, Map.empty), allowSuperType = false).asInstanceOf[JsonTypeDeserializer[AnyRef]]
deserializers += typeName -> d
d
})
jsonIterator.skipWhitespaceChars()
jsonIterator.nextChar()
jsonIterator.skipWhitespaceChars()
val value = innerElementDeserializer.deserialize(jsonIterator).asInstanceOf[T]
jsonIterator.skipWhitespaceChars()
jsonIterator.nextChar()
value
}
def extractTypeName(jsonIterator: StringIterator): String = {
val deserializer = deserializerFactory.getDeserializer[String](typeOf[String], Context(List.empty, Map.empty))
jsonIterator.skipWhitespaceChars()
deserializer.deserialize(jsonIterator)
}
}
}
| marpiec/mpjsons | src/main/scala/io/mpjsons/impl/special/TypedConverter.scala | Scala | apache-2.0 | 2,568 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package collection.parallel
import java.util.concurrent.ThreadPoolExecutor
import java.util.concurrent.ForkJoinPool
import scala.concurrent.ExecutionContext
/** A trait implementing the scheduling of a parallel collection operation.
*
* Parallel collections are modular in the way operations are scheduled. Each
* parallel collection is parameterized with a task support object which is
* responsible for scheduling and load-balancing tasks to processors.
*
* A task support object can be changed in a parallel collection after it has
* been created, but only during a quiescent period, i.e. while there are no
* concurrent invocations to parallel collection methods.
*
* There are currently a few task support implementations available for
* parallel collections. The [[scala.collection.parallel.ForkJoinTaskSupport]]
* uses a fork-join pool internally.
*
* The [[scala.collection.parallel.ExecutionContextTaskSupport]] uses the
* default execution context implementation found in scala.concurrent, and it
* reuses the thread pool used in scala.concurrent.
*
* The execution context task support is set to each parallel collection by
* default, so parallel collections reuse the same fork-join pool as the
* future API.
*
* Here is a way to change the task support of a parallel collection:
*
* {{{
* import scala.collection.parallel._
* val pc = mutable.ParArray(1, 2, 3)
* pc.tasksupport = new ForkJoinTaskSupport(
* new java.util.concurrent.ForkJoinPool(2))
* }}}
*
* @see [[http://docs.scala-lang.org/overviews/parallel-collections/configuration.html Configuring Parallel Collections]] section
* on the parallel collection's guide for more information.
*/
trait TaskSupport extends Tasks
/** A task support that uses a fork join pool to schedule tasks.
*
* @see [[scala.collection.parallel.TaskSupport]] for more information.
*/
class ForkJoinTaskSupport(val environment: ForkJoinPool = ForkJoinTasks.defaultForkJoinPool)
extends TaskSupport with AdaptiveWorkStealingForkJoinTasks
/** A task support that uses a thread pool executor to schedule tasks.
*
* @see [[scala.collection.parallel.TaskSupport]] for more information.
*/
@deprecated("use `ForkJoinTaskSupport` instead", "2.11.0")
class ThreadPoolTaskSupport(val environment: ThreadPoolExecutor = ThreadPoolTasks.defaultThreadPool)
extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks
/** A task support that uses an execution context to schedule tasks.
*
* It can be used with the default execution context implementation in the
* `scala.concurrent` package. It internally forwards the call to either a
* forkjoin based task support or a thread pool executor one, depending on
* what the execution context uses.
*
* By default, parallel collections are parameterized with this task support
* object, so parallel collections share the same execution context backend
* as the rest of the `scala.concurrent` package.
*
* @see [[scala.collection.parallel.TaskSupport]] for more information.
*/
class ExecutionContextTaskSupport(val environment: ExecutionContext = scala.concurrent.ExecutionContext.global)
extends TaskSupport with ExecutionContextTasks
| felixmulder/scala | src/library/scala/collection/parallel/TaskSupport.scala | Scala | bsd-3-clause | 3,753 |
package aecor.tests.e2e
import aecor.tests.e2e.TestCounterViewRepository.State
import cats.mtl.MonadState
import monocle.Lens
import aecor.testkit._
trait CounterViewRepository[F[_]] {
def getCounterState(id: CounterId): F[Option[Long]]
def setCounterState(id: CounterId, value: Long): F[Unit]
}
object TestCounterViewRepository {
case class State(values: Map[CounterId, Long]) {
def getCounterState(id: CounterId): Option[Long] =
values.get(id)
def setCounterState(id: CounterId, value: Long): State =
State(values.updated(id, value))
}
object State {
def init: State = State(Map.empty)
}
final class Builder[F[_]] {
def apply[S: MonadState[F, *]](lens: Lens[S, State]): TestCounterViewRepository[F, S] =
new TestCounterViewRepository(lens)
}
def apply[F[_]]: Builder[F] = new Builder[F]
}
class TestCounterViewRepository[F[_]: MonadState[*[_], S], S](lens: Lens[S, State])
extends CounterViewRepository[F] {
private val F = lens.transformMonadState(MonadState[F, S])
def getCounterState(id: CounterId): F[Option[Long]] =
F.inspect(_.getCounterState(id))
def setCounterState(id: CounterId, value: Long): F[Unit] =
F.modify(_.setCounterState(id, value))
}
| notxcain/aecor | modules/tests/src/main/scala/aecor/tests/e2e/CounterViewRepository.scala | Scala | mit | 1,230 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.Equality
import org.scalactic.Explicitly
import collection.GenTraversable
import SharedHelpers._
import Matchers._
class InOrderElementsOfContainMatcherEqualitySpec extends Spec with Explicitly {
class CustomEquality extends Equality[String] {
def areEqual(left: String, right: Any) =
left.trim == (right match {
case s: String => s.trim
case other => other
})
}
object `inOrderElementsOf ` {
implicit val equality = new CustomEquality
def checkShouldContainStackDepth(e: exceptions.StackDepthException, left: Any, right: GenTraversable[Any], lineNumber: Int) {
val leftText = FailureMessages.decorateToStringValue(left)
val rightText = FailureMessages.decorateToStringValue(right)
e.message should be (Some(leftText + " did not contain all elements of " + rightText + " in order"))
e.failedCodeFileName should be (Some("InOrderElementsOfContainMatcherEqualitySpec.scala"))
e.failedCodeLineNumber should be (Some(lineNumber))
}
def checkShouldNotContainStackDepth(e: exceptions.StackDepthException, left: Any, right: GenTraversable[Any], lineNumber: Int) {
val leftText = FailureMessages.decorateToStringValue(left)
val rightText = FailureMessages.decorateToStringValue(right)
e.message should be (Some(leftText + " contained all elements of " + rightText + " in order"))
e.failedCodeFileName should be (Some("InOrderElementsOfContainMatcherEqualitySpec.scala"))
e.failedCodeLineNumber should be (Some(lineNumber))
}
def `should take custom implicit equality in scope when 'should contain' is used` {
List("1 ", "2", "3 ") should contain inOrderElementsOf Seq("1", "2 ", "3")
Array("1 ", "2", "3 ") should contain inOrderElementsOf Seq("1", "2 ", "3")
javaList("1", "2 ", "3") should contain inOrderElementsOf Seq("1", "2 ", "3")
}
def `should take custom implicit equality in scope when 'should not contain' is used` {
List("1 ", "2", "3 ") should not contain inOrderElementsOf (Seq("3", "2 ", "1"))
Array("1 ", "2", "3 ") should not contain inOrderElementsOf (Seq("3", "2 ", "1"))
javaList("1 ", "2", "3 ") should not contain inOrderElementsOf (Seq("3", "2 ", "1"))
}
def `should throw TestFailedException with correct stack depth and message when 'should contain custom matcher' failed with custom implicit equality in scope` {
val left1 = List("1 ", "2", "3 ")
val e1 = intercept[exceptions.TestFailedException] {
left1 should contain inOrderElementsOf Seq("3", "2 ", "1")
}
checkShouldContainStackDepth(e1, left1, Seq("3", "2 ", "1"), thisLineNumber - 2)
val left2 = Array("1 ", "2", "3 ")
val e2 = intercept[exceptions.TestFailedException] {
left2 should contain inOrderElementsOf Seq("3", "2 ", "1")
}
checkShouldContainStackDepth(e2, left2, Seq("3", "2 ", "1"), thisLineNumber - 2)
val left3 = javaList("1 ", "2", "3 ")
val e3 = intercept[exceptions.TestFailedException] {
left3 should contain inOrderElementsOf Seq("3", "2 ", "1")
}
checkShouldContainStackDepth(e3, left3, Seq("3", "2 ", "1"), thisLineNumber - 2)
}
def `should throw TestFailedException with correct stack depth and message when 'should not contain custom matcher' failed with custom implicit equality in scope` {
val left1 = List("1 ", "2", "3 ")
val e1 = intercept[exceptions.TestFailedException] {
left1 should not contain inOrderElementsOf (Seq("1", "2 ", "3"))
}
checkShouldNotContainStackDepth(e1, left1, Seq("1", "2 ", "3"), thisLineNumber - 2)
val left2 = Array("1 ", "2", "3 ")
val e2 = intercept[exceptions.TestFailedException] {
left2 should not contain inOrderElementsOf (Seq("1", "2 ", "3"))
}
checkShouldNotContainStackDepth(e2, left2, Seq("1", "2 ", "3"), thisLineNumber - 2)
val left3 = javaList("1 ", "2", "3 ")
val e3 = intercept[exceptions.TestFailedException] {
left3 should not contain inOrderElementsOf (Seq("1", "2 ", "3"))
}
checkShouldNotContainStackDepth(e3, left3, Seq("1", "2 ", "3"), thisLineNumber - 2)
}
def `should take passed in custom explicit equality when 'should contain' is used` {
(List("1 ", "2", "3 ") should contain inOrderElementsOf Seq("1", "2 ", "3")) (equality)
(Array("1 ", "2", "3 ") should contain inOrderElementsOf Seq("1", "2 ", "3")) (equality)
(javaList("1 ", "2", "3 ") should contain inOrderElementsOf Seq("1", "2 ", "3")) (equality)
}
def `should take passed in custom explicit equality when 'should not contain' is used` {
(List("1 ", "2", "3 ") should not contain inOrderElementsOf (Seq("3", "2 ", "1"))) (equality)
(Array("1 ", "2", "3 ") should not contain inOrderElementsOf (Seq("3", "2 ", "1"))) (equality)
(javaList("1 ", "2", "3 ") should not contain inOrderElementsOf (Seq("3", "2 ", "1"))) (equality)
}
def `should throw TestFailedException with correct stack depth and message when 'should contain custom matcher' failed with custom explicit equality` {
val left1 = List("1 ", "2", "3 ")
val e1 = intercept[exceptions.TestFailedException] {
(left1 should contain inOrderElementsOf Seq("3", "2 ", "1")) (equality)
}
checkShouldContainStackDepth(e1, left1, Seq("3", "2 ", "1"), thisLineNumber - 2)
val left2 = Array("1 ", "2", "3 ")
val e2 = intercept[exceptions.TestFailedException] {
(left2 should contain inOrderElementsOf Seq("3", "2 ", "1")) (equality)
}
checkShouldContainStackDepth(e2, left2, Seq("3", "2 ", "1"), thisLineNumber - 2)
val left3 = javaList("1 ", "2", "3 ")
val e3 = intercept[exceptions.TestFailedException] {
(left3 should contain inOrderElementsOf Seq("3", "2 ", "1")) (equality)
}
checkShouldContainStackDepth(e3, left3, Seq("3", "2 ", "1"), thisLineNumber - 2)
}
def `should throw TestFailedException with correct stack depth and message when 'should not contain custom matcher' failed with custom explicit equality` {
val left1 = List("1 ", "2", "3 ")
val e1 = intercept[exceptions.TestFailedException] {
(left1 should not contain inOrderElementsOf (Seq("1", "2 ", "3"))) (equality)
}
checkShouldNotContainStackDepth(e1, left1, Seq("1", "2 ", "3"), thisLineNumber - 2)
val left2 = Array("1 ", "2", "3 ")
val e2 = intercept[exceptions.TestFailedException] {
(left2 should not contain inOrderElementsOf (Seq("1", "2 ", "3"))) (equality)
}
checkShouldNotContainStackDepth(e2, left2, Seq("1", "2 ", "3"), thisLineNumber - 2)
val left3 = javaList("1 ", "2", "3 ")
val e3 = intercept[exceptions.TestFailedException] {
(left3 should not contain inOrderElementsOf (Seq("1", "2 ", "3"))) (equality)
}
checkShouldNotContainStackDepth(e3, left3, Seq("1", "2 ", "3"), thisLineNumber - 2)
}
}
}
| cheeseng/scalatest | scalatest-test/src/test/scala/org/scalatest/InOrderElementsOfContainMatcherEqualitySpec.scala | Scala | apache-2.0 | 7,661 |
/*
* Copyright (c) 2012, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.queue.engine.gridengine
import org.broadinstitute.sting.queue.util.Logging
import org.broadinstitute.sting.queue.function.CommandLineFunction
import org.broadinstitute.sting.queue.engine.drmaa.DrmaaJobRunner
import org.ggf.drmaa.Session
/**
* Runs jobs on a Grid Engine compute cluster.
*/
class GridEngineJobRunner(session: Session, function: CommandLineFunction) extends DrmaaJobRunner(session, function) with Logging {
// Grid Engine disallows certain characters from being in job names.
// This replaces all illegal characters with underscores
protected override val jobNameFilter = """[\\n\\t\\r/:@\\\\*?]"""
protected override val minRunnerPriority = -1023
protected override val maxRunnerPriority = 0
override protected def functionNativeSpec = {
// Force the remote environment to inherit local environment settings
var nativeSpec: String = "-V"
// If a project name is set specify the project name
if (function.jobProject != null)
nativeSpec += " -P " + function.jobProject
// If the job queue is set specify the job queue
if (function.jobQueue != null)
nativeSpec += " -q " + function.jobQueue
// If the resident set size is requested pass on the memory request
// NOTE: 12/20/11: depristo commented this out because mem_free isn't
// such a standard feature in SGE (gsa-engineering queue doesn't support it)
// requiring it can make SGE not so usable. It's dangerous to not enforce
// that we have enough memory to run our jobs, but I'd rather be dangerous
// than not be able to run my jobs at all.
// if (function.residentRequest.isDefined)
// nativeSpec += " -l mem_free=%dM".format(function.residentRequest.map(_ * 1024).get.ceil.toInt)
// If the resident set size limit is defined specify the memory limit
if (function.residentLimit.isDefined)
nativeSpec += " -l h_rss=%dM".format(function.residentLimit.map(_ * 1024).get.ceil.toInt)
// If more than 1 core is requested, set the proper request
// if we aren't being jerks and just stealing cores (previous behavior)
if ( function.nCoresRequest.getOrElse(1) > 1 ) {
if ( function.qSettings.dontRequestMultipleCores )
logger.warn("Sending multicore job %s to farm without requesting appropriate number of cores (%d)".format(
function.shortDescription, function.nCoresRequest.get))
else
nativeSpec += " -pe %s %d".format(function.qSettings.parallelEnvironmentName, function.nCoresRequest.get)
}
// Pass on any job resource requests
nativeSpec += function.jobResourceRequests.map(" -l " + _).mkString
// Pass on any job environment names
nativeSpec += function.jobEnvironmentNames.map(" -pe " + _).mkString
// If the priority is set specify the priority
val priority = functionPriority
if (priority.isDefined)
nativeSpec += " -p " + priority.get
logger.debug("Native spec is: %s".format(nativeSpec))
(nativeSpec + " " + super.functionNativeSpec).trim()
}
}
| iontorrent/Torrent-Variant-Caller-stable | public/scala/src/org/broadinstitute/sting/queue/engine/gridengine/GridEngineJobRunner.scala | Scala | mit | 4,186 |
/*******************************************************************************
* Copyright (c) 2014 Łukasz Szpakowski.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
******************************************************************************/
package pl.luckboy.purfuncor.frontend.typer
trait TypeEnvironmentState[E, L, V]
{
def typeParamCountFromEnvironmentS(env: E): (E, Int)
def withTypeParamsS[T](paramCount: Int)(f: (Int, Int, E) => (E, T))(env: E): (E, T)
def currentTypeParamAppIdxFromEnvironmentS(env: E): (E, Int)
def globalTypeVarValueFromEnvironmentS(loc: L)(env: E): (E, V)
// This method for leaks of type closures.
def withClearS[T](f: E => (E, T))(env: E): (E, T)
}
| luckboy/Purfuncor | src/main/scala/pl/luckboy/purfuncor/frontend/typer/TypeEnvironmentState.scala | Scala | mpl-2.0 | 873 |
package com.orendainx.trucking.storm.schemes
import java.nio.ByteBuffer
import org.apache.storm.tuple.{Fields, Values}
/**
* Scheme for parsing speed events.
*
* @author Edgar Orendain <[email protected]>
*/
class BufferToStringScheme(dataType: String) extends DelimitedScheme("\\\\|") {
override def deserialize(buffer: ByteBuffer): Values = new Values(dataType, deserializeAsString(buffer))
override def getOutputFields: Fields = new Fields("dataType", "data")
}
| orendain/trucking-iot | storm-topology/src/main/scala/com/orendainx/trucking/storm/schemes/BufferToStringScheme.scala | Scala | apache-2.0 | 482 |
package com.folio_sec.example.domain.issue003.scala_api
object ParentObjectService extends ParentObjectService
trait ParentObjectService extends ParentObjectServiceAbstract {}
| folio-sec/reladomo-scala | sample/src/main/scala/com/folio_sec/example/domain/issue003/scala_api/ParentObjectService.scala | Scala | apache-2.0 | 178 |
package charactor.core.model.objects.charactor.attributes
import charactor.core.model.objects.charactor.{ReversedLinearSpeed, Speed}
object PersistentAttribute
{
val MaximumSpeedModifier = 1.6;
val MinimumSpeedModifier = 1.4;
}
class PersistentAttribute extends Attribute(List(new ReversedLinearSpeed(Speed.DefaultMaximumSpeed*PersistentAttribute.MaximumSpeedModifier, Speed.DefaultMinimumSpeed*PersistentAttribute.MinimumSpeedModifier)))
{
}
| PiotrTrzpil/charactor | src/charactor/core/model/objects/charactor/attributes/PersistentAttribute.scala | Scala | apache-2.0 | 448 |
package cgta.oscala
package util
import cgta.otest.FunSuite
//////////////////////////////////////////////////////////////
// Copyright (c) 2013 Ben Jackman
// All Rights Reserved
// please contact [email protected]
// for licensing inquiries
// Created by pacifique @ 9/24/13 12:55 PM
//////////////////////////////////////////////////////////////
object TestArrayHelp extends FunSuite {
val A = "a"
val B = "b"
val C = "c"
val D = "d"
val E = "e"
val F = "f"
val G = "g"
val H = "h"
val X = "x"
def evens = Array(2, 4, 6, 8, 10)
def odds = Array(1, 3, 5, 7, 9)
def abcd = Array(A, B, C, D)
def abcde = Array(A, B, C, D, E)
def efgh = Array(E, F, G, H)
def emptyIntArr = Array[Int]()
def emptyStringArr = Array[Symbol]()
test("") {
val ys = ArrayHelp.setAtGrow(abcd, 4, E)
Assert.isEquals(abcde.toList, ys.take(5).toList)
}
test("RemoveAddOnLeft") {
val arr = evens
ArrayHelp.removeAddOnLeft(arr, 2, Int.MinValue)
Assert.isEquals(Array(Int.MinValue, 2, 4, 8, 10).toList, arr.toList)
Assert.isTrue(arr.indexOf(Int.MinValue) == 0 && !arr.contains(767))
}
test("RemoveAddOnRight") {
val arr = abcd
val removed = ArrayHelp.removeAddOnRight(arr, 1, X)
Assert.isEquals(B, removed)
Assert.isEquals(List(A, C, D, X), arr.toList)
}
test("MoveToEnd") {
val arr = abcd
val removed = ArrayHelp.moveToEnd(arr, 1)
Assert.isEquals(B, removed)
Assert.isEquals(List(A, C, D, B), arr.toList)
}
test("ShiftRight") {
def validate(amount: Int, newVal: Int, result: Array[Int]) {
val arr = evens
ArrayHelp.shiftRight(arr, amount, newVal)
Assert.isEquals(result.toList, arr.toList)
}
validate(0, 17, Array(2, 4, 6, 8, 10))
validate(2, 17, Array(17, 17, 2, 4, 6))
validate(-10, 0, Array(0, 0, 0, 0, 0))
validate(5, 0, Array(0, 0, 0, 0, 0))
}
test("ShiftLeft") {
val arr = (1 to 10).toArray
ArrayHelp.shiftLeft(arr, 0, 19)
Assert.isTrue(!arr.contains(19))
ArrayHelp.shiftLeft(arr, 2, 15)
Assert.isTrue(arr.filter(_ == 15).length == 2)
ArrayHelp.shiftLeft(arr, -30, 19)
Assert.isTrue(arr.sum == 19 * arr.length)
}
test("InsertAt") {
def validate(idx: Int, popped: String, result: List[String]) {
val xs = abcd
Assert.isEquals(popped, ArrayHelp.insertAt(xs, idx, X))
Assert.isEquals(result, xs.toList)
}
validate(0, D, List(X, A, B, C))
validate(1, D, List(A, X, B, C))
validate(2, D, List(A, B, X, C))
validate(3, D, List(A, B, C, X))
Assert.intercepts[IndexOutOfBoundsException](ArrayHelp.insertAt(abcd, 4, X))
Assert.intercepts[IndexOutOfBoundsException](ArrayHelp.insertAt(abcd, -1, X))
}
test("InsertAtShiftLeft") {
def validate(idx: Int, popped: String, result: List[String]) {
val xs = abcd
Assert.isEquals(popped, ArrayHelp.insertAtShiftLeft(xs, idx, X))
Assert.isEquals(result, xs.toList)
}
validate(0, A, List(X, B, C, D))
validate(1, A, List(B, X, C, D))
validate(2, A, List(B, C, X, D))
validate(3, A, List(B, C, D, X))
Assert.intercepts[IndexOutOfBoundsException](ArrayHelp.insertAt(abcd, 4, X))
Assert.intercepts[IndexOutOfBoundsException](ArrayHelp.insertAt(abcd, -1, X))
}
test("append") {
Assert.isEquals(List(1), ArrayHelp.append(Array(), 1).toList)
Assert.isEquals(List(1,2,3,4), ArrayHelp.append(Array(1,2,3), 4).toList)
}
test("GrowWithFill") {
val arr = abcd
Assert.isEquals(List(A, B, C, D, X, X, X, X), ArrayHelp.growWithFill(arr, arr.length * 2)(X).toList)
}
test("Grow") {
val arr = (1 to 5).toArray
Assert.isEquals(10, ArrayHelp.grow(arr, 10).length)
}
test("Swap") {
val arr = abcd
ArrayHelp.swap(arr, 0, 3)
Assert.isEquals(List(D, B, C, A), arr.toList)
}
test("CopyNew") {
val arr = evens
Assert.isEquals(arr.toList, ArrayHelp.copyNew(arr).toList)
}
} | cgta/open | oscala/shared/src/test/scala/cgta/oscala/util/TestArrayHelp.scala | Scala | mit | 3,936 |
package one.murch.bitcoin.coinselection
class Transaction(val walletName: String, val target: Long, val change: Long, val fee: Long, val inputSet: Set[Utxo], val block: Int, val duration: Long) {
var inputsValue: Long = 0
inputSet.foreach(inputsValue += _.value)
var outputsValue: Long = 0
outputsValue += target
outputsValue += change
outputsValue += fee
if (inputsValue != outputsValue) {
println("ERROR: one.murch.bitcoin.coinselection.Transaction inputs and outputs don't match in value for " + walletName)
}
def createTransactionReport() {
println("To spend " + target + ", " + walletName + " selected " + inputSet.size + " inputs, with a total.value of " + inputsValue + " satoshi. The change was " + change + " It took " + duration + " ms to calculate.")
}
}
| Xekyo/CoinSelectionSimulator | src/main/scala/one/murch/bitcoin/coinselection/Transaction.scala | Scala | mit | 826 |
/*
* Copyright (C) 2014 - 2017 Contributors as noted in the AUTHORS.md file
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package databases.mysql
import java.math.BigDecimal
import java.net.URI
import java.sql.SQLIntegrityConstraintViolationException
import akka.testkit.{ EventFilter, TestFSMRef }
import com.wegtam.scalatest.tags.{ DbTest, DbTestMysql }
import com.wegtam.tensei.adt.{ ConnectionInformation, DFASDL, DFASDLReference }
import com.wegtam.tensei.agent.ActorSpecWithDebugLog
import com.wegtam.tensei.agent.writers.BaseWriter.BaseWriterMessages._
import com.wegtam.tensei.agent.writers.BaseWriter._
import com.wegtam.tensei.agent.writers.DatabaseWriterActor.DatabaseWriterData
import com.wegtam.tensei.agent.writers.{ BaseWriter, DatabaseWriterActor }
import org.scalatest.BeforeAndAfterEach
import scalaz.Scalaz._
class DatabaseWriterActorTest extends ActorSpecWithDebugLog with BeforeAndAfterEach {
val databaseHost = testConfig.getString("mysql.host")
val databasePort = testConfig.getInt("mysql.port")
val databaseName = testConfig.getString("mysql.target-db.name")
val databaseUser = testConfig.getString("mysql.target-db.user")
val databasePass = testConfig.getString("mysql.target-db.pass")
/**
* Initialise the test database.
*/
override protected def beforeEach(): Unit = {
// The database connection.
val connection = java.sql.DriverManager
.getConnection(s"jdbc:mysql://$databaseHost:$databasePort/", databaseUser, databasePass)
val s = connection.createStatement()
s.execute(s"DROP DATABASE IF EXISTS $databaseName")
s.execute(s"CREATE DATABASE $databaseName")
s.close()
connection.close()
super.beforeEach()
}
/**
* Remove the test database.
*/
override protected def afterEach(): Unit = {
// The database connection.
val connection = java.sql.DriverManager
.getConnection(s"jdbc:mysql://$databaseHost:$databasePort/", databaseUser, databasePass)
val s = connection.createStatement()
s.execute(s"DROP DATABASE IF EXISTS $databaseName")
s.close()
connection.close()
super.afterEach()
}
/**
* Initialise the database writer actor.
*
* @param con The connection information for the target database.
* @param dfasdl The target DFASDL.
* @return The test actor ref of the writer actor.
*/
private def initializeWriter(
con: ConnectionInformation,
dfasdl: DFASDL
): TestFSMRef[BaseWriter.State, DatabaseWriterData, DatabaseWriterActor] = {
val writer = TestFSMRef(
new DatabaseWriterActor(con, dfasdl, Option("DatabaseWriterActorTest"))
)
writer.stateName should be(BaseWriter.State.Initializing)
writer ! BaseWriterMessages.InitializeTarget
writer ! AreYouReady
val expectedMsg = ReadyToWork
expectMsg(expectedMsg)
writer
}
describe("DatabaseWriterActor") {
describe("using mysql") {
describe("initialise") {
it("should create the tables", DbTest, DbTestMysql) {
val connection = java.sql.DriverManager.getConnection(
s"jdbc:mysql://$databaseHost:$databasePort/$databaseName",
databaseUser,
databasePass
)
val dfasdlFile = "/databases/generic/DatabaseWriter/simple-01.xml"
val xml =
scala.io.Source.fromInputStream(getClass.getResourceAsStream(dfasdlFile)).mkString
val dfasdl = new DFASDL("SIMPLE-01", xml)
val target = new ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef = Option(DFASDLReference("TEST", "SIMPLE-01")),
username = Option(databaseUser),
password = Option(databasePass)
)
val writer = initializeWriter(target, dfasdl)
writer ! CloseWriter
expectMsgType[WriterClosed]
val statement = connection.createStatement()
val results = statement.executeQuery(
s"SELECT UPPER(TABLE_NAME) AS TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE UPPER(TABLE_SCHEMA) = UPPER('$databaseName') AND UPPER(TABLE_NAME) = 'ACCOUNTS' ORDER BY TABLE_NAME ASC"
)
withClue("Database table 'accounts' should be created!'") {
results.getFetchSize
results.next() should be(true)
results.getString("TABLE_NAME") shouldEqual "ACCOUNTS"
}
connection.close()
}
it("should not create tables that are already existing", DbTest, DbTestMysql) {
val connection = java.sql.DriverManager.getConnection(
s"jdbc:mysql://$databaseHost:$databasePort/$databaseName",
databaseUser,
databasePass
)
val statement = connection.createStatement()
statement.execute("CREATE TABLE accounts (id NUMERIC)")
val dfasdlFile = "/databases/generic/DatabaseWriter/simple-01.xml"
val xml =
scala.io.Source.fromInputStream(getClass.getResourceAsStream(dfasdlFile)).mkString
val dfasdl = new DFASDL("SIMPLE-01", xml)
val target = new ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef = Option(DFASDLReference("TEST", "SIMPLE-01")),
username = Option(databaseUser),
password = Option(databasePass)
)
EventFilter.warning(occurrences = 1, start = "Table") intercept {
val writer = initializeWriter(target, dfasdl)
writer ! CloseWriter
expectMsgType[WriterClosed]
}
connection.close()
}
it("should create primary keys if defined", DbTest, DbTestMysql) {
val connection = java.sql.DriverManager.getConnection(
s"jdbc:mysql://$databaseHost:$databasePort/$databaseName",
databaseUser,
databasePass
)
val dfasdlFile = "/databases/generic/DatabaseWriter/simple-01-with-primary-key.xml"
val xml =
scala.io.Source.fromInputStream(getClass.getResourceAsStream(dfasdlFile)).mkString
val dfasdl = new DFASDL("SIMPLE-01", xml)
val target = new ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef = Option(DFASDLReference("TEST", "SIMPLE-01")),
username = Option(databaseUser),
password = Option(databasePass)
)
val writer = initializeWriter(target, dfasdl)
writer ! CloseWriter
expectMsgType[WriterClosed]
val statement = connection.createStatement()
val results = statement.executeQuery(
s"SELECT UPPER(TABLE_NAME) AS TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE UPPER(TABLE_SCHEMA) = UPPER('$databaseName') AND UPPER(TABLE_NAME) = 'ACCOUNTS' ORDER BY TABLE_NAME ASC"
)
withClue("Database table 'accounts' should be created!'") {
results.next() should be(true)
results.getString("TABLE_NAME") shouldEqual "ACCOUNTS"
}
statement.execute("INSERT INTO accounts VALUES(1, 'John Doe', NULL, '2001-01-01', 3.14)")
an[SQLIntegrityConstraintViolationException] should be thrownBy statement.execute(
"INSERT INTO accounts VALUES(1, 'Jane Doe', NULL, '2001-01-02', 2.76)"
)
connection.close()
}
it("should create auto-increment columns if defined", DbTest, DbTestMysql) {
val connection = java.sql.DriverManager.getConnection(
s"jdbc:mysql://$databaseHost:$databasePort/$databaseName",
databaseUser,
databasePass
)
val dfasdlFile = "/databases/generic/DatabaseWriter/simple-01-with-pk-and-auto-inc.xml"
val xml =
scala.io.Source.fromInputStream(getClass.getResourceAsStream(dfasdlFile)).mkString
val dfasdl = new DFASDL("SIMPLE-01", xml)
val target = new ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef = Option(DFASDLReference("TEST", "SIMPLE-01")),
username = Option(databaseUser),
password = Option(databasePass)
)
val writer = initializeWriter(target, dfasdl)
writer ! CloseWriter
expectMsgType[WriterClosed]
val statement = connection.createStatement()
val results = statement.executeQuery(
s"SELECT UPPER(TABLE_NAME) AS TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE UPPER(TABLE_SCHEMA) = UPPER('$databaseName') AND UPPER(TABLE_NAME) = 'ACCOUNTS' ORDER BY TABLE_NAME ASC"
)
withClue("Database table 'accounts' should be created!'") {
results.next() should be(true)
results.getString("TABLE_NAME") shouldEqual "ACCOUNTS"
}
statement.execute(
"INSERT INTO accounts (name, description, birthday, salary) VALUES('John Doe', NULL, '2001-01-01', 3.14)"
)
val entries = statement.executeQuery("SELECT * FROM accounts WHERE name = 'John Doe'")
withClue("Column should be incremented automatically.") {
entries.next() should be(true)
entries.getInt("id") should be(1)
}
an[SQLIntegrityConstraintViolationException] should be thrownBy statement.execute(
"INSERT INTO accounts VALUES(1, 'Jane Doe', NULL, '2001-01-02', 2.76)"
)
connection.close()
}
it("should create the unique columns if defined", DbTest, DbTestMysql) {
val connection = java.sql.DriverManager.getConnection(
s"jdbc:mysql://$databaseHost:$databasePort/$databaseName",
databaseUser,
databasePass
)
val dfasdlFile = "/databases/generic/DatabaseWriter/simple-01-with-unique.xml"
val xml =
scala.io.Source.fromInputStream(getClass.getResourceAsStream(dfasdlFile)).mkString
val dfasdl = new DFASDL("SIMPLE-01", xml)
val target = new ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef = Option(DFASDLReference("TEST", "SIMPLE-01")),
username = Option(databaseUser),
password = Option(databasePass)
)
val writer = initializeWriter(target, dfasdl)
writer ! CloseWriter
expectMsgType[WriterClosed]
val statement = connection.createStatement()
statement.execute("INSERT INTO accounts VALUES(1, 'John Doe', NULL, '2001-01-01', 3.14)")
val entries = statement.executeQuery("SELECT * FROM accounts WHERE name = 'John Doe'")
withClue("Unique should work.") {
entries.next() should be(true)
entries.getString("name") should be("John Doe")
an[SQLIntegrityConstraintViolationException] should be thrownBy statement.execute(
"INSERT INTO accounts VALUES(2, 'John Doe', NULL, '2001-01-02', 2.76)"
)
}
connection.close()
}
it("should create primary keys, foreign keys and auto increments", DbTest, DbTestMysql) {
val connection = java.sql.DriverManager.getConnection(
s"jdbc:mysql://$databaseHost:$databasePort/$databaseName",
databaseUser,
databasePass
)
val dfasdlFile =
"/databases/generic/DatabaseWriter/simple-02-with-pk-and-fk-and-auto-inc.xml"
val xml =
scala.io.Source.fromInputStream(getClass.getResourceAsStream(dfasdlFile)).mkString
val dfasdl = new DFASDL("SIMPLE-01", xml)
val target = new ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef = Option(DFASDLReference("TEST", "SIMPLE-01")),
username = Option(databaseUser),
password = Option(databasePass)
)
val writer = initializeWriter(target, dfasdl)
writer ! CloseWriter
expectMsgType[WriterClosed]
val statement = connection.createStatement()
val results = statement.executeQuery(
s"SELECT UPPER(TABLE_NAME) AS TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE UPPER(TABLE_SCHEMA) = UPPER('$databaseName') AND (UPPER(TABLE_NAME) = 'ACCOUNTS' OR UPPER(TABLE_NAME) = 'COMPANIES') ORDER BY TABLE_NAME ASC"
)
withClue("Database tables should be created!'") {
results.next() should be(true)
results.getString("TABLE_NAME") shouldEqual "ACCOUNTS"
results.next() should be(true)
results.getString("TABLE_NAME") shouldEqual "COMPANIES"
}
statement.execute("INSERT INTO companies VALUES(1, 'Letterbox Inc.', NULL)")
statement.execute(
"INSERT INTO accounts (name, description, birthday, salary, company_id) VALUES('John Doe', NULL, '2001-01-01', 3.14, 1)"
)
val entries = statement.executeQuery(
"SELECT accounts.id AS account_id, companies.name AS company_name FROM accounts JOIN companies ON accounts.company_id = companies.id WHERE accounts.name = 'John Doe'"
)
withClue("Foreign keys should work.") {
entries.next() should be(true)
withClue("Column id should be auto-incremented.")(
entries.getInt("account_id") should be(1)
)
entries.getString("company_name") should be("Letterbox Inc.")
an[SQLIntegrityConstraintViolationException] should be thrownBy statement.execute(
"INSERT INTO accounts (name, description, birthday, salary, company_id) VALUES('Jane Doe', NULL, '2001-01-02', 2.76, -1)"
)
}
connection.close()
}
}
describe("writing data") {
describe("using a single sequence") {
describe("when given data for a single row") {
it("should write a sequence row", DbTest, DbTestMysql) {
val connection = java.sql.DriverManager.getConnection(
s"jdbc:mysql://$databaseHost:$databasePort/$databaseName",
databaseUser,
databasePass
)
val dfasdlFile = "/databases/generic/DatabaseWriter/simple-01.xml"
val xml =
scala.io.Source.fromInputStream(getClass.getResourceAsStream(dfasdlFile)).mkString
val dfasdl = new DFASDL("SIMPLE-01", xml)
val target = new ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef = Option(DFASDLReference("TEST", "SIMPLE-01")),
username = Option(databaseUser),
password = Option(databasePass)
)
val databaseWriter = initializeWriter(target, dfasdl)
val msg = new WriteBatchData(
batch = List(
new WriteData(1, 1L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(2,
"Max Mustermann",
List(),
Option(new WriterMessageMetaData("name"))),
new WriteData(3,
"Some fancy text...",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(4,
java.sql.Date.valueOf("1968-01-03"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(5,
new BigDecimal("1500.23"),
List(),
Option(new WriterMessageMetaData("salary")))
)
)
databaseWriter ! msg
databaseWriter ! BaseWriterMessages.CloseWriter
val expectedMessage = BaseWriterMessages.WriterClosed("".right[String])
expectMsg(expectedMessage)
val statement = connection.createStatement()
val results = statement.executeQuery("SELECT * FROM accounts")
withClue("Data should have been written to the database!") {
results.next() should be(true)
results.getLong("id") should be(1)
results.getString("name") should be("Max Mustermann")
results.getString("description") should be("Some fancy text...")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-01-03"))
results.getDouble("salary") should be(1500.23)
}
connection.close()
}
}
describe("when given data for multiple rows") {
describe("without primary key") {
it("should write all possible sequence rows", DbTest, DbTestMysql) {
val connection = java.sql.DriverManager.getConnection(
s"jdbc:mysql://$databaseHost:$databasePort/$databaseName",
databaseUser,
databasePass
)
val dfasdlFile = "/databases/generic/DatabaseWriter/simple-01.xml"
val xml = scala.io.Source
.fromInputStream(getClass.getResourceAsStream(dfasdlFile))
.mkString
val dfasdl = new DFASDL("SIMPLE-01", xml)
val target = new ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef = Option(DFASDLReference("TEST", "SIMPLE-01")),
username = Option(databaseUser),
password = Option(databasePass)
)
val databaseWriter = initializeWriter(target, dfasdl)
val msg = new WriteBatchData(
batch = List(
new WriteData(1, 1L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(2,
"Max Mustermann",
List(),
Option(new WriterMessageMetaData("name"))),
new WriteData(3,
"Some fancy text...",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(4,
java.sql.Date.valueOf("1968-01-03"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(5,
new BigDecimal("1500.23"),
List(),
Option(new WriterMessageMetaData("salary"))),
new WriteData(6, 2L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(7,
"Eva Mustermann",
List(),
Option(new WriterMessageMetaData("name"))),
new WriteData(8,
"Some fancy text...",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(9,
java.sql.Date.valueOf("1968-01-01"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(10,
new BigDecimal("1500.00"),
List(),
Option(new WriterMessageMetaData("salary"))),
new WriteData(11, 3L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(12,
"Dr. Evil",
List(),
Option(new WriterMessageMetaData("name"))),
new WriteData(13,
"Beware of Austin Powers!",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(14,
java.sql.Date.valueOf("1968-08-08"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(15,
new BigDecimal("1500000.00"),
List(),
Option(new WriterMessageMetaData("salary")))
)
)
databaseWriter ! msg
databaseWriter ! BaseWriterMessages.CloseWriter
val expectedMessage = BaseWriterMessages.WriterClosed("".right[String])
expectMsg(expectedMessage)
val statement = connection.createStatement()
val results = statement.executeQuery("SELECT * FROM accounts ORDER BY id")
withClue("Data should have been written to the database!") {
results.next() should be(true)
results.getLong("id") should be(1)
results.getString("name") should be("Max Mustermann")
results.getString("description") should be("Some fancy text...")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-01-03"))
results.getDouble("salary") should be(1500.23)
results.next() should be(true)
results.getLong("id") should be(2)
results.getString("name") should be("Eva Mustermann")
results.getString("description") should be("Some fancy text...")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-01-01"))
results.getDouble("salary") should be(1500.00)
results.next() should be(true)
results.getLong("id") should be(3)
results.getString("name") should be("Dr. Evil")
results.getString("description") should be("Beware of Austin Powers!")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-08-08"))
results.getDouble("salary") should be(1500000.00)
}
connection.close()
}
}
describe("with primary key") {
it("should write new and update existing rows", DbTest, DbTestMysql) {
val connection = java.sql.DriverManager.getConnection(
s"jdbc:mysql://$databaseHost:$databasePort/$databaseName",
databaseUser,
databasePass
)
val dfasdlFile = "/databases/generic/DatabaseWriter/simple-01-with-primary-key.xml"
val xml = scala.io.Source
.fromInputStream(getClass.getResourceAsStream(dfasdlFile))
.mkString
val dfasdl = new DFASDL("SIMPLE-01", xml)
val target = new ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef = Option(DFASDLReference("TEST", "SIMPLE-01")),
username = Option(databaseUser),
password = Option(databasePass)
)
val databaseWriter = initializeWriter(target, dfasdl)
val statement = connection.createStatement()
statement.execute(
"INSERT INTO accounts (id, name, description, birthday, salary) VALUES(1, 'Max Mustermann', 'Some fancy text...', '1968-01-03', 1500.23)"
)
val msg = new WriteBatchData(
batch = List(
new WriteData(1, 2L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(2,
"Eva Mustermann",
List(),
Option(new WriterMessageMetaData("name"))),
new WriteData(3,
"Some fancy text...",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(4,
java.sql.Date.valueOf("1968-01-01"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(5,
new BigDecimal("1500.00"),
List(),
Option(new WriterMessageMetaData("salary"))),
new WriteData(6, 3L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(7, "Dr. Evil", List(), Option(new WriterMessageMetaData("name"))),
new WriteData(8,
"Beware of Austin Powers!",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(9,
java.sql.Date.valueOf("1968-08-08"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(10,
new BigDecimal("1500000.00"),
List(),
Option(new WriterMessageMetaData("salary"))),
new WriteData(11, 1L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(12,
"Lord Fancy Pants",
List(),
Option(new WriterMessageMetaData("name"))),
new WriteData(13,
"An updated description text.",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(14,
java.sql.Date.valueOf("1968-04-01"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(15,
new BigDecimal("999.97"),
List(),
Option(new WriterMessageMetaData("salary")))
)
)
databaseWriter ! msg
databaseWriter ! BaseWriterMessages.CloseWriter
val expectedMessage = BaseWriterMessages.WriterClosed("".right[String])
expectMsg(expectedMessage)
withClue("The exact number of rows should have been written!") {
val count = statement.executeQuery("SELECT COUNT(*) FROM accounts")
count.next() should be(true)
count.getInt(1) shouldEqual 3
}
withClue("Data should have been written to the database!") {
val results = statement.executeQuery("SELECT * FROM accounts ORDER BY id")
results.next() should be(true)
results.getLong("id") should be(1)
results.getString("name") should be("Lord Fancy Pants")
results.getString("description") should be("An updated description text.")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-04-01"))
results.getDouble("salary") should be(999.97)
results.next() should be(true)
results.getLong("id") should be(2)
results.getString("name") should be("Eva Mustermann")
results.getString("description") should be("Some fancy text...")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-01-01"))
results.getDouble("salary") should be(1500.00)
results.next() should be(true)
results.getLong("id") should be(3)
results.getString("name") should be("Dr. Evil")
results.getString("description") should be("Beware of Austin Powers!")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-08-08"))
results.getDouble("salary") should be(1500000.00)
}
connection.close()
}
}
}
}
describe("using multiple sequences") {
describe("when given data for multiple rows") {
it("should write all possible sequence rows", DbTest, DbTestMysql) {
val connection = java.sql.DriverManager.getConnection(
s"jdbc:mysql://$databaseHost:$databasePort/$databaseName",
databaseUser,
databasePass
)
val dfasdlFile = "/databases/generic/DatabaseWriter/simple-02.xml"
val xml =
scala.io.Source.fromInputStream(getClass.getResourceAsStream(dfasdlFile)).mkString
val dfasdl = new DFASDL("SIMPLE-01", xml)
val target = new ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef = Option(DFASDLReference("TEST", "SIMPLE-01")),
username = Option(databaseUser),
password = Option(databasePass)
)
val databaseWriter = initializeWriter(target, dfasdl)
val msg = new WriteBatchData(
batch = List(
new WriteData(1, 1L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(2,
"Max Mustermann",
List(),
Option(new WriterMessageMetaData("name"))),
new WriteData(3,
"Some fancy text...",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(4,
java.sql.Date.valueOf("1968-01-03"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(5,
new BigDecimal("1500.23"),
List(),
Option(new WriterMessageMetaData("salary"))),
new WriteData(6, 2L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(7,
"Eva Mustermann",
List(),
Option(new WriterMessageMetaData("name"))),
new WriteData(8,
"Some fancy text...",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(9,
java.sql.Date.valueOf("1968-01-01"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(10,
new BigDecimal("1500.00"),
List(),
Option(new WriterMessageMetaData("salary"))),
new WriteData(11, 1L, List(), Option(new WriterMessageMetaData("id2"))),
new WriteData(12, "Dr. Evil", List(), Option(new WriterMessageMetaData("name2"))),
new WriteData(13,
"Beware of Austin Powers!",
List(),
Option(new WriterMessageMetaData("description2"))),
new WriteData(14,
java.sql.Date.valueOf("1968-08-08"),
List(),
Option(new WriterMessageMetaData("birthday2"))),
new WriteData(15,
new BigDecimal("1500000.00"),
List(),
Option(new WriterMessageMetaData("salary2"))),
new WriteData(16, 2L, List(), Option(new WriterMessageMetaData("id2"))),
new WriteData(17,
"Eva Mustermann",
List(),
Option(new WriterMessageMetaData("name2"))),
new WriteData(18,
"Some fancy text...",
List(),
Option(new WriterMessageMetaData("description2"))),
new WriteData(19,
java.sql.Date.valueOf("1968-01-01"),
List(),
Option(new WriterMessageMetaData("birthday2"))),
new WriteData(20,
new BigDecimal("1500.00"),
List(),
Option(new WriterMessageMetaData("salary2"))),
new WriteData(21, 3L, List(), Option(new WriterMessageMetaData("id2"))),
new WriteData(22, "Dr. Evil", List(), Option(new WriterMessageMetaData("name2"))),
new WriteData(23,
"Beware of Austin Powers!",
List(),
Option(new WriterMessageMetaData("description2"))),
new WriteData(24,
java.sql.Date.valueOf("1968-08-08"),
List(),
Option(new WriterMessageMetaData("birthday2"))),
new WriteData(25,
new BigDecimal("1500000.00"),
List(),
Option(new WriterMessageMetaData("salary2")))
)
)
databaseWriter ! msg
databaseWriter ! BaseWriterMessages.CloseWriter
val expectedMessage = BaseWriterMessages.WriterClosed("".right[String])
expectMsg(expectedMessage)
val statement = connection.createStatement()
val results = statement.executeQuery("SELECT * FROM accounts ORDER BY id")
withClue("Data should have been written to the database!") {
results.next() should be(true)
results.getLong("id") should be(1)
results.getString("name") should be("Max Mustermann")
results.getString("description") should be("Some fancy text...")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-01-03"))
results.getDouble("salary") should be(1500.23)
results.next() should be(true)
results.getLong("id") should be(2)
results.getString("name") should be("Eva Mustermann")
results.getString("description") should be("Some fancy text...")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-01-01"))
results.getDouble("salary") should be(1500.00)
results.next() should be(false)
}
val results2 = statement.executeQuery("SELECT * FROM accounts2 ORDER BY id")
withClue("Data should have been written to the database!") {
results2.next() should be(true)
results2.getLong("id") should be(1)
results2.getString("name") should be("Dr. Evil")
results2.getString("description") should be("Beware of Austin Powers!")
results2.getDate("birthday") should be(java.sql.Date.valueOf("1968-08-08"))
results2.getDouble("salary") should be(1500000.00)
results2.next() should be(true)
results2.getLong("id") should be(2)
results2.getString("name") should be("Eva Mustermann")
results2.getString("description") should be("Some fancy text...")
results2.getDate("birthday") should be(java.sql.Date.valueOf("1968-01-01"))
results2.getDouble("salary") should be(1500.00)
results2.next() should be(true)
results2.getLong("id") should be(3)
results2.getString("name") should be("Dr. Evil")
results2.getString("description") should be("Beware of Austin Powers!")
results2.getDate("birthday") should be(java.sql.Date.valueOf("1968-08-08"))
results2.getDouble("salary") should be(1500000.00)
results2.next() should be(false)
}
connection.close()
}
}
describe("when given data for multiple rows in random order") {
it("should write all possible sequence rows", DbTest, DbTestMysql) {
val connection = java.sql.DriverManager.getConnection(
s"jdbc:mysql://$databaseHost:$databasePort/$databaseName",
databaseUser,
databasePass
)
val dfasdlFile = "/databases/generic/DatabaseWriter/simple-02.xml"
val xml =
scala.io.Source.fromInputStream(getClass.getResourceAsStream(dfasdlFile)).mkString
val dfasdl = new DFASDL("SIMPLE-01", xml)
val target = new ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef = Option(DFASDLReference("TEST", "SIMPLE-01")),
username = Option(databaseUser),
password = Option(databasePass)
)
val databaseWriter = initializeWriter(target, dfasdl)
val msg = new WriteBatchData(
batch = List(
new WriteData(1, 1L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(5,
new BigDecimal("1500.23"),
List(),
Option(new WriterMessageMetaData("salary"))),
new WriteData(2,
"Max Mustermann",
List(),
Option(new WriterMessageMetaData("name"))),
new WriteData(3,
"Some fancy text...",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(6, 2L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(7,
"Eva Mustermann",
List(),
Option(new WriterMessageMetaData("name"))),
new WriteData(4,
java.sql.Date.valueOf("1968-01-03"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(8,
"Some fancy text...",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(11, 1L, List(), Option(new WriterMessageMetaData("id2"))),
new WriteData(12, "Dr. Evil", List(), Option(new WriterMessageMetaData("name2"))),
new WriteData(13,
"Beware of Austin Powers!",
List(),
Option(new WriterMessageMetaData("description2"))),
new WriteData(9,
java.sql.Date.valueOf("1968-01-01"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(10,
new BigDecimal("1500.00"),
List(),
Option(new WriterMessageMetaData("salary"))),
new WriteData(14,
java.sql.Date.valueOf("1968-08-08"),
List(),
Option(new WriterMessageMetaData("birthday2"))),
new WriteData(15,
new BigDecimal("1500000.00"),
List(),
Option(new WriterMessageMetaData("salary2"))),
new WriteData(23,
"Beware of Austin Powers!",
List(),
Option(new WriterMessageMetaData("description2"))),
new WriteData(24,
java.sql.Date.valueOf("1968-08-08"),
List(),
Option(new WriterMessageMetaData("birthday2"))),
new WriteData(16, 2L, List(), Option(new WriterMessageMetaData("id2"))),
new WriteData(18,
"Some fancy text...",
List(),
Option(new WriterMessageMetaData("description2"))),
new WriteData(17,
"Eva Mustermann",
List(),
Option(new WriterMessageMetaData("name2"))),
new WriteData(19,
java.sql.Date.valueOf("1968-01-01"),
List(),
Option(new WriterMessageMetaData("birthday2"))),
new WriteData(20,
new BigDecimal("1500.00"),
List(),
Option(new WriterMessageMetaData("salary2"))),
new WriteData(22, "Dr. Evil", List(), Option(new WriterMessageMetaData("name2"))),
new WriteData(25,
new BigDecimal("1500000.00"),
List(),
Option(new WriterMessageMetaData("salary2"))),
new WriteData(21, 3L, List(), Option(new WriterMessageMetaData("id2")))
)
)
databaseWriter ! msg
databaseWriter ! BaseWriterMessages.CloseWriter
val expectedMessage = BaseWriterMessages.WriterClosed("".right[String])
expectMsg(expectedMessage)
val statement = connection.createStatement()
val results = statement.executeQuery("SELECT * FROM accounts ORDER BY id")
withClue("Data should have been written to the database!") {
results.next() should be(true)
results.getLong("id") should be(1)
results.getString("name") should be("Max Mustermann")
results.getString("description") should be("Some fancy text...")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-01-03"))
results.getDouble("salary") should be(1500.23)
results.next() should be(true)
results.getLong("id") should be(2)
results.getString("name") should be("Eva Mustermann")
results.getString("description") should be("Some fancy text...")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-01-01"))
results.getDouble("salary") should be(1500.00)
results.next() should be(false)
}
val results2 = statement.executeQuery("SELECT * FROM accounts2 ORDER BY id")
withClue("Data should have been written to the database!") {
results2.next() should be(true)
results2.getLong("id") should be(1)
results2.getString("name") should be("Dr. Evil")
results2.getString("description") should be("Beware of Austin Powers!")
results2.getDate("birthday") should be(java.sql.Date.valueOf("1968-08-08"))
results2.getDouble("salary") should be(1500000.00)
results2.next() should be(true)
results2.getLong("id") should be(2)
results2.getString("name") should be("Eva Mustermann")
results2.getString("description") should be("Some fancy text...")
results2.getDate("birthday") should be(java.sql.Date.valueOf("1968-01-01"))
results2.getDouble("salary") should be(1500.00)
results2.next() should be(true)
results2.getLong("id") should be(3)
results2.getString("name") should be("Dr. Evil")
results2.getString("description") should be("Beware of Austin Powers!")
results2.getDate("birthday") should be(java.sql.Date.valueOf("1968-08-08"))
results2.getDouble("salary") should be(1500000.00)
results2.next() should be(false)
}
connection.close()
}
}
}
describe("when retrieving ordered column data") {
it("should write the columns in correct order", DbTest, DbTestMysql) {
val connection = java.sql.DriverManager.getConnection(
s"jdbc:mysql://$databaseHost:$databasePort/$databaseName",
databaseUser,
databasePass
)
val dfasdlFile = "/databases/generic/DatabaseWriter/simple-01.xml"
val xml =
scala.io.Source.fromInputStream(getClass.getResourceAsStream(dfasdlFile)).mkString
val dfasdl = new DFASDL("SIMPLE-01", xml)
val target = new ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef = Option(DFASDLReference("TEST", "SIMPLE-01")),
username = Option(databaseUser),
password = Option(databasePass)
)
val databaseWriter = initializeWriter(target, dfasdl)
val msg = new WriteBatchData(
batch = List(
new WriteData(1, 1L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(2,
"Max Mustermann",
List(),
Option(new WriterMessageMetaData("name"))),
new WriteData(3,
"Some fancy text...",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(4,
java.sql.Date.valueOf("1968-01-03"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(5,
new BigDecimal("1500.23"),
List(),
Option(new WriterMessageMetaData("salary")))
)
)
databaseWriter ! msg
databaseWriter ! BaseWriterMessages.CloseWriter
val expectedMessage = BaseWriterMessages.WriterClosed("".right[String])
expectMsg(expectedMessage)
val statement = connection.createStatement()
val results = statement.executeQuery("SELECT * FROM accounts")
withClue("Data should have been written to the database!") {
results.next() should be(true)
results.getLong("id") should be(1)
results.getString("name") should be("Max Mustermann")
results.getString("description") should be("Some fancy text...")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-01-03"))
results.getDouble("salary") should be(1500.23)
}
connection.close()
}
}
describe("when retrieving unordered column data") {
it("should write the columns in correct order", DbTest, DbTestMysql) {
val connection = java.sql.DriverManager.getConnection(
s"jdbc:mysql://$databaseHost:$databasePort/$databaseName",
databaseUser,
databasePass
)
val dfasdlFile = "/databases/generic/DatabaseWriter/simple-01.xml"
val xml =
scala.io.Source.fromInputStream(getClass.getResourceAsStream(dfasdlFile)).mkString
val dfasdl = new DFASDL("SIMPLE-01", xml)
val target = new ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef = Option(DFASDLReference("TEST", "SIMPLE-01")),
username = Option(databaseUser),
password = Option(databasePass)
)
val databaseWriter = initializeWriter(target, dfasdl)
val msg = new WriteBatchData(
batch = List(
new WriteData(1,
java.sql.Date.valueOf("1968-01-03"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(2,
"Max Mustermann",
List(),
Option(new WriterMessageMetaData("name"))),
new WriteData(3, 1L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(4,
new BigDecimal("1500.23"),
List(),
Option(new WriterMessageMetaData("salary"))),
new WriteData(5,
"Some fancy text...",
List(),
Option(new WriterMessageMetaData("description")))
)
)
databaseWriter ! msg
databaseWriter ! BaseWriterMessages.CloseWriter
val expectedMessage = BaseWriterMessages.WriterClosed("".right[String])
expectMsg(expectedMessage)
val statement = connection.createStatement()
val results = statement.executeQuery("SELECT * FROM accounts")
withClue("Data should have been written to the database!") {
results.next() should be(true)
results.getLong("id") should be(1)
results.getString("name") should be("Max Mustermann")
results.getString("description") should be("Some fancy text...")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-01-03"))
results.getDouble("salary") should be(1500.23)
}
connection.close()
}
}
}
describe("using auto increment columns") {
it("should collect the written auto increment values", DbTest, DbTestMysql) {
val connection = java.sql.DriverManager.getConnection(
s"jdbc:mysql://$databaseHost:$databasePort/$databaseName",
databaseUser,
databasePass
)
val dfasdlFile =
"/databases/generic/DatabaseWriter/simple-01-with-pk-and-auto-inc-not-first-column.xml"
val xml =
scala.io.Source.fromInputStream(getClass.getResourceAsStream(dfasdlFile)).mkString
val dfasdl = new DFASDL("SIMPLE-01", xml)
val target = new ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef = Option(DFASDLReference("TEST", "SIMPLE-01")),
username = Option(databaseUser),
password = Option(databasePass)
)
val databaseWriter = initializeWriter(target, dfasdl)
val statement = connection.createStatement()
statement.execute(
"INSERT INTO accounts (name, id, description, birthday, salary) VALUES('Max Mustermann', 1, 'Some fancy text...', '1968-01-03', 1500.23)"
)
val msg = new WriteBatchData(
batch = List(
new WriteData(1, None, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(2, "Eva Mustermann", List(), Option(new WriterMessageMetaData("name"))),
new WriteData(3,
"Some fancy text...",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(4,
java.sql.Date.valueOf("1968-01-01"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(5,
new java.math.BigDecimal("1500.00"),
List(),
Option(new WriterMessageMetaData("salary"))),
new WriteData(6, None, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(7, "Dr. Evil", List(), Option(new WriterMessageMetaData("name"))),
new WriteData(8,
"Beware of Austin Powers!",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(9,
java.sql.Date.valueOf("1968-08-08"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(10,
new java.math.BigDecimal("1500000.00"),
List(),
Option(new WriterMessageMetaData("salary"))),
new WriteData(11, 1L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(12,
"Lord Fancy Pants",
List(),
Option(new WriterMessageMetaData("name"))),
new WriteData(13,
"An updated description text.",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(14,
java.sql.Date.valueOf("1968-04-01"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(15,
new java.math.BigDecimal("999.97"),
List(),
Option(new WriterMessageMetaData("salary")))
)
)
EventFilter.debug(occurrences = 1, message = "GENERATED INSERT KEY: 3") intercept {
databaseWriter ! msg
databaseWriter ! BaseWriterMessages.CloseWriter
val expectedMessage = BaseWriterMessages.WriterClosed("".right[String])
expectMsg(expectedMessage)
}
withClue("The exact number of rows should have been written!") {
val count = statement.executeQuery("SELECT COUNT(*) FROM accounts")
count.next() should be(true)
count.getInt(1) shouldEqual 3
}
withClue("Data should have been written to the database!") {
val results = statement.executeQuery("SELECT * FROM accounts ORDER BY id")
results.next() should be(true)
results.getLong("id") should be(1)
results.getString("name") should be("Lord Fancy Pants")
results.getString("description") should be("An updated description text.")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-04-01"))
results.getDouble("salary") should be(999.97)
results.next() should be(true)
results.getLong("id") should be(2)
results.getString("name") should be("Eva Mustermann")
results.getString("description") should be("Some fancy text...")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-01-01"))
results.getDouble("salary") should be(1500.00)
results.next() should be(true)
results.getLong("id") should be(3)
results.getString("name") should be("Dr. Evil")
results.getString("description") should be("Beware of Austin Powers!")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-08-08"))
results.getDouble("salary") should be(1500000.00)
}
connection.close()
}
}
describe("using NULL values") {
it("should set the correct parameter columns to null", DbTest, DbTestMysql) {
val connection = java.sql.DriverManager.getConnection(
s"jdbc:mysql://$databaseHost:$databasePort/$databaseName",
databaseUser,
databasePass
)
val dfasdlFile = "/databases/generic/DatabaseWriter/simple-01.xml"
val xml =
scala.io.Source.fromInputStream(getClass.getResourceAsStream(dfasdlFile)).mkString
val dfasdl = new DFASDL("SIMPLE-01", xml)
val target = new ConnectionInformation(
uri = new URI(connection.getMetaData.getURL),
dfasdlRef = Option(DFASDLReference("TEST", "SIMPLE-01")),
username = Option(databaseUser),
password = Option(databasePass)
)
val databaseWriter = initializeWriter(target, dfasdl)
val msg = new WriteBatchData(
batch = List(
new WriteData(1, 2L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(2, "Eva Mustermann", List(), Option(new WriterMessageMetaData("name"))),
new WriteData(3, None, List(), Option(new WriterMessageMetaData("description"))),
new WriteData(4,
java.sql.Date.valueOf("1968-01-01"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(5,
new java.math.BigDecimal("1500.00"),
List(),
Option(new WriterMessageMetaData("salary"))),
new WriteData(6, 3L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(7, "Dr. Evil", List(), Option(new WriterMessageMetaData("name"))),
new WriteData(8,
"Beware of Austin Powers!",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(9, None, List(), Option(new WriterMessageMetaData("birthday"))),
new WriteData(10,
new java.math.BigDecimal("1500000.00"),
List(),
Option(new WriterMessageMetaData("salary"))),
new WriteData(11, 1L, List(), Option(new WriterMessageMetaData("id"))),
new WriteData(12,
"Lord Fancy Pants",
List(),
Option(new WriterMessageMetaData("name"))),
new WriteData(13,
"An updated description text.",
List(),
Option(new WriterMessageMetaData("description"))),
new WriteData(14,
java.sql.Date.valueOf("1968-04-01"),
List(),
Option(new WriterMessageMetaData("birthday"))),
new WriteData(15, None, List(), Option(new WriterMessageMetaData("salary")))
)
)
databaseWriter ! msg
databaseWriter ! BaseWriterMessages.CloseWriter
val expectedMessage = BaseWriterMessages.WriterClosed("".right[String])
expectMsg(expectedMessage)
val statement = connection.createStatement()
withClue("The exact number of rows should have been written!") {
val count = statement.executeQuery("SELECT COUNT(*) FROM accounts")
count.next() should be(true)
count.getInt(1) shouldEqual 3
}
withClue("Data should have been written to the database!") {
val results = statement.executeQuery("SELECT * FROM accounts ORDER BY id")
results.next() should be(true)
results.getLong("id") should be(1)
results.getString("name") should be("Lord Fancy Pants")
results.getString("description") should be("An updated description text.")
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-04-01"))
results.getDouble("salary") should be(0.0)
results.next() should be(true)
results.getLong("id") should be(2)
results.getString("name") should be("Eva Mustermann")
results.getString("description") should be(null)
results.getDate("birthday") should be(java.sql.Date.valueOf("1968-01-01"))
results.getDouble("salary") should be(1500.00)
results.next() should be(true)
results.getLong("id") should be(3)
results.getString("name") should be("Dr. Evil")
results.getString("description") should be("Beware of Austin Powers!")
results.getDate("birthday") should be(null)
results.getDouble("salary") should be(1500000.00)
}
connection.close()
}
}
}
}
}
| Tensei-Data/tensei-agent | src/it/scala/databases/mysql/DatabaseWriterActorTest.scala | Scala | agpl-3.0 | 63,232 |
// allow supplementary chars in identifiers
class 𐐀 {
def 𐐀 = 42
// regression check: anything goes in strings
def x = "𐐀"
def y = s"$𐐀"
def w = s" 𐐀"
}
case class 𐐀𐐀(n: Int) {
def 𐐀𐐀 = n
def `𐐀𐐀1` = n + n
}
// uncontroversially, orphan surrogates may be introduced
// via unicode escape.
class Construction {
def hi = '\\ud801'
def lo = '\\udc00'
def endhi = "abc\\ud801"
def startlo = "\\udc00xyz"
def reversed = "xyz\\udc00\\ud801abc"
}
// was: error: illegal character '\\ud801', '\\udc00'
| scala/scala | test/files/pos/surrogates.scala | Scala | apache-2.0 | 549 |
package org.jobimtext.coref.berkeley.bansalklein
import java.io.{BufferedInputStream, FileInputStream}
import java.util.zip.GZIPInputStream
import edu.berkeley.nlp.coref.Mention
import edu.berkeley.nlp.coref.config.CorefSystemConfiguration
import edu.berkeley.nlp.futile.util.Logger
import scala.io.Source
/**
* Feature from section 3.5 of the Bansal & Klein paper.
*/
object PronounContext {
val Placeholder = "<NONE>"
val punctuationPos = Set(".", ",", "''")
val FeatureFileOption = "featureFile"
val R1BinSizeOption = "R1binSize"
val R2BinSizeOption = "R2binSize"
val R1GapBinSizeOption = "R1GapBinSize"
var features: Option[Map[(String, String, String, String, Boolean), Array[String]]] = None
/**
* Returns true if the feature is defined for the given pair of mentions. This is the case if the current mention
* is from a closed class, but the antecedent mention is not.
*
* @param current the current mention
* @param antecedent a possible antecedent mention
*/
def isFeatureApplicable(current: Mention, antecedent: Mention): Boolean = current.mentionType.isClosedClass && !antecedent.mentionType.isClosedClass
def isPossessive(pronounMention: Mention): Boolean = pronounMention.headPos.endsWith("$")
def getR1(pronounMention: Mention): String = getR(pronounMention, pronounMention.headIdx + 1)
def getR2(pronounMention: Mention): String = getR(pronounMention, pronounMention.headIdx + 2)
private def getR(mention: Mention, index: Int): String = {
if (punctuationPos.contains(mention.accessPosOrPlaceholder(index))) Placeholder else mention.accessWordOrPlaceholder(index, Placeholder)
}
private def bin(d: Double, binSize: Double) = Math.round((d / binSize).toFloat)
def loadFeatures(config: CorefSystemConfiguration): Unit = {
val path = config.getAdditionalProperty(getClass, FeatureFileOption, "").asInstanceOf[String]
if (path.isEmpty) throw new IllegalStateException("GeneralCoOccurence feature used without using precomputed " +
"features")
val source = path.endsWith(".gz") match {
case false => Source.fromFile(path)
case true => Source.fromInputStream(new BufferedInputStream(new GZIPInputStream(new FileInputStream(path))))
}
Logger.logss(s"[B&K] Loading pronoun context features from $path into memory...")
val featureMap = source.getLines().map(_.split(' ')).map(parts => {
assert(parts.length == 8)
(parts(0).intern(), parts(1).intern(), parts(2).intern(), parts(3).intern(), parts(4).toBoolean) -> Array(parts(5), parts(6), parts(7))
}).toMap
source.close()
features = Some(featureMap)
}
def getR1Feature(config: CorefSystemConfiguration, pronounMention: Mention, replacementMention: Mention) =
getFeature(config, pronounMention, replacementMention, 0, config.getAdditionalProperty(getClass, R1BinSizeOption).asInstanceOf[Double])
def getR2Feature(config: CorefSystemConfiguration, pronounMention: Mention, replacementMention: Mention) =
getFeature(config, pronounMention, replacementMention, 1, config.getAdditionalProperty(getClass, R2BinSizeOption).asInstanceOf[Double])
def getR1GapFeature(config: CorefSystemConfiguration, pronounMention: Mention, replacementMention: Mention) =
getFeature(config, pronounMention, replacementMention, 2, config.getAdditionalProperty(getClass, R1GapBinSizeOption).asInstanceOf[Double])
private def getFeature(config: CorefSystemConfiguration, pronounMention: Mention, replacementMention: Mention, featureIdx: Int, binSize: Double): String = {
assert(isFeatureApplicable(pronounMention, replacementMention))
if (!features.isDefined) loadFeatures(config)
val tuple = (pronounMention.headString, replacementMention.headString, getR1(pronounMention), getR2(pronounMention), isPossessive(pronounMention))
val feature = features.get.get(tuple)
feature match {
case None => throw new IllegalStateException("Feature missing for tuple " + tuple)
case Some(featureValues) =>
val featureValue = featureValues(featureIdx)
if (featureValue.startsWith("M")) featureValue else bin(featureValue.toDouble, binSize).toString
}
}
}
| timfeu/berkeleycoref-thesaurus | src/main/java/org/jobimtext/coref/berkeley/bansalklein/PronounContext.scala | Scala | gpl-3.0 | 4,188 |
package fpinscala.datastructures
sealed trait Tree[+A]
case class Leaf[A](value: A) extends Tree[A]
case class Branch[A](left: Tree[A], right: Tree[A]) extends Tree[A]
object Tree {
def size[A](tree: Tree[A]): Int = tree match {
case Leaf(_) => 1
case Branch(l, r) => 1 + size(l) + size(r)
}
def maximum(tree: Tree[Int]):Int = tree match {
case Leaf(x) => x
case Branch(l, r) => maximum(l) max maximum(r)
}
def depth[A](tree: Tree[A]): Int = tree match {
case Leaf(_) => 0
case Branch(l, r) => 1 + (depth(l) max depth(r))
}
def map[A, B](tree: Tree[A])(f: A => B): Tree[B] = tree match {
case Leaf(x) => Leaf(f(x))
case Branch(l, r) => Branch(map(l)(f), map(r)(f))
}
}
| demonyangyue/fpinscala | exercises/src/main/scala/fpinscala/datastructures/Tree.scala | Scala | mit | 728 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.