code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package ml.combust.mleap.executor.error class ExecutorException(message: String, cause: Throwable) extends RuntimeException(message, cause) { def this(message: String) = this(message, null) def this(err: Throwable) = this(err.getMessage, err) }
combust/mleap
mleap-executor/src/main/scala/ml/combust/mleap/executor/error/ExecutorException.scala
Scala
apache-2.0
274
package reftree.geometry import monocle.{Lens, Optional} import scala.collection.immutable.ListMap /** * Semi-interpolation varies a single value over time. * It can be seen as an [[Interpolation]] with one end fixed (left or right). */ trait SemiInterpolation[A] { self β‡’ /** Produce the value at time `t` (from 0 to 1) */ def apply(value: A, t: Double): A /** * Map the time interval with the given function * * For example, the function (_ * 2) will make the interpolation twice faster, * and it will happen in the first half of the full time interval. */ def mapTime(f: Double β‡’ Double) = SemiInterpolation[A] { (value, t) β‡’ self(value, f(t) min 1 max 0) } /** * Make the interpolation happen in the given timespan, instead of [0; 1] * * This is essentially a more convenient alternative to `mapTime`. * For example, `timespan(0, 1.0/2)` is equivalent to `mapTime(_ * 2)`. */ def timespan(from: Double, to: Double) = mapTime(t β‡’ (t - from) / (to - from)) /** * Obtain a semi-interpolation of type `B` that varies a value of type `A` * β€œinside” it using the current semi-interpolation */ def lens[B](l: Lens[B, A]) = SemiInterpolation[B] { (value, t) β‡’ l.modify(self(_, t))(value) } /** * Obtain a semi-interpolation of type `B` that varies a value of type `A` * β€œinside” it using the current semi-interpolation */ def optional[B](o: Optional[B, A]) = SemiInterpolation[B] { (value, t) β‡’ o.modify(self(_, t))(value) } } object SemiInterpolation { def apply[A](f: (A, Double) β‡’ A): SemiInterpolation[A] = new SemiInterpolation[A] { def apply(value: A, t: Double) = f(value, t) } } /** * A trait for interpolating between two values of a certain type over time (from 0 to 1) */ trait Interpolation[A] { self β‡’ def apply(left: A, right: A, t: Double): A /** * Sample the range between `left` and `right` using `n` values */ def sample(left: A, right: A, n: Int, inclusive: Boolean = true): Stream[A] = { val range = if (inclusive) { Seq.tabulate(n)(i β‡’ (i + 0.0) / (n - 1)) } else { Seq.tabulate(if (inclusive) n - 2 else n)(i β‡’ (i + 1.0) / (n + 1)) } range.toStream.map(self(left, right, _)) } /** * Map the time interval with the given function * * For example, the function (_ * 2) will make the interpolation twice faster, * and it will happen in the first half of the full time interval. */ def mapTime(f: Double β‡’ Double) = Interpolation[A] { (left, right, t) β‡’ self(left, right, f(t) min 1 max 0) } /** * Make the interpolation happen in the given timespan, instead of [0; 1] * * This is essentially a more convenient alternative to `mapTime`. * For example, `timespan(0, 1.0/2)` is equivalent to `mapTime(_ * 2)`. */ def timespan(from: Double, to: Double) = mapTime(t β‡’ (t - from) / (to - from)) /** * Combine with another interpolation of the same type * * Note that this operation is not commutative. It has a left bias, * i.e. the combined interpolations are applied from left to right, * always modifying the `left` argument. */ def +(that: Interpolation[A]) = Interpolation[A] { (left, right, t) β‡’ that(self(left, right, t), right, t) } /** * Obtain an interpolation of type `B` that varies a value of type `A` * β€œinside” it using the current interpolation * * Note that this operation is left-biased, * i.e. it applies the `set` function of the lens to the `left` argument. */ def lens[B](l: Lens[B, A]): Interpolation[B] = Interpolation[B] { (left, right, t) β‡’ l.set(self(l.get(left), l.get(right), t))(left) } /** * Obtain an interpolation of type `B` that varies a value of type `A` * β€œinside” it using the current interpolation * * Note that this operation is left-biased, * i.e. it applies the `set` function of the optional to the `left` argument. */ def optional[B](o: Optional[B, A]): Interpolation[B] = Interpolation[B] { (left, right, t) β‡’ (o.getOption(left), o.getOption(right)) match { case (Some(l), Some(r)) β‡’ o.set(self(l, r, t))(left) case _ β‡’ left } } /** * Derive a semi-interpolation by providing a function to calculate * the `left` argument from the `right` one */ def withLeft(rightToLeft: A β‡’ A) = SemiInterpolation[A] { (value, t) β‡’ self(rightToLeft(value), value, t) } /** Derive a semi-interpolation by using a fixed `left` argument */ def withLeft(left: A) = SemiInterpolation[A]((value, t) β‡’ self(left, value, t)) /** * Derive a semi-interpolation by providing a function to calculate * the `right` argument from the `left` one */ def withRight(leftToRight: A β‡’ A) = SemiInterpolation[A] { (value, t) β‡’ self(value, leftToRight(value), t) } /** Derive a semi-interpolation by using a fixed `right` argument */ def withRight(right: A) = SemiInterpolation[A]((value, t) β‡’ self(value, right, t)) /** * Derive an interpolation for `Option[A]` by providing semi-interpolations * for the remaining arguments when either `left` or `right` arguments are `None` */ def option( leftOnly: SemiInterpolation[A], rightOnly: SemiInterpolation[A] ): Interpolation[Option[A]] = Interpolation[Option[A]] { case (Some(l), Some(r), t) β‡’ Some(self(l, r, t)) case (Some(l), None, t) β‡’ Some(leftOnly(l, t)) case (None, Some(r), t) β‡’ Some(rightOnly(r, t)) case (None, None, t) β‡’ None } /** * Derive an interpolation for `Option[A]` by providing a function to calculate * the missing argument (i.e. None) from the remaining one (i.e. Some) */ def option(default: A β‡’ A): Interpolation[Option[A]] = option(self.withRight(default), self.withLeft(default)) /** * Derive an interpolation for `Option[A]` by providing a default * to use in place of a missing argument (i.e. None) */ def option(default: A): Interpolation[Option[A]] = option(self.withRight(default), self.withLeft(default)) /** * Derive an interpolation for a sequence (that assumes sequences of the same length) */ def seq = Interpolation[Seq[A]] { (left, right, t) β‡’ (left zip right) map { case (l, r) β‡’ self(l, r, t) } } /** * Derive an interpolation for a [[List]] (that assumes lists of the same length) */ def list = Interpolation[List[A]] { (left, right, t) β‡’ (left zip right) map { case (l, r) β‡’ self(l, r, t) } } /** * Derive an interpolation for a [[ListMap]] * * This method will compare the keys in both maps and use `Option[B]` * for `left` and `right` arguments associated with each key, * based on whether the key is only in the left map (Some, None), * only in the right one (None, Some), or in both (Some, Some). */ def listMap[B](implicit evidence1: A =:= Option[B], evidence2: Option[B] =:= A) = Interpolation[ListMap[String, B]] { (left, right, t) β‡’ val ids = (left.keysIterator ++ right.keysIterator).toSeq.distinct ListMap(ids flatMap { id β‡’ self(left.get(id), right.get(id), t).map(id β†’ _) }: _*) } } object Interpolation { /** A shorthand for constructing interpolations */ def apply[A](f: (A, A, Double) β‡’ A): Interpolation[A] = new Interpolation[A] { def apply(left: A, right: A, t: Double): A = f(left, right, t) } /** A basic linear interpolation for doubles */ val double = Interpolation[Double]((l, r, t) β‡’ l * (1 - t) + r * t) } /** Assorted syntax sugar */ trait InterpolationSyntax { implicit class LensInterpolation[A, B](l: Lens[B, A]) { def interpolateWith(interpolation: Interpolation[A]) = interpolation.lens(l) def semiInterpolateWith(semiInterpolation: SemiInterpolation[A]) = semiInterpolation.lens(l) } implicit class LensSeqInterpolation[A, B](l: Lens[B, Seq[A]]) { def interpolateEachWith(interpolation: Interpolation[A]) = interpolation.seq.lens(l) } implicit class LensListInterpolation[A, B](l: Lens[B, List[A]]) { def interpolateEachWith(interpolation: Interpolation[A]) = interpolation.list.lens(l) } implicit class LensListMapInterpolation[A, B](l: Lens[B, ListMap[String, A]]) { def interpolateEachWith(interpolation: Interpolation[Option[A]]) = interpolation.listMap[A].lens(l) } implicit class OptionalInterpolation[A, B](o: Optional[B, A]) { def interpolateWith(interpolation: Interpolation[A]) = interpolation.optional(o) def semiInterpolateWith(semiInterpolation: SemiInterpolation[A]) = semiInterpolation.optional(o) } implicit class OptionalListInterpolation[A, B](o: Optional[B, List[A]]) { def interpolateEachWith(interpolation: Interpolation[A]) = interpolation.list.optional(o) } implicit class OptionalListMapInterpolation[A, B](o: Optional[B, ListMap[String, A]]) { def interpolateEachWith(interpolation: Interpolation[Option[A]]) = interpolation.listMap[A].optional(o) } }
stanch/reftree
core/shared/src/main/scala/reftree/geometry/Interpolation.scala
Scala
gpl-3.0
9,036
package com.sksamuel.scapegoat import java.io.File import java.util.concurrent.atomic.AtomicInteger import com.sksamuel.scapegoat.io.IOUtils import scala.tools.nsc._ import scala.tools.nsc.plugins.{ Plugin, PluginComponent } import scala.tools.nsc.transform.{ Transform, TypingTransformers } class ScapegoatPlugin(val global: Global) extends Plugin { override val name: String = "scapegoat" override val description: String = "scapegoat compiler plugin" val component = new ScapegoatComponent(global, ScapegoatConfig.inspections) override val components: List[PluginComponent] = List(component) override def init(options: List[String], error: String => Unit): Boolean = { options.find(_.startsWith("disabledInspections:")) match { case Some(option) => component.disabled = option.drop("disabledInspections:".length).split(':').toList case _ => } options.find(_.startsWith("consoleOutput:")) match { case Some(option) => component.consoleOutput = option.drop("consoleOutput:".length).toBoolean case _ => } options.find(_.startsWith("ignoredFiles:")) match { case Some(option) => component.ignoredFiles = option.drop("ignoredFiles:".length).split(':').toList case _ => } for (verbose <- options.find(_.startsWith("verbose:"))) { component.verbose = verbose.drop("verbose:".length).toBoolean } options.find(_.startsWith("customInspectors:")) match { case Some(option) => component.customInpections = option.drop("customInspectors:".length) .split(':') .toSeq .map(inspection => Class.forName(inspection).newInstance.asInstanceOf[Inspection]) case _ => } options.find(_.startsWith("reports:")) match { case Some(option) => option.drop("reports:".length) .split(':') .toSeq .foreach { case "xml" => component.disableXML = false case "html" => component.disableHTML = false case "scalastyle" => component.disableScalastyleXML = false case "all" => component.disableXML = false component.disableHTML = false component.disableScalastyleXML = false case _ => } case None => component.disableXML = false component.disableHTML = false component.disableScalastyleXML = false } options.find(_.startsWith("overrideLevels:")) foreach { case option => component.feedback.levelOverridesByInspectionSimpleName = option.drop("overrideLevels:".length).split(":").map { case nameLevel => nameLevel.split("=") match { case Array(insp, level) => insp -> Levels.fromName(level) case _ => throw new IllegalArgumentException( s"Malformed argument to 'overrideLevels': '$nameLevel'. " + "Expecting 'name=level' where 'name' is the simple name of " + "an inspection and 'level' is the simple name of a " + "com.sksamuel.scapegoat.Level constant, e.g. 'Warning'.") } }.toMap } options.find(_.startsWith("dataDir:")) match { case Some(option) => component.dataDir = new File(option.drop("dataDir:".length)) true case None => error("-P:scapegoat:dataDir not specified") false } } override val optionsHelp: Option[String] = Some(Seq( "-P:scapegoat:dataDir:<pathtodatadir> where the report should be written", "-P:scapegoat:disabled:<listofinspections> colon separated list of disabled inspections", "-P:scapegoat:customInspectors:<listofinspections> colon separated list of custom inspections", "-P:scapegoat:ignoredFiles:<patterns> colon separated list of regexes to match ", " files to ignore.", "-P:scapeogoat:verbose:<boolean> enable/disable verbose console messages", "-P:scapegoat:consoleOutput:<boolean> enable/disable console report output", "-P:scapegoat:reports:<reports> colon separated list of reports to generate.", " Valid options are `xml', `html', `scalastyle',", " or `all'.", "-P:scapegoat:overrideLevels:<levels> override the built in warning levels, e.g. to", " downgrade a Error to a Warning.", " <levels> should be a colon separated list of name=level", " settings, where 'name' is the simple name of an inspection", " and 'level' is the simple name of a", " com.sksamuel.scapegoat.Level constant, e.g. 'Warning'.") .mkString("\\n")) } class ScapegoatComponent(val global: Global, inspections: Seq[Inspection]) extends PluginComponent with TypingTransformers with Transform { require(inspections != null) import global._ var dataDir: File = new File(".") var disabled: List[String] = Nil var ignoredFiles: List[String] = Nil var consoleOutput: Boolean = false var verbose: Boolean = false var debug: Boolean = false var summary: Boolean = true var disableXML = true var disableHTML = true var disableScalastyleXML = true var customInpections: Seq[Inspection] = Nil private val count = new AtomicInteger(0) override val phaseName: String = "scapegoat" override val runsAfter: List[String] = List("typer") override val runsBefore = List[String]("patmat") def disableAll: Boolean = disabled.exists(_.compareToIgnoreCase("all") == 0) def activeInspections: Seq[Inspection] = (inspections ++ customInpections) .filterNot(inspection => disabled.contains(inspection.getClass.getSimpleName)) lazy val feedback = new Feedback(consoleOutput, global.reporter) override def newPhase(prev: scala.tools.nsc.Phase): Phase = new Phase(prev) { override def run(): Unit = { if (disableAll) { reporter.echo("[info] [scapegoat] All inspections disabled") } else { reporter.echo(s"[info] [scapegoat] ${activeInspections.size} activated inspections") if (verbose) { if (ignoredFiles.nonEmpty) reporter.echo(s"[info] [scapegoat] $ignoredFiles ignored file patterns") } super.run() if (summary) { val errors = feedback.errors.size val warns = feedback.warns.size val infos = feedback.infos.size val level = if (errors > 0) "error" else if (warns > 0) "warn" else "info" reporter.echo(s"[$level] [scapegoat] Analysis complete: ${count.get} files - $errors errors $warns warns $infos infos") } if (!disableHTML) { val html = IOUtils.writeHTMLReport(dataDir, feedback) if (verbose) reporter.echo(s"[info] [scapegoat] Written HTML report [$html]") } if (!disableXML) { val xml = IOUtils.writeXMLReport(dataDir, feedback) if (verbose) reporter.echo(s"[info] [scapegoat] Written XML report [$xml]") } if (!disableScalastyleXML) { val xml = IOUtils.writeScalastyleReport(dataDir, feedback) if (verbose) reporter.echo(s"[info] [scapegoat] Written Scalastyle XML report [$xml]") } } } } protected def newTransformer(unit: CompilationUnit): Transformer = { count.incrementAndGet() new Transformer(unit) } class Transformer(unit: global.CompilationUnit) extends TypingTransformer(unit) { override def transform(tree: global.Tree): global.Tree = { if (ignoredFiles.exists(unit.source.path.matches)) { if (debug) { reporter.echo(s"[debug] Skipping scapegoat [$unit]") } } else { if (debug) { reporter.echo(s"[debug] Scapegoat analysis [$unit] .....") } val context = new InspectionContext(global, feedback) activeInspections.foreach(inspection => { val inspector = inspection.inspector(context) for (traverser <- inspector.postTyperTraverser) traverser.traverse(tree.asInstanceOf[inspector.context.global.Tree]) inspector.postInspection() }) } tree } } }
pwwpche/scalac-scapegoat-plugin
src/main/scala/com/sksamuel/scapegoat/plugin.scala
Scala
apache-2.0
8,705
package boatcraft.compatibility.industrialcraft2.modifiers.materials import boatcraft.api.modifiers.Material import net.minecraft.util.ResourceLocation import ic2.api.item.IC2Items object Carbon extends Material { override def getTexture = new ResourceLocation("ic2", "textures/models/boatCarbon.png") override def getUnlocalizedName = "Carbon Fibre" override def getLocalizedName = "industrialcraft2.materials.carbon.name" override def getItem = IC2Items getItem "carbonFiber" override def getStick = IC2Items getItem "coalDust" //TODO Balance! These are completely meaningless numbers, someone PLEASE balance it! override def getCrashResistance = 2 }
Open-Code-Developers/BoatCraft
src/main/scala/boatcraft/compatibility/industrialcraft2/modifiers/materials/Carbon.scala
Scala
mit
666
package com.typesafe.akka.http.benchmark.datastore import scala.collection.immutable import com.typesafe.akka.http.benchmark.entity.{ Fortune, World } import scala.concurrent.Future trait DataStore { def findWorldById(id: Int): Future[Option[World]] def requireWorldById(id: Int): Future[World] def updateWorld(world: World): Future[Boolean] def getFortunes: Future[immutable.Seq[Fortune]] }
saturday06/FrameworkBenchmarks
frameworks/Scala/akka-http/src/main/scala/com/typesafe/akka/http/benchmark/datastore/DataStore.scala
Scala
bsd-3-clause
404
/* * This file is part of pelam-scala-csv * * Copyright Β© Peter Lamberg 2015 (pelam-scala-csv@pelam.fi) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package fi.pelam.csv.cell // @formatter:off IntelliJ 14.1 (Scala plugin) formatter messes up Scaladoc /** * Basically this class is a sample implementation of a more specialised subtype of * [[fi.pelam.csv.cell.Cell]]. * * It is expected that any nontrivial client will want to specify its own subtypes * of [[fi.pelam.csv.cell.Cell]]. * * This class is quite simple, but the companion object is more * interesting as it provides [[Cell.Parser]] functions * which produce IntegerCell instances (or errors if parsing fails) from String data. * * @param cellKey the location of this cell in a CSV table. * @param formatter A function used to convert the integer held by this cell into a `String` * to be stored in CSV text data. * @param value is the integer stored in CSV. */ // @formatter:on IntelliJ 14.1 (Scala plugin) formatter messes up ScalaDoc final case class IntegerCell(override val cellKey: CellKey, override val value: Int) (implicit override val formatter: IntegerCell.NumberFormatter = IntegerCell.defaultFormatter) extends Cell with NumberCell[Int] { override def updatedCellKey(cellKey: CellKey) = { if (this.cellKey == cellKey) { this } else { copy(cellKey = cellKey) } } /** * Shorter version of `toString` to be used in debug table outputs. * Should identify cell type and value in small amount of text. */ override def shortString(): String = "i " + value } /** * The IntegerCell class it self is quite simple, but this companion object is more * interesting as provides [[Cell.Parser]] functions. These functions in turn * produce IntegerCell instances (or errors if parsing fails) from String data. * * [[Cell.Parser]] functions can be used used to upgrade cells in [[fi.pelam.csv.table.TableReader]] * in an easy way by using them in a map passed to [[fi.pelam.csv.table.TableReaderConfig.makeCellUpgrader]]. * The map specifies which cells should be interpreted as containing integers. */ object IntegerCell extends PrimitiveCellObject[Int] { override val primitiveDescription: String = "integer" override def numberToCell(cellKey: CellKey, input: String, number: Number, formatter: NumberFormatter) = { val intValue = number.intValue() if (intValue == number) { Right(IntegerCell(cellKey, intValue)(formatter)) } else { Left(CellParsingError(s"Expected $primitiveDescription, but value '$input' is decimal.")) } } }
pelamfi/pelam-scala-csv
src/main/scala/fi/pelam/csv/cell/IntegerCell.scala
Scala
apache-2.0
3,152
package scala.scalajs.js.typedarray import scala.scalajs.js /** <span class="badge badge-ecma6" style="float: right;">ECMAScript 6</span> * A [[TypedArray]] of signed 32-bit integers */ @js.native class Int32Array private extends TypedArray[Int, Int32Array] { /** Constructs a Int32Array with the given length. Initialized to all 0 */ def this(length: Int) = this() /** Creates a new Int32Array with the same elements than the given TypedArray * * The elements are converted before being stored in the new Int8Array. */ def this(typedArray: TypedArray[_, _]) = this() /** Creates a new Int32Array with the elements in the given array */ def this(array: js.Array[_]) = this() /** Creates a Int32Array view on the given ArrayBuffer */ def this(buffer: ArrayBuffer, byteOffset: Int = 0, length: Int = ???) = this() } /** <span class="badge badge-ecma6" style="float: right;">ECMAScript 6</span> * [[Int32Array]] companion */ @js.native object Int32Array extends TypedArrayStatic
lrytz/scala-js
library/src/main/scala/scala/scalajs/js/typedarray/Int32Array.scala
Scala
bsd-3-clause
1,016
package com.overviewdocs.background.filegroupcleanup import scala.concurrent.Future import akka.actor.{ Actor, ActorRef, Props } import akka.pattern.pipe import com.overviewdocs.background.filegroupcleanup.FileGroupRemovalRequestQueueProtocol._ /** * Looks for deleted [[FileGroup]]s on start up, and sends removal requests to * the [[FileGroupRemovalRequestQueue]]. * * When requests have been sent, the actor terminates. This functionality probably * doesn't need to be an actor. */ trait DeletedFileGroupCleaner extends Actor { import context._ protected case object RemoveDeletedFileGroups protected case object RequestsSent override def preStart = self ! RemoveDeletedFileGroups override def receive = { case RemoveDeletedFileGroups => requestRemovals pipeTo self case RequestsSent => context.stop(self) } private def requestRemovals = { deletedFileGroupFinder.indexIds.map { ids => ids.foreach(fileGroupRemovalRequestQueue ! RemoveFileGroup(_)) RequestsSent } } protected val deletedFileGroupFinder: DeletedFileGroupFinder protected val fileGroupRemovalRequestQueue: ActorRef } object DeletedFileGroupCleaner { def apply(fileGroupRemovalRequestQueue: ActorRef): Props = Props(new DeletedFileGroupCleanerImpl(fileGroupRemovalRequestQueue)) private class DeletedFileGroupCleanerImpl(fileGroupRemovalRequestQueueActor: ActorRef) extends DeletedFileGroupCleaner { override protected val deletedFileGroupFinder = DeletedFileGroupFinder override protected val fileGroupRemovalRequestQueue = fileGroupRemovalRequestQueueActor } }
overview/overview-server
worker/src/main/scala/com/overviewdocs/background/filegroupcleanup/DeletedFileGroupCleaner.scala
Scala
agpl-3.0
1,617
/** * (C) Copyright IBM Corp. 2015 - 2017 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.ibm.sparktc.sparkbench.datageneration import java.io.File import com.ibm.sparktc.sparkbench.datageneration.mlgenerator.KMeansDataGen import com.ibm.sparktc.sparkbench.testfixtures.{BuildAndTeardownData, SparkSessionProvider} import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers} import scala.io.Source class KMeansDataGenTest extends FlatSpec with Matchers with BeforeAndAfterAll { val cool = new BuildAndTeardownData("kmeans-data-gen") val fileName = s"${cool.sparkBenchTestFolder}/${java.util.UUID.randomUUID.toString}" var file: File = _ override def beforeAll() { cool.createFolders() } override def afterAll() { cool.deleteFolders() } "KMeansDataGeneration" should "generate a csv correctly" in { val csvFile = s"$fileName.csv" val m = Map( "name" -> "kmeans", "rows" -> 10, "cols" -> 10, "output" -> csvFile ) val generator = KMeansDataGen(m) generator.doWorkload(spark = SparkSessionProvider.spark) file = new File(csvFile) val fileList = file.listFiles().toList.filter(_.getName.startsWith("part")) val fileContents: List[String] = fileList .flatMap( Source.fromFile(_) .getLines() .toList ) val length: Int = fileContents.length /* * Okay, some explanation here. I made headers default for csv, so there's going to be * one extra header line per partition file. If the csv header option ever changes, this * test will break, but now you know what's going on so you can fix it :) */ length shouldBe generator.numRows + fileList.length } it should "generate an ORC file correctly" in { val spark = SparkSessionProvider.spark val orcFile = s"$fileName.orc" val m = Map( "name" -> "kmeans", "rows" -> 10, "cols" -> 10, "output" -> orcFile ) val generator = KMeansDataGen(m) generator.doWorkload(spark = spark) file = new File(orcFile) val list = file.listFiles().toList val fileList = list.filter(_.getName.startsWith("part")) fileList.length should be > 0 println(s"reading file $orcFile") val fromDisk = spark.read.orc(orcFile) val rows = fromDisk.count() rows shouldBe 10 } }
SparkTC/spark-bench
cli/src/test/scala/com/ibm/sparktc/sparkbench/datageneration/KMeansDataGenTest.scala
Scala
apache-2.0
2,891
package picasso.frontend.compilerPlugin import scala.tools.nsc import scala.tools.nsc.{Global,Phase} import scala.tools.nsc.plugins.{Plugin,PluginComponent} import picasso.utils.{LogCritical, LogError, LogWarning, LogNotice, LogInfo, LogDebug, Logger} import picasso.frontend.compilerPlugin.transform._ /** This class is the entry point for the plugin. */ class PicassoPlugin(val global: Global) extends Plugin { import global._ val name = "picasso" val description = "Software modelchecker for program using actors." var stopBeforeAnalysis = false var experimental = false sealed abstract class OptKind(val option: String, val description: String) case class NoArgOpt(opt: String, descr: String, action: () => Unit) extends OptKind(opt, descr) case class OneArgOpt(opt: String, descr: String, action: String => Unit) extends OptKind(opt, descr) case class ManyArgOpt(opt: String, descr: String, action: Seq[String] => Unit) extends OptKind(opt, descr) private val oneArg = "smth" private val manyArgs = "a,b,..." private val optPrefix = " " private val optSpaces = 28 def opts = List( NoArgOpt("parse", "Checks only whether the program is something we can handle (stop after parsing)", (() => stopBeforeAnalysis = true)), NoArgOpt("error", "Only error messages", (() => Logger.setMinPriority(LogError))), NoArgOpt("quiet", "Warning and error messages", (() => Logger.setMinPriority(LogWarning))), NoArgOpt("info", "More verbose", (() => Logger.setMinPriority(LogInfo))), NoArgOpt("debug", "Most verbose", (() => Logger.setMinPriority(LogDebug))), ManyArgOpt("hide", "Removes the ouput comming from " + manyArgs, ( args => args foreach (Logger.disallow))), NoArgOpt("XP", "Enable experimental features", (() => experimental = true)) ) def fullOption(opt: String) = "-P:"+name+":"+opt private def printOpt(o: OptKind) = { val start = o match { case NoArgOpt(o, _, _) => fullOption(o) case OneArgOpt(o, _, _) => fullOption(o) + "=" + oneArg case ManyArgOpt(o, _, _) => fullOption(o) + "=" + manyArgs } val middle = (0 until (optSpaces - start.length)).map(_ => ' ').mkString val end = o.description optPrefix + start + middle + end } /** The help message displaying the options for that plugin. */ override val optionsHelp: Option[String] = { val help = opts.map(printOpt).mkString("","\\n","") Some(help) } /** Processes the command-line options. */ private def splitList(lst: String) : Seq[String] = lst.split(',').map(_.trim).filter(!_.isEmpty) private def processOption(option: String) { opts.find(o => option startsWith o.option) match { case Some(NoArgOpt(_,_,act)) => act() case Some(OneArgOpt(o,_,act)) => act(option.substring(o.length + 1)) case Some(ManyArgOpt(o,_,act)) => act(splitList(option.substring(o.length + 1))) case None => Logger("Plugin", LogWarning, "Invalid option (ignoring it): " + option) } } override def processOptions(options: List[String], error: String => Unit) { options foreach processOption } val analyzer = new Analysis(global, this) val pluginPhases = List( new Unfolder(global, this) -> "flatten nested expressions", new LLifter(global, this) -> "move nested functions/classes to package level", new Constructor(global, this) -> "move initialisation code from definitions to constructor", new Link(global, this) -> "performs some kind of linking to get ride of the inheritance layer", new GettersSetters(global, this) -> "within one class replaces the getters and setters by the appropriate variable access", analyzer -> "the analysis" ) val components = pluginPhases map (_._1) def computeCover = analyzer.computeCover def testForTermination = analyzer.testForPossibleTerminationUsingFullCover def testForError = analyzer.testForError }
dzufferey/picasso
frontend/compilerPlugin/src/main/scala/picasso/frontend/compilerPlugin/PicassoPlugin.scala
Scala
bsd-2-clause
3,981
package com.eevolution.context.dictionary.infrastructure.repository import java.util.UUID import com.eevolution.context.dictionary.domain._ import com.eevolution.context.dictionary.domain.model.OrganizationInfo import com.eevolution.context.dictionary.infrastructure.db.DbContext._ import com.eevolution.utils.PaginatedSequence import com.lightbend.lagom.scaladsl.persistence.jdbc.JdbcSession import scala.concurrent.{ExecutionContext, Future} /** * Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/EmerisScala * Created by emeris.hernandez@e-evolution.com , www.e-evolution.com on 07/11/17. */ /** * Organization Info Repository * @param session * @param executionContext */ class OrganizationInfoRepository (session: JdbcSession)(implicit executionContext: ExecutionContext) extends api.repository.OrganizationInfoRepository[OrganizationInfo , Int] with OrganizationInfoMapping { def getById(id: Int): Future[OrganizationInfo] = { Future(run(queryOrganizationInfo.filter(_.organizationId == lift(id))).headOption.get) } def getByUUID(uuid: UUID): Future[OrganizationInfo] = { Future(run(queryOrganizationInfo.filter(_.uuid == lift(uuid.toString))).headOption.get) } def getByOrganizationInfoId(id : Int) : Future[List[OrganizationInfo]] = { Future(run(queryOrganizationInfo)) } def getAll() : Future[List[OrganizationInfo]] = { Future(run(queryOrganizationInfo)) } def getAllByPage(page: Int, pageSize: Int): Future[PaginatedSequence[OrganizationInfo]] = { val offset = page * pageSize val limit = (page + 1) * pageSize for { count <- countOrganizationInfo() elements <- if (offset > count) Future.successful(Nil) else selectOrganizationInfo(offset, limit) } yield { PaginatedSequence(elements, page, pageSize, count) } } private def countOrganizationInfo() = { Future(run(queryOrganizationInfo.size).toInt) } private def selectOrganizationInfo(offset: Int, limit: Int): Future[Seq[OrganizationInfo]] = { Future(run(queryOrganizationInfo).drop(offset).take(limit).toSeq) } }
adempiere/ADReactiveSystem
dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/repository/OrganizationInfoRepository.scala
Scala
gpl-3.0
2,882
package at.logic.gapt.formats.shlk_parsing import at.logic.gapt.formats.simple.TypeParsers import at.logic.gapt.proofs.HOLSequent import at.logic.gapt.proofs.lk.solve import scala.util.parsing.combinator._ import scala.util.matching.Regex import java.io.InputStreamReader import at.logic.gapt.expr.schema._ import at.logic.gapt.proofs.lk.base._ import collection.mutable.{ Map => MMap } import at.logic.gapt.proofs.shlk._ import scala.Tuple4 import at.logic.gapt.expr.schema.IntZero import scala.Tuple2 import at.logic.gapt.expr.StringSymbol import at.logic.gapt.expr._ import at.logic.gapt.proofs.lk._ import at.logic.gapt.proofs.occurrences.FormulaOccurrence import scala.Tuple4 import at.logic.gapt.expr.schema.IntZero import scala.Tuple2 object sFOParserCNT { val nLine = sys.props( "line.separator" ) def parseProofs( input: InputStreamReader ): List[( String, LKProof )] = { // ("p",parseProof(input, "root"))::Nil val m = sFOParserCNT.parseProof( input ) m.foldLeft( List.empty[( String, LKProof )] )( ( res, pair ) => ( pair._1, pair._2._1.get( "root" ).get ) :: ( pair._1, pair._2._2.get( "root" ).get ) :: res ) } //--------------------------------- parse SLK proof ----------------------- def parseProofFlat( txt: InputStreamReader ): MMap[String, Tuple2[LKProof, LKProof]] = { val map = parseProof( txt ) map.map( pp => { val name = pp._1 val pair = pp._2 ( name, Tuple2( pair._1.get( "root" ).get, pair._2.get( "root" ).get ) ) } ) } //plabel should return the proof corresponding to this label def parseProof( txt: InputStreamReader ): MMap[String, Tuple2[MMap[String, LKProof], MMap[String, LKProof]]] = { var mapBase = MMap.empty[String, LKProof] var mapStep = MMap.empty[String, LKProof] var map = MMap.empty[String, LKProof] var baseORstep: Int = 1 SchemaProofDB.clear var defMMap = MMap.empty[Const, Tuple2[List[IntegerTerm], SchemaFormula]] var list = List[String]() var error_buffer = "" // lazy val sp2 = new ParserTxt // sp2.parseAll(sp2.line, txt) val bigMMap = MMap.empty[String, Tuple2[MMap[String, LKProof], MMap[String, LKProof]]] val mapPredicateToArity = MMap.empty[String, Int] dbTRS.clear lazy val sp = new SimpleSLKParser sp.parseAll( sp.slkProofs, txt ) match { case sp.Success( result, input ) => // println( nLine + nLine + "SUCCESS parse :) " + nLine ) case x: AnyRef => // { println( nLine + nLine + "FAIL parse : " + nLine + error_buffer); throw new Exception( nLine + nLine + "FAIL parse :( " + nLine ); } throw new Exception( x.toString ) } class SimpleSLKParser extends JavaTokenParsers with TypeParsers { def line: Parser[List[Unit]] = rep( cmappingBase ) def cmappingBase: Parser[Unit] = ( "comment" ~ "\"[\"]*\"" ) ^^ { x => () } | mappingBase def mappingBase: Parser[Unit] = label.r ~ ":" ~ proof ^^ { case l ~ ":" ~ p => { error_buffer = l if ( baseORstep == 2 ) { map = MMap.empty[String, LKProof] baseORstep = 1 } map.put( l, p ) mapBase = map } } def mappingStep: Parser[Unit] = label.r ~ ":" ~ proof ^^ { case l ~ ":" ~ p => { error_buffer = l // mapStep.put(l,p) if ( baseORstep == 1 ) { map = MMap.empty[String, LKProof] baseORstep = 2 } map.put( l, p ) mapStep = map } } def name = """[\\]*[a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,_,0,1,2,3,4,5,6,7,8,9]*""".r //~ "(" ~ repsep(term,",") ~ ")" def slkProof: Parser[Any] = "proof" ~ name ~ "proves" ~ sequent ~ "base" ~ "{" ~ line ~ "}" ~ "step" ~ "{" ~ rep( mappingStep ) ~ "}" ~ rep( """-""".r ) ^^ { case "proof" ~ str ~ str1 ~ seq ~ "base" ~ "{" ~ line1 ~ "}" ~ "step" ~ "{" ~ line2 ~ "}" ~ procents => { // proofName = str bigMMap.put( str, Tuple2( mapBase, mapStep ) ) SchemaProofDB.put( new SchemaProof( str, IntVar( "k" ) :: Nil, seq.toHOLSequent, mapBase.get( "root" ).get, mapStep.get( "root" ).get ) ) mapBase = MMap.empty[String, LKProof] mapStep = MMap.empty[String, LKProof] // println( nLine + nLine + "Parsing is SUCCESSFUL : "+str) } } def slkProofs: Parser[List[Unit]] = rep( trs ) ~ rep( define ) ~ rep( slkProof ) ^^ { case a ~ s => { List.empty[Unit] } } def trs: Parser[Unit] = s_term ~ "->" ~ term ~ s_term ~ "->" ~ term ^^ { case t1 ~ "->" ~ base ~ t2 ~ "->" ~ step => { t1 match { case sTerm( func1, i1, arg1 ) => t2 match { case sTerm( func2, i2, arg2 ) => { // if(func1 == func2) { dbTRS.add( func1.asInstanceOf[Const], Tuple2( t1, base ), Tuple2( t2, step ) ) // } } } } } } def proof: Parser[LKProof] = ax | orL | orR1 | orR | orR2 | negL | negR | cut | pFOLink | andL | andR | andL1 | andL2 | weakL | weakR | contrL | contrR | andEqR1 | andEqR2 | andEqR3 | orEqR1 | orEqR2 | orEqR3 | andEqL1 | andEqL2 | andEqL3 | orEqL1 | orEqL2 | orEqL3 | allL | exR | exL | exLHyper | allR | allRHyper | allLHyper | exRHyper | impL | impR | termDefL1 | termDefR1 | arrowL | foldL | arrowR | autoprop def label: String = """[0-9]*[root]*""" def formula: Parser[SchemaFormula] = ( atom | neg | big | and | or | indPred | imp | forall | exists | variable | constant | forall_hyper | exists_hyper ) ^? { case trm: SchemaFormula => trm } def intTerm: Parser[SchemaExpression] = index //| schemaFormula def index: Parser[IntegerTerm] = ( sum | intConst | intVar | succ ) def intConst: Parser[IntegerTerm] = ( intZero | intOne | intTwo | intThree ) def intOne: Parser[IntegerTerm] = "1".r ^^ { case x => { Succ( IntZero() ) } } def intTwo: Parser[IntegerTerm] = "2".r ^^ { case x => { Succ( Succ( IntZero() ) ) } } def intThree: Parser[IntegerTerm] = "3".r ^^ { case x => { Succ( Succ( Succ( IntZero() ) ) ) } } def intZero: Parser[IntegerTerm] = "0".r ^^ { case x => { IntZero() } } def PLUSterm: Parser[SchemaExpression] = "(" ~ term ~ "+" ~ term ~ ")" ^^ { case "(" ~ t1 ~ "+" ~ t2 ~ ")" => { val func = Const( "+", Tindex -> ( Tindex -> Tindex ) ) App( App( func, t1 ), t2 ) } } def MINUSterm: Parser[SchemaExpression] = "(" ~ term ~ "-" ~ term ~ ")" ^^ { case "(" ~ t1 ~ "-" ~ t2 ~ ")" => { val func = Const( "-", Tindex -> ( Tindex -> Tindex ) ) App( App( func, t1 ), t2 ) } } def MULTterm: Parser[SchemaExpression] = "(" ~ term ~ "*" ~ term ~ ")" ^^ { case "(" ~ t1 ~ "*" ~ t2 ~ ")" => { val func = Const( "*", Tindex -> ( Tindex -> Tindex ) ) App( App( func, t1 ), t2 ) } } def POWterm: Parser[SchemaExpression] = "EXP(" ~ index ~ "," ~ term ~ ")" ^^ { case "EXP(" ~ t1 ~ "," ~ t2 ~ ")" => { val func = Const( "EXP", Tindex -> ( Tindex -> Tindex ) ) App( App( func, t1 ), t2 ) } } def sum: Parser[IntegerTerm] = intVar ~ "+" ~ intConst ^^ { case indV ~ "+" ~ indC => { // println( nLine + nLine + "sum") indC match { case Succ( IntZero() ) => Succ( indV ) case Succ( Succ( IntZero() ) ) => Succ( Succ( indV ) ) case Succ( Succ( Succ( IntZero() ) ) ) => Succ( Succ( Succ( indV ) ) ) } } } def intVar: Parser[IntVar] = "[i,j,k,p,u,q]".r ^^ { case x => { /*println( nLine + nLine + "intVar");*/ IntVar( x ) } } def succ: Parser[IntegerTerm] = "s(" ~ intTerm ~ ")" ^^ { case "s(" ~ intTerm ~ ")" => Succ( intTerm.asInstanceOf[IntegerTerm] ) } def schemaFormula = formula def indPred: Parser[SchemaFormula] = """[A-Z]*[a-z]*[0-9]*""".r ~ "(" ~ repsep( index, "," ) ~ ")" ^^ { case x ~ "(" ~ l ~ ")" => { if ( !mapPredicateToArity.isDefinedAt( x.toString ) ) mapPredicateToArity.put( x.toString, l.size ) else if ( mapPredicateToArity.get( x.toString ).get != l.size ) { println( nLine + "Input ERROR : Indexed Predicate '" + x.toString + "' should have arity " + mapPredicateToArity.get( x.toString ).get + ", but not " + l.size + " !" + nLine + nLine ) throw new Exception( nLine + "Input ERROR : Indexed Predicate '" + x.toString + "' should have arity " + mapPredicateToArity.get( x.toString ).get + ", but not " + l.size + " !" + nLine ) } // println( nLine + nLine + "IndexedPredicate"); // val map: MMap[Var, T]) // val subst: SchemaSubstitution1[SchemaExpression] = new SchemaSubstitution1[SchemaExpression]() // val new_ind = subst(ind) // val new_map = (subst.map - subst.map.head._1.asInstanceOf[Var]) + Tuple2(subst.map.head._1.asInstanceOf[Var], Pred(new_ind.asInstanceOf[IntegerTerm]) ) // val new_subst = new SchemaSubstitution1(new_map) IndexedPredicate( x, l ) } } def define: Parser[Any] = indPred ~ ":=" ~ schemaFormula ^^ { case indpred ~ ":=" ~ sf => { indpred match { case IndexedPredicate( f, ls ) => { defMMap.put( f, Tuple2( ls.asInstanceOf[List[IntegerTerm]], sf.asInstanceOf[SchemaFormula] ) ) } } } } // nested bigAnd bigOr.... ("""BigAnd""".r | """BigOr""".r) def prefix: Parser[Tuple4[Boolean, IntVar, IntegerTerm, IntegerTerm]] = """[BigAnd]*[BigOr]*""".r ~ "(" ~ intVar ~ "=" ~ index ~ ".." ~ index ~ ")" ^^ { case "BigAnd" ~ "(" ~ intVar1 ~ "=" ~ ind1 ~ ".." ~ ind2 ~ ")" => { // println( nLine + nLine + "prefix" + nLine + nLine ) Tuple4( true, intVar1, ind1, ind2 ) } case "BigOr" ~ "(" ~ intVar1 ~ "=" ~ ind1 ~ ".." ~ ind2 ~ ")" => { // println( nLine + nLine + "prefix" + nLine + nLine ) Tuple4( false, intVar1, ind1, ind2 ) } } def big: Parser[SchemaFormula] = rep1( prefix ) ~ schemaFormula ^^ { case l ~ schemaFormula => { // println("Works?") l.reverse.foldLeft( schemaFormula.asInstanceOf[SchemaFormula] )( ( res, triple ) => { if ( triple._1 ) BigAnd( triple._2, res, triple._3, triple._4 ) else BigOr( triple._2, res, triple._3, triple._4 ) } ) } } def term: Parser[SchemaExpression] = ( lambdaTerm | PLUSterm | MINUSterm | MULTterm | POWterm | index | fo_term | s_term | abs | variable | constant | var_func | const_func | SOindVar ) def lambdaTerm: Parser[SchemaExpression] = "(" ~ "Ξ»" ~ FOVariable ~ "." ~ intZero ~ ")" ^^ { case "(" ~ "Ξ»" ~ x ~ "." ~ zero ~ ")" => Abs( x, zero ) } def s_term: Parser[SchemaExpression] = "[g,h]".r ~ "(" ~ intTerm ~ "," ~ term ~ ")" ^^ { case name ~ "(" ~ i ~ "," ~ args ~ ")" => { // println( nLine + "sTerm : "+name+"("+i+","+args+")") // println("args = "+args) // println("args.extype = "+args.exptype) sTerm( name, i, args :: Nil ) } } def fo_term: Parser[SchemaExpression] = "[f]".r ~ "(" ~ term ~ ")" ^^ { case name ~ "(" ~ arg ~ ")" => { // println( nLine + nLine + "foTerm" + nLine + " arg.extype = "+arg.exptype) foTerm( name, arg :: Nil ) } } def indexedVar: Parser[Var] = regex( new Regex( "[zzz]" ) ) ~ "(" ~ intTerm ~ ")" ^^ { case x ~ "(" ~ index ~ ")" => { indexedFOVar( x, index.asInstanceOf[IntegerTerm] ) } } //indexed variable of type Ο‰->Ο‰ def indexedwVar: Parser[Var] = regex( new Regex( "[Ξ±,c,b,y,a,x,z,s,w,h,m,n,l]" ) ) ~ "(" ~ intTerm ~ ")" ^^ { case x ~ "(" ~ index ~ ")" => { indexedOmegaVar( x, index.asInstanceOf[IntegerTerm] ) } } // TODO: a should be a FOConstant def FOVariable: Parser[Var] = regex( new Regex( "[x,v,g,u,q]" + word ) ) ^^ { case x => fowVar( x ) } //foVar(x)} def variable: Parser[Var] = ( indexedwVar | indexedVar | FOVariable ) //regex(new Regex("[u-z]" + word)) ^^ {case x => Var(new VariableStringSymbol(x), i->i).asInstanceOf[Var]} def constant: Parser[Const] = regex( new Regex( "[t]" + word ) ) ^^ { case x => { Const( StringSymbol( x ), Tindex -> Tindex ) } } def and: Parser[SchemaFormula] = "(" ~ repsep( formula, "/\\" ) ~ ")" ^^ { case "(" ~ formulas ~ ")" => { formulas.tail.foldLeft( formulas.head )( ( f, res ) => And( f, res ) ) } } def or: Parser[SchemaFormula] = "(" ~ repsep( formula, """\/""" ) ~ ")" ^^ { case "(" ~ formulas ~ ")" => { formulas.tail.foldLeft( formulas.head )( ( f, res ) => Or( f, res ) ) } } def imp: Parser[SchemaFormula] = "(" ~ formula ~ "->" ~ formula ~ ")" ^^ { case "(" ~ x ~ "->" ~ y ~ ")" => Imp( x, y ) } def abs: Parser[SchemaExpression] = "Abs" ~ variable ~ term ^^ { case "Abs" ~ v ~ x => Abs( v, x ).asInstanceOf[SchemaExpression] } def neg: Parser[SchemaFormula] = "~" ~ formula ^^ { case "~" ~ x => Neg( x ) } def atom: Parser[SchemaFormula] = ( inequality | equality | less | lessOrEqual | s_atom | var_atom | const_atom ) def forall: Parser[SchemaFormula] = "Forall" ~ variable ~ formula ^^ { case "Forall" ~ v ~ x => All( v, x ) } def forall_hyper: Parser[SchemaFormula] = "ForallHyper" ~ SOindVar ~ formula ^^ { case "ForallHyper" ~ v ~ x => All( v, x ) } def exists: Parser[SchemaFormula] = "Exists" ~ variable ~ formula ^^ { case "Exists" ~ v ~ x => Ex( v, x ) } def exists_hyper: Parser[SchemaFormula] = "ExistsHyper" ~ SOindVar ~ formula ^^ { case "ExistsHyper" ~ v ~ x => Ex( v, x ) } def var_atom: Parser[SchemaFormula] = regex( new Regex( "[u-z]" + word ) ) ~ "(" ~ repsep( term, "," ) ~ ")" ^^ { case x ~ "(" ~ params ~ ")" => SchemaAtom( Var( x, FunctionType( To, params map ( _.exptype ) ) ), params ) } // def const_atom: Parser[SchemaFormula] = regex(new Regex("["+symbols+"a-tA-Z0-9]" + word)) ~ "(" ~ repsep(term,",") ~ ")" ^^ {case x ~ "(" ~ params ~ ")" => { def const_atom: Parser[SchemaFormula] = regex( new Regex( "[P]" ) ) ~ "(" ~ repsep( term, "," ) ~ ")" ^^ { case x ~ "(" ~ params ~ ")" => SchemaAtom( Const( x, FunctionType( To, params map ( _.exptype ) ) ), params ) } def s_atom: Parser[SchemaFormula] = """[BEΞ£CO]*""".r ~ "(" ~ repsep( term, "," ) ~ ")" ^^ { case x ~ "(" ~ params ~ ")" => //TODO: refactor rule to parse only the correct terms require( params.size > 0, "A schematic atom needs at least one parameter (of type omega)!" ) //TODO: requirement added later, might break some test cases require( params( 0 ).exptype == Tindex, "The first parameter of a schematic formula needs to be of type omega!" ) //TODO: requirement added later, might break some test cases SchemaAtom( Const( x, FunctionType( To, params map ( _.exptype ) ) ), params ) } def equality: Parser[SchemaFormula] = eq_infix | eq_prefix // infix is problematic in higher order def eq_infix: Parser[SchemaFormula] = term ~ "=" ~ term ^^ { case x ~ "=" ~ y => Eq( x, y ) } def inequality: Parser[SchemaFormula] = term ~ "\\=" ~ term ^^ { case x ~ "\\=" ~ y => Neg( Eq( x, y ) ) } def eq_prefix: Parser[SchemaFormula] = "=" ~ "(" ~ term ~ "," ~ term ~ ")" ^^ { case "=" ~ "(" ~ x ~ "," ~ y ~ ")" => Eq( x, y ) } def less: Parser[SchemaFormula] = term ~ "<" ~ term ^^ { case x ~ "<" ~ y => lessThan( x, y ) } def lessOrEqual: Parser[SchemaFormula] = term ~ "<=" ~ term ^^ { case x ~ "<=" ~ y => leq( x, y ) } def var_func: Parser[SchemaExpression] = regex( new Regex( "[u-z]" + word ) ) ~ "(" ~ repsep( term, "," ) ~ ")" ^^ { case x ~ "(" ~ params ~ ")" => SchemaFunction( Var( x, FunctionType( Tindex -> Tindex, params map ( _.exptype ) ) ), params ) } def SOindVar: Parser[Var] = regex( new Regex( "[x,c,w,h,a,z,b,b',l,f,r,m,y,A,B]" ) ) ^^ { case x => Var( x, Tindex -> Tindex ) } /*def var_func: Parser[SchemaExpression] = (var_func1 | var_funcn) def var_func1: Parser[SchemaExpression] = regex(new Regex("[u-z]" + word)) ~ "(" ~ repsep(term,",") ~ ")" ~ ":" ~ Type ^^ {case x ~ "(" ~ params ~ ")" ~ ":" ~ tp => Function(new VariableStringSymbol(x), params, tp)} def var_funcn: Parser[SchemaExpression] = regex(new Regex("[u-z]" + word)) ~ "^" ~ decimalNumber ~ "(" ~ repsep(term,",") ~ ")" ~ ":" ~ Type ^^ {case x ~ "^" ~ n ~ "(" ~ params ~ ")" ~ ":" ~ tp => genF(n.toInt, Var(new VariableStringSymbol(x)), params)} */ def const_func: Parser[SchemaExpression] = "[v]" ~ "(" ~ repsep( term, "," ) ~ ")" ^^ { case x ~ "(" ~ params ~ ")" => SchemaFunction( Const( x, FunctionType( Tindex -> Tindex, params map ( _.exptype ) ) ), params ) } protected def word: String = """[a-zA-Z0-9$_{}]*""" protected def symbol: Parser[String] = symbols.r def symbols: String = """[\053\055\052\057\0134\0136\074\076\075\0140\0176\077\0100\046\0174\041\043\047\073\0173\0175]+""" // +-*/\^<>=`~?@&|!#{}'; // def sequent: Parser[Sequent] = formula ~ "|-" ~ formula ^^ { case lf ~ "|-" ~ rf => { def sequent: Parser[OccSequent] = repsep( formula, "," ) ~ "|-" ~ repsep( formula, "," ) ^^ { case lfs ~ "|-" ~ rfs => { // println( nLine + nLine + "SEQUENT") Axiom( lfs, rfs ).root } } def ax: Parser[LKProof] = "ax(" ~ sequent ~ ")" ^^ { case "ax(" ~ sequent ~ ")" => { // println( nLine + nLine + "AXIOM") Axiom( sequent ) } case _ => { println( "ERROR" ); Axiom( List(), List() ) } } def proof_name: Parser[String] = """[\\]*[a-z]*[0-9]*""".r // def pLink: Parser[LKProof] = "pLink(" ~ "(" ~ proof_name ~ "," ~ index ~ ")" ~ sequent ~ ")" ^^ { // case "pLink(" ~ "(" ~ name ~ "," ~ v ~ ")" ~ sequent ~ ")" => { //// println( nLine + nLine + "pLink") // SchemaProofLinkRule(sequent.toHOLSequent, name, v::Nil) // } // } def pFOLink: Parser[LKProof] = "pLink(" ~ "(" ~ proof_name ~ "," ~ repsep( term, "," ) ~ ")" ~ sequent ~ ")" ^^ { case "pLink(" ~ "(" ~ name ~ "," ~ l ~ ")" ~ sequent ~ ")" => { // println( nLine + nLine + "pLink") FOSchemaProofLinkRule( sequent.toHOLSequent, name, l ) } } def orR1: Parser[LKProof] = "orR1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orR1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // println( nLine + nLine + "orR1") OrRight1Rule( map.get( l ).get, f1, f2 ) } } def orR2: Parser[LKProof] = "orR2(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orR2(" ~ label ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // println( nLine + nLine + "orR2") OrRight2Rule( map.get( label ).get, f1, f2 ) } } def orR: Parser[LKProof] = "orR(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orR(" ~ label ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // println( nLine + nLine + "orR") OrRightRule( map.get( label ).get, f1, f2 ) } } def orL: Parser[LKProof] = "orL(" ~ label.r ~ "," ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orL(" ~ l1 ~ "," ~ l2 ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // println( nLine + nLine + "orL") OrLeftRule( map.get( l1 ).get, map.get( l2 ).get, f1, f2 ) } } def andR: Parser[LKProof] = "andR(" ~ label.r ~ "," ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andR(" ~ l1 ~ "," ~ l2 ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // println( nLine + nLine + "andR") // println( nLine + "error_buffer = "+error_buffer) // println( nLine + map.get(l).get.root.toString) // println(map.get(l1).get.root) // println( nLine + nLine ) // println(map.get(l2).get.root) // println( nLine + nLine ) val p = AndRightRule( map.get( l1 ).get, map.get( l2 ).get, f1, f2 ) // println(p.root) p } } def cut: Parser[LKProof] = "cut(" ~ label.r ~ "," ~ label.r ~ "," ~ formula ~ ")" ^^ { case "cut(" ~ l1 ~ "," ~ l2 ~ "," ~ f ~ ")" => { // println( nLine + nLine + "cut") // println( nLine + "error_buffer = "+error_buffer) CutRule( map.get( l1 ).get, map.get( l2 ).get, f ) } } def negL: Parser[LKProof] = "negL(" ~ label.r ~ "," ~ formula ~ ")" ^^ { case "negL(" ~ label ~ "," ~ formula ~ ")" => { // println( nLine + nLine + "negL") NegLeftRule( map.get( label ).get, formula ) } case _ => { println( nLine + nLine + "Error!" ) sys.exit( 10 ) } } def negR: Parser[LKProof] = "negR(" ~ label.r ~ "," ~ formula ~ ")" ^^ { case "negR(" ~ label ~ "," ~ formula ~ ")" => { // println( nLine + nLine + map.get(label).get.root.toString) // println( nLine + nLine + "negR") NegRightRule( map.get( label ).get, formula ) } } def weakR: Parser[LKProof] = "weakR(" ~ label.r ~ "," ~ formula ~ ")" ^^ { case "weakR(" ~ label ~ "," ~ formula ~ ")" => { // println( nLine + nLine + "weakR") WeakeningRightRule( map.get( label ).get, formula ) } } def weakL: Parser[LKProof] = "weakL(" ~ label.r ~ "," ~ formula ~ ")" ^^ { case "weakL(" ~ label ~ "," ~ formula ~ ")" => { // println( nLine + nLine + "weakL") WeakeningLeftRule( map.get( label ).get, formula ) } } // def eqAnd1: Parser[LKProof] = "eqAnd1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { // case "eqAnd1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // AndEquivalenceRule1(map.get(l).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula]) // } // } def andL1: Parser[LKProof] = "andL1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andL1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // println( nLine + nLine + "andL1") AndLeft1Rule( map.get( l ).get, f1, f2 ) } } def andL2: Parser[LKProof] = "andL2(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andL2(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // println( nLine + nLine + "andL2") AndLeft2Rule( map.get( l ).get, f1, f2 ) } } def andL: Parser[LKProof] = "andL(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andL(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { // println( nLine + nLine + "andL") // println( nLine + "error_buffer = "+error_buffer) // println( nLine + map.get(l).get.root.toString) val p = AndLeftRule( map.get( l ).get, f1, f2 ) p // val and = And(f1,f2) // val aux = p.root.antecedent.tail.head.formula // println( nLine + "p = "+aux) // println( nLine + "and = "+and) // println( nLine + nLine + aux.syntaxEquals(and)) // println( nLine + "f1 = "+f1) // var res = p // f1 match { // case BigAnd(ind,f,lb,ub) => { // println("ERROR 5") //// sys.exit(1) // res = AndEquivalenceRule1(p, and.asInstanceOf[SchemaFormula], BigAnd(ind,f,lb,Succ(ub)).asInstanceOf[SchemaFormula]) // println( nLine + nLine + "res = "+res.root.antecedent.head.formula) //// return res // res // } // case _ => { // println("ERROR 3") //// sys.exit(1) // res // } // } // println("ERROR 2") // res // sys.exit(1) } } def andEqR1: Parser[LKProof] = "andEqR1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andEqR1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { AndRightEquivalenceRule1( map.get( l ).get, f1, f2 ) } } def andEqR2: Parser[LKProof] = "andEqR2(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andEqR2(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { AndRightEquivalenceRule2( map.get( l ).get, f1, f2 ) } } def andEqR3: Parser[LKProof] = "andEqR3(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andEqR3(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { AndRightEquivalenceRule3( map.get( l ).get, f1, f2 ) } } def andEqL1: Parser[LKProof] = "andEqL1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andEqL1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { AndLeftEquivalenceRule1( map.get( l ).get, f1, f2 ) } } def andEqL2: Parser[LKProof] = "andEqL2(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andEqL2(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { AndLeftEquivalenceRule2( map.get( l ).get, f1, f2 ) } } def andEqL3: Parser[LKProof] = "andEqL3(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "andEqL3(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { AndLeftEquivalenceRule3( map.get( l ).get, f1, f2 ) } } def orEqR1: Parser[LKProof] = "orEqR1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orEqR1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { OrRightEquivalenceRule1( map.get( l ).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula] ) } } def orEqR2: Parser[LKProof] = "orEqR2(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orEqR2(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { OrRightEquivalenceRule2( map.get( l ).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula] ) } } def orEqR3: Parser[LKProof] = "orEqR3(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orEqR3(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { OrRightEquivalenceRule3( map.get( l ).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula] ) } } def orEqL1: Parser[LKProof] = "orEqL1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orEqL1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { OrLeftEquivalenceRule1( map.get( l ).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula] ) } } def orEqL2: Parser[LKProof] = "orEqL2(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orEqL2(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { OrLeftEquivalenceRule2( map.get( l ).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula] ) } } def orEqL3: Parser[LKProof] = "orEqL3(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "orEqL3(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { OrLeftEquivalenceRule3( map.get( l ).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula] ) } } def contrL: Parser[LKProof] = "contrL(" ~ label.r ~ "," ~ formula ~ ")" ^^ { case "contrL(" ~ l ~ "," ~ f ~ ")" => { // println( nLine + nLine + "contrL") ContractionLeftRule( map.get( l ).get, f ) } } def contrR: Parser[LKProof] = "contrR(" ~ label.r ~ "," ~ formula ~ ")" ^^ { case "contrR(" ~ l ~ "," ~ f ~ ")" => { // println( nLine + nLine + "contrR") ContractionRightRule( map.get( l ).get, f ) } } def exR: Parser[LKProof] = "exR(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ "," ~ term ~ ")" ^^ { case "exR(" ~ l ~ "," ~ aux ~ "," ~ main ~ "," ~ term ~ ")" => { ExistsRightRule( map.get( l ).get, aux.asInstanceOf[SchemaFormula], main.asInstanceOf[SchemaFormula], term.asInstanceOf[SchemaExpression] ) } } def allL: Parser[LKProof] = "allL(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ "," ~ term ~ ")" ^^ { case "allL(" ~ l ~ "," ~ aux ~ "," ~ main ~ "," ~ term ~ ")" => { ForallLeftRule( map.get( l ).get, aux.asInstanceOf[SchemaFormula], main.asInstanceOf[SchemaFormula], term.asInstanceOf[SchemaExpression] ) } } def allR: Parser[LKProof] = "allR(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ "," ~ ( indexedwVar | FOVariable ) ~ ")" ^^ { case "allR(" ~ l ~ "," ~ aux ~ "," ~ main ~ "," ~ v ~ ")" => { ForallRightRule( map.get( l ).get, aux.asInstanceOf[SchemaFormula], main.asInstanceOf[SchemaFormula], v.asInstanceOf[Var] ) } } def exL: Parser[LKProof] = "exL(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ "," ~ ( indexedwVar | FOVariable ) ~ ")" ^^ { case "exL(" ~ l ~ "," ~ aux ~ "," ~ main ~ "," ~ v ~ ")" => { ExistsLeftRule( map.get( l ).get, aux.asInstanceOf[SchemaFormula], main.asInstanceOf[SchemaFormula], v.asInstanceOf[Var] ) } } def exLHyper: Parser[LKProof] = "exLHyper(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ "," ~ SOindVar ~ ")" ^^ { case "exLHyper(" ~ l ~ "," ~ aux ~ "," ~ main ~ "," ~ v ~ ")" => { ExistsHyperLeftRule( map.get( l ).get, aux.asInstanceOf[SchemaFormula], main.asInstanceOf[SchemaFormula], v.asInstanceOf[Var] ) } } def allRHyper: Parser[LKProof] = "allRHyper(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ "," ~ SOindVar ~ ")" ^^ { case "allRHyper(" ~ l ~ "," ~ aux ~ "," ~ main ~ "," ~ v ~ ")" => { ForallHyperRightRule( map.get( l ).get, aux.asInstanceOf[SchemaFormula], main.asInstanceOf[SchemaFormula], v.asInstanceOf[Var] ) } } def exRHyper: Parser[LKProof] = "exRHyper(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ "," ~ term ~ ")" ^^ { case "exRHyper(" ~ l ~ "," ~ aux ~ "," ~ main ~ "," ~ t ~ ")" => { ExistsHyperRightRule( map.get( l ).get, aux, main, t ) } } def allLHyper: Parser[LKProof] = "allLHyper(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ "," ~ term ~ ")" ^^ { case "allLHyper(" ~ l ~ "," ~ aux ~ "," ~ main ~ "," ~ t ~ ")" => { ForallHyperLeftRule( map.get( l ).get, aux, main, t ) } } def impL: Parser[LKProof] = "impL(" ~ label.r ~ "," ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "impL(" ~ l1 ~ "," ~ l2 ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { ImpLeftRule( map.get( l1 ).get, map.get( l2 ).get, f1, f2 ) } } def impR: Parser[LKProof] = "impR(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "impR(" ~ label ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { ImpRightRule( map.get( label ).get, f1, f2 ) } } def foldL: Parser[LKProof] = "foldL(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "foldL(" ~ label ~ "," ~ aux ~ "," ~ main ~ ")" => { foldLeftRule( map.get( label ).get, aux, main ) } } def arrowL: Parser[LKProof] = "arrowL(" ~ label.r ~ "," ~ formula ~ ")" ^^ { case "arrowL(" ~ label ~ "," ~ f1 ~ ")" => { trsArrowLeftRule( map.get( label ).get, f1 ) } } def arrowR: Parser[LKProof] = "arrowR(" ~ label.r ~ "," ~ formula ~ ")" ^^ { case "arrowR(" ~ label ~ "," ~ f1 ~ ")" => { trsArrowRightRule( map.get( label ).get, f1 ) } } def autoprop: Parser[LKProof] = "autoprop(" ~ sequent ~ ")" ^^ { case "autoprop(" ~ seq ~ ")" => solve.solvePropositional( seq.toHOLSequent, throwOnError = true ).get } def termDefL1: Parser[LKProof] = "termDefL1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "termDefL1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { TermLeftEquivalenceRule1( map.get( l ).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula] ) } } def termDefR1: Parser[LKProof] = "termDefR1(" ~ label.r ~ "," ~ formula ~ "," ~ formula ~ ")" ^^ { case "termDefR1(" ~ l ~ "," ~ f1 ~ "," ~ f2 ~ ")" => { TermRightEquivalenceRule1( map.get( l ).get, f1.asInstanceOf[SchemaFormula], f2.asInstanceOf[SchemaFormula] ) } } } // println( nLine + nLine + "number of SLK-proofs = "+bigMMap.size) // println( nLine + "defMMapr size = "+defMMap.size) // println( nLine + nLine + nLine + "list = "+list) // if (!bigMMap.get("chi").get._2.isDefinedAt(plabel)) println( nLine + nLine + nLine + "Syntax ERROR after ID : " + error_buffer + nLine + nLine) // val m = bigMMap.get("chi").get._2.get(plabel).get //// println(m.root.antecedent.head+" |- "+m.root.succedent.head) // m // println( nLine + "SchemaProofDB.size = "+SchemaProofDB.size + nLine ) bigMMap } } object getPLinks { val nLine = sys.props( "line.separator" ) def apply( p: LKProof ): List[OccSequent] = p match { case Axiom( so ) => Nil case UnaryLKProof( _, upperProof, _, _, _ ) => apply( upperProof ) case BinaryLKProof( _, upperProofLeft, upperProofRight, _, aux1, aux2, _ ) => apply( upperProofLeft ) ::: apply( upperProofRight ) case UnarySchemaProof( _, upperProof, _, _, _ ) => apply( upperProof ) case SchemaProofLinkRule( so, name, indices ) => so :: Nil case TermEquivalenceRule1( upperProof, _, _, _ ) => apply( upperProof ) case ForallHyperLeftRule( upperProof, r, a, p, _ ) => apply( upperProof ) case ExistsHyperRightRule( upperProof, r, a, p, _ ) => apply( upperProof ) case ForallHyperRightRule( upperProof, r, a, p, _ ) => apply( upperProof ) case ExistsHyperLeftRule( upperProof, r, a, p, _ ) => apply( upperProof ) case _ => throw new Exception( nLine + "Missin rule in getPLinks.apply" + nLine ) } } //makes a clauses CL,A|-C,D and CL|-E,F to CL|-(~A\/C\/D) /\ (E\/F) object ClauseSetToCNF { //returns: CL |- formulaList def apply( seq: HOLSequent ): HOLSequent = { val headCLsym = seq.antecedent.head if ( seq.antecedent.size == 1 && seq.succedent.size <= 1 ) return seq else if ( seq.antecedent.size == 1 ) return HOLSequent( headCLsym :: Nil, Or( seq.succedent.toList.asInstanceOf[List[SchemaFormula]] ) :: Nil ) val succ = Or( seq.antecedent.tail.toList.map( f => Neg( f.asInstanceOf[SchemaFormula] ) ) ++ seq.succedent.asInstanceOf[List[SchemaFormula]] ) HOLSequent( headCLsym :: Nil, succ :: Nil ) } var mapCLsym: MMap[SchemaFormula, List[SchemaFormula]] = MMap.empty[SchemaFormula, List[SchemaFormula]] def combiningCLsymbols( ccs: List[HOLSequent] ): MMap[SchemaFormula, List[SchemaFormula]] = { ccs.map( fseq => { // println( nLine + "combining: "+mapCLsym) val seq: HOLSequent = ClauseSetToCNF( fseq ) // println( nLine + nLine + "seq: "+seq) val f = seq.antecedent.head if ( !mapCLsym.contains( f.asInstanceOf[SchemaFormula] ) ) if ( seq.succedent.isEmpty ) mapCLsym = mapCLsym.updated( f.asInstanceOf[SchemaFormula], List.empty[SchemaFormula] ) else mapCLsym = mapCLsym.updated( f.asInstanceOf[SchemaFormula], seq.succedent.head.asInstanceOf[SchemaFormula] :: Nil ) else { val l = mapCLsym.get( f.asInstanceOf[SchemaFormula] ).get if ( seq.succedent.isEmpty ) mapCLsym = mapCLsym.updated( f.asInstanceOf[SchemaFormula], l ) else mapCLsym = mapCLsym.updated( f.asInstanceOf[SchemaFormula], seq.succedent.head.asInstanceOf[SchemaFormula] :: l ) } } ) mapCLsym } def apply( ccs: List[HOLSequent] ): List[HOLSequent] = { combiningCLsymbols( ccs ) mapCLsym.toList.map( pair => HOLSequent( pair._1 :: Nil, And( pair._2.asInstanceOf[List[SchemaFormula]] ) :: Nil ) ) } } object RW { //non-grounded map : CL_k -> Schemaformula def createMMap( ccs: List[HOLSequent] ): MMap[SchemaFormula, SchemaFormula] = { var map = MMap.empty[SchemaFormula, SchemaFormula] ccs.foreach( fseq => { if ( fseq.antecedent.size > 0 ) map = map.updated( fseq.antecedent.head.asInstanceOf[SchemaFormula], fseq.succedent.head.asInstanceOf[SchemaFormula] ) } ) map } def rewriteGroundFla( f: SchemaFormula, map: MMap[SchemaFormula, SchemaFormula] ): SchemaFormula = { f match { case IndexedPredicate( ipred, l ) => { if ( l.head == IntZero() ) return map.get( f.asInstanceOf[SchemaFormula] ).get else { val k = IntVar( "k" ) val from = IndexedPredicate( ipred.name, Succ( k ) ) val to = map.get( from ).get val new_map = Map.empty[Var, IntegerTerm] + Tuple2( IntVar( "k" ), Pred( l.head.asInstanceOf[IntegerTerm] ) ) val subst = SchemaSubstitution( new_map ) //this was once a SchemaSubstitutionCNF, the normal substitution could make trouble here return rewriteGroundFla( subst( to ), map ) } } case Or( l, r ) => Or( rewriteGroundFla( l, map ), rewriteGroundFla( r, map ) ) case And( l, r ) => And( rewriteGroundFla( l, map ), rewriteGroundFla( r, map ) ) case Neg( l ) => Neg( rewriteGroundFla( l, map ) ) case _ => f } } } object CNFtoSet { //f should be in CNF def apply( f: SchemaFormula ): List[SchemaFormula] = { f match { case And( f1, f2 ) => apply( f1 ) ::: apply( f2 ) case _ => f :: Nil } } }
loewenheim/gapt
src/main/scala/at/logic/gapt/formats/shlk/sFOparserCNT.scala
Scala
gpl-3.0
38,743
package skinny /** * Model interface for SkinnyResource. * * @tparam Id id * @tparam Model model */ trait SkinnyModel[Id, Model] { /** * Extracts raw value from Identity. * * @param id id * @return raw value */ def idToRawValue(id: Id): Any /** * Converts raw value to Identity. * * @param value raw value * @return id */ def rawValueToId(value: Any): Id /** * Creates new entity with parameters. * * @param parameters parameters * @return generated id */ def createNewModel(parameters: PermittedStrongParameters): Id /** * Returns the count of all models. * * @return the count of all models */ def countAllModels(): Long /** * Returns all models. * * @return all models */ def findAllModels(): List[Model] /** * Returns models by paging. * * @param pageSize page size * @param pageNo page no * @return models */ def findModels(pageSize: Int, pageNo: Int): List[Model] /** * Returns the specified entity if exists. * @param id id * @return entity if exists */ def findModel(id: Id): Option[Model] /** * Updates the specified entity with parameters if exists. * * @param id id * @param parameters parameters * @return updated count */ def updateModelById(id: Id, parameters: PermittedStrongParameters): Int /** * Deletes the specified entity if exists. * @param id id * @return deleted count */ def deleteModelById(id: Id): Int }
BlackPrincess/skinny-framework
common/src/main/scala/skinny/SkinnyModel.scala
Scala
mit
1,512
package org.scalatest.prop /* * Copyright 2001-2022 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Seed data class holding seed value used for generating random value. */ final case class Seed(value: Long) /** * Companion object for Seed class. */ object Seed { import java.util.concurrent.atomic.AtomicReference /** * This seed is empty under ordinary circumstances. It is here so that the test * Runner can poke in a seed value to be used during a test run. If set, it will be used * as the seed for all calls to [[Seed.default]]. */ private[scalatest] val configuredRef: AtomicReference[Option[Long]] = new AtomicReference(None) /** * Creates a new Seed using default approach, which is initialized based on the current time. * * This should not be considered a strong source of seed for randomness -- in cases where high entropy really * matters, it's a bit mediocre -- but for general purposes it's typically good enough. */ def default: Seed = Seed( configuredRef.get() match { case Some(value) => value case None => System.currentTimeMillis() } ) def configured: Option[Seed] = configuredRef.get().map(Seed(_)) }
scalatest/scalatest
jvm/core/src/main/scala/org/scalatest/prop/Seed.scala
Scala
apache-2.0
1,747
package edu.utdallas import org.apache.spark.SparkContext import org.apache.spark.SparkContext._ import org.apache.spark.SparkConf import org.apache.spark.util.Vector import org.apache.spark.rdd.RDD import scala.io.Source import java.util.Properties object Resystem { def parse(a:Array[String]) : Array[String]= { var z = new Array[String](5) for(i<-0 until 5) { z(i) = a(i).split(":")(0) } return z } def find_name1( n : String, m: scala.collection.Map[String,String]) = { println(n+":"+m.get(n).mkString) } def find_name2(n:Array[String], m: scala.collection.Map[String,String]) = { for(i<-0 until n.length) { print(n(i)+":"+m.get(n(i)).mkString+",") } } def main(args : Array[String]) { val conf = new SparkConf().setAppName("recommend system") conf.setMaster("local[2]") val sc = new SparkContext(conf) var lines = sc.textFile("/Users/cy/Desktop/part-00000").cache() val rating = sc.textFile("/Users/cy/Desktop/ratings.dat") val movie = sc.textFile("/Users/cy/Desktop/movies.dat") val m = movie.map(x=>(x.split("::")(0),x.split("::")(1))).collectAsMap val key= args(0) val r = rating.map(x=>(x.split("::")(0),x.split("::")(1),x.split("::")(2))).filter(_._1==key).filter(_._3=="3").map(p=>(p._2)).toArray() lines = lines.filter{x=> var key = x.split("\\t")(0) r.contains(key)} val data = lines.map(x=>(x.split("\\t")(0),x.split("\\t")(1))).mapValues(b=>b.split(" ")).mapValues(c=>parse(c)) //val d = data.mapValues(p=>parse(p)).collectAsMap() // data.flatMapValues(x=>x) // data.foreach(println) //val d = data.map(x=>( x._1+: x._2)) data.foreach{x=>find_name1(x._1,m) print("here is what we recommend:") find_name2(x._2,m) println()} // data.foreach(x=>println(x._1)) //data.foreach(println) // val bcTable = sc.broadcast(m.collectAsMap) // val flattenMD = d.mapPartitions { iter=> // val table = bcTable.value // iter.flatMap(ele=>(ele,table(ele)))} // val res = d.flatMap { arr => arr.map(ele => (ele,bcTable.value(ele)))} // res.foreach(println) // val flat = d.flatMap(_.toSeq).keyBy { x => x } // val res = flat.join(m).map{ case (k,v) => v} // res.foreach(println) } }
cyjj/Big-data
Resystem.scala
Scala
gpl-2.0
2,315
/*^ =========================================================================== Helios - FX =========================================================================== Copyright (C) 2013-2016 Gianluca Costa =========================================================================== Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =========================================================================== */ package info.gianlucacosta.helios.fx.application import javafx.stage.Stage import info.gianlucacosta.helios.apps.AppInfo import info.gianlucacosta.helios.fx.dialogs.Alerts import scalafx.application.Platform private class StartupThread( appInfo: AppInfo, splashStage: SplashStage, primaryStage: Stage, startupCallback: AppStartupCallback ) extends Thread { setDaemon(true) override def run(): Unit = { try { startupCallback(appInfo, splashStage, primaryStage) Platform.runLater { primaryStage.show() splashStage.close() } } catch { case ex: Exception => Platform.runLater { Alerts.showException(ex, "Startup error") System.exit(1) } } } }
giancosta86/Helios-fx
src/main/scala/info/gianlucacosta/helios/fx/application/StartupThread.scala
Scala
apache-2.0
1,801
/** * Copyright (C) 2010-2011 LShift Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.lshift.diffa.kernel.notifications import org.joda.time.DateTime import net.lshift.diffa.kernel.events.VersionID import reflect.BeanProperty case class NotificationEvent ( @BeanProperty var id:VersionID, @BeanProperty var lastUpdated:DateTime, @BeanProperty var upstreamVsn:String, @BeanProperty var downstreamVsn:String ) { def getPairKey = id.pair.key // TODO What about the domain ? def getDomain = id.pair.domain def getEntityId = id.id def getTimestamp = lastUpdated.toString() def getUpstream = upstreamVsn def getDownstream = downstreamVsn }
aprescott/diffa
kernel/src/main/scala/net/lshift/diffa/kernel/notifications/NotificationEvent.scala
Scala
apache-2.0
1,233
/* * Copyright 2016 Nicolas Rinaudo * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kantan.regex.java8 import java.time.LocalTime import kantan.regex.{GroupDecoder, MatchDecoder} import kantan.regex.java8.arbitrary._ import kantan.regex.laws.discipline.{DisciplineSuite, GroupDecoderTests, MatchDecoderTests, SerializableTests} class LocalTimeDecoderTests extends DisciplineSuite { checkAll("GroupDecoder[LocalTime]", GroupDecoderTests[LocalTime].decoder[Int, Int]) checkAll("GroupDecoder[LocalTime]", SerializableTests[GroupDecoder[LocalTime]].serializable) checkAll("MatchDecoder[LocalTime]", MatchDecoderTests[LocalTime].decoder[Int, Int]) checkAll("MatchDecoder[LocalTime]", SerializableTests[MatchDecoder[LocalTime]].serializable) }
nrinaudo/kantan.regex
java8/src/test/scala/kantan/regex/java8/LocalTimeDecoderTests.scala
Scala
apache-2.0
1,275
/* * Copyright (C) 2011-2017 Interfaculty Department of Geoinformatics, University of * Salzburg (Z_GIS) & Institute of Geological and Nuclear Sciences Limited (GNS Science) * in the SMART Aquifer Characterisation (SAC) programme funded by the New Zealand * Ministry of Business, Innovation and Employment (MBIE) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package info.smart.models.owc100 import java.net.URL import java.time.{OffsetDateTime, ZoneOffset} import com.typesafe.scalalogging.LazyLogging import org.locationtech.spatial4j.context.jts.JtsSpatialContext import org.scalatest.{MustMatchers, WordSpec} import play.api.libs.json._ class OwcResourceSpec extends WordSpec with MustMatchers with LazyLogging { private lazy val owcResource1: URL = this.getClass().getResource("/owc100/owc1.geojson") private lazy val owcResource2: URL = this.getClass().getResource("/owc100/owc2.geojson") private lazy val owcResource3: URL = this.getClass().getResource("/owc100/owc3.geojson") private val jsonTestCollection1: JsValue = Json.parse(scala.io.Source.fromURL(owcResource1).getLines.mkString) private val jsonTestCollection2: JsValue = Json.parse(scala.io.Source.fromURL(owcResource2).getLines.mkString) private val jsonTestCollection3: JsValue = Json.parse(scala.io.Source.fromURL(owcResource3).getLines.mkString) "Class OWC:Resource GeoJson Section 7.1.2" should { "<xz>.features[i].type String type that SHALL have the value 'Feature'" in { val jsVal = (jsonTestCollection3 \ "features") (0).get val fromJson: JsResult[OwcResource] = Json.fromJson[OwcResource](jsVal) fromJson match { case JsSuccess(r: OwcResource, path: JsPath) => println("id: " + r.id) case e: JsError => println("Errors: " + JsError.toJson(e).toString()) } val result: JsResult[OwcResource] = jsVal.validate[OwcResource] result match { case s: JsSuccess[OwcResource] => println("title: " + s.get.title) case e: JsError => println("Errors: " + JsError.toJson(e).toString()) } (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.isInstanceOf[OwcResource] mustBe true (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.isInstanceOf[OwcResource] mustBe true (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.isInstanceOf[OwcResource] mustBe true } "<xz>.features[i].id SHALL contain a URI value as Unambiguous reference to the identification of the Context resource (IRI)" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.id.isInstanceOf[URL] mustBe true (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.id mustEqual new URL("http://ows-9.compusult.net/wes/serviceManagerCSW/csw/http://ows-9.compusult.net/wes/serviceManagerCSW/csw/9496276a-4f6e-47c1-94bb-f604245fac57/") (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.id mustEqual new URL("http://portal.smart-project.info/context/smart-sac_add-nz-dtm-100x100/") } "<xz>.features[i].properties.title SHALL contain Title given to the Context resource" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.title mustEqual "WPS 52 north" (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.title mustEqual "gml:AbstractFeature" (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.title mustEqual "NZ DTM 100x100" } "<xz>.features[i].properties.abstract MAY contain Account of the content of the Context resource. (0..1)" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.subtitle mustEqual Some("abstract about data") (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.subtitle mustEqual None (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.subtitle mustEqual Some("New Zealand Digital Terrain Model 100m by 100m resolution") } "<xz>.features[i].properties.updated SHALL contain Date of the last update of the Context resource RFC-3339 date format" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.updateDate mustEqual OffsetDateTime.of(2013, 5, 19, 0, 0, 0, 0, ZoneOffset.ofHours(0)) (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.updateDate mustEqual OffsetDateTime.of(2013, 1, 2, 15, 24, 24, 446 * 1000000, ZoneOffset.ofHoursMinutes(-3, -30)) (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.updateDate mustEqual OffsetDateTime.of(2016, 2, 21, 11, 58, 23, 0, ZoneOffset.ofHours(0)) } "<xz>.features[i].properties.authors.name MAY contain Entity primarily responsible for making the content of the Context resource (0..*)" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.author mustEqual List() (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.author.head.name mustEqual Some("interactive-instruments") (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.author.isEmpty mustBe true } "<xz>.features[i].properties.publisher MAY contain Entity responsible for making the Context resource available (0..1)" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.publisher mustEqual None (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.publisher mustEqual None (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.publisher mustEqual Some("GNS Science and Salzburg Uni ZGIS Dept") } "<xz>.features[i].properties.rights MAY contain rights held in and over the Context resource (0..1)" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.rights mustEqual None (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.rights mustEqual Some("Copyright (c) 2012. Some rights reserved. This feed licensed under a Creative Commons Attribution 3.0 License.") (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.rights mustEqual None } "<xz>.features[i].geometry MAY contain Spatial extent or scope of the content of the Context resource, GeoJSON Geometry type (0..1)" in { lazy val jtsCtx = JtsSpatialContext.GEO (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.geospatialExtent mustEqual None (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.geospatialExtent mustEqual Some(jtsCtx.getShapeFactory.rect(-180.0, 180.0, -90.0, 90.0)) (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.geospatialExtent mustEqual Some(jtsCtx.getShapeFactory.rect(164.0, 180.0, -50.0, -31.0)) } "<xz>.features[i].properties.date MAY contain date or an interval for the Context resource, " + "String according to the ISO-8601 format (0..1)" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.temporalExtent mustEqual Some(List( OffsetDateTime.of(2013, 11, 2, 15, 24, 24, 446 * 1000000, ZoneOffset.ofHours(12)))) (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.temporalExtent mustEqual None (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.temporalExtent mustEqual Some(List( OffsetDateTime.of(2011, 11, 4, 0, 1, 23, 0, ZoneOffset.ofHours(0)), OffsetDateTime.of(2017, 12, 5, 17, 28, 56, 0, ZoneOffset.ofHours(0)) )) } "<xz>.features[i].properties.links.alternates MAY contain Reference to a description of the Context resource " + "in alternative format, Array of link objects (0..*)" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.contentDescription mustEqual List() (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.contentDescription.head.mimeType mustEqual Some("text/html") (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.contentDescription.head.href.isInstanceOf[URL] mustBe true (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.contentDescription.isEmpty mustBe true } "<xz>.features[i].properties.links.previews MAY contain Reference to a quick-look or browse image representing " + "the resource, Array of link objects, 'length' SHOULD be provided (0..*)" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.preview mustEqual List() (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.preview.isEmpty mustBe true (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.preview.head.mimeType mustEqual Some("image/png") (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.preview.head.href.isInstanceOf[URL] mustBe true } "<xz>.features[i].properties.links.data MAY contain Reference to the location of the data resource described in the Context resource, " + "Array of link objects (0..*)" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.contentByRef.length mustEqual 1 (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.contentByRef.head.mimeType mustEqual Some("application/x-hdf5") (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.contentByRef.head.length mustEqual Some(453123432) (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.contentByRef.isEmpty mustBe true (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.contentByRef.isEmpty mustBe true } "<xz>.features[i].properties.offering MAY contain Service or inline content offering for the resource " + "targeted at OGC compliant clients, owc:OfferingType (0..*)" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.offering.length mustEqual 1 (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.offering.length mustEqual 2 (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.offering.length mustEqual 2 } "<xz>.features[i].properties.active MAY contain Flag value indicating if the Context resource should be displayed by default, 'true' or 'false' (0..*)" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.active mustEqual None (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.active mustEqual Some(false) (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.active mustEqual Some(true) } "<xz>.features[i].properties.links.via MAY contain Reference to a resource from which the Context resource is derived, " + "(e.g. source of the information) Link object (0..*)" in { logger.warn("the spec says, 'link object' not 'Array of link objects', example doesn NOT show an array, but spec defines multiplicity as 'Zero or more(optional)'") (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.resourceMetadata.length mustEqual 1 (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.resourceMetadata.head.mimeType mustEqual Some("application/xml") (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.resourceMetadata.head.length mustEqual Some(435) (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.resourceMetadata.head.lang mustEqual Some("es") (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.resourceMetadata.isEmpty mustBe true } "<xz>.features[i].properties.categories.term MAY contain Category related to this resource. " + "It MAY have a related code-list that is identified by the 'scheme' attribute (0..*)" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.keyword.isEmpty mustBe true (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.keyword.length mustEqual 1 (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.keyword.head.term mustEqual "GEOSSDataCore" (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.keyword.head.scheme mustEqual Some("view-groups") (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.keyword.head.label mustEqual Some("Informative Layers") } "<xz>.features[i].properties.minscaledenominator MAY contain Minimum scale for the display of the Context resource Double (0..1)" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.minScaleDenominator mustEqual None (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.minScaleDenominator.get mustEqual 100.0 (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.minScaleDenominator mustEqual None } "<xz>.features[i].properties.maxscaledenominator MAY contain Maximum scale for the display of the Context resource Double (0..1)" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.maxScaleDenominator mustEqual None (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.maxScaleDenominator.get mustEqual 1000000.0 (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.maxScaleDenominator mustEqual None } "<xz>.features[i].properties.folder MAY contain Definition string of a folder name in which the resource is placed (0..1)" in { (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get.folder mustEqual None (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get.folder mustEqual None (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get.folder mustEqual Some("/view-groups/sac_add") } "Not used: <xz>.features[i].properties.* MAY contain Any other element Extension outside of the scope of OWS Context (0..*)" in { logger.info("Not used: MAY contain <xz>.features[i].properties.uuid, but in Ows:Resource we use <xz>.features[i].id as unique identifier") } } "OwcResource Writes" should { val res1 = (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get val res2 = (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get val res3 = (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get "write OwcResource GeoJSON" in { res1.toJson.validate[OwcResource].get mustEqual res1 res2.toJson.validate[OwcResource].get mustEqual res2 res3.toJson.validate[OwcResource].get mustEqual res3 } } "OwcResource Custom" should { "Copy and Compare" in { val res1 = (jsonTestCollection1 \ "features") (0).get.validate[OwcResource].get val res2 = (jsonTestCollection2 \ "features") (0).get.validate[OwcResource].get val res3 = (jsonTestCollection3 \ "features") (0).get.validate[OwcResource].get val res1Clone = res1.newOf() res1Clone must not equal res1 res1Clone.sameAs(res1) mustBe true res1.id.toString.contains("_copy") mustBe false res1Clone.id.toString.contains("_copy") mustBe true val resCloneClone = res1Clone.newOf() resCloneClone must not equal res1Clone resCloneClone must not equal res1 resCloneClone.sameAs(res1) mustBe true resCloneClone.sameAs(res1Clone) mustBe true val res2Clone = res2.newOf() res2Clone must not equal res2 res2Clone.sameAs(res2) mustBe true val res3Clone = res3.newOf() res3Clone must not equal res3 res3Clone.sameAs(res3) mustBe true val resClone2 = OwcResource.newOf(res1) resClone2.id.toString.contains("_copy") mustBe true resClone2 must not equal res1 resClone2.sameAs(res1) mustBe true val resCaseCopy = res1.copy() resCaseCopy mustEqual res1 resCaseCopy must not equal res1Clone } } }
ZGIS/smart-owc-geojson
src/test/scala/info/smart/models/owc100/OwcResourceSpec.scala
Scala
apache-2.0
16,447
package com.joshcough.minecraft import org.bukkit.block.Block import org.bukkit.entity.{Arrow, Entity, Player} import org.bukkit.event.{EventHandler => EH, Listener} import org.bukkit.event.block.{BlockBreakEvent, BlockDamageEvent} import org.bukkit.event.block.Action._ import org.bukkit.event.entity.{ProjectileHitEvent, EntityDamageEvent, PlayerDeathEvent, EntityDamageByEntityEvent} import org.bukkit.event.weather.WeatherChangeEvent import org.bukkit.event.player.{PlayerQuitEvent, PlayerInteractEvent, PlayerMoveEvent, PlayerChatEvent, PlayerJoinEvent, PlayerKickEvent, PlayerLoginEvent} /** * A trait that supports exactly one listener. * This is really just a tiny convenience wrapper over ListenersPlugin, * so that you can say: * * val listener = createMyListener * * instead of: * * val listeners = List(createMyListener) */ trait ListenerPlugin extends ListenersPlugin { def listener: Listener def listeners = List(listener) override def onEnable(){ super.onEnable(); registerListener(listener) } } /** * A trait that can have many Listeners. * * All clients need to do is specify the listeners val, like so: * * val listeners = List( * createListener1, * createListener2, * ... * ) * * Convenience functions for creating Listeners are provided in the Listeners trait. */ trait ListenersPlugin extends ScalaPlugin with Listeners { def listeners: List[Listener] override def onEnable{ super.onEnable(); listeners.foreach(registerListener) } } object Listeners extends Listeners object ListenersObject extends Listeners /** * This trait supports many convenience wrappers for creating Listeners with * higher order functions. Creating Listeners in Bukkit is fairly awkward. * You have to create a Listener instance with an annotated method, * that method can have any name, and it must take some Event as an argument, like so: * * new Listener { * @EventHandler def on(e:PlayerMoveEvent): Unit = doSomething(e) * } * * This is all abstracted away from the user here. A user now just says: * * OnPlayerMove(doSomething) * * (where doSomething is just a function that takes a PlayerMoveEvent, same as above) * * There are piles of examples of this in the examples code. **/ trait Listeners extends BukkitEnrichment { abstract case class ListeningFor(listener:Listener) extends ListenerPlugin def OnPlayerMove(f: PlayerMoveEvent => Unit) = new Listener { @EH def on(e:PlayerMoveEvent): Unit = f(e) } def OnEntityDamageByEntity(f: EntityDamageByEntityEvent => Unit) = new Listener { @EH def on(e:EntityDamageByEntityEvent): Unit = f(e) } def OnPlayerDamageByEntity(f: (Player, EntityDamageByEntityEvent) => Unit) = new Listener { @EH def on(e:EntityDamageByEntityEvent): Unit = e.getEntity.whenPlayer(f(_, e)) } def OnEntityDamageByPlayer(f: (Entity, Player, EntityDamageByEntityEvent) => Unit) = new Listener { @EH def on(e:EntityDamageByEntityEvent): Unit = e.getDamager match { case p: Player => f(e.getEntity,p, e) case _ => } } def OnPlayerDamage(f: (Player, EntityDamageEvent) => Unit) = new Listener { @EH def on(e:EntityDamageEvent): Unit = e.getEntity.whenPlayer(f(_, e)) } def OnPlayerDeath(f: (Player, PlayerDeathEvent) => Unit) = new Listener { @EH def on(e:PlayerDeathEvent): Unit = f(e.getEntity, e) } def OnPlayerChat(f: (Player, PlayerChatEvent) => Unit) = new Listener { @EH def on(e:PlayerChatEvent): Unit = f(e.getPlayer, e) } def OnBlockBreak(f: (Block, Player, BlockBreakEvent) => Unit) = new Listener { @EH def on(e:BlockBreakEvent): Unit = f(e.getBlock, e.getPlayer, e) } def OnBlockDamage(f: (Block, BlockDamageEvent) => Unit) = new Listener { @EH def on(e:BlockDamageEvent): Unit = f(e.getBlock, e) } def OnWeatherChange(f: WeatherChangeEvent => Unit) = new Listener { @EH def on(e:WeatherChangeEvent): Unit = f(e) } def OnPlayerInteract(f: (Player, PlayerInteractEvent) => Unit) = new Listener { @EH def on(e:PlayerInteractEvent): Unit = f(e.getPlayer, e) } def OnRightClickBlock(f: (Player, PlayerInteractEvent) => Unit) = new Listener { @EH def on(e:PlayerInteractEvent): Unit = if (e.getAction == RIGHT_CLICK_BLOCK) f(e.getPlayer, e) } def OnLeftClickBlock(f: (Player, PlayerInteractEvent) => Unit) = new Listener { @EH def on(e:PlayerInteractEvent): Unit = if (e.getAction == LEFT_CLICK_BLOCK) f(e.getPlayer, e) } def OnPlayerRightClickAir(f: (Player, PlayerInteractEvent) => Unit) = new Listener { @EH def on(e:PlayerInteractEvent): Unit = if (e.getAction == RIGHT_CLICK_AIR) f(e.getPlayer, e) } def OnPlayerLeftClickAir(f: (Player, PlayerInteractEvent) => Unit) = new Listener { @EH def on(e:PlayerInteractEvent): Unit = if (e.getAction == LEFT_CLICK_AIR) f(e.getPlayer, e) } def OnPlayerMove(f: (Player, PlayerMoveEvent) => Unit) = new Listener { @EH def on(e:PlayerMoveEvent): Unit = f(e.getPlayer, e) } def OnPlayerQuit(f: (Player, PlayerQuitEvent) => Unit) = new Listener { @EH def on(e: PlayerQuitEvent): Unit = f(e.getPlayer, e) } def OnPlayerKick(f: (Player, PlayerKickEvent) => Unit) = new Listener { @EH def on(e: PlayerKickEvent): Unit = f(e.getPlayer, e) } def OnPlayerLogin(f: (Player, PlayerLoginEvent) => Unit) = new Listener { @EH def on(e: PlayerLoginEvent): Unit = f(e.getPlayer, e) } def OnPlayerJoin(f: (Player, PlayerJoinEvent) => Unit) = new Listener { @EH def on(e: PlayerJoinEvent): Unit = f(e.getPlayer, e) } def OnArrowHitEvent(f: (Arrow, ProjectileHitEvent) => Unit) = new Listener { @EH def on(e: ProjectileHitEvent): Unit = e.getEntity match { case a: Arrow => f(a, e) case _ => } } }
joshcough/MinecraftPlugins
core/src/main/scala/com/joshcough/minecraft/Listeners.scala
Scala
mit
5,780
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package org.apache.toree.kernel.protocol.v5.interpreter.tasks import akka.actor.{Actor, Props} import org.apache.toree.interpreter.Interpreter import org.apache.toree.kernel.protocol.v5.content.CompleteRequest import org.apache.toree.utils.LogLike object CodeCompleteTaskActor { def props(interpreter: Interpreter): Props = Props(classOf[CodeCompleteTaskActor], interpreter) } class CodeCompleteTaskActor(interpreter: Interpreter) extends Actor with LogLike { require(interpreter != null) override def receive: Receive = { case completeRequest: CompleteRequest => logger.debug("Invoking the interpreter completion") sender ! interpreter.completion(completeRequest.code, completeRequest.cursor_pos) case _ => sender ! "Unknown message" // TODO: Provide a failure message type to be passed around? } }
lresende/incubator-toree
kernel/src/main/scala/org/apache/toree/kernel/protocol/v5/interpreter/tasks/CodeCompleteTaskActor.scala
Scala
apache-2.0
1,658
/* __ *\\ ** ________ ___ / / ___ Scala API ** ** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** ** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** ** /____/\\___/_/ |_/____/_/ | | ** ** |/ ** \\* */ package scala package collection package generic import scala.collection.parallel.Combiner /** * @since 2.8 */ trait HasNewCombiner[+T, +Repr] { protected[this] def newCombiner: Combiner[T, Repr] }
felixmulder/scala
src/library/scala/collection/generic/HasNewCombiner.scala
Scala
bsd-3-clause
732
package grasshopper.test.streams import akka.stream.ActorAttributes._ import akka.stream.Supervision._ import akka.stream.scaladsl.Flow import akka.util.ByteString import feature.Feature import grasshopper.test.model.TestGeocodeModel.{ PointInputAddress, PointInputAddressTract } trait FlowUtils { val numProcessors = Runtime.getRuntime.availableProcessors() def byte2StringFlow: Flow[ByteString, String, Unit] = Flow[ByteString].map(bs => bs.utf8String) def string2ByteStringFlow: Flow[String, ByteString, Unit] = Flow[String].map(str => ByteString(str)) }
awolfe76/grasshopper
test-harness/src/main/scala/grasshopper/test/streams/FlowUtils.scala
Scala
cc0-1.0
578
package com.thetestpeople.trt.importer.jenkins import org.joda.time.Duration import org.joda.time.DateTime import java.net.URI case class JenkinsBuildSummary( url: URI, durationOpt: Option[Duration], nameOpt: Option[String], timestampOpt: Option[DateTime], resultOpt: Option[String], hasTestReport: Boolean, isBuilding: Boolean)
thetestpeople/trt
app/com/thetestpeople/trt/importer/jenkins/JenkinsBuildSummary.scala
Scala
mit
345
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest import SharedHelpers._ import org.scalatest.events._ import org.scalactic.exceptions.NullArgumentException import org.scalatest.exceptions.DuplicateTestNameException import org.scalatest.exceptions.TestFailedException import org.scalatest.exceptions.TestRegistrationClosedException class PropSpecSpec extends FunSpec { describe("A PropSpec") { it("should return the test names in registration order from testNames") { val a = new PropSpec { property("test this") {/* ASSERTION_SUCCEED */} property("test that") {/* ASSERTION_SUCCEED */} } assertResult(List("test this", "test that")) { a.testNames.iterator.toList } val b = new PropSpec {} assertResult(List[String]()) { b.testNames.iterator.toList } val c = new PropSpec { property("test that") {/* ASSERTION_SUCCEED */} property("test this") {/* ASSERTION_SUCCEED */} } assertResult(List("test that", "test this")) { c.testNames.iterator.toList } } it("should throw NotAllowedException if a duplicate test name registration is attempted") { intercept[DuplicateTestNameException] { new PropSpec { property("test this") {/* ASSERTION_SUCCEED */} property("test this") {/* ASSERTION_SUCCEED */} } } intercept[DuplicateTestNameException] { new PropSpec { property("test this") {/* ASSERTION_SUCCEED */} ignore("test this") {/* ASSERTION_SUCCEED */} } } intercept[DuplicateTestNameException] { new PropSpec { ignore("test this") {/* ASSERTION_SUCCEED */} ignore("test this") {/* ASSERTION_SUCCEED */} } } intercept[DuplicateTestNameException] { new PropSpec { ignore("test this") {/* ASSERTION_SUCCEED */} property("test this") {/* ASSERTION_SUCCEED */} } } } it("should throw NotAllowedException if test registration is attempted after run has been invoked on a suite") { class InvokedWhenNotRunningSuite extends PropSpec { var fromMethodTestExecuted = false var fromConstructorTestExecuted = false property("from constructor") { fromConstructorTestExecuted = true /* ASSERTION_SUCCEED */ } def tryToRegisterATest(): Unit = { property("from method") { fromMethodTestExecuted = true /* ASSERTION_SUCCEED */ } } } val suite = new InvokedWhenNotRunningSuite suite.run(None, Args(SilentReporter)) assert(suite.fromConstructorTestExecuted) assert(!suite.fromMethodTestExecuted) intercept[TestRegistrationClosedException] { suite.tryToRegisterATest() } suite.run(None, Args(SilentReporter)) assert(!suite.fromMethodTestExecuted) /* class InvokedWhenRunningSuite extends PropSpec { var fromMethodTestExecuted = false var fromConstructorTestExecuted = false property("from constructor") { tryToRegisterATest() fromConstructorTestExecuted = true } def tryToRegisterATest() { property("from method") { fromMethodTestExecuted = true } } } val a = new InvokedWhenNotRunningSuite a.run() intercept[TestFailedException] { new InvokedWhenRunningSuite } */ } it("should invoke withFixture from runTest") { val a = new PropSpec { var withFixtureWasInvoked = false var testWasInvoked = false override def withFixture(test: NoArgTest): Outcome = { withFixtureWasInvoked = true super.withFixture(test) } property("something") { testWasInvoked = true /* ASSERTION_SUCCEED */ } } import scala.language.reflectiveCalls a.run(None, Args(SilentReporter)) assert(a.withFixtureWasInvoked) assert(a.testWasInvoked) } it("should pass the correct test name in the NoArgTest passed to withFixture") { val a = new PropSpec { var correctTestNameWasPassed = false override def withFixture(test: NoArgTest): Outcome = { correctTestNameWasPassed = test.name == "something" super.withFixture(test) } property("something") {/* ASSERTION_SUCCEED */} } import scala.language.reflectiveCalls a.run(None, Args(SilentReporter)) assert(a.correctTestNameWasPassed) } it("should pass the correct config map in the NoArgTest passed to withFixture") { val a = new PropSpec { var correctConfigMapWasPassed = false override def withFixture(test: NoArgTest): Outcome = { correctConfigMapWasPassed = (test.configMap == ConfigMap("hi" -> 7)) super.withFixture(test) } property("something") {/* ASSERTION_SUCCEED */} } import scala.language.reflectiveCalls a.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap("hi" -> 7), None, new Tracker(), Set.empty)) assert(a.correctConfigMapWasPassed) } describe("(with info calls)") { it("should, when the info appears in the body before a test, report the info before the test") { val msg = "hi there, dude" val testName = "test name" class MySuite extends PropSpec { info(msg) property(testName) {/* ASSERTION_SUCCEED */} } val (infoProvidedIndex, testStartingIndex, testSucceededIndex) = getIndexesForInformerEventOrderTests(new MySuite, testName, msg) assert(infoProvidedIndex < testStartingIndex) assert(testStartingIndex < testSucceededIndex) } it("should, when the info appears in the body after a test, report the info after the test runs") { val msg = "hi there, dude" val testName = "test name" class MySuite extends PropSpec { property(testName) {/* ASSERTION_SUCCEED */} info(msg) } val (infoProvidedIndex, testStartingIndex, testSucceededIndex) = getIndexesForInformerEventOrderTests(new MySuite, testName, msg) assert(testStartingIndex < testSucceededIndex) assert(testSucceededIndex < infoProvidedIndex) } it("should print to stdout when info is called by a method invoked after the suite has been executed") { class MySuite extends PropSpec { callInfo() // This should work fine def callInfo(): Unit = { info("howdy") } property("howdy also") { callInfo() // This should work fine /* ASSERTION_SUCCEED */ } } val suite = new MySuite val myRep = new EventRecordingReporter suite.run(None, Args(myRep)) suite.callInfo() // TODO: Actually test that it prints to stdout } } it("should run tests registered via the propertiesFor syntax") { trait SharedPropSpecTests { this: PropSpec => def nonEmptyStack(s: String)(i: Int): Unit = { property("I am shared") {/* ASSERTION_SUCCEED */} } } class MySuite extends PropSpec with SharedPropSpecTests { propertiesFor(nonEmptyStack("hi")(1)) } val suite = new MySuite val reporter = new EventRecordingReporter suite.run(None, Args(reporter)) val indexedList = reporter.eventsReceived val testStartingOption = indexedList.find(_.isInstanceOf[TestStarting]) assert(testStartingOption.isDefined) assert(testStartingOption.get.asInstanceOf[TestStarting].testName === "I am shared") } it("should throw NullArgumentException if a null test tag is provided") { // test intercept[NullArgumentException] { new PropSpec { property("hi", null) {/* ASSERTION_SUCCEED */} } } val caught = intercept[NullArgumentException] { new PropSpec { property("hi", mytags.SlowAsMolasses, null) {/* ASSERTION_SUCCEED */} } } assert(caught.getMessage == "a test tag was null") intercept[NullArgumentException] { new PropSpec { property("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) {/* ASSERTION_SUCCEED */} } } // ignore intercept[NullArgumentException] { new PropSpec { ignore("hi", null) {/* ASSERTION_SUCCEED */} } } val caught2 = intercept[NullArgumentException] { new PropSpec { ignore("hi", mytags.SlowAsMolasses, null) {/* ASSERTION_SUCCEED */} } } assert(caught2.getMessage == "a test tag was null") intercept[NullArgumentException] { new PropSpec { ignore("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) {/* ASSERTION_SUCCEED */} } } // registerTest intercept[NullArgumentException] { new PropSpec { registerTest("hi", null) {/* ASSERTION_SUCCEED */} } } val caught3 = intercept[NullArgumentException] { new PropSpec { registerTest("hi", mytags.SlowAsMolasses, null) {/* ASSERTION_SUCCEED */} } } assert(caught3.getMessage == "a test tag was null") intercept[NullArgumentException] { new PropSpec { property("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) {/* ASSERTION_SUCCEED */} } } // registerIgnoredTest intercept[NullArgumentException] { new PropSpec { registerIgnoredTest("hi", null) {/* ASSERTION_SUCCEED */} } } val caught4 = intercept[NullArgumentException] { new PropSpec { registerIgnoredTest("hi", mytags.SlowAsMolasses, null) {/* ASSERTION_SUCCEED */} } } assert(caught4.getMessage == "a test tag was null") intercept[NullArgumentException] { new PropSpec { registerIgnoredTest("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) {/* ASSERTION_SUCCEED */} } } } class TestWasCalledSuite extends PropSpec { var theTestThisCalled = false var theTestThatCalled = false property("this") { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } property("that") { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } } it("should execute all tests when run is called with testName None") { val b = new TestWasCalledSuite b.run(None, Args(SilentReporter)) assert(b.theTestThisCalled) assert(b.theTestThatCalled) } it("should execute one test when run is called with a defined testName") { val a = new TestWasCalledSuite a.run(Some("this"), Args(SilentReporter)) assert(a.theTestThisCalled) assert(!a.theTestThatCalled) } it("should report as ignored, and not run, tests marked ignored") { val a = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false property("test this") { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } property("test that") { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } } import scala.language.reflectiveCalls val repA = new TestIgnoredTrackingReporter a.run(None, Args(repA)) assert(!repA.testIgnoredReceived) assert(a.theTestThisCalled) assert(a.theTestThatCalled) val b = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false ignore("test this") { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } property("test that") { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } } val repB = new TestIgnoredTrackingReporter b.run(None, Args(repB)) assert(repB.testIgnoredReceived) assert(repB.lastEvent.isDefined) assert(repB.lastEvent.get.testName endsWith "test this") assert(!b.theTestThisCalled) assert(b.theTestThatCalled) val c = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false property("test this") { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } ignore("test that") { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } } val repC = new TestIgnoredTrackingReporter c.run(None, Args(repC)) assert(repC.testIgnoredReceived) assert(repC.lastEvent.isDefined) assert(repC.lastEvent.get.testName endsWith "test that", repC.lastEvent.get.testName) assert(c.theTestThisCalled) assert(!c.theTestThatCalled) // The order I want is order of appearance in the file. // Will try and implement that tomorrow. Subtypes will be able to change the order. val d = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false ignore("test this") { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } ignore("test that") { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } } val repD = new TestIgnoredTrackingReporter d.run(None, Args(repD)) assert(repD.testIgnoredReceived) assert(repD.lastEvent.isDefined) assert(repD.lastEvent.get.testName endsWith "test that") // last because should be in order of appearance assert(!d.theTestThisCalled) assert(!d.theTestThatCalled) } it("should ignore a test marked as ignored if run is invoked with that testName") { // If I provide a specific testName to run, then it should ignore an Ignore on that test // method and actually invoke it. val e = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false ignore("test this") { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } property("test that") { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } } import scala.language.reflectiveCalls val repE = new TestIgnoredTrackingReporter e.run(Some("test this"), Args(repE)) assert(repE.testIgnoredReceived) assert(!e.theTestThisCalled) assert(!e.theTestThatCalled) } it("should run only those tests selected by the tags to include and exclude sets") { // Nothing is excluded val a = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false property("test this", mytags.SlowAsMolasses) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } property("test that") { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } } import scala.language.reflectiveCalls val repA = new TestIgnoredTrackingReporter a.run(None, Args(repA)) assert(!repA.testIgnoredReceived) assert(a.theTestThisCalled) assert(a.theTestThatCalled) // SlowAsMolasses is included, one test should be excluded val b = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false property("test this", mytags.SlowAsMolasses) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } property("test that") { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } } val repB = new TestIgnoredTrackingReporter b.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty)) assert(!repB.testIgnoredReceived) assert(b.theTestThisCalled) assert(!b.theTestThatCalled) // SlowAsMolasses is included, and both tests should be included val c = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false property("test this", mytags.SlowAsMolasses) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } property("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } } val repC = new TestIgnoredTrackingReporter c.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty)) assert(!repC.testIgnoredReceived) assert(c.theTestThisCalled) assert(c.theTestThatCalled) // SlowAsMolasses is included. both tests should be included but one ignored val d = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false ignore("test this", mytags.SlowAsMolasses) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } property("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } } val repD = new TestIgnoredTrackingReporter d.run(None, Args(repD, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty)) assert(repD.testIgnoredReceived) assert(!d.theTestThisCalled) assert(d.theTestThatCalled) // SlowAsMolasses included, FastAsLight excluded val e = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false var theTestTheOtherCalled = false property("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } property("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } property("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ } } val repE = new TestIgnoredTrackingReporter e.run(None, Args(repE, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty)) assert(!repE.testIgnoredReceived) assert(!e.theTestThisCalled) assert(e.theTestThatCalled) assert(!e.theTestTheOtherCalled) // An Ignored test that was both included and excluded should not generate a TestIgnored event val f = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false var theTestTheOtherCalled = false ignore("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } property("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } property("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ } } val repF = new TestIgnoredTrackingReporter f.run(None, Args(repF, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty)) assert(!repF.testIgnoredReceived) assert(!f.theTestThisCalled) assert(f.theTestThatCalled) assert(!f.theTestTheOtherCalled) // An Ignored test that was not included should not generate a TestIgnored event val g = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false var theTestTheOtherCalled = false property("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } property("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } ignore("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ } } val repG = new TestIgnoredTrackingReporter g.run(None, Args(repG, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty)) assert(!repG.testIgnoredReceived) assert(!g.theTestThisCalled) assert(g.theTestThatCalled) assert(!g.theTestTheOtherCalled) // No tagsToInclude set, FastAsLight excluded val h = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false var theTestTheOtherCalled = false property("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } property("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } property("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ } } val repH = new TestIgnoredTrackingReporter h.run(None, Args(repH, Stopper.default, Filter(None, Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty)) assert(!repH.testIgnoredReceived) assert(!h.theTestThisCalled) assert(h.theTestThatCalled) assert(h.theTestTheOtherCalled) // No tagsToInclude set, SlowAsMolasses excluded val i = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false var theTestTheOtherCalled = false property("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } property("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } property("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ } } val repI = new TestIgnoredTrackingReporter i.run(None, Args(repI, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty)) assert(!repI.testIgnoredReceived) assert(!i.theTestThisCalled) assert(!i.theTestThatCalled) assert(i.theTestTheOtherCalled) // No tagsToInclude set, SlowAsMolasses excluded, TestIgnored should not be received on excluded ones val j = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false var theTestTheOtherCalled = false ignore("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } ignore("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } property("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ } } val repJ = new TestIgnoredTrackingReporter j.run(None, Args(repJ, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty)) assert(!repI.testIgnoredReceived) assert(!j.theTestThisCalled) assert(!j.theTestThatCalled) assert(j.theTestTheOtherCalled) // Same as previous, except Ignore specifically mentioned in excludes set val k = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false var theTestTheOtherCalled = false ignore("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } ignore("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } ignore("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ } } val repK = new TestIgnoredTrackingReporter k.run(None, Args(repK, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses", "org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty)) assert(repK.testIgnoredReceived) assert(!k.theTestThisCalled) assert(!k.theTestThatCalled) assert(!k.theTestTheOtherCalled) } it("should run only those registered tests selected by the tags to include and exclude sets") { // Nothing is excluded val a = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false registerTest("test this", mytags.SlowAsMolasses) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } registerTest("test that") { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } } import scala.language.reflectiveCalls val repA = new TestIgnoredTrackingReporter a.run(None, Args(repA)) assert(!repA.testIgnoredReceived) assert(a.theTestThisCalled) assert(a.theTestThatCalled) // SlowAsMolasses is included, one test should be excluded val b = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false registerTest("test this", mytags.SlowAsMolasses) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } registerTest("test that") { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } } val repB = new TestIgnoredTrackingReporter b.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty)) assert(!repB.testIgnoredReceived) assert(b.theTestThisCalled) assert(!b.theTestThatCalled) // SlowAsMolasses is included, and both tests should be included val c = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false registerTest("test this", mytags.SlowAsMolasses) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } registerTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } } val repC = new TestIgnoredTrackingReporter c.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty)) assert(!repC.testIgnoredReceived) assert(c.theTestThisCalled) assert(c.theTestThatCalled) // SlowAsMolasses is included. both tests should be included but one ignored val d = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false registerIgnoredTest("test this", mytags.SlowAsMolasses) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } registerTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } } val repD = new TestIgnoredTrackingReporter d.run(None, Args(repD, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty)) assert(repD.testIgnoredReceived) assert(!d.theTestThisCalled) assert(d.theTestThatCalled) // SlowAsMolasses included, FastAsLight excluded val e = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false var theTestTheOtherCalled = false registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } registerTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } registerTest("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ } } val repE = new TestIgnoredTrackingReporter e.run(None, Args(repE, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty)) assert(!repE.testIgnoredReceived) assert(!e.theTestThisCalled) assert(e.theTestThatCalled) assert(!e.theTestTheOtherCalled) // An Ignored test that was both included and excluded should not generate a TestIgnored event val f = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false var theTestTheOtherCalled = false registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } registerTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } registerTest("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ } } val repF = new TestIgnoredTrackingReporter f.run(None, Args(repF, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty)) assert(!repF.testIgnoredReceived) assert(!f.theTestThisCalled) assert(f.theTestThatCalled) assert(!f.theTestTheOtherCalled) // An Ignored test that was not included should not generate a TestIgnored event val g = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false var theTestTheOtherCalled = false registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } registerTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } registerIgnoredTest("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ } } val repG = new TestIgnoredTrackingReporter g.run(None, Args(repG, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty)) assert(!repG.testIgnoredReceived) assert(!g.theTestThisCalled) assert(g.theTestThatCalled) assert(!g.theTestTheOtherCalled) // No tagsToInclude set, FastAsLight excluded val h = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false var theTestTheOtherCalled = false registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } registerTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } registerTest("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ } } val repH = new TestIgnoredTrackingReporter h.run(None, Args(repH, Stopper.default, Filter(None, Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty)) assert(!repH.testIgnoredReceived) assert(!h.theTestThisCalled) assert(h.theTestThatCalled) assert(h.theTestTheOtherCalled) // No tagsToInclude set, SlowAsMolasses excluded val i = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false var theTestTheOtherCalled = false registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } registerTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } registerTest("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ } } val repI = new TestIgnoredTrackingReporter i.run(None, Args(repI, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty)) assert(!repI.testIgnoredReceived) assert(!i.theTestThisCalled) assert(!i.theTestThatCalled) assert(i.theTestTheOtherCalled) // No tagsToInclude set, SlowAsMolasses excluded, TestIgnored should not be received on excluded ones val j = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false var theTestTheOtherCalled = false registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } registerIgnoredTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } registerTest("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ } } val repJ = new TestIgnoredTrackingReporter j.run(None, Args(repJ, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty)) assert(!repI.testIgnoredReceived) assert(!j.theTestThisCalled) assert(!j.theTestThatCalled) assert(j.theTestTheOtherCalled) // Same as previous, except Ignore specifically mentioned in excludes set val k = new PropSpec { var theTestThisCalled = false var theTestThatCalled = false var theTestTheOtherCalled = false registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ } registerIgnoredTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ } registerIgnoredTest("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ } } val repK = new TestIgnoredTrackingReporter k.run(None, Args(repK, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses", "org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty)) assert(repK.testIgnoredReceived) assert(!k.theTestThisCalled) assert(!k.theTestThatCalled) assert(!k.theTestTheOtherCalled) } it("should return the correct test count from its expectedTestCount method") { val a = new PropSpec { property("test this") {/* ASSERTION_SUCCEED */} property("test that") {/* ASSERTION_SUCCEED */} } assert(a.expectedTestCount(Filter()) == 2) val b = new PropSpec { ignore("test this") {/* ASSERTION_SUCCEED */} property("test that") {/* ASSERTION_SUCCEED */} } assert(b.expectedTestCount(Filter()) == 1) val c = new PropSpec { property("test this", mytags.FastAsLight) {/* ASSERTION_SUCCEED */} property("test that") {/* ASSERTION_SUCCEED */} } assert(c.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1) assert(c.expectedTestCount(Filter(None, Set("org.scalatest.FastAsLight"))) == 1) val d = new PropSpec { property("test this", mytags.FastAsLight, mytags.SlowAsMolasses) {/* ASSERTION_SUCCEED */} property("test that", mytags.SlowAsMolasses) {/* ASSERTION_SUCCEED */} property("test the other thing") {/* ASSERTION_SUCCEED */} } assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1) assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) == 1) assert(d.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) == 1) assert(d.expectedTestCount(Filter()) === 3) val e = new PropSpec { property("test this", mytags.FastAsLight, mytags.SlowAsMolasses) {/* ASSERTION_SUCCEED */} property("test that", mytags.SlowAsMolasses) {/* ASSERTION_SUCCEED */} ignore("test the other thing") {/* ASSERTION_SUCCEED */} } assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1) assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) == 1) assert(e.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) == 0) assert(e.expectedTestCount(Filter()) == 2) val f = new Suites(a, b, c, d, e) assert(f.expectedTestCount(Filter()) === 10) } it("should return the correct test count from its expectedTestCount method when uses registerTest and registerIgnoredTest to register tests") { val a = new PropSpec { registerTest("test this") {/* ASSERTION_SUCCEED */} registerTest("test that") {/* ASSERTION_SUCCEED */} } assert(a.expectedTestCount(Filter()) == 2) val b = new PropSpec { registerIgnoredTest("test this") {/* ASSERTION_SUCCEED */} registerTest("test that") {/* ASSERTION_SUCCEED */} } assert(b.expectedTestCount(Filter()) == 1) val c = new PropSpec { registerTest("test this", mytags.FastAsLight) {/* ASSERTION_SUCCEED */} registerTest("test that") {/* ASSERTION_SUCCEED */} } assert(c.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1) assert(c.expectedTestCount(Filter(None, Set("org.scalatest.FastAsLight"))) == 1) val d = new PropSpec { registerTest("test this", mytags.FastAsLight, mytags.SlowAsMolasses) {/* ASSERTION_SUCCEED */} registerTest("test that", mytags.SlowAsMolasses) {/* ASSERTION_SUCCEED */} registerTest("test the other thing") {/* ASSERTION_SUCCEED */} } assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1) assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) == 1) assert(d.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) == 1) assert(d.expectedTestCount(Filter()) == 3) val e = new PropSpec { registerTest("test this", mytags.FastAsLight, mytags.SlowAsMolasses) {/* ASSERTION_SUCCEED */} registerTest("test that", mytags.SlowAsMolasses) {/* ASSERTION_SUCCEED */} registerIgnoredTest("test the other thing") {/* ASSERTION_SUCCEED */} } assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1) assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) == 1) assert(e.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) == 0) assert(e.expectedTestCount(Filter()) == 2) val f = new Suites(a, b, c, d, e) assert(f.expectedTestCount(Filter()) == 10) } it("should generate a TestPending message when the test body is (pending)") { val a = new PropSpec { property("should do this") (pending) property("should do that") { assert(2 + 2 === 4) } property("should do something else") { assert(2 + 2 === 4) pending } } val rep = new EventRecordingReporter a.run(None, Args(rep)) val tp = rep.testPendingEventsReceived assert(tp.size === 2) } it("should generate a test failure if a Throwable, or an Error other than direct Error subtypes " + "known in JDK 1.5, excluding AssertionError") { val a = new PropSpec { property("throws AssertionError") { throw new AssertionError } property("throws plain old Error") { throw new Error } property("throws Throwable") { throw new Throwable } } val rep = new EventRecordingReporter a.run(None, Args(rep)) val tf = rep.testFailedEventsReceived assert(tf.size === 3) } // SKIP-SCALATESTJS,NATIVE-START it("should propagate out Errors that are direct subtypes of Error in JDK 1.5, other than " + "AssertionError, causing Suites and Runs to abort.") { val a = new PropSpec { property("throws AssertionError") { throw new OutOfMemoryError } } intercept[OutOfMemoryError] { a.run(None, Args(SilentReporter)) } } // SKIP-SCALATESTJS,NATIVE-END describe("(when a nesting rule has been violated)") { it("should, if they call a nested it from within an it clause, result in a TestFailedException when running the test") { class MySuite extends PropSpec { property("should blow up") { property("should never run") { assert(1 === 1) } /* ASSERTION_SUCCEED */ } } val spec = new MySuite ensureTestFailedEventReceived(spec, "should blow up") } it("should, if they call a nested it with tags from within an it clause, result in a TestFailedException when running the test") { class MySuite extends PropSpec { property("should blow up") { property("should never run", mytags.SlowAsMolasses) { assert(1 == 1) } /* ASSERTION_SUCCEED */ } } val spec = new MySuite ensureTestFailedEventReceived(spec, "should blow up") } it("should, if they call a nested registerTest with tags from within a registerTest clause, result in a TestFailedException when running the test") { class MySuite extends PropSpec { registerTest("should blow up") { registerTest("should never run", mytags.SlowAsMolasses) { assert(1 == 1) } /* ASSERTION_SUCCEED */ } } val spec = new MySuite ensureTestFailedEventReceived(spec, "should blow up") } it("should, if they call a nested ignore from within an it clause, result in a TestFailedException when running the test") { class MySuite extends PropSpec { property("should blow up") { ignore("should never run") { assert(1 === 1) } /* ASSERTION_SUCCEED */ } } val spec = new MySuite ensureTestFailedEventReceived(spec, "should blow up") } it("should, if they call a nested ignore with tags from within an it clause, result in a TestFailedException when running the test") { class MySuite extends PropSpec { property("should blow up") { ignore("should never run", mytags.SlowAsMolasses) { assert(1 === 1) } /* ASSERTION_SUCCEED */ } } val spec = new MySuite ensureTestFailedEventReceived(spec, "should blow up") } it("should, if they call a nested registerIgnoredTest with tags from within a registerTest clause, result in a TestFailedException when running the test") { class MySuite extends PropSpec { registerTest("should blow up") { registerIgnoredTest("should never run", mytags.SlowAsMolasses) { assert(1 === 1) } /* ASSERTION_SUCCEED */ } } val spec = new MySuite ensureTestFailedEventReceived(spec, "should blow up") } } it("should throw IllegalArgumentException if passed a testName that doesn't exist") { class MySuite extends PropSpec { property("one") {/* ASSERTION_SUCCEED */} property("two") {/* ASSERTION_SUCCEED */} } val suite = new MySuite intercept[IllegalArgumentException] { suite.run(Some("three"), Args(SilentReporter)) } } it("should allow test registration with registerTest and registerIgnoredTest") { class TestSpec extends PropSpec { val a = 1 registerTest("test 1") { val e = intercept[TestFailedException] { assert(a == 2) } assert(e.message == Some("1 did not equal 2")) assert(e.failedCodeFileName == Some("PropSpecSpec.scala")) assert(e.failedCodeLineNumber == Some(thisLineNumber - 4)) } registerTest("test 2") { assert(a == 2) } registerTest("test 3") { pending } registerTest("test 4") { cancel } registerIgnoredTest("test 5") { assert(a == 2) } } val rep = new EventRecordingReporter val s = new TestSpec s.run(None, Args(rep)) assert(rep.testStartingEventsReceived.length == 4) assert(rep.testSucceededEventsReceived.length == 1) assert(rep.testSucceededEventsReceived(0).testName == "test 1") assert(rep.testFailedEventsReceived.length == 1) assert(rep.testFailedEventsReceived(0).testName == "test 2") assert(rep.testPendingEventsReceived.length == 1) assert(rep.testPendingEventsReceived(0).testName == "test 3") assert(rep.testCanceledEventsReceived.length == 1) assert(rep.testCanceledEventsReceived(0).testName == "test 4") assert(rep.testIgnoredEventsReceived.length == 1) assert(rep.testIgnoredEventsReceived(0).testName == "test 5") } ignore("should support expectations") { // Unignore after we uncomment the expectation implicits in RegistrationPolicy class TestSpec extends PropSpec with expectations.Expectations { property("fail scenario") { expect(1 === 2); /* ASSERTION_SUCCEED */ } property("nested fail scenario") { expect(1 === 2); /* ASSERTION_SUCCEED */ } } val rep = new EventRecordingReporter val s1 = new TestSpec s1.run(None, Args(rep)) assert(rep.testFailedEventsReceived.size === 2) assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "PropSpecSpec.scala") assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 11) assert(rep.testFailedEventsReceived(1).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "PropSpecSpec.scala") assert(rep.testFailedEventsReceived(1).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 10) } } describe("when failure happens") { it("should fire TestFailed event with correct stack depth info when test failed") { class TestSpec extends PropSpec { property("fail scenario") { assert(1 === 2) } } val rep = new EventRecordingReporter val s1 = new TestSpec s1.run(None, Args(rep)) assert(rep.testFailedEventsReceived.size === 1) assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "PropSpecSpec.scala") assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 8) } it("should generate TestRegistrationClosedException with correct stack depth info when has a property nested inside a property") { class TestSpec extends PropSpec { var registrationClosedThrown = false property("a scenario") { property("nested scenario") { assert(1 == 2) }; /* ASSERTION_SUCCEED */ } override def withFixture(test: NoArgTest): Outcome = { val outcome = test.apply() outcome match { case Exceptional(ex: TestRegistrationClosedException) => registrationClosedThrown = true case _ => } outcome } } val rep = new EventRecordingReporter val s = new TestSpec s.run(None, Args(rep)) assert(s.registrationClosedThrown == true) val testFailedEvents = rep.testFailedEventsReceived assert(testFailedEvents.size === 1) assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException]) val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException] assert("PropSpecSpec.scala" === trce.failedCodeFileName.get) assert(trce.failedCodeLineNumber.get === thisLineNumber - 23) assert(trce.message == Some("A property clause may not appear inside another property clause.")) } it("should generate TestRegistrationClosedException with correct stack depth info when has an ignore nested inside a property") { class TestSpec extends PropSpec { var registrationClosedThrown = false property("a scenario") { ignore("nested scenario") { assert(1 == 2) }; /* ASSERTION_SUCCEED */ } override def withFixture(test: NoArgTest): Outcome = { val outcome = test.apply() outcome match { case Exceptional(ex: TestRegistrationClosedException) => registrationClosedThrown = true case _ => } outcome } } val rep = new EventRecordingReporter val s = new TestSpec s.run(None, Args(rep)) assert(s.registrationClosedThrown == true) val testFailedEvents = rep.testFailedEventsReceived assert(testFailedEvents.size === 1) assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException]) val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException] assert("PropSpecSpec.scala" === trce.failedCodeFileName.get) assert(trce.failedCodeLineNumber.get === thisLineNumber - 23) assert(trce.message == Some("An ignore clause may not appear inside a property clause.")) } it("should generate TestRegistrationClosedException with correct stack depth info when has a registerTest nested inside a registerTest") { class TestSpec extends PropSpec { var registrationClosedThrown = false registerTest("a scenario") { registerTest("nested scenario") { assert(1 == 2) }; /* ASSERTION_SUCCEED */ } override def withFixture(test: NoArgTest): Outcome = { val outcome = test.apply() outcome match { case Exceptional(ex: TestRegistrationClosedException) => registrationClosedThrown = true case _ => } outcome } } val rep = new EventRecordingReporter val s = new TestSpec s.run(None, Args(rep)) assert(s.registrationClosedThrown == true) val testFailedEvents = rep.testFailedEventsReceived assert(testFailedEvents.size === 1) assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException]) val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException] assert("PropSpecSpec.scala" === trce.failedCodeFileName.get) assert(trce.failedCodeLineNumber.get === thisLineNumber - 23) assert(trce.message == Some("Test cannot be nested inside another test.")) } it("should generate TestRegistrationClosedException with correct stack depth info when has a registerIgnoredTest nested inside a registerTest") { class TestSpec extends PropSpec { var registrationClosedThrown = false registerTest("a scenario") { registerIgnoredTest("nested scenario") { assert(1 == 2) }; /* ASSERTION_SUCCEED */ } override def withFixture(test: NoArgTest): Outcome = { val outcome = test.apply() outcome match { case Exceptional(ex: TestRegistrationClosedException) => registrationClosedThrown = true case _ => } outcome } } val rep = new EventRecordingReporter val s = new TestSpec s.run(None, Args(rep)) assert(s.registrationClosedThrown == true) val testFailedEvents = rep.testFailedEventsReceived assert(testFailedEvents.size === 1) assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException]) val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException] assert("PropSpecSpec.scala" === trce.failedCodeFileName.get) assert(trce.failedCodeLineNumber.get === thisLineNumber - 23) assert(trce.message == Some("Test cannot be nested inside another test.")) } it("should generate a DuplicateTestNameException when duplicate test name is detected") { class TestSpec extends PropSpec { property("test 1") {} property("test 1") {} } val e = intercept[DuplicateTestNameException] { new TestSpec } assert("PropSpecSpec.scala" == e.failedCodeFileName.get) assert(e.failedCodeLineNumber.get == thisLineNumber - 6) assert(!e.cause.isDefined) } it("should generate a DuplicateTestNameException when duplicate test name is detected using ignore") { class TestSpec extends PropSpec { property("test 1") {} ignore("test 1") {} } val e = intercept[DuplicateTestNameException] { new TestSpec } assert("PropSpecSpec.scala" == e.failedCodeFileName.get) assert(e.failedCodeLineNumber.get == thisLineNumber - 6) assert(!e.cause.isDefined) } } }
dotty-staging/scalatest
scalatest-test/src/test/scala/org/scalatest/PropSpecSpec.scala
Scala
apache-2.0
52,716
package com.thangiee.lolhangouts.data.usecases.entities import java.util.Date case class Message( friendName: String, text: String, isSentByUser: Boolean, isRead: Boolean, date: Date )
Thangiee/LoL-Hangouts
src/com/thangiee/lolhangouts/data/usecases/entities/Message.scala
Scala
apache-2.0
200
class Foo: private var blah: Double = 0L
lampepfl/dotty
tests/pos-special/i7296.scala
Scala
apache-2.0
42
package scoverage.report import java.io.File import scala.xml.Node import scala.xml.PrettyPrinter import scoverage._ /** @author Stephen Samuel */ class ScoverageXmlWriter( sourceDirectories: Seq[File], outputDir: File, debug: Boolean ) extends BaseReportWriter(sourceDirectories, outputDir) { def this(sourceDir: File, outputDir: File, debug: Boolean) = { this(Seq(sourceDir), outputDir, debug) } def write(coverage: Coverage): Unit = { val file = IOUtils.reportFile(outputDir, debug) IOUtils.writeToFile(file, new PrettyPrinter(120, 4).format(xml(coverage))) } private def xml(coverage: Coverage): Node = { <scoverage statement-count={coverage.statementCount.toString} statements-invoked={coverage.invokedStatementCount.toString} statement-rate={coverage.statementCoverageFormatted} branch-rate={coverage.branchCoverageFormatted} version="1.0" timestamp={System.currentTimeMillis.toString}> <packages> {coverage.packages.map(pack)} </packages> </scoverage> } private def statement(stmt: Statement): Node = { debug match { case true => <statement package={stmt.location.packageName} class={stmt.location.className} class-type={stmt.location.classType.toString} full-class-name={stmt.location.fullClassName} source={stmt.source} method={stmt.location.method} start={stmt.start.toString} end={stmt.end.toString} line={stmt.line.toString} symbol={escape(stmt.symbolName)} tree={escape(stmt.treeName)} branch={stmt.branch.toString} invocation-count={stmt.count.toString} ignored={stmt.ignored.toString}> {escape(stmt.desc)} </statement> case false => <statement package={stmt.location.packageName} class={stmt.location.className} class-type={stmt.location.classType.toString} full-class-name={stmt.location.fullClassName} source={stmt.source} method={stmt.location.method} start={stmt.start.toString} end={stmt.end.toString} line={stmt.line.toString} branch={stmt.branch.toString} invocation-count={stmt.count.toString} ignored={stmt.ignored.toString}/> } } private def method(method: MeasuredMethod): Node = { <method name={method.name} statement-count={method.statementCount.toString} statements-invoked={method.invokedStatementCount.toString} statement-rate={method.statementCoverageFormatted} branch-rate={method.branchCoverageFormatted}> <statements> {method.statements.map(statement)} </statements> </method> } private def klass(klass: MeasuredClass): Node = { <class name={klass.fullClassName} filename={relativeSource(klass.source)} statement-count={klass.statementCount.toString} statements-invoked={klass.invokedStatementCount.toString} statement-rate={klass.statementCoverageFormatted} branch-rate={klass.branchCoverageFormatted}> <methods> {klass.methods.map(method)} </methods> </class> } private def pack(pack: MeasuredPackage): Node = { <package name={pack.name} statement-count={pack.statementCount.toString} statements-invoked={pack.invokedStatementCount.toString} statement-rate={pack.statementCoverageFormatted}> <classes> {pack.classes.map(klass)} </classes> </package> } /** This method ensures that the output String has only * valid XML unicode characters as specified by the * XML 1.0 standard. For reference, please see * <a href="http://www.w3.org/TR/2000/REC-xml-20001006#NT-Char">the * standard</a>. This method will return an empty * String if the input is null or empty. * * @param in The String whose non-valid characters we want to remove. * @return The in String, stripped of non-valid characters. * @see http://blog.mark-mclaren.info/2007/02/invalid-xml-characters-when-valid-utf8_5873.html */ def escape(in: String): String = { val out = new StringBuilder() for (current <- Option(in).getOrElse("").toCharArray) { if ( (current == 0x9) || (current == 0xa) || (current == 0xd) || ((current >= 0x20) && (current <= 0xd7ff)) || ((current >= 0xe000) && (current <= 0xfffd)) || ((current >= 0x10000) && (current <= 0x10ffff)) ) out.append(current) } out.mkString } }
scoverage/scalac-scoverage-plugin
scalac-scoverage-plugin/src/main/scala/scoverage/report/ScoverageXmlWriter.scala
Scala
apache-2.0
4,934
package com.github.ponkin.bloom.spark import com.github.ponkin.bloom.driver.Client import com.twitter.util.{ Await, Future } import org.apache.spark.{ TaskContext, SparkContext } import org.apache.spark.rdd.RDD /** * Enrich standart RDD with additional * functions to put rdd in remote bloom filter */ class BloomFunctions[T](rdd: RDD[(String, T)]) { private val sparkContext: SparkContext = rdd.sparkContext /** * Put all keys from RDD to * remote bloom filter with `name` */ def putInBloomFilter( name: String )( implicit conn: BloomConnector = new BloomConnector(BloomConnectorConf(sparkContext.getConf)) ): Unit = { sparkContext.runJob(rdd, put(name)) } private[spark] def put( filter: String )( implicit conn: BloomConnector ): (TaskContext, Iterator[(String, T)]) => Unit = { (ctx, partition) => conn.withClientDo { client => Await.result( conn.conf.putBatchSize match { case 0 | 1 => Future.collect( partition.map(row => client.put(filter, Set(row._1))) .toSeq ).unit case n if n > 1 => Future.collect( partition.map(_._1) .sliding(n, n) .map(seq => client.put(filter, seq.toSet)) .toSeq ).unit case _ => client.put(filter, partition.map(_._1).toSet) } ) } } /** * Create RDD with flag - whether * key is inside bloom filter */ def mightContain( filterName: String )( implicit conn: BloomConnector = new BloomConnector(BloomConnectorConf(sparkContext.getConf)) ): BloomFilterRDD[T] = { new BloomFilterRDD(rdd, filterName, conn) } } object BloomFunctions { implicit def addBloomFucntions[T](rdd: RDD[(String, T)]) = new BloomFunctions(rdd) }
ponkin/bloom
spark-connector/src/main/scala/com/github/ponkin/bloom/spark/BloomFunctions.scala
Scala
apache-2.0
1,865
/* * Scala.js (https://www.scala-js.org/) * * Copyright EPFL. * * Licensed under Apache License 2.0 * (https://www.apache.org/licenses/LICENSE-2.0). * * See the NOTICE file distributed with this work for * additional information regarding copyright ownership. */ package org.scalajs.testsuite.javalib.lang import org.junit.Test import org.junit.Assert._ import org.junit.Assume._ import org.scalajs.testsuite.utils.AssertThrows.assertThrows import org.scalajs.testsuite.utils.Platform import scala.scalajs.js // scalastyle:off disallow.space.before.token class ObjectJSTest { @Test def everythingButNullIsAnObject(): Unit = { assertTrue((new js.Object: Any).isInstanceOf[Object]) assertTrue((js.Array(5) : Any).isInstanceOf[Object]) } @Test def everythingCanCastToObjectSuccessfullyIncludingNull(): Unit = { (new js.Object: Any).asInstanceOf[Object] (js.Array(5) : Any).asInstanceOf[Object] } @Test def cloneOnNonScalaObject(): Unit = { class CloneOnNonScalaObject extends js.Object { def boom(): Any = this.clone() } val obj = new CloneOnNonScalaObject() assertThrows(classOf[CloneNotSupportedException], obj.boom()) } @Test def hashCodeOfSymbols(): Unit = { /* None of the specific values here are by-spec. This test is highly * implementation-dependent. It is written like this to make sure that we * are returning different values for different arguments, but the specific * values are irrelevant and could be changed at any time. * * By-spec, however, hashCode() delegates to System.identityHashCode() for * symbols, since they are not Scala objects nor primitives that correspond * to a hijacked class. So the values here must be equal to those in * `SystemJSTest.identityHashCodeOfSymbols()`. */ assumeTrue("requires JS symbols", Platform.jsSymbols) @noinline def test(hash: Int, x: js.Symbol): Unit = assertEquals(hash, x.hashCode()) // unfortunately, all symbols without description hash to the same value test(0, js.Symbol()) test(0, js.Symbol("")) test(-1268878963, js.Symbol("foobar")) test(93492084, js.Symbol("babar")) test(3392903, js.Symbol(null)) test(-1268878963, js.Symbol.forKey("foobar")) test(93492084, js.Symbol.forKey("babar")) test(3392903, js.Symbol.forKey(null)) } @Test def hashCodeOfBigInts(): Unit = { /* None of the specific values here are by-spec. This test is highly * implementation-dependent. It is written like this to make sure that we * are returning different values for different arguments, but the specific * values are irrelevant and could be changed at any time. * * By-spec, however, hashCode() delegates to System.identityHashCode() for * bigints, since they are not Scala objects nor primitives that correspond * to a hijacked class (except for those that fit in a Long when we * implement Longs as bigints). So the values here must be equal to those * in `SystemJSTest.identityHashCodeOfBigInts()`. */ assumeTrue("requires JS bigints", Platform.jsBigInts) @noinline def test(hash: Int, x: js.BigInt): Unit = assertEquals(hash, System.identityHashCode(x)) test(0, js.BigInt("0")) test(1, js.BigInt("1")) test(0, js.BigInt("-1")) test(-1746700373, js.BigInt("4203407456681260900")) test(1834237377, js.BigInt("-4533628472446063315")) test(1917535332, js.BigInt("-8078028383605336161")) test(1962981592, js.BigInt("-1395767907951999837")) test(1771769687, js.BigInt("4226100786750107409")) test(-1655946833, js.BigInt("8283069451989884520")) test(969818862, js.BigInt("-4956907030691723841")) test(-614637591, js.BigInt("7053247622210876606")) test(1345794172, js.BigInt("4113526825251053222")) test(-575359500, js.BigInt("7285869072471305893")) test(-413046144, js.BigInt("52943860994923075240706774564564704640410650435892")) test(-726153056, js.BigInt("-89593710930720640163135273078359588137037151908747")) } }
scala-js/scala-js
test-suite/js/src/test/scala/org/scalajs/testsuite/javalib/lang/ObjectJSTest.scala
Scala
apache-2.0
4,079
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources.csv import org.apache.spark.sql.Dataset import org.apache.spark.sql.functions._ import org.apache.spark.sql.types._ object CSVUtils { /** * Filter ignorable rows for CSV dataset (lines empty and starting with `comment`). * This is currently being used in CSV schema inference. */ def filterCommentAndEmpty(lines: Dataset[String], options: CSVOptions): Dataset[String] = { // Note that this was separately made by SPARK-18362. Logically, this should be the same // with the one below, `filterCommentAndEmpty` but execution path is different. One of them // might have to be removed in the near future if possible. import lines.sqlContext.implicits._ val nonEmptyLines = lines.filter(length(trim($"value")) > 0) if (options.isCommentSet) { nonEmptyLines.filter(!$"value".startsWith(options.comment.toString)) } else { nonEmptyLines } } /** * Filter ignorable rows for CSV iterator (lines empty and starting with `comment`). * This is currently being used in CSV reading path and CSV schema inference. */ def filterCommentAndEmpty(iter: Iterator[String], options: CSVOptions): Iterator[String] = { iter.filter { line => line.trim.nonEmpty && !line.startsWith(options.comment.toString) } } /** * Skip the given first line so that only data can remain in a dataset. * This is similar with `dropHeaderLine` below and currently being used in CSV schema inference. */ def filterHeaderLine( iter: Iterator[String], firstLine: String, options: CSVOptions): Iterator[String] = { // Note that unlike actual CSV reading path, it simply filters the given first line. Therefore, // this skips the line same with the header if exists. One of them might have to be removed // in the near future if possible. if (options.headerFlag) { iter.filterNot(_ == firstLine) } else { iter } } /** * Drop header line so that only data can remain. * This is similar with `filterHeaderLine` above and currently being used in CSV reading path. */ def dropHeaderLine(iter: Iterator[String], options: CSVOptions): Iterator[String] = { val nonEmptyLines = if (options.isCommentSet) { val commentPrefix = options.comment.toString iter.dropWhile { line => line.trim.isEmpty || line.trim.startsWith(commentPrefix) } } else { iter.dropWhile(_.trim.isEmpty) } if (nonEmptyLines.hasNext) nonEmptyLines.drop(1) iter } /** * Helper method that converts string representation of a character to actual character. * It handles some Java escaped strings and throws exception if given string is longer than one * character. */ @throws[IllegalArgumentException] def toChar(str: String): Char = { if (str.charAt(0) == '\\') { str.charAt(1) match { case 't' => '\t' case 'r' => '\r' case 'b' => '\b' case 'f' => '\f' case '\"' => '\"' // In case user changes quote char and uses \" as delimiter in options case '\'' => '\'' case 'u' if str == """\u0000""" => '\u0000' case _ => throw new IllegalArgumentException(s"Unsupported special character for delimiter: $str") } } else if (str.length == 1) { str.charAt(0) } else { throw new IllegalArgumentException(s"Delimiter cannot be more than one character: $str") } } /** * Verify if the schema is supported in CSV datasource. */ def verifySchema(schema: StructType): Unit = { def verifyType(dataType: DataType): Unit = dataType match { case ByteType | ShortType | IntegerType | LongType | FloatType | DoubleType | BooleanType | _: DecimalType | TimestampType | DateType | StringType => case udt: UserDefinedType[_] => verifyType(udt.sqlType) case _ => throw new UnsupportedOperationException( s"CSV data source does not support ${dataType.simpleString} data type.") } schema.foreach(field => verifyType(field.dataType)) } }
wangyixiaohuihui/spark2-annotation
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/csv/CSVUtils.scala
Scala
apache-2.0
5,070
class Person(name: String) { val Array(firstName, lastName) = name.split(' ') } val person = new Person("Samurai Jack") println("First name: " + person.firstName) println("Last name: " + person.lastName)
demiazz/scala-impatient
chapter-05/exercise-07/main.scala
Scala
unlicense
208
/* * Copyright 2020 ACINQ SAS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package fr.acinq.eclair.payment.relay import akka.actor.testkit.typed.scaladsl.{ScalaTestWithActorTestKit, TestProbe} import akka.actor.typed import akka.actor.typed.eventstream.EventStream import akka.actor.typed.scaladsl.adapter.TypedActorRefOps import com.typesafe.config.ConfigFactory import fr.acinq.bitcoin.Crypto.PublicKey import fr.acinq.bitcoin.{Block, ByteVector32, ByteVector64, Crypto, Satoshi, SatoshiLong} import fr.acinq.eclair.TestConstants.emptyOnionPacket import fr.acinq.eclair.blockchain.fee.FeeratePerKw import fr.acinq.eclair.channel._ import fr.acinq.eclair.payment.IncomingPaymentPacket.ChannelRelayPacket import fr.acinq.eclair.payment.relay.ChannelRelayer._ import fr.acinq.eclair.payment.{ChannelPaymentRelayed, IncomingPaymentPacket, PaymentPacketSpec} import fr.acinq.eclair.router.Announcements import fr.acinq.eclair.wire.protocol.PaymentOnion.{ChannelRelayPayload, ChannelRelayTlvPayload, RelayLegacyPayload} import fr.acinq.eclair.wire.protocol._ import fr.acinq.eclair.{CltvExpiry, NodeParams, TestConstants, randomBytes32, _} import org.scalatest.Outcome import org.scalatest.funsuite.FixtureAnyFunSuiteLike import scodec.bits.HexStringSyntax class ChannelRelayerSpec extends ScalaTestWithActorTestKit(ConfigFactory.load("application")) with FixtureAnyFunSuiteLike { import ChannelRelayerSpec._ case class FixtureParam(nodeParams: NodeParams, channelRelayer: typed.ActorRef[ChannelRelayer.Command], register: TestProbe[Any]) override def withFixture(test: OneArgTest): Outcome = { // we are node B in the route A -> B -> C -> .... val nodeParams = TestConstants.Bob.nodeParams val register = TestProbe[Any]("register") val channelRelayer = testKit.spawn(ChannelRelayer.apply(nodeParams, register.ref.toClassic)) try { withFixture(test.toNoArgTest(FixtureParam(nodeParams, channelRelayer, register))) } finally { testKit.stop(channelRelayer) } } def expectFwdFail(register: TestProbe[Any], channelId: ByteVector32, cmd: channel.Command): Register.Forward[channel.Command] = { val fwd = register.expectMessageType[Register.Forward[channel.Command]] assert(fwd.channelId === channelId) assert(fwd.message === cmd) fwd } def expectFwdAdd(register: TestProbe[Any], shortChannelId: ShortChannelId, outAmount: MilliSatoshi, outExpiry: CltvExpiry): Register.ForwardShortId[CMD_ADD_HTLC] = { val fwd = register.expectMessageType[Register.ForwardShortId[CMD_ADD_HTLC]] assert(fwd.shortChannelId === shortChannelId) assert(fwd.message.amount === outAmount) assert(fwd.message.cltvExpiry === outExpiry) assert(fwd.message.origin.isInstanceOf[Origin.ChannelRelayedHot]) val o = fwd.message.origin.asInstanceOf[Origin.ChannelRelayedHot] assert(o.amountOut === outAmount) fwd } test("relay htlc-add") { f => import f._ val payload = RelayLegacyPayload(shortId1, outgoingAmount, outgoingExpiry) val r = createValidIncomingPacket(1100000 msat, CltvExpiry(400100), payload) val u = createLocalUpdate(shortId1) channelRelayer ! WrappedLocalChannelUpdate(u) channelRelayer ! Relay(r) expectFwdAdd(register, shortId1, outgoingAmount, outgoingExpiry) } test("relay an htlc-add with onion tlv payload") { f => import f._ import fr.acinq.eclair.wire.protocol.OnionPaymentPayloadTlv._ val payload = ChannelRelayTlvPayload(TlvStream[OnionPaymentPayloadTlv](AmountToForward(outgoingAmount), OutgoingCltv(outgoingExpiry), OutgoingChannelId(shortId1))) val r = createValidIncomingPacket(1100000 msat, CltvExpiry(400100), payload) val u = createLocalUpdate(shortId1) channelRelayer ! WrappedLocalChannelUpdate(u) channelRelayer ! Relay(r) expectFwdAdd(register, shortId1, outgoingAmount, outgoingExpiry) } test("relay an htlc-add with retries") { f => import f._ val payload = RelayLegacyPayload(shortId1, outgoingAmount, outgoingExpiry) val r = createValidIncomingPacket(1100000 msat, CltvExpiry(400100), payload) // we tell the relayer about the first channel val u1 = createLocalUpdate(shortId1) channelRelayer ! WrappedLocalChannelUpdate(u1) // this is another channel, with less balance (it will be preferred) val u2 = createLocalUpdate(shortId2, 8000000 msat) channelRelayer ! WrappedLocalChannelUpdate(u2) channelRelayer ! Relay(r) // first try val fwd1 = expectFwdAdd(register, shortId2, outgoingAmount, outgoingExpiry) // channel returns an error fwd1.message.replyTo ! RES_ADD_FAILED(fwd1.message, HtlcValueTooHighInFlight(channelIds(shortId2), UInt64(1000000000L), 1516977616L msat), Some(u2.channelUpdate)) // second try val fwd2 = expectFwdAdd(register, shortId1, outgoingAmount, outgoingExpiry) // failure again fwd1.message.replyTo ! RES_ADD_FAILED(fwd2.message, HtlcValueTooHighInFlight(channelIds(shortId1), UInt64(1000000000L), 1516977616L msat), Some(u1.channelUpdate)) // the relayer should give up expectFwdFail(register, r.add.channelId, CMD_FAIL_HTLC(r.add.id, Right(TemporaryNodeFailure), commit = true)) } test("fail to relay an htlc-add when we have no channel_update for the next channel") { f => import f._ val payload = RelayLegacyPayload(shortId1, outgoingAmount, outgoingExpiry) val r = createValidIncomingPacket(1100000 msat, CltvExpiry(400100), payload) channelRelayer ! Relay(r) expectFwdFail(register, r.add.channelId, CMD_FAIL_HTLC(r.add.id, Right(UnknownNextPeer), commit = true)) } test("fail to relay an htlc-add when register returns an error") { f => import f._ val payload = RelayLegacyPayload(shortId1, outgoingAmount, outgoingExpiry) val r = createValidIncomingPacket(1100000 msat, CltvExpiry(400100), payload) val u = createLocalUpdate(shortId1) channelRelayer ! WrappedLocalChannelUpdate(u) channelRelayer ! Relay(r) val fwd = expectFwdAdd(register, shortId1, outgoingAmount, outgoingExpiry) fwd.replyTo ! Register.ForwardShortIdFailure(fwd) expectFwdFail(register, r.add.channelId, CMD_FAIL_HTLC(r.add.id, Right(UnknownNextPeer), commit = true)) } test("fail to relay an htlc-add when the channel is advertised as unusable (down)") { f => import f._ val payload = RelayLegacyPayload(shortId1, outgoingAmount, outgoingExpiry) val r = createValidIncomingPacket(1100000 msat, CltvExpiry(400100), payload) val u = createLocalUpdate(shortId1) val d = LocalChannelDown(null, channelId = channelIds(shortId1), shortId1, outgoingNodeId) channelRelayer ! WrappedLocalChannelUpdate(u) channelRelayer ! WrappedLocalChannelDown(d) channelRelayer ! Relay(r) expectFwdFail(register, r.add.channelId, CMD_FAIL_HTLC(r.add.id, Right(UnknownNextPeer), commit = true)) } test("fail to relay an htlc-add (channel disabled)") { f => import f._ val payload = RelayLegacyPayload(shortId1, outgoingAmount, outgoingExpiry) val r = createValidIncomingPacket(1100000 msat, CltvExpiry(400100), payload) val u = createLocalUpdate(shortId1, enabled = false) channelRelayer ! WrappedLocalChannelUpdate(u) channelRelayer ! Relay(r) expectFwdFail(register, r.add.channelId, CMD_FAIL_HTLC(r.add.id, Right(ChannelDisabled(u.channelUpdate.messageFlags, u.channelUpdate.channelFlags, u.channelUpdate)), commit = true)) } test("fail to relay an htlc-add (amount below minimum)") { f => import f._ val payload = RelayLegacyPayload(shortId1, outgoingAmount, outgoingExpiry) val r = createValidIncomingPacket(1100000 msat, CltvExpiry(400100), payload) val u = createLocalUpdate(shortId1, htlcMinimum = outgoingAmount + 1.msat) channelRelayer ! WrappedLocalChannelUpdate(u) channelRelayer ! Relay(r) expectFwdFail(register, r.add.channelId, CMD_FAIL_HTLC(r.add.id, Right(AmountBelowMinimum(outgoingAmount, u.channelUpdate)), commit = true)) } test("relay an htlc-add (expiry larger than our requirements)") { f => import f._ val payload = RelayLegacyPayload(shortId1, outgoingAmount, outgoingExpiry) val u = createLocalUpdate(shortId1) val r = createValidIncomingPacket(1100000 msat, outgoingExpiry + u.channelUpdate.cltvExpiryDelta + CltvExpiryDelta(1), payload) channelRelayer ! WrappedLocalChannelUpdate(u) channelRelayer ! Relay(r) expectFwdAdd(register, shortId1, payload.amountToForward, payload.outgoingCltv).message } test("fail to relay an htlc-add (expiry too small)") { f => import f._ val payload = RelayLegacyPayload(shortId1, outgoingAmount, outgoingExpiry) val u = createLocalUpdate(shortId1) val r = createValidIncomingPacket(1100000 msat, outgoingExpiry + u.channelUpdate.cltvExpiryDelta - CltvExpiryDelta(1), payload) channelRelayer ! WrappedLocalChannelUpdate(u) channelRelayer ! Relay(r) expectFwdFail(register, r.add.channelId, CMD_FAIL_HTLC(r.add.id, Right(IncorrectCltvExpiry(payload.outgoingCltv, u.channelUpdate)), commit = true)) } test("fail to relay an htlc-add (fee insufficient)") { f => import f._ val payload = RelayLegacyPayload(shortId1, outgoingAmount, outgoingExpiry) val r = createValidIncomingPacket(outgoingAmount + 1.msat, CltvExpiry(400100), payload) val u = createLocalUpdate(shortId1) channelRelayer ! WrappedLocalChannelUpdate(u) channelRelayer ! Relay(r) expectFwdFail(register, r.add.channelId, CMD_FAIL_HTLC(r.add.id, Right(FeeInsufficient(r.add.amountMsat, u.channelUpdate)), commit = true)) } test("relay an htlc-add that would fail (fee insufficient) with a recent channel update but succeed with the previous update") { f => import f._ val payload = RelayLegacyPayload(shortId1, outgoingAmount, outgoingExpiry) val r = createValidIncomingPacket(outgoingAmount + 1.msat, CltvExpiry(400100), payload) val u1 = createLocalUpdate(shortId1, timestamp = TimestampSecond.now(), feeBaseMsat = 1 msat, feeProportionalMillionths = 0) channelRelayer ! WrappedLocalChannelUpdate(u1) channelRelayer ! Relay(r) // relay succeeds with current channel update (u1) with lower fees expectFwdAdd(register, shortId1, outgoingAmount, outgoingExpiry) val u2 = createLocalUpdate(shortId1, timestamp = TimestampSecond.now() - 530) channelRelayer ! WrappedLocalChannelUpdate(u2) channelRelayer ! Relay(r) // relay succeeds because the current update (u2) with higher fees occurred less than 10 minutes ago expectFwdAdd(register, shortId1, outgoingAmount, outgoingExpiry) val u3 = createLocalUpdate(shortId1, timestamp = TimestampSecond.now() - 601) channelRelayer ! WrappedLocalChannelUpdate(u1) channelRelayer ! WrappedLocalChannelUpdate(u3) channelRelayer ! Relay(r) // relay fails because the current update (u3) with higher fees occurred more than 10 minutes ago expectFwdFail(register, r.add.channelId, CMD_FAIL_HTLC(r.add.id, Right(FeeInsufficient(r.add.amountMsat, u3.channelUpdate)), commit = true)) } test("fail to relay an htlc-add (local error)") { f => import f._ val channelId1 = channelIds(shortId1) val payload = RelayLegacyPayload(shortId1, outgoingAmount, outgoingExpiry) val r = createValidIncomingPacket(1100000 msat, CltvExpiry(400100), payload) val u = createLocalUpdate(shortId1) val u_disabled = createLocalUpdate(shortId1, enabled = false) case class TestCase(exc: ChannelException, update: ChannelUpdate, failure: FailureMessage) val testCases = Seq( TestCase(ExpiryTooSmall(channelId1, CltvExpiry(100), CltvExpiry(0), BlockHeight(0)), u.channelUpdate, ExpiryTooSoon(u.channelUpdate)), TestCase(ExpiryTooBig(channelId1, CltvExpiry(100), CltvExpiry(200), BlockHeight(0)), u.channelUpdate, ExpiryTooFar), TestCase(InsufficientFunds(channelId1, payload.amountToForward, 100 sat, 0 sat, 0 sat), u.channelUpdate, TemporaryChannelFailure(u.channelUpdate)), TestCase(FeerateTooDifferent(channelId1, FeeratePerKw(1000 sat), FeeratePerKw(300 sat)), u.channelUpdate, TemporaryChannelFailure(u.channelUpdate)), TestCase(ChannelUnavailable(channelId1), u_disabled.channelUpdate, ChannelDisabled(u_disabled.channelUpdate.messageFlags, u_disabled.channelUpdate.channelFlags, u_disabled.channelUpdate)) ) testCases.foreach { testCase => channelRelayer ! WrappedLocalChannelUpdate(u) channelRelayer ! Relay(r) val fwd = expectFwdAdd(register, shortId1, outgoingAmount, outgoingExpiry) fwd.message.replyTo ! RES_ADD_FAILED(fwd.message, testCase.exc, Some(testCase.update)) expectFwdFail(register, r.add.channelId, CMD_FAIL_HTLC(r.add.id, Right(testCase.failure), commit = true)) } } test("select preferred channels") { f => import f._ /** This is just a simplified helper function with random values for fields we are not using here */ def dummyLocalUpdate(shortChannelId: ShortChannelId, remoteNodeId: PublicKey, availableBalanceForSend: MilliSatoshi, capacity: Satoshi) = { val channelId = randomBytes32() val update = Announcements.makeChannelUpdate(Block.RegtestGenesisBlock.hash, randomKey(), remoteNodeId, shortChannelId, CltvExpiryDelta(10), 100 msat, 1000 msat, 100, capacity.toMilliSatoshi) val commitments = PaymentPacketSpec.makeCommitments(ByteVector32.Zeroes, availableBalanceForSend, testCapacity = capacity) LocalChannelUpdate(null, channelId, shortChannelId, remoteNodeId, None, update, commitments) } val (a, b) = (randomKey().publicKey, randomKey().publicKey) val channelUpdates = Map( ShortChannelId(11111) -> dummyLocalUpdate(ShortChannelId(11111), a, 100000000 msat, 200000 sat), ShortChannelId(12345) -> dummyLocalUpdate(ShortChannelId(12345), a, 10000000 msat, 200000 sat), ShortChannelId(22222) -> dummyLocalUpdate(ShortChannelId(22222), a, 10000000 msat, 100000 sat), ShortChannelId(22223) -> dummyLocalUpdate(ShortChannelId(22223), a, 9000000 msat, 50000 sat), ShortChannelId(33333) -> dummyLocalUpdate(ShortChannelId(33333), a, 100000 msat, 50000 sat), ShortChannelId(44444) -> dummyLocalUpdate(ShortChannelId(44444), b, 1000000 msat, 10000 sat), ) channelUpdates.values.foreach(u => channelRelayer ! WrappedLocalChannelUpdate(u)) { val payload = RelayLegacyPayload(ShortChannelId(12345), 998900 msat, CltvExpiry(60)) val r = createValidIncomingPacket(1000000 msat, CltvExpiry(70), payload) channelRelayer ! Relay(r) // select the channel to the same node, with the lowest capacity and balance but still high enough to handle the payment val cmd1 = expectFwdAdd(register, ShortChannelId(22223), payload.amountToForward, payload.outgoingCltv).message cmd1.replyTo ! RES_ADD_FAILED(cmd1, ChannelUnavailable(randomBytes32()), None) // select 2nd-to-best channel: higher capacity and balance val cmd2 = expectFwdAdd(register, ShortChannelId(22222), payload.amountToForward, payload.outgoingCltv).message cmd2.replyTo ! RES_ADD_FAILED(cmd2, TooManyAcceptedHtlcs(randomBytes32(), 42), Some(channelUpdates(ShortChannelId(22222)).channelUpdate)) // select 3rd-to-best channel: same balance but higher capacity val cmd3 = expectFwdAdd(register, ShortChannelId(12345), payload.amountToForward, payload.outgoingCltv).message cmd3.replyTo ! RES_ADD_FAILED(cmd3, TooManyAcceptedHtlcs(randomBytes32(), 42), Some(channelUpdates(ShortChannelId(12345)).channelUpdate)) // select 4th-to-best channel: same capacity but higher balance val cmd4 = expectFwdAdd(register, ShortChannelId(11111), payload.amountToForward, payload.outgoingCltv).message cmd4.replyTo ! RES_ADD_FAILED(cmd4, HtlcValueTooHighInFlight(randomBytes32(), UInt64(100000000), 100000000 msat), Some(channelUpdates(ShortChannelId(11111)).channelUpdate)) // all the suitable channels have been tried expectFwdFail(register, r.add.channelId, CMD_FAIL_HTLC(r.add.id, Right(TemporaryChannelFailure(channelUpdates(ShortChannelId(12345)).channelUpdate)), commit = true)) } { // higher amount payment (have to increased incoming htlc amount for fees to be sufficient) val payload = RelayLegacyPayload(ShortChannelId(12345), 50000000 msat, CltvExpiry(60)) val r = createValidIncomingPacket(60000000 msat, CltvExpiry(70), payload) channelRelayer ! Relay(r) expectFwdAdd(register, ShortChannelId(11111), payload.amountToForward, payload.outgoingCltv).message } { // lower amount payment val payload = RelayLegacyPayload(ShortChannelId(12345), 1000 msat, CltvExpiry(60)) val r = createValidIncomingPacket(60000000 msat, CltvExpiry(70), payload) channelRelayer ! Relay(r) expectFwdAdd(register, ShortChannelId(33333), payload.amountToForward, payload.outgoingCltv).message } { // payment too high, no suitable channel found, we keep the requested one val payload = RelayLegacyPayload(ShortChannelId(12345), 1000000000 msat, CltvExpiry(60)) val r = createValidIncomingPacket(1010000000 msat, CltvExpiry(70), payload) channelRelayer ! Relay(r) expectFwdAdd(register, ShortChannelId(12345), payload.amountToForward, payload.outgoingCltv).message } { // cltv expiry larger than our requirements val payload = RelayLegacyPayload(ShortChannelId(12345), 998900 msat, CltvExpiry(50)) val r = createValidIncomingPacket(1000000 msat, CltvExpiry(70), payload) channelRelayer ! Relay(r) expectFwdAdd(register, ShortChannelId(22223), payload.amountToForward, payload.outgoingCltv).message } { // cltv expiry too small, no suitable channel found val payload = RelayLegacyPayload(ShortChannelId(12345), 998900 msat, CltvExpiry(61)) val r = createValidIncomingPacket(1000000 msat, CltvExpiry(70), payload) channelRelayer ! Relay(r) expectFwdFail(register, r.add.channelId, CMD_FAIL_HTLC(r.add.id, Right(IncorrectCltvExpiry(CltvExpiry(61), channelUpdates(ShortChannelId(12345)).channelUpdate)), commit = true)) } } test("settlement failure") { f => import f._ val channelId1 = channelIds(shortId1) val payload = RelayLegacyPayload(shortId1, outgoingAmount, outgoingExpiry) val r = createValidIncomingPacket(1100000 msat, CltvExpiry(400100), payload) val u = createLocalUpdate(shortId1) val u_disabled = createLocalUpdate(shortId1, enabled = false) val downstream_htlc = UpdateAddHtlc(channelId1, 7, outgoingAmount, paymentHash, outgoingExpiry, emptyOnionPacket) case class TestCase(result: HtlcResult, cmd: channel.HtlcSettlementCommand) val testCases = Seq( TestCase(HtlcResult.RemoteFail(UpdateFailHtlc(channelId1, downstream_htlc.id, hex"deadbeef")), CMD_FAIL_HTLC(r.add.id, Left(hex"deadbeef"), commit = true)), TestCase(HtlcResult.RemoteFailMalformed(UpdateFailMalformedHtlc(channelId1, downstream_htlc.id, ByteVector32.One, FailureMessageCodecs.BADONION)), CMD_FAIL_MALFORMED_HTLC(r.add.id, ByteVector32.One, FailureMessageCodecs.BADONION, commit = true)), TestCase(HtlcResult.OnChainFail(HtlcOverriddenByLocalCommit(channelId1, downstream_htlc)), CMD_FAIL_HTLC(r.add.id, Right(PermanentChannelFailure), commit = true)), TestCase(HtlcResult.DisconnectedBeforeSigned(u_disabled.channelUpdate), CMD_FAIL_HTLC(r.add.id, Right(TemporaryChannelFailure(u_disabled.channelUpdate)), commit = true)), TestCase(HtlcResult.ChannelFailureBeforeSigned, CMD_FAIL_HTLC(r.add.id, Right(PermanentChannelFailure), commit = true)) ) testCases.foreach { testCase => channelRelayer ! WrappedLocalChannelUpdate(u) channelRelayer ! Relay(r) val fwd = expectFwdAdd(register, shortId1, outgoingAmount, outgoingExpiry) fwd.message.replyTo ! RES_SUCCESS(fwd.message, channelId1) fwd.message.origin.replyTo ! RES_ADD_SETTLED(fwd.message.origin, downstream_htlc, testCase.result) expectFwdFail(register, r.add.channelId, testCase.cmd) } } test("settlement success") { f => import f._ val eventListener = TestProbe[ChannelPaymentRelayed]() system.eventStream ! EventStream.Subscribe(eventListener.ref) val channelId1 = channelIds(shortId1) val payload = RelayLegacyPayload(shortId1, outgoingAmount, outgoingExpiry) val r = createValidIncomingPacket(1100000 msat, CltvExpiry(400100), payload) val u = createLocalUpdate(shortId1) val downstream_htlc = UpdateAddHtlc(channelId1, 7, outgoingAmount, paymentHash, outgoingExpiry, emptyOnionPacket) case class TestCase(result: HtlcResult) val testCases = Seq( TestCase(HtlcResult.RemoteFulfill(UpdateFulfillHtlc(channelId1, downstream_htlc.id, paymentPreimage))), TestCase(HtlcResult.OnChainFulfill(paymentPreimage)) ) testCases.foreach { testCase => channelRelayer ! WrappedLocalChannelUpdate(u) channelRelayer ! Relay(r) val fwd1 = expectFwdAdd(register, shortId1, outgoingAmount, outgoingExpiry) fwd1.message.replyTo ! RES_SUCCESS(fwd1.message, channelId1) fwd1.message.origin.replyTo ! RES_ADD_SETTLED(fwd1.message.origin, downstream_htlc, testCase.result) val fwd2 = register.expectMessageType[Register.Forward[CMD_FULFILL_HTLC]] assert(fwd2.channelId === r.add.channelId) assert(fwd2.message.id === r.add.id) assert(fwd2.message.r === paymentPreimage) val paymentRelayed = eventListener.expectMessageType[ChannelPaymentRelayed] assert(paymentRelayed.copy(timestamp = 0 unixms) === ChannelPaymentRelayed(r.add.amountMsat, r.payload.amountToForward, r.add.paymentHash, r.add.channelId, channelId1, timestamp = 0 unixms)) } } test("get outgoing channels") { f => import PaymentPacketSpec._ import f._ val channelId_ab = randomBytes32() val channelId_bc = randomBytes32() val a = PaymentPacketSpec.a val sender = TestProbe[Relayer.OutgoingChannels]() def getOutgoingChannels(enabledOnly: Boolean): Seq[Relayer.OutgoingChannel] = { channelRelayer ! GetOutgoingChannels(sender.ref.toClassic, Relayer.GetOutgoingChannels(enabledOnly)) val Relayer.OutgoingChannels(channels) = sender.expectMessageType[Relayer.OutgoingChannels] channels } channelRelayer ! WrappedLocalChannelUpdate(LocalChannelUpdate(null, channelId_ab, channelUpdate_ab.shortChannelId, a, None, channelUpdate_ab, makeCommitments(channelId_ab, -2000 msat, 300000 msat))) channelRelayer ! WrappedLocalChannelUpdate(LocalChannelUpdate(null, channelId_bc, channelUpdate_bc.shortChannelId, c, None, channelUpdate_bc, makeCommitments(channelId_bc, 400000 msat, -5000 msat))) val channels1 = getOutgoingChannels(true) assert(channels1.size === 2) assert(channels1.head.channelUpdate === channelUpdate_ab) assert(channels1.head.toUsableBalance === Relayer.UsableBalance(a, channelUpdate_ab.shortChannelId, 0 msat, 300000 msat, isPublic = false)) assert(channels1.last.channelUpdate === channelUpdate_bc) assert(channels1.last.toUsableBalance === Relayer.UsableBalance(c, channelUpdate_bc.shortChannelId, 400000 msat, 0 msat, isPublic = false)) channelRelayer ! WrappedAvailableBalanceChanged(AvailableBalanceChanged(null, channelId_bc, channelUpdate_bc.shortChannelId, makeCommitments(channelId_bc, 200000 msat, 500000 msat))) val channels2 = getOutgoingChannels(true) assert(channels2.last.commitments.availableBalanceForReceive === 500000.msat && channels2.last.commitments.availableBalanceForSend === 200000.msat) channelRelayer ! WrappedAvailableBalanceChanged(AvailableBalanceChanged(null, channelId_ab, channelUpdate_ab.shortChannelId, makeCommitments(channelId_ab, 100000 msat, 200000 msat))) channelRelayer ! WrappedLocalChannelDown(LocalChannelDown(null, channelId_bc, channelUpdate_bc.shortChannelId, c)) val channels3 = getOutgoingChannels(true) assert(channels3.size === 1 && channels3.head.commitments.availableBalanceForSend === 100000.msat) channelRelayer ! WrappedLocalChannelUpdate(LocalChannelUpdate(null, channelId_ab, channelUpdate_ab.shortChannelId, a, None, channelUpdate_ab.copy(channelFlags = ChannelUpdate.ChannelFlags(isEnabled = false, isNode1 = true)), makeCommitments(channelId_ab, 100000 msat, 200000 msat))) val channels4 = getOutgoingChannels(true) assert(channels4.isEmpty) val channels5 = getOutgoingChannels(false) assert(channels5.size === 1) channelRelayer ! WrappedLocalChannelUpdate(LocalChannelUpdate(null, channelId_ab, channelUpdate_ab.shortChannelId, a, None, channelUpdate_ab, makeCommitments(channelId_ab, 100000 msat, 200000 msat))) val channels6 = getOutgoingChannels(true) assert(channels6.size === 1) // Simulate a chain re-org that changes the shortChannelId: channelRelayer ! WrappedShortChannelIdAssigned(ShortChannelIdAssigned(null, channelId_ab, ShortChannelId(42), Some(channelUpdate_ab.shortChannelId))) val channels7 = getOutgoingChannels(true) assert(channels7.isEmpty) // We should receive the updated channel update containing the new shortChannelId: channelRelayer ! WrappedLocalChannelUpdate(LocalChannelUpdate(null, channelId_ab, ShortChannelId(42), a, None, channelUpdate_ab.copy(shortChannelId = ShortChannelId(42)), makeCommitments(channelId_ab, 100000 msat, 200000 msat))) val channels8 = getOutgoingChannels(true) assert(channels8.size === 1) assert(channels8.head.channelUpdate.shortChannelId === ShortChannelId(42)) } } object ChannelRelayerSpec { val paymentPreimage: ByteVector32 = randomBytes32() val paymentHash: ByteVector32 = Crypto.sha256(paymentPreimage) val outgoingAmount: MilliSatoshi = 1000000 msat val outgoingExpiry: CltvExpiry = CltvExpiry(400000) val outgoingNodeId: PublicKey = randomKey().publicKey val shortId1: ShortChannelId = ShortChannelId(111111) val shortId2: ShortChannelId = ShortChannelId(222222) val channelIds = Map( shortId1 -> randomBytes32(), shortId2 -> randomBytes32() ) def createValidIncomingPacket(amountIn: MilliSatoshi, expiryIn: CltvExpiry, payload: ChannelRelayPayload): IncomingPaymentPacket.ChannelRelayPacket = { val add_ab = UpdateAddHtlc(channelId = randomBytes32(), id = 123456, amountIn, paymentHash, expiryIn, emptyOnionPacket) ChannelRelayPacket(add_ab, payload, emptyOnionPacket) } def createLocalUpdate(shortChannelId: ShortChannelId, balance: MilliSatoshi = 10000000 msat, capacity: Satoshi = 500000 sat, enabled: Boolean = true, htlcMinimum: MilliSatoshi = 0 msat, timestamp: TimestampSecond = 0 unixsec, feeBaseMsat: MilliSatoshi = 1000 msat, feeProportionalMillionths: Long = 100): LocalChannelUpdate = { val channelId = channelIds(shortChannelId) val update = ChannelUpdate(ByteVector64(randomBytes(64)), Block.RegtestGenesisBlock.hash, shortChannelId, timestamp, ChannelUpdate.ChannelFlags(isNode1 = true, isEnabled = enabled), CltvExpiryDelta(100), htlcMinimum, feeBaseMsat, feeProportionalMillionths, Some(capacity.toMilliSatoshi)) val commitments = PaymentPacketSpec.makeCommitments(channelId, testAvailableBalanceForSend = balance, testCapacity = capacity) LocalChannelUpdate(null, channelId, shortChannelId, outgoingNodeId, None, update, commitments) } }
ACINQ/eclair
eclair-core/src/test/scala/fr/acinq/eclair/payment/relay/ChannelRelayerSpec.scala
Scala
apache-2.0
27,852
package fpinscala package parsing import SliceableTypes._ import scala.util.matching.Regex /* This implementation is a bit trickier than the one in `Reference.scala`. The main change is to add another piece of state to `ParseState`, an `isSliced` flag, and an additional `Slice` constructor to `Result`. If the `isSliced` flag is set, parsers avoid building a meaningful result--see in particular the overridden implementations for `map`, `map2`, and `many`. This implementation runs up against some limitations of Scala's type system--Scala does not appropriately refine type parameters when pattern matching. Keep reading for more details on this. */ object SliceableTypes { /* A parser is a kind of state action that can fail. * This type is slightly fancier than the one discussed in the chapter, * to support efficient slicing. If the parser is surrounded by * a `slice` combinator, the `isSliced` field of `ParseState` will * be `true`, and we return a `Slice` output. */ type Parser[+A] = ParseState => Result[A] /** `isSliced` indicates if the current parser is surround by a * `slice` combinator. This lets us avoid building up values that * will end up getting thrown away. * * There are several convenience functions on `ParseState` to make * implementing some of the combinators easier. */ case class ParseState(loc: Location, isSliced: Boolean) { // some convenience functions def advanceBy(numChars: Int): ParseState = copy(loc = loc.copy(offset = loc.offset + numChars)) def input: String = loc.input.substring(loc.offset) def unslice = copy(isSliced = false) def reslice(s: ParseState) = copy(isSliced = s.isSliced) def slice(n: Int) = loc.input.substring(loc.offset, loc.offset + n) } /** The result of a parse--a `Parser[A]` returns a `Result[A]`. * * There are three cases: * - Success(a,n): a is the value, n is # of consumed characters * - Slice(n): a successful slice; n is the # of consumed characters * - Failure(n,isCommitted): a failing parse * * As usual, we define some helper functions on `Result`. * Defining functions on `Result` gives us better type * information--there are cases (see `map` and `map2` below) where * Scala will not appropriately refine type information when * pattern matching on `Result`. */ sealed trait Result[+A] { def extract(input: String): Either[ParseError,A] def slice: Result[String] /* Used by `attempt`. */ def uncommit: Result[A] = this match { case Failure(e,true) => Failure(e,false) case _ => this } /* Used by `flatMap` */ def addCommit(isCommitted: Boolean): Result[A] = this match { case Failure(e,c) => Failure(e, c || isCommitted) case _ => this } /* Used by `scope`, `label`. */ def mapError(f: ParseError => ParseError): Result[A] = this match { case Failure(e,c) => Failure(f(e),c) case _ => this } def advanceSuccess(n: Int): Result[A] } case class Slice(length: Int) extends Result[String] { def extract(s: String) = Right(s.substring(0,length)) def slice = this def advanceSuccess(n: Int) = Slice(length+n) } case class Success[+A](get: A, length: Int) extends Result[A] { def extract(s: String) = Right(get) def slice = Slice(length) def advanceSuccess(n: Int) = Success(get, length+n) } case class Failure(get: ParseError, isCommitted: Boolean) extends Result[Nothing] { def extract(s: String) = Left(get) def slice = this def advanceSuccess(n: Int) = this } /** Returns -1 if s.startsWith(s2), otherwise returns the * first index where the two strings differed. If s2 is * longer than s1, returns s.length. */ def firstNonmatchingIndex(s: String, s2: String, offset: Int): Int = { var i = 0 while (i+offset < s.length && i < s2.length) { if (s.charAt(i+offset) != s2.charAt(i)) return i i += 1 } if (s.length-offset >= s2.length) -1 else s.length-offset } } object Sliceable extends Parsers[Parser] { def run[A](p: Parser[A])(s: String): Either[ParseError,A] = { val s0 = ParseState(Location(s), false) p(s0).extract(s) } // consume no characters and succeed with the given value def succeed[A](a: A): Parser[A] = s => Success(a, 0) def or[A](p: Parser[A], p2: => Parser[A]): Parser[A] = s => p(s) match { case Failure(e,false) => p2(s) case r => r // committed failure or success skips running `p2` } /* * `Result` is an example of a Generalized Algebraic Data Type (GADT), * which means that not all the data constructors of `Result` have * the same type. In particular, `Slice` _refines_ the `A` type * parameter to be `String`. If we pattern match on a `Result` * and obtain a `Slice`, we expect to be able to assume that `A` was * in fact `String` and use this type information elsewhere. * * Unfortunately, Scala doesn't quite support this. Let's look * at an example, `map`. */ /* Pattern matching on Slice should refine the type `A` to `String`, * and allow us to call `f(s.slice(n))`, since `f` accepts an * `A` which is known to be `String`. We resort to a cast here. */ override def map[A,B](p: Parser[A])(f: A => B): Parser[B] = s => p(s) match { case Success(a,n) => Success(f(a),n) case Slice(n) => Success(f(s.slice(n).asInstanceOf[A]),n) case f@Failure(_,_) => f } /* See this gist for more information, examples, and discussion * of Scala's GADT support: * https://gist.github.com/1369239 */ /* This implementation is rather delicate. Since we need an `A` * to generate the second parser, we need to run the first parser * 'unsliced', even if the `flatMap` is wrapped in a `slice` call. * Once we have the `A` and have generated the second parser to * run, we can 'reslice' the second parser. * * Note that this implementation is less efficient than it could * be in the case where the choice of the second parser does not * depend on the first (as in `map2`). In that case, we could * continue to run the first parser sliced. * * Again, note the cast needed. */ def flatMap[A,B](f: Parser[A])(g: A => Parser[B]): Parser[B] = s => f(s.unslice) match { case Success(a,n) => g(a)(s.advanceBy(n).reslice(s)) .addCommit(n != 0) .advanceSuccess(n) case Slice(n) => g(s.slice(n).asInstanceOf[A])(s.advanceBy(n).reslice(s)) .advanceSuccess(n) case f@Failure(_,_) => f } // other functions are quite similar to impls in `Reference.scala` def string(w: String): Parser[String] = { val msg = "'" + w + "'" s => { val i = firstNonmatchingIndex(s.loc.input, w, s.loc.offset) if (i == -1) { // they matched if (s.isSliced) Slice(w.length) else Success(w, w.length) } else Failure(s.loc.advanceBy(i).toError(msg), i != 0) } } // note, regex matching is 'all-or-nothing' - failures are // uncommitted def regex(r: Regex): Parser[String] = { val msg = "regex " + r s => r.findPrefixOf(s.input) match { case None => Failure(s.loc.toError(msg), false) case Some(m) => if (s.isSliced) Slice(m.length) else Success(m,m.length) } } def scope[A](msg: String)(p: Parser[A]): Parser[A] = s => p(s).mapError(_.push(s.loc,msg)) def label[A](msg: String)(p: Parser[A]): Parser[A] = s => p(s).mapError(_.label(msg)) def fail[A](msg: String): Parser[A] = s => Failure(s.loc.toError(msg), true) def attempt[A](p: Parser[A]): Parser[A] = s => p(s).uncommit def slice[A](p: Parser[A]): Parser[String] = s => p(s.copy(isSliced = true)).slice /* As with `map`, we require casts in a few places. */ override def map2[A,B,C](p: Parser[A], p2: => Parser[B])(f: (A,B) => C): Parser[C] = s => p(s) match { case Success(a,n) => val s2 = s.advanceBy(n); p2(s2) match { case Success(b,m) => Success(f(a,b),n+m) case Slice(m) => Success(f(a,s2.slice(m).asInstanceOf[B]), n+m) case f@Failure(_,_) => f } case Slice(n) => val s2 = s.advanceBy(n); p2(s2) match { case Success(b,m) => Success(f(s.slice(n).asInstanceOf[A],b),n+m) case Slice(m) => if (s.isSliced) Slice(n+m).asInstanceOf[Result[C]] else Success(f(s.slice(n).asInstanceOf[A],s2.slice(m).asInstanceOf[B]), n+m) case f@Failure(_,_) => f } case f@Failure(_,_) => f } override def product[A,B](p: Parser[A], p2: => Parser[B]): Parser[(A,B)] = map2(p,p2)((_,_)) /* We provide an overridden version of `many` that accumulates * the list of results using a monolithic loop. This avoids * stack overflow errors. */ override def many[A](p: Parser[A]): Parser[List[A]] = s => { var nConsumed: Int = 0 if (s.isSliced) { def go(p: Parser[String], offset: Int): Result[String] = p(s.advanceBy(offset)) match { case f@Failure(e,true) => f case Failure(e,_) => Slice(offset) case Slice(n) => go(p, offset+n) case Success(_,_) => sys.error("sliced parser should not return success, only slice") } go(p.slice, 0).asInstanceOf[Result[List[A]]] } else { val buf = new collection.mutable.ListBuffer[A] def go(p: Parser[A], offset: Int): Result[List[A]] = { p(s.advanceBy(offset)) match { case Success(a,n) => buf += a; go(p, offset+n) case f@Failure(e,true) => f case Failure(e,_) => Success(buf.toList,offset) case Slice(n) => buf += s.input.substring(offset,offset+n). asInstanceOf[A] go(p, offset+n) } } go(p, 0) } } }
damien-neveu/functional-programming-in-scala
answers/src/main/scala/fpinscala/parsing/instances/Sliceable.scala
Scala
mit
9,959
/* * Copyright 2011-2022 GatlingCorp (https://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.core.action import io.gatling.commons.util.Clock import io.gatling.core.session.{ Expression, Session } import io.gatling.core.stats.StatsEngine import io.gatling.core.util.NameGen private final class GroupStart(groupName: Expression[String], val statsEngine: StatsEngine, val clock: Clock, val next: Action) extends ExitableAction with NameGen { override val name: String = genName("groupStart") override def execute(session: Session): Unit = recover(session) { groupName(session).map { group => val newSession = session.enterGroup(group, clock.nowMillis) // [fl] // // // [fl] next ! newSession } } }
gatling/gatling
gatling-core/src/main/scala/io/gatling/core/action/GroupStart.scala
Scala
apache-2.0
1,303
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import scala.collection.immutable.{HashSet => HSet} import scala.collection.immutable.Queue import scala.collection.mutable.{LinkedHashMap => LHMap} import scala.collection.mutable.ArrayBuffer import org.apache.spark.sql.test.SharedSparkSession case class IntClass(value: Int) case class SeqClass(s: Seq[Int]) case class ListClass(l: List[Int]) case class QueueClass(q: Queue[Int]) case class MapClass(m: Map[Int, Int]) case class LHMapClass(m: LHMap[Int, Int]) case class ComplexClass(seq: SeqClass, list: ListClass, queue: QueueClass) case class ComplexMapClass(map: MapClass, lhmap: LHMapClass) case class InnerData(name: String, value: Int) case class NestedData(id: Int, param: Map[String, InnerData]) package object packageobject { case class PackageClass(value: Int) } class DatasetPrimitiveSuite extends QueryTest with SharedSparkSession { import testImplicits._ test("toDS") { val data = Seq(1, 2, 3, 4, 5, 6) checkDataset( data.toDS(), data: _*) } test("as case class / collect") { val ds = Seq(1, 2, 3).toDS().as[IntClass] checkDataset( ds, IntClass(1), IntClass(2), IntClass(3)) assert(ds.collect().head == IntClass(1)) } test("map") { val ds = Seq(1, 2, 3).toDS() checkDataset( ds.map(_ + 1), 2, 3, 4) } test("mapPrimitive") { val dsInt = Seq(1, 2, 3).toDS() checkDataset(dsInt.map(_ > 1), false, true, true) checkDataset(dsInt.map(_ + 1), 2, 3, 4) checkDataset(dsInt.map(_ + 8589934592L), 8589934593L, 8589934594L, 8589934595L) checkDataset(dsInt.map(_ + 1.1F), 2.1F, 3.1F, 4.1F) checkDataset(dsInt.map(_ + 1.23D), 2.23D, 3.23D, 4.23D) val dsLong = Seq(1L, 2L, 3L).toDS() checkDataset(dsLong.map(_ > 1), false, true, true) checkDataset(dsLong.map(e => (e + 1).toInt), 2, 3, 4) checkDataset(dsLong.map(_ + 8589934592L), 8589934593L, 8589934594L, 8589934595L) checkDataset(dsLong.map(_ + 1.1F), 2.1F, 3.1F, 4.1F) checkDataset(dsLong.map(_ + 1.23D), 2.23D, 3.23D, 4.23D) val dsFloat = Seq(1F, 2F, 3F).toDS() checkDataset(dsFloat.map(_ > 1), false, true, true) checkDataset(dsFloat.map(e => (e + 1).toInt), 2, 3, 4) checkDataset(dsFloat.map(e => (e + 123456L).toLong), 123457L, 123458L, 123459L) checkDataset(dsFloat.map(_ + 1.1F), 2.1F, 3.1F, 4.1F) checkDataset(dsFloat.map(_ + 1.23D), 2.23D, 3.23D, 4.23D) val dsDouble = Seq(1D, 2D, 3D).toDS() checkDataset(dsDouble.map(_ > 1), false, true, true) checkDataset(dsDouble.map(e => (e + 1).toInt), 2, 3, 4) checkDataset(dsDouble.map(e => (e + 8589934592L).toLong), 8589934593L, 8589934594L, 8589934595L) checkDataset(dsDouble.map(e => (e + 1.1F).toFloat), 2.1F, 3.1F, 4.1F) checkDataset(dsDouble.map(_ + 1.23D), 2.23D, 3.23D, 4.23D) val dsBoolean = Seq(true, false).toDS() checkDataset(dsBoolean.map(e => !e), false, true) } test("mapPrimitiveArray") { val dsInt = Seq(Array(1, 2), Array(3, 4)).toDS() checkDataset(dsInt.map(e => e), Array(1, 2), Array(3, 4)) checkDataset(dsInt.map(e => null: Array[Int]), null, null) val dsDouble = Seq(Array(1D, 2D), Array(3D, 4D)).toDS() checkDataset(dsDouble.map(e => e), Array(1D, 2D), Array(3D, 4D)) checkDataset(dsDouble.map(e => null: Array[Double]), null, null) } test("filter") { val ds = Seq(1, 2, 3, 4).toDS() checkDataset( ds.filter(_ % 2 == 0), 2, 4) } test("filterPrimitive") { val dsInt = Seq(1, 2, 3).toDS() checkDataset(dsInt.filter(_ > 1), 2, 3) val dsLong = Seq(1L, 2L, 3L).toDS() checkDataset(dsLong.filter(_ > 1), 2L, 3L) val dsFloat = Seq(1F, 2F, 3F).toDS() checkDataset(dsFloat.filter(_ > 1), 2F, 3F) val dsDouble = Seq(1D, 2D, 3D).toDS() checkDataset(dsDouble.filter(_ > 1), 2D, 3D) val dsBoolean = Seq(true, false).toDS() checkDataset(dsBoolean.filter(e => !e), false) } test("foreach") { val ds = Seq(1, 2, 3).toDS() val acc = sparkContext.longAccumulator ds.foreach(acc.add(_)) assert(acc.value == 6) } test("foreachPartition") { val ds = Seq(1, 2, 3).toDS() val acc = sparkContext.longAccumulator ds.foreachPartition((it: Iterator[Int]) => it.foreach(acc.add(_))) assert(acc.value == 6) } test("reduce") { val ds = Seq(1, 2, 3).toDS() assert(ds.reduce(_ + _) == 6) } test("groupBy function, keys") { val ds = Seq(1, 2, 3, 4, 5).toDS() val grouped = ds.groupByKey(_ % 2) checkDatasetUnorderly( grouped.keys, 0, 1) } test("groupBy function, map") { val ds = Seq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11).toDS() val grouped = ds.groupByKey(_ % 2) val agged = grouped.mapGroups { (g, iter) => val name = if (g == 0) "even" else "odd" (name, iter.size) } checkDatasetUnorderly( agged, ("even", 5), ("odd", 6)) } test("groupBy function, flatMap") { val ds = Seq("a", "b", "c", "xyz", "hello").toDS() val grouped = ds.groupByKey(_.length) val agged = grouped.flatMapGroups { (g, iter) => Iterator(g.toString, iter.mkString) } checkDatasetUnorderly( agged, "1", "abc", "3", "xyz", "5", "hello") } test("Arrays and Lists") { checkDataset(Seq(Seq(1)).toDS(), Seq(1)) checkDataset(Seq(Seq(1.toLong)).toDS(), Seq(1.toLong)) checkDataset(Seq(Seq(1.toDouble)).toDS(), Seq(1.toDouble)) checkDataset(Seq(Seq(1.toFloat)).toDS(), Seq(1.toFloat)) checkDataset(Seq(Seq(1.toByte)).toDS(), Seq(1.toByte)) checkDataset(Seq(Seq(1.toShort)).toDS(), Seq(1.toShort)) checkDataset(Seq(Seq(true)).toDS(), Seq(true)) checkDataset(Seq(Seq("test")).toDS(), Seq("test")) checkDataset(Seq(Seq(Tuple1(1))).toDS(), Seq(Tuple1(1))) checkDataset(Seq(Array(1)).toDS(), Array(1)) checkDataset(Seq(Array(1.toLong)).toDS(), Array(1.toLong)) checkDataset(Seq(Array(1.toDouble)).toDS(), Array(1.toDouble)) checkDataset(Seq(Array(1.toFloat)).toDS(), Array(1.toFloat)) checkDataset(Seq(Array(1.toByte)).toDS(), Array(1.toByte)) checkDataset(Seq(Array(1.toShort)).toDS(), Array(1.toShort)) checkDataset(Seq(Array(true)).toDS(), Array(true)) checkDataset(Seq(Array("test")).toDS(), Array("test")) checkDataset(Seq(Array(Tuple1(1))).toDS(), Array(Tuple1(1))) } test("arbitrary sequences") { checkDataset(Seq(Queue(1)).toDS(), Queue(1)) checkDataset(Seq(Queue(1.toLong)).toDS(), Queue(1.toLong)) checkDataset(Seq(Queue(1.toDouble)).toDS(), Queue(1.toDouble)) checkDataset(Seq(Queue(1.toFloat)).toDS(), Queue(1.toFloat)) checkDataset(Seq(Queue(1.toByte)).toDS(), Queue(1.toByte)) checkDataset(Seq(Queue(1.toShort)).toDS(), Queue(1.toShort)) checkDataset(Seq(Queue(true)).toDS(), Queue(true)) checkDataset(Seq(Queue("test")).toDS(), Queue("test")) checkDataset(Seq(Queue(Tuple1(1))).toDS(), Queue(Tuple1(1))) checkDataset(Seq(ArrayBuffer(1)).toDS(), ArrayBuffer(1)) checkDataset(Seq(ArrayBuffer(1.toLong)).toDS(), ArrayBuffer(1.toLong)) checkDataset(Seq(ArrayBuffer(1.toDouble)).toDS(), ArrayBuffer(1.toDouble)) checkDataset(Seq(ArrayBuffer(1.toFloat)).toDS(), ArrayBuffer(1.toFloat)) checkDataset(Seq(ArrayBuffer(1.toByte)).toDS(), ArrayBuffer(1.toByte)) checkDataset(Seq(ArrayBuffer(1.toShort)).toDS(), ArrayBuffer(1.toShort)) checkDataset(Seq(ArrayBuffer(true)).toDS(), ArrayBuffer(true)) checkDataset(Seq(ArrayBuffer("test")).toDS(), ArrayBuffer("test")) checkDataset(Seq(ArrayBuffer(Tuple1(1))).toDS(), ArrayBuffer(Tuple1(1))) } test("sequence and product combinations") { // Case classes checkDataset(Seq(SeqClass(Seq(1))).toDS(), SeqClass(Seq(1))) checkDataset(Seq(Seq(SeqClass(Seq(1)))).toDS(), Seq(SeqClass(Seq(1)))) checkDataset(Seq(List(SeqClass(Seq(1)))).toDS(), List(SeqClass(Seq(1)))) checkDataset(Seq(Queue(SeqClass(Seq(1)))).toDS(), Queue(SeqClass(Seq(1)))) checkDataset(Seq(ListClass(List(1))).toDS(), ListClass(List(1))) checkDataset(Seq(Seq(ListClass(List(1)))).toDS(), Seq(ListClass(List(1)))) checkDataset(Seq(List(ListClass(List(1)))).toDS(), List(ListClass(List(1)))) checkDataset(Seq(Queue(ListClass(List(1)))).toDS(), Queue(ListClass(List(1)))) checkDataset(Seq(QueueClass(Queue(1))).toDS(), QueueClass(Queue(1))) checkDataset(Seq(Seq(QueueClass(Queue(1)))).toDS(), Seq(QueueClass(Queue(1)))) checkDataset(Seq(List(QueueClass(Queue(1)))).toDS(), List(QueueClass(Queue(1)))) checkDataset(Seq(Queue(QueueClass(Queue(1)))).toDS(), Queue(QueueClass(Queue(1)))) val complex = ComplexClass(SeqClass(Seq(1)), ListClass(List(2)), QueueClass(Queue(3))) checkDataset(Seq(complex).toDS(), complex) checkDataset(Seq(Seq(complex)).toDS(), Seq(complex)) checkDataset(Seq(List(complex)).toDS(), List(complex)) checkDataset(Seq(Queue(complex)).toDS(), Queue(complex)) // Tuples checkDataset(Seq(Seq(1) -> Seq(2)).toDS(), Seq(1) -> Seq(2)) checkDataset(Seq(List(1) -> Queue(2)).toDS(), List(1) -> Queue(2)) checkDataset(Seq(List(Seq("test1") -> List(Queue("test2")))).toDS(), List(Seq("test1") -> List(Queue("test2")))) // Complex checkDataset(Seq(ListClass(List(1)) -> Queue("test" -> SeqClass(Seq(2)))).toDS(), ListClass(List(1)) -> Queue("test" -> SeqClass(Seq(2)))) } test("arbitrary maps") { checkDataset(Seq(Map(1 -> 2)).toDS(), Map(1 -> 2)) checkDataset(Seq(Map(1.toLong -> 2.toLong)).toDS(), Map(1.toLong -> 2.toLong)) checkDataset(Seq(Map(1.toDouble -> 2.toDouble)).toDS(), Map(1.toDouble -> 2.toDouble)) checkDataset(Seq(Map(1.toFloat -> 2.toFloat)).toDS(), Map(1.toFloat -> 2.toFloat)) checkDataset(Seq(Map(1.toByte -> 2.toByte)).toDS(), Map(1.toByte -> 2.toByte)) checkDataset(Seq(Map(1.toShort -> 2.toShort)).toDS(), Map(1.toShort -> 2.toShort)) checkDataset(Seq(Map(true -> false)).toDS(), Map(true -> false)) checkDataset(Seq(Map("test1" -> "test2")).toDS(), Map("test1" -> "test2")) checkDataset(Seq(Map(Tuple1(1) -> Tuple1(2))).toDS(), Map(Tuple1(1) -> Tuple1(2))) checkDataset(Seq(Map(1 -> Tuple1(2))).toDS(), Map(1 -> Tuple1(2))) checkDataset(Seq(Map("test" -> 2.toLong)).toDS(), Map("test" -> 2.toLong)) checkDataset(Seq(LHMap(1 -> 2)).toDS(), LHMap(1 -> 2)) checkDataset(Seq(LHMap(1.toLong -> 2.toLong)).toDS(), LHMap(1.toLong -> 2.toLong)) checkDataset(Seq(LHMap(1.toDouble -> 2.toDouble)).toDS(), LHMap(1.toDouble -> 2.toDouble)) checkDataset(Seq(LHMap(1.toFloat -> 2.toFloat)).toDS(), LHMap(1.toFloat -> 2.toFloat)) checkDataset(Seq(LHMap(1.toByte -> 2.toByte)).toDS(), LHMap(1.toByte -> 2.toByte)) checkDataset(Seq(LHMap(1.toShort -> 2.toShort)).toDS(), LHMap(1.toShort -> 2.toShort)) checkDataset(Seq(LHMap(true -> false)).toDS(), LHMap(true -> false)) checkDataset(Seq(LHMap("test1" -> "test2")).toDS(), LHMap("test1" -> "test2")) checkDataset(Seq(LHMap(Tuple1(1) -> Tuple1(2))).toDS(), LHMap(Tuple1(1) -> Tuple1(2))) checkDataset(Seq(LHMap(1 -> Tuple1(2))).toDS(), LHMap(1 -> Tuple1(2))) checkDataset(Seq(LHMap("test" -> 2.toLong)).toDS(), LHMap("test" -> 2.toLong)) } test("SPARK-25817: map and product combinations") { // Case classes checkDataset(Seq(MapClass(Map(1 -> 2))).toDS(), MapClass(Map(1 -> 2))) checkDataset(Seq(Map(1 -> MapClass(Map(2 -> 3)))).toDS(), Map(1 -> MapClass(Map(2 -> 3)))) checkDataset(Seq(Map(MapClass(Map(1 -> 2)) -> 3)).toDS(), Map(MapClass(Map(1 -> 2)) -> 3)) checkDataset(Seq(Map(MapClass(Map(1 -> 2)) -> MapClass(Map(3 -> 4)))).toDS(), Map(MapClass(Map(1 -> 2)) -> MapClass(Map(3 -> 4)))) checkDataset(Seq(LHMap(1 -> MapClass(Map(2 -> 3)))).toDS(), LHMap(1 -> MapClass(Map(2 -> 3)))) checkDataset(Seq(LHMap(MapClass(Map(1 -> 2)) -> 3)).toDS(), LHMap(MapClass(Map(1 -> 2)) -> 3)) checkDataset(Seq(LHMap(MapClass(Map(1 -> 2)) -> MapClass(Map(3 -> 4)))).toDS(), LHMap(MapClass(Map(1 -> 2)) -> MapClass(Map(3 -> 4)))) checkDataset(Seq(LHMapClass(LHMap(1 -> 2))).toDS(), LHMapClass(LHMap(1 -> 2))) checkDataset(Seq(Map(1 -> LHMapClass(LHMap(2 -> 3)))).toDS(), Map(1 -> LHMapClass(LHMap(2 -> 3)))) checkDataset(Seq(Map(LHMapClass(LHMap(1 -> 2)) -> 3)).toDS(), Map(LHMapClass(LHMap(1 -> 2)) -> 3)) checkDataset(Seq(Map(LHMapClass(LHMap(1 -> 2)) -> LHMapClass(LHMap(3 -> 4)))).toDS(), Map(LHMapClass(LHMap(1 -> 2)) -> LHMapClass(LHMap(3 -> 4)))) checkDataset(Seq(LHMap(1 -> LHMapClass(LHMap(2 -> 3)))).toDS(), LHMap(1 -> LHMapClass(LHMap(2 -> 3)))) checkDataset(Seq(LHMap(LHMapClass(LHMap(1 -> 2)) -> 3)).toDS(), LHMap(LHMapClass(LHMap(1 -> 2)) -> 3)) checkDataset(Seq(LHMap(LHMapClass(LHMap(1 -> 2)) -> LHMapClass(LHMap(3 -> 4)))).toDS(), LHMap(LHMapClass(LHMap(1 -> 2)) -> LHMapClass(LHMap(3 -> 4)))) val complex = ComplexMapClass(MapClass(Map(1 -> 2)), LHMapClass(LHMap(3 -> 4))) checkDataset(Seq(complex).toDS(), complex) checkDataset(Seq(Map(1 -> complex)).toDS(), Map(1 -> complex)) checkDataset(Seq(Map(complex -> 5)).toDS(), Map(complex -> 5)) checkDataset(Seq(Map(complex -> complex)).toDS(), Map(complex -> complex)) checkDataset(Seq(LHMap(1 -> complex)).toDS(), LHMap(1 -> complex)) checkDataset(Seq(LHMap(complex -> 5)).toDS(), LHMap(complex -> 5)) checkDataset(Seq(LHMap(complex -> complex)).toDS(), LHMap(complex -> complex)) // Tuples checkDataset(Seq(Map(1 -> 2) -> Map(3 -> 4)).toDS(), Map(1 -> 2) -> Map(3 -> 4)) checkDataset(Seq(LHMap(1 -> 2) -> Map(3 -> 4)).toDS(), LHMap(1 -> 2) -> Map(3 -> 4)) checkDataset(Seq(Map(1 -> 2) -> LHMap(3 -> 4)).toDS(), Map(1 -> 2) -> LHMap(3 -> 4)) checkDataset(Seq(LHMap(1 -> 2) -> LHMap(3 -> 4)).toDS(), LHMap(1 -> 2) -> LHMap(3 -> 4)) checkDataset(Seq(LHMap((Map("test1" -> 1) -> 2) -> (3 -> LHMap(4 -> "test2")))).toDS(), LHMap((Map("test1" -> 1) -> 2) -> (3 -> LHMap(4 -> "test2")))) // Complex checkDataset(Seq(LHMapClass(LHMap(1 -> 2)) -> LHMap("test" -> MapClass(Map(3 -> 4)))).toDS(), LHMapClass(LHMap(1 -> 2)) -> LHMap("test" -> MapClass(Map(3 -> 4)))) } test("arbitrary sets") { checkDataset(Seq(Set(1, 2, 3, 4)).toDS(), Set(1, 2, 3, 4)) checkDataset(Seq(Set(1.toLong, 2.toLong)).toDS(), Set(1.toLong, 2.toLong)) checkDataset(Seq(Set(1.toDouble, 2.toDouble)).toDS(), Set(1.toDouble, 2.toDouble)) checkDataset(Seq(Set(1.toFloat, 2.toFloat)).toDS(), Set(1.toFloat, 2.toFloat)) checkDataset(Seq(Set(1.toByte, 2.toByte)).toDS(), Set(1.toByte, 2.toByte)) checkDataset(Seq(Set(1.toShort, 2.toShort)).toDS(), Set(1.toShort, 2.toShort)) checkDataset(Seq(Set(true, false)).toDS(), Set(true, false)) checkDataset(Seq(Set("test1", "test2")).toDS(), Set("test1", "test2")) checkDataset(Seq(Set(Tuple1(1), Tuple1(2))).toDS(), Set(Tuple1(1), Tuple1(2))) checkDataset(Seq(HSet(1, 2)).toDS(), HSet(1, 2)) checkDataset(Seq(HSet(1.toLong, 2.toLong)).toDS(), HSet(1.toLong, 2.toLong)) checkDataset(Seq(HSet(1.toDouble, 2.toDouble)).toDS(), HSet(1.toDouble, 2.toDouble)) checkDataset(Seq(HSet(1.toFloat, 2.toFloat)).toDS(), HSet(1.toFloat, 2.toFloat)) checkDataset(Seq(HSet(1.toByte, 2.toByte)).toDS(), HSet(1.toByte, 2.toByte)) checkDataset(Seq(HSet(1.toShort, 2.toShort)).toDS(), HSet(1.toShort, 2.toShort)) checkDataset(Seq(HSet(true, false)).toDS(), HSet(true, false)) checkDataset(Seq(HSet("test1", "test2")).toDS(), HSet("test1", "test2")) checkDataset(Seq(HSet(Tuple1(1), Tuple1(2))).toDS(), HSet(Tuple1(1), Tuple1(2))) checkDataset(Seq(Seq(Some(1), None), Seq(Some(2))).toDF("c").as[Set[Integer]], Seq(Set[Integer](1, null), Set[Integer](2)): _*) } test("nested sequences") { checkDataset(Seq(Seq(Seq(1))).toDS(), Seq(Seq(1))) checkDataset(Seq(List(Queue(1))).toDS(), List(Queue(1))) } test("nested maps") { checkDataset(Seq(Map(1 -> LHMap(2 -> 3))).toDS(), Map(1 -> LHMap(2 -> 3))) checkDataset(Seq(LHMap(Map(1 -> 2) -> 3)).toDS(), LHMap(Map(1 -> 2) -> 3)) } test("nested set") { checkDataset(Seq(Set(HSet(1, 2), HSet(3, 4))).toDS(), Set(HSet(1, 2), HSet(3, 4))) checkDataset(Seq(HSet(Set(1, 2), Set(3, 4))).toDS(), HSet(Set(1, 2), Set(3, 4))) } test("package objects") { import packageobject._ checkDataset(Seq(PackageClass(1)).toDS(), PackageClass(1)) } test("SPARK-19104: Lambda variables in ExternalMapToCatalyst should be global") { val data = Seq.tabulate(10)(i => NestedData(1, Map("key" -> InnerData("name", i + 100)))) val ds = spark.createDataset(data) checkDataset(ds, data: _*) } test("special floating point values") { import org.scalatest.exceptions.TestFailedException // Spark distinguishes -0.0 and 0.0 intercept[TestFailedException] { checkDataset(Seq(-0.0d).toDS(), 0.0d) } intercept[TestFailedException] { checkAnswer(Seq(-0.0d).toDF(), Row(0.0d)) } intercept[TestFailedException] { checkDataset(Seq(-0.0f).toDS(), 0.0f) } intercept[TestFailedException] { checkAnswer(Seq(-0.0f).toDF(), Row(0.0f)) } intercept[TestFailedException] { checkDataset(Seq(Tuple1(-0.0)).toDS(), Tuple1(0.0)) } intercept[TestFailedException] { checkAnswer(Seq(Tuple1(-0.0)).toDF(), Row(Row(0.0))) } intercept[TestFailedException] { checkDataset(Seq(Seq(-0.0)).toDS(), Seq(0.0)) } intercept[TestFailedException] { checkAnswer(Seq(Seq(-0.0)).toDF(), Row(Seq(0.0))) } val floats = Seq[Float](-0.0f, 0.0f, Float.NaN) checkDataset(floats.toDS(), floats: _*) val arrayOfFloats = Seq[Array[Float]](Array(0.0f, -0.0f), Array(-0.0f, Float.NaN)) checkDataset(arrayOfFloats.toDS(), arrayOfFloats: _*) val doubles = Seq[Double](-0.0d, 0.0d, Double.NaN) checkDataset(doubles.toDS(), doubles: _*) val arrayOfDoubles = Seq[Array[Double]](Array(0.0d, -0.0d), Array(-0.0d, Double.NaN)) checkDataset(arrayOfDoubles.toDS(), arrayOfDoubles: _*) val tuples = Seq[(Float, Float, Double, Double)]( (0.0f, -0.0f, 0.0d, -0.0d), (-0.0f, Float.NaN, -0.0d, Double.NaN)) checkDataset(tuples.toDS(), tuples: _*) val complex = Map(Array(Seq(Tuple1(Double.NaN))) -> Map(Tuple2(Float.NaN, null))) checkDataset(Seq(complex).toDS(), complex) } }
goldmedal/spark
sql/core/src/test/scala/org/apache/spark/sql/DatasetPrimitiveSuite.scala
Scala
apache-2.0
19,111
package demo /* * Copyright (C) 24/08/16 // mathieu.leclaire@openmole.org * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ import org.scalajs.dom.Element import org.scalajs.dom.raw.{Event, HTMLButtonElement, HTMLElement, MouseEvent} import scaladget.bootstrapnative.bsn._ import scaladget.tools._ import scalatags.JsDom.TypedTag import scalatags.JsDom.all._ import rx._ object PopoverDemo extends Demo { val sc = sourcecode.Text { import scaladget.bootstrapnative.Popup._ val buttonStyle: ModifierSeq = Seq( btn_default, marginRight := 5 ) //SIMPLE POPOVERS val simplePopovers = div( h2("Simple popovers"), div(paddingTop := 20)("Simple popovers containing text, or simple content with no events to be fired and with basic trigger modes (click, hover)."), button("Left", buttonStyle).popover(vForm(width := 100)(label("Nice content", label_danger).render, span("A important message").render), Left, title = Some("Check this !")).render, button("Title", buttonStyle).popover("Popover on hover with Title", Top, title = Some("Pop title")).render, button("Dismissable", buttonStyle).popover("Dismissible Popover on hover with Title", Top, HoverPopup, Some("Pop title"), true).render, inputTag("")(width := 320, marginTop := 10, placeholder := "Bottom (click)").popover("Tooltip on click on bottom", Bottom, ClickPopup).render ) //MANUAL POPOVERS val BUTTON1_ID = uuID.short("b") val BUTTON2_ID = uuID.short("b") def actions(element: HTMLElement): Boolean = { element.id match { case BUTTON1_ID => println(s"button 1 with IDΒ $BUTTON1_ID clicked") element.parentNode.replaceChild(span("YO"), element) true case BUTTON2_ID => println(s"button 2 with IDΒ $BUTTON2_ID clicked") true case _ => println("unknown") false } } def buildManualPopover(trigger: TypedTag[HTMLButtonElement], title: String, position: PopupPosition) = { val but1 = button("button1", btn_primary)(id := BUTTON1_ID, margin := 10) val but2 = button("button2", btn_primary)(id := BUTTON2_ID, margin := 10) lazy val pop1 = trigger.popover( div( span( but1, but2 ).render ).toString, position, Manual ) lazy val pop1Render = pop1.render pop1Render.onclick = { (e: Event) => if (Popover.current.now == pop1) Popover.hide else { Popover.current.now match { case Some(p)=> Popover.toggle(p) case _=> } Popover.toggle(pop1) } e.stopPropagation } pop1Render } val manualPopovers = div( h2("Manual popovers"), div(paddingTop := 20)("Manual popovers, ie popovers built with custom interaction rules. " + "Here an exemple with a set of exclusive popovers, which keep alive when clicking on them."), div(paddingTop := 10)( (1 until 100).map { i => buildManualPopover( button(s"Button ${i.toString}", buttonStyle), "Popover on click on bottom", Left) } ) ) org.scalajs.dom.document.body.onclick = { (e: Event) => if (!actions(e.target.asInstanceOf[HTMLElement])) if (!e.target.asInstanceOf[HTMLElement].className.contains("popover-content")) Popover.hide } div(simplePopovers, manualPopovers).render } val elementDemo = new ElementDemo { def title: String = "Popover" def code: String = sc.source def element: Element = sc.value override def codeWidth: Int = 9 } }
mathieuleclaire/scaladget
demo/src/main/scala/demo/PopoverDemo.scala
Scala
agpl-3.0
4,311
package com.datastax.spark.connector import java.nio.ByteBuffer import com.datastax.driver.core.{ProtocolVersion, Row, UDTValue => DriverUDTValue} import com.datastax.spark.connector.types.TypeConverter.StringConverter import scala.collection.JavaConversions._ import com.datastax.spark.connector.util.ByteBufferUtil trait AbstractGettableData { protected def fieldNames: IndexedSeq[String] protected def fieldValues: IndexedSeq[AnyRef] @transient private[connector] lazy val _indexOf = fieldNames.zipWithIndex.toMap.withDefaultValue(-1) @transient private[connector] lazy val _indexOfOrThrow = _indexOf.withDefault { name => throw new ColumnNotFoundException( s"Column not found: $name. " + s"Available columns are: ${fieldNames.mkString("[", ", ", "]")}") } /** Total number of columns in this row. Includes columns with null values. */ def length = fieldValues.size /** Total number of columns in this row. Includes columns with null values. */ def size = fieldValues.size /** Returns true if column value is Cassandra null */ def isNullAt(index: Int): Boolean = fieldValues(index) == null /** Returns true if column value is Cassandra null */ def isNullAt(name: String): Boolean = { fieldValues(_indexOfOrThrow(name)) == null } /** Returns index of column with given name or -1 if column not found */ def indexOf(name: String): Int = _indexOf(name) /** Returns the name of the i-th column. */ def nameOf(index: Int): String = fieldNames(index) /** Returns true if column with given name is defined and has an * entry in the underlying value array, i.e. was requested in the result set. * For columns having null value, returns true. */ def contains(name: String): Boolean = _indexOf(name) != -1 /** Displays the content in human readable form, including the names and values of the columns */ def dataAsString = fieldNames .zip(fieldValues) .map(kv => kv._1 + ": " + StringConverter.convert(kv._2)) .mkString("{", ", ", "}") override def toString = dataAsString override def equals(o: Any) = o match { case o: AbstractGettableData => if (this.fieldValues.length == o.length) { this.fieldValues.zip(o.fieldValues).forall { case (mine, yours) => mine == yours} } else false case _ => false } } object AbstractGettableData { /* ByteBuffers are not serializable, so we need to convert them to something that is serializable. Array[Byte] seems reasonable candidate. Additionally converts Java collections to Scala ones. */ private[connector] def convert(obj: Any)(implicit protocolVersion: ProtocolVersion): AnyRef = { obj match { case bb: ByteBuffer => ByteBufferUtil.toArray(bb) case list: java.util.List[_] => list.view.map(convert).toList case set: java.util.Set[_] => set.view.map(convert).toSet case map: java.util.Map[_, _] => map.view.map { case (k, v) => (convert(k), convert(v))}.toMap case udtValue: DriverUDTValue => UDTValue.fromJavaDriverUDTValue(udtValue) case other => other.asInstanceOf[AnyRef] } } /** Deserializes given field from the DataStax Java Driver `Row` into appropriate Java type. * If the field is null, returns null (not Scala Option). */ def get(row: Row, index: Int)(implicit protocolVersion: ProtocolVersion): AnyRef = { val columnDefinitions = row.getColumnDefinitions val columnType = columnDefinitions.getType(index) val bytes = row.getBytesUnsafe(index) if (bytes != null) convert(columnType.deserialize(bytes, protocolVersion)) else null } def get(row: Row, name: String)(implicit protocolVersion: ProtocolVersion): AnyRef = { val index = row.getColumnDefinitions.getIndexOf(name) get(row, index) } def get(value: DriverUDTValue, name: String)(implicit protocolVersion: ProtocolVersion): AnyRef = { val valueType = value.getType.getFieldType(name) val bytes = value.getBytesUnsafe(name) if (bytes != null) convert(valueType.deserialize(bytes, protocolVersion)) else null } } /** Thrown when the requested column does not exist in the result set. */ class ColumnNotFoundException(message: String) extends Exception(message)
willgalen/REVEL
spark-cassandra-connector/src/main/scala/com/datastax/spark/connector/AbstractGettableData.scala
Scala
apache-2.0
4,278
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sample.helloworld import org.apache.predictionio.controller._ import scala.io.Source import scala.collection.immutable.HashMap // all data need to be serializable class MyTrainingData( // list of (day, temperature) tuples val temperatures: List[(String, Double)] ) extends Serializable class MyQuery( val day: String ) extends Serializable class MyModel( val temperatures: HashMap[String, Double] ) extends Serializable { override def toString = temperatures.toString } class MyPredictedResult( val temperature: Double ) extends Serializable case class MyDataSourceParams(val multiplier: Int ) extends Params class MyDataSource extends LDataSource[ MyTrainingData, EmptyEvaluationInfo, MyQuery, EmptyActualResult] { /* override this to return Training Data only */ override def readTraining(): MyTrainingData = { val lines = Source.fromFile("../data/helloworld/data.csv").getLines() .toList.map{ line => val data = line.split(",") (data(0), data(1).toDouble) } new MyTrainingData(lines) } } class MyAlgorithm extends LAlgorithm[ MyTrainingData, MyModel, MyQuery, MyPredictedResult] { override def train(pd: MyTrainingData): MyModel = { // calculate average value of each day val average = pd.temperatures .groupBy(_._1) // group by day .mapValues{ list => val tempList = list.map(_._2) // get the temperature tempList.sum / tempList.size } // trait Map is not serializable, use concrete class HashMap new MyModel(HashMap[String, Double]() ++ average) } override def predict(model: MyModel, query: MyQuery): MyPredictedResult = { val temp = model.temperatures(query.day) new MyPredictedResult(temp) } } // factory object MyEngineFactory extends IEngineFactory { override def apply() = { /* SimpleEngine only requires one DataSouce and one Algorithm */ new SimpleEngine( classOf[MyDataSource], classOf[MyAlgorithm] ) } }
himanshudhami/PredictionIO
examples/experimental/scala-local-helloworld/HelloWorld.scala
Scala
apache-2.0
2,851
package com.geocommit import com.surftools.BeanstalkClient.Job import net.liftweb.json.JsonAST._ import net.liftweb.json.Printer._ object Implicit { implicit def job2RichJob(j: Job): RichJob = new RichJob(j) implicit def json2ByteArray(j: JValue): Array[Byte] = compact(render(j)).getBytes("UTF-8") implicit def byteArray2String(b: Array[Byte]): String = new String(b, "UTF-8") implicit def string2ByteArray(s: String): Array[Byte] = s.getBytes("UTF-8") } /* vim: set sw=4 */
dsp/geocommit-web
fetchservice/src/main/scala/com/geocommit/Implicit.scala
Scala
mit
530
/*** * Copyright 2014 Rackspace US, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.rackspace.com.papi.components.checker.step.startend import java.util.HashMap import javax.servlet.FilterChain import com.rackspace.com.papi.components.checker.servlet._ import com.rackspace.com.papi.components.checker.step.base.StepContext import com.rackspace.com.papi.components.checker.step.results._ import scala.util.matching.Regex // // Like MethodFail, but fails only if the current method is not // matched against the uri regex // class MethodFailMatch(id : String, label : String, val method : Regex, priority : Long) extends MethodFail(id, label, priority) { private val allowHeaders = new HashMap[String, String](1) allowHeaders.put("Allow", method.toString.replaceAll("\\|",", ")) override def check(req : CheckerServletRequest, resp : CheckerServletResponse, chain : FilterChain, context : StepContext) : Option[Result] = { var result : Option[Result] = super.check(req, resp, chain, context) if (result != None) { req.getMethod() match { case method() => result = None case _ => result = Some(new MethodFailResult (result.get.message+". The Method does not match the pattern: '"+method+"'", context, id, priority, allowHeaders.clone.asInstanceOf[java.util.Map[String,String]])) // Augment our parents result with match info } } result } }
tylerroyal/api-checker
core/src/main/scala/com/rackspace/com/papi/components/checker/step/startend/MethodFailMatch.scala
Scala
apache-2.0
2,228
package uk.gov.homeoffice.rabbitmq import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.{Future, Promise} import org.json4s.JValue import org.json4s.JsonAST.JNothing import org.json4s.jackson.JsonMethods._ import org.scalactic.{Bad, Good, Or} import com.rabbitmq.client.{Channel, ConfirmListener, MessageProperties} import grizzled.slf4j.Logging import uk.gov.homeoffice.json.{JsonError, JsonValidator} trait Publisher extends Logging { this: JsonValidator with Queue with Rabbit => def publish(json: JValue): Future[JValue Or JsonError] = { val promise = Promise[JValue Or JsonError]() def ack = promise success Good(json) def nack = promise success Bad(JsonError(json, Some(s"Rabbit NACK - Failed to publish JSON ${if (json == JNothing) "" else pretty(render(json))}"))) def onError(t: Throwable) = promise failure RabbitException(JsonError(json, Some("Failed to publish JSON"), Some(t))) validate(json) match { case Good(j) => publish(j, queue, ack, nack, onError) case Bad(e) => publishError(e).map(promise success Bad(_)) } promise.future } def publishError(e: JsonError): Future[JsonError] = { publish(e, errorQueue) } def publishAlert(e: JsonError) = Future { publish(e, alertQueue) } private[rabbitmq] def publish(e: JsonError, queue: Channel => String): Future[JsonError] = { val promise = Promise[JsonError]() def ack = promise success e def nack = promise success e.copy(error = Some(s"Rabbit NACK - Failed to publish error JSON: ${e.error}")) def onError(t: Throwable) = promise failure RabbitException(e.copy(error = Some(s"Failed to publish error JSON${e.error.fold("")(e => ": " + e)}"))) publish(e.asJson, queue, ack, nack, onError) promise.future } private[rabbitmq] def publish(json: JValue, queue: Channel => String, ack: => Any, nack: => Any, onError: Throwable => Any) = Future { try { val channel = connection.createChannel() channel.confirmSelect() channel.addConfirmListener(new ConfirmListener { def handleAck(deliveryTag: Long, multiple: Boolean) = ack def handleNack(deliveryTag: Long, multiple: Boolean) = nack }) info(s"Publishing to $connection:${queue(channel)} ${pretty(render(json))}") channel.basicPublish("", queue(channel), MessageProperties.PERSISTENT_BASIC, compact(render(json)).getBytes) } catch { case t: Throwable => error(t) onError(t) } } }
UKHomeOffice/rtp-rabbit-lib
src/main/scala/uk/gov/homeoffice/rabbitmq/Publisher.scala
Scala
mit
2,471
package com.krux.hyperion.contrib.activity.email import java.io.{ File, FilenameFilter } import java.nio.file.{ Files, Paths } import javax.activation.{ DataHandler, FileDataSource } import javax.mail.Message.RecipientType import javax.mail._ import javax.mail.internet.{ InternetAddress, MimeBodyPart, MimeMessage, MimeMultipart } import scopt.OptionParser object SendEmailActivity { case class Options( host: Option[String] = None, port: Option[Int] = None, username: Option[String] = None, password: Option[String] = None, from: Option[String] = None, to: Seq[String] = Seq.empty, cc: Seq[String] = Seq.empty, bcc: Seq[String] = Seq.empty, subject: Option[String] = None, body: Option[String] = None, starttls: Boolean = false, debug: Boolean = false ) def apply(options: Options): Boolean = { // Set the SMTP properties val props = System.getProperties options.host.foreach(host => props.put("mail.smtp.host", host)) options.port.foreach(port => props.put("mail.smtp.port", port.toString)) options.username.foreach(username => props.put("mail.smtp.user", username)) options.password.foreach(password => props.put("mail.smtp.password", password)) if (options.username.nonEmpty || options.password.nonEmpty) { println("Enabling auth") props.put("mail.smtp.auth", "true") } props.put("mail.smtp.starttls.enable", options.starttls.toString) props.put("mail.smtp.debug", options.debug.toString) // Open a session using the properties print("Opening session...") val session = Session.getDefaultInstance(props, new Authenticator { override def getPasswordAuthentication: PasswordAuthentication = { println("Authenticating...") new PasswordAuthentication(options.username.get, options.password.get) } }) println("done.") try { println("Creating message...") // Create a new message val message = new MimeMessage(session) val multipart = new MimeMultipart() // Set the from address options.from.orElse(options.username).flatMap(from => InternetAddress.parse(from, false).toSeq.headOption).foreach(from => message.setFrom(from)) // Add the primary recipients options.to match { case Seq() => case recipients => message.setRecipients(RecipientType.TO, InternetAddress.parse(recipients.mkString(","), false).asInstanceOf[Array[Address]]) } // Add the carbon copy recipients options.cc match { case Seq() => case recipients => message.setRecipients(RecipientType.CC, InternetAddress.parse(recipients.mkString(","), false).asInstanceOf[Array[Address]]) } // Add the blind carbon copy recipients options.bcc match { case Seq() => case recipients => message.setRecipients(RecipientType.BCC, InternetAddress.parse(recipients.mkString(","), false).asInstanceOf[Array[Address]]) } // Set the subject options.subject.foreach(subject => message.setSubject(subject)) // Set the body text options.body.foreach { body => val part = new MimeBodyPart() part.setText(body) multipart.addBodyPart(part) } // Add the attachments println("Checking for attachments...") (1 until 11).map(n => s"INPUT${n}_STAGING_DIR").flatMap(v => Option(System.getenv(v))).map(n => Paths.get(n).toFile).foreach { f => f.listFiles(new FilenameFilter { override def accept(dir: File, name: String): Boolean = Files.size(Paths.get(dir.getAbsolutePath, name)) > 0 }).foreach { file => println(s"Adding attachment $file") val part = new MimeBodyPart() part.setDataHandler(new DataHandler(new FileDataSource(file))) part.setFileName(file.getName) multipart.addBodyPart(part) } } message.setContent(multipart) // Get the SMTP transport val transport = session.getTransport("smtp") try { // Connect to the SMTP server println("Connecting...") transport.connect() // Send the message print("Sending...") transport.sendMessage(message, message.getAllRecipients) println("done.") } finally { // Close the SMTP connection transport.close() } true } catch { case e: MessagingException => System.err.println() System.err.println(e.getMessage) false } } def main(args: Array[String]): Unit = { val parser = new OptionParser[Options](s"hyperion-email-activity") { note("Common options:") help("help").text("prints this usage text\\n") opt[String]('H', "host").valueName("SERVER").optional().action((x, c) => c.copy(host = Option(x))) .text("Connects to SERVER to send message (default: localhost)\\n") opt[Int]('P', "port").valueName("PORT").optional().action((x, c) => c.copy(port = Option(x))) .text("Connects to PORT on SERVER to send message (default: 25)\\n") opt[String]('u', "username").valueName("USERNAME").optional().action((x, c) => c.copy(username = Option(x))) .text("Uses USERNAME to authenticate to SERVER\\n") opt[String]('p', "password").valueName("PASSWORD").optional().action((x, c) => c.copy(password = Option(x))) .text("Uses PASSWORD to authenticate to SERVER\\n") opt[String]("from").valueName("ADDRESS").required().action((x, c) => c.copy(from = Option(x))) .text("Sets the From/Sender to ADDRESS\\n") opt[Seq[String]]("to").valueName("ADDRESS").required().unbounded().action((x, c) => c.copy(to = c.to ++ x)) .text("Adds ADDRESS as a To recipient\\n") opt[Seq[String]]("cc").valueName("ADDRESS").optional().unbounded().action((x, c) => c.copy(cc = c.cc ++ x)) .text("Adds ADDRESS as a CC recipient\\n") opt[Seq[String]]("bcc").valueName("ADDRESS").optional().unbounded().action((x, c) => c.copy(bcc = c.bcc ++ x)) .text("Adds ADDRESS as a BCC recipient\\n") opt[String]('s', "subject").valueName("SUBJECT").required().action((x, c) => c.copy(subject = Option(x))) .text("Sets the message subject to SUBJECT\\n") opt[String]('B', "body").valueName("MESSAGE").required().action((x, c) => c.copy(body = Option(x))) .text("Sets the message body to MESSAGE\\n") opt[Unit]("starttls").optional().action((_, c) => c.copy(starttls = true)) .text("Start TLS when connecting to SERVER\\n") opt[Unit]("debug").optional().action((_, c) => c.copy(debug = true)) .text("Enables debug messages\\n") } if (!parser.parse(args, Options()).exists(apply)) System.exit(3) } }
sethyates/hyperion
contrib/activity/email/src/main/scala/com/krux/hyperion/contrib/activity/email/SendEmailActivity.scala
Scala
apache-2.0
6,736
package controllers import javax.inject.Inject import actors.RequestActor import actors.RequestActor._ import akka.actor.ActorSystem import akka.pattern.ask import akka.util.Timeout import models.db.Tables.customReads import play.api.Logger import play.api.libs.json._ import play.api.libs.ws.WSClient import play.api.mvc.{Action, Controller} import services.db.DBService import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration._ class Application @Inject() (ws: WSClient, system: ActorSystem, database: DBService) extends Controller { implicit val timeout = Timeout(5 seconds) private val requestActor = system.actorOf(RequestActor.props(ws, database)) requestActor ! RequestNestoriaData def index() = Action { Ok(views.html.main("Nestoria api")(views.html.interactive())) } def search() = Action.async(parse.json) { request => val queryData = (request.body \\ "keywords").as[List[String]] (requestActor ? LookForKeywords(queryData)).mapTo[JsValue].map(Ok(_)) } }
alexFrankfurt/nest-api
app/controllers/Application.scala
Scala
mit
1,035
package com.ironcorelabs.davenport package error import com.couchbase.client.core.CouchbaseException import codec.DecodeError sealed trait CouchbaseError extends CouchbaseException //Splitting these ADT entries didn't work well. // scalastyle:off line.size.limit final case class InvalidPasswordException(bucketName: String) extends CouchbaseException(s"Invalid password for bucket '$bucketName'") with CouchbaseError final case class DocumentDoesNotExistException(documentKey: String, bucketName: String) extends CouchbaseException(s"Document '$documentKey' in '$bucketName' does not exist.") with CouchbaseError final case class BucketDoesNotExistException(bucketName: String) extends CouchbaseException(s"'$bucketName' does not exist.") with CouchbaseError final case class DocumentAlreadyExistsException(id: String) extends CouchbaseException(s"Document with id '$id' already exists.") with CouchbaseError final case class CouchbaseOutOfMemoryException() extends CouchbaseException("Couchbase is out of memory.") with CouchbaseError final case class CASMismatchException(id: String) extends CouchbaseException(s"The passed in CAS for '$id' didn't match the expected.") final case class RequestTooBigException() extends CouchbaseException("The request was too big.") final case class TemporaryFailureException() extends CouchbaseException("The couchbase cluster had a transient error. Try your request again.") // scalastyle:on line.size.limit //For the following I need to defer which constructor to call in the base class which I cannot do using the normal syntax. //The following was taken from Seth's answer on SO: http://stackoverflow.com/a/3299832/1226945 trait DocumentDecodeFailedException extends CouchbaseException with CouchbaseError object DocumentDecodeFailedException { def apply(cause: DecodeError): DocumentDecodeFailedException = cause match { case DecodeError(message, None) => new CouchbaseException(message) with DocumentDecodeFailedException case DecodeError(message, Some(ex)) => new CouchbaseException(message, ex) with DocumentDecodeFailedException } }
IronCoreLabs/davenport
src/main/scala/com/ironcorelabs/davenport/error/CouchbaseError.scala
Scala
mit
2,094
/* * -β•₯⌐⌐⌐⌐ -⌐⌐⌐⌐- * β‰‘β•’β–‘β–‘β–‘β–‘βŒ\\β–‘β–‘β–‘Ο† β•“β•β–‘β–‘β–‘β–‘βŒβ–‘β–‘β–‘β–‘β•ͺβ•• * ╣╬░░` `β–‘β–‘β–‘β•’β”˜ Ο†β–’β•£β•¬β•β•œ β–‘β–‘β•’β•£Q * β•‘β•£β•¬β–‘βŒ ` β•€β–’β–’β–’Γ…` ║╒╬╣ * β•šβ•£β•¬β–‘βŒ β•”β–’β–’β–’β–’`Β«β•• β•’β•’β•£β–’ * ╫╬░░╖ .β–‘ ╙╨╨ ╣╣╬░φ β•“Ο†β–‘β•’β•’Γ… * β•™β•’β–‘β–‘β–‘β–‘βŒ"β–‘β–‘β–‘β•œ β•™Γ…β–‘β–‘β–‘β–‘βŒβ–‘β–‘β–‘β–‘β•` * ``˚¬ ⌐ ˚˚⌐´ * * Copyright Β© 2016 Flipkart.com */ package com.flipkart.connekt.commons.tests.dao import java.util.UUID import com.flipkart.connekt.commons.dao.DaoFactory import com.flipkart.connekt.commons.entities.{AppUserConfiguration, Channel} import com.flipkart.connekt.commons.tests.CommonsBaseTest class UserConfigurationDaoTest extends CommonsBaseTest { val id = UUID.randomUUID().toString.substring(20) val userConfig = new AppUserConfiguration(userId = id, channel = Channel.PUSH, queueName = s"connekt-ut-push-$id", platforms = "android,ios,windows,openweb", maxRate = 1000) lazy val dao = DaoFactory.getUserConfigurationDao "UserInfoDao test" should "add user info" in { noException should be thrownBy dao.addUserConfiguration(userConfig) } "UserInfoDao test" should "get user info" in { dao.getUserConfiguration(id,Channel.PUSH).get shouldEqual userConfig } }
Flipkart/connekt
commons/src/test/scala/com/flipkart/connekt/commons/tests/dao/UserConfigurationDaoTest.scala
Scala
mit
1,461
package com.sothr.imagetools.ui.util import java.awt.Desktop import java.io.File import com.sothr.imagetools.engine.util.PropertiesService import grizzled.slf4j.Logging /** * Created by Drew Short on 8/31/2014. */ object FileUtil extends Logging { def openInEditor(file: File) = { PropertiesService.OS.toLowerCase match { // Open file on windows case os if os.startsWith("windows") => openFileWindows(file) case os if os.startsWith("linux") => openFileLinux(file) case default => error(s"Do not know how to open editor for OS: ${PropertiesService.OS}, ${PropertiesService.OS_VERSION}, ${PropertiesService.OS_ARCH}") } } private def openFileWindows(file: File) = { Desktop.getDesktop.open(file) } private def openFileLinux(file: File) = { Runtime.getRuntime.exec(s"xdg-open ${file.getAbsolutePath}") } }
warricksothr/ImageTools
gui/src/main/scala/com/sothr/imagetools/ui/util/FileUtil.scala
Scala
mit
863
package com.cloudera.sa.sparkstreaming.seqwriter import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.StreamingContext import org.apache.spark.streaming.Seconds import org.apache.hadoop.io.Text import org.apache.hadoop.conf.Configuration import org.apache.hadoop.io.NullWritable class SeqFileWriterExample { def main(args: Array[String]) { if (args.length == 0) { System.out.println("SeqFileWriterExample {host} {port} {outputFolder} {rollInterval} {rollSize} {rollCount} {idleTimeout} {numberOfFilesPerExecutor} {filePrefix} {filePostfix}"); return ; } val host = args(0); val port = args(1); val outputFolder = args(2) val rollInterval = args(3).toLong val rollSize = args(4).toLong val rollCount = args(5).toLong val idleTimeout = args(6).toLong val numberOfFilesPerExecutor = args(7).toInt val filePrefix = args(8) val filePostfix = args(9) println("host:" + host) println("port:" + Integer.parseInt(port)) val sparkConf = new SparkConf().setAppName("SeqFileWriterExample") sparkConf.set("spark.cleaner.ttl", "120000"); val sc = new SparkContext(sparkConf) val ssc = new StreamingContext(sc, Seconds(1)) val lines = ssc.socketTextStream(host, port.toInt) val ss = new StreamingSeq(outputFolder, rollInterval, rollSize, rollCount, idleTimeout, numberOfFilesPerExecutor, filePrefix, filePostfix, classOf[NullWritable], classOf[Text], new Configuration, sc) ss.writeToSeqFiles[String](lines, (t) => (NullWritable.get(), new Text(t))) ssc.start } }
tmalaska/SparkStreamingSeqSink
src/main/scala/com/cloudera/sa/sparkstreaming/seqwriter/Example.scala
Scala
apache-2.0
1,684
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.spark.sql import org.apache.spark.sql.Row import org.apache.spark.sql.types.StructType import org.elasticsearch.hadoop.serialization.field.FieldExtractor import org.elasticsearch.spark.serialization.ScalaMapFieldExtractor class DataFrameFieldExtractor extends ScalaMapFieldExtractor { override protected def extractField(target: AnyRef): AnyRef = { var obj = target for (in <- 0 until getFieldNames.size()) { val field = getFieldNames.get(in) obj = obj match { case (row: Row, struct: StructType) => { val index = struct.fieldNames.indexOf(field) if (index < 0) { FieldExtractor.NOT_FOUND } else { row(index).asInstanceOf[AnyRef] } } case _ => super.extractField(target) } } return obj } }
xjrk58/elasticsearch-hadoop
spark/sql-20/src/main/scala/org/elasticsearch/spark/sql/DataFrameFieldExtractor.scala
Scala
apache-2.0
1,633
package com.cyrusinnovation.computation /* * Copyright 2014 Cyrus Innovation, LLC. Licensed under Apache license 2.0. */ import com.cyrusinnovation.computation.util.Log /** Operates on a set of facts (contained in a Map[Symbol, Any] to return a result in * the same form. Implementers must extend the `compute` method to specify the specific * computation, and the `resultKey` method allowing the results to be identified in the * returned map. */ trait Computation { /** Takes a set of facts and returns a new set of facts made up of the original set of * facts plus the result. This method is generally the one called by clients to execute * computations * * @param facts A map whose keys the computation will use to identify the values * to be operated on in the computation. * @return A new map consisting of the original map of facts plus an entry * whose key is `resultKey` and whose value is the result of the * computation. */ def compute(facts: Map[Symbol, Any]) : Map[Symbol, Any] = { val domain = new Domain(facts, true) val results = compute(domain) results.facts } /** Takes a domain of facts and returns a new domain of facts made up of the original set of * facts plus the result. This method specifies the details of the computation and must be * implemented by classes that mix in this trait. * * @param domain A `Domain` containing the facts to be operated on as well as * additional metadata. * @return A new domain consisting of the original domain of facts plus * an entry whose key is `resultKey` and whose value is the result * of the computation. The metadata of the domain may also be different * from the metadata in the input domain. */ def compute(domain: Domain): Domain /** Returns the symbol that identifies the results of the computation in the domain of facts * returned by `compute`. This method must be implemented by classes that mix in this trait. */ def resultKey : Symbol } object Computation { def createInputMappings(inputMap: Map[String, Symbol]) : String = { val inputMappings = if (inputMap == null) Map() else inputMap inputMappings.foldLeft("") { (soFar, keyValuePair) => { val valWithType = keyValuePair._1 val domainKey = keyValuePair._2 val theType = valWithType.split( """:\\s*""").last soFar + s"""val $valWithType = domainFacts.get($domainKey).get.asInstanceOf[$theType]\\n""" } } } } /** A computation instantiated from a Scala expression passed into the constructor as a string, * along with various additional configurations (see constructor params). When the computation's `compute` * method is called, the computation will execute against an arbitrary Scala map (a `Map[Any, Any]`) * and return a `Map[Any, Any]` containing the results. * * * @constructor Instantiate a SimpleComputation. Compilation of the computation expression * occurs in the constructor of the computation. * @param packageName A java package name for the computation, used to hinder naming collisions. * This package will be used as the package for the class compiled from the * computation string. * @param name A name for the computation. This should follow Java camel case style * and contain no spaces, since a class is going to be compiled from it. * @param description Free text describing the rule. * @param imports A list of strings, each of which is a fully qualified class name or * otherwise valid Scala identifier/expression that is supplied to an import * statement (not including the word "import"). * @param computationExpression A string that is source code for a valid Scala expression, inside curly * braces, containing free variables which will be bound by the keys in the * input and output maps. * @param inputMapWithTypes A map whose keys are the free variables in the transformationExpression, * with their types, separated by a colon as in a Scala type annotation * (space allowed). The values of the map are the keys that will be applied * to the incoming domain of facts in order to select the values with which * to bind the variables. * @param resultKey The key that will be used to identify the result of the computation * in the outgoing domain of facts. * @param securityConfiguration An instance of the SecurityConfiguration trait indicating what packages * are safe to load, what classes in those packages are unsafe to load, and * where the Java security policy file for the current security manager is. * @param computationEngineLog An instance of `com.cyrusinnovation.computation.util.Log`. A convenience * case class `com.cyrusinnovation.computation.util.ComputationEngineLog` * extends this trait and wraps an slf4j log passed to its constructor. * @param shouldPropagateExceptions If a computation fails to compile or if it throws an exception * on application, it can throw an exception up the stack, or simply * log and return the domain it was passed. */ class SimpleComputation(packageName: String, name: String, description: String, imports: List[String], computationExpression: String, inputMapWithTypes: Map[String, Symbol], val resultKey: Symbol, securityConfiguration: SecurityConfiguration, computationEngineLog: Log, shouldPropagateExceptions: Boolean = true) extends Computation { private var enabled = true private var fullExpression = SimpleComputation.createFunctionBody(computationExpression, inputMapWithTypes, resultKey) private val transformationFunction: Map[Symbol, Any] => Map[Symbol, Any] = try { EvalSimpleComputationString( packageName, imports, name, fullExpression, securityConfiguration).newInstance } catch { case t: Throwable => { computationEngineLog.error("Computation failed to compile", t) enabled = false if (shouldPropagateExceptions) throw t else (x) => Map() } } val disabledComputationWarning = s"Disabled computation called: ${packageName}.${name}" /** Takes a domain of facts and returns a new domain of facts made up of the original set of * facts plus the result. Implements `compute` on the `Computation` trait. This method will * propagate exceptions or not depending on whether the `shouldPropagateExceptions` constructor * parameter is set. */ def compute(domain: Domain) : Domain = { if(enabled) { try { computationEngineLog.debug(s"${packageName}.${name}: Input: ${domain.facts}") val newFacts: Map[Symbol, Any] = transformationFunction(domain.facts) computationEngineLog.debug(s"${packageName}.${name}: Results: ${newFacts}") Domain.combine(newFacts, domain) } catch { case t: Throwable => { computationEngineLog.error(s"Computation threw exception when processing data: ${domain.facts}", t) if(shouldPropagateExceptions) throw t else domain } } } else { computationEngineLog.warn(disabledComputationWarning) domain } } } object SimpleComputation { def createFunctionBody(computationExpression: String, inputMap: Map[String, Symbol], resultKey: Symbol) = { val inputAssignments = Computation.createInputMappings(inputMap) s"""$inputAssignments | ( { $computationExpression } : Option[Any]) match { | case Some(value) => Map($resultKey -> value) | case None => Map() |}""".stripMargin } }
psfblair/computation-engine
core/src/main/scala/com/cyrusinnovation/computation/Computation.scala
Scala
apache-2.0
9,029
package com.sksamuel.elastic4s.searches.aggs import com.sksamuel.elastic4s.script.ScriptDefinition import com.sksamuel.elastic4s.searches.aggs.pipeline.PipelineAggregationDefinition import com.sksamuel.exts.OptionImplicits._ case class PercentilesAggregationDefinition(name: String, field: Option[String] = None, missing: Option[AnyRef] = None, format: Option[String] = None, script: Option[ScriptDefinition] = None, percents: Seq[Double] = Nil, compression: Option[Double] = None, pipelines: Seq[PipelineAggregationDefinition] = Nil, subaggs: Seq[AggregationDefinition] = Nil, metadata: Map[String, AnyRef] = Map.empty) extends AggregationDefinition { type T = PercentilesAggregationDefinition def percents(first: Double, rest: Double*): T = percents(first +: rest) def percents(percents: Iterable[Double]): T = copy(percents = percents.toSeq) def compression(compression: Double): T = copy(compression = compression.some) def format(format: String): T = copy(format = format.some) def field(field: String): T = copy(field = field.some) def missing(missing: AnyRef): T = copy(missing = missing.some) def script(script: ScriptDefinition): T = copy(script = script.some) override def pipelines(pipelines: Iterable[PipelineAggregationDefinition]): T = copy(pipelines = pipelines.toSeq) override def subAggregations(aggs: Iterable[AggregationDefinition]): T = copy(subaggs = aggs.toSeq) override def metadata(map: Map[String, AnyRef]): PercentilesAggregationDefinition = copy(metadata = metadata) }
tyth/elastic4s
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/aggs/PercentilesAggregationDefinition.scala
Scala
apache-2.0
1,924
/* * Copyright 2015-2016 Snowflake Computing * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.snowflake.spark.snowflake.benchmarks import java.io.{BufferedWriter, File, FileWriter, Writer} import java.util.Properties import net.snowflake.spark.snowflake.pushdowns.SnowflakeStrategy import net.snowflake.spark.snowflake.{ IntegrationSuiteBase, SnowflakeConnectorUtils } import org.apache.spark.sql.DataFrame import org.scalatest.exceptions.TestFailedException import scala.collection.mutable trait PerformanceSuite extends IntegrationSuiteBase { protected final val runOptionAccepted = Set[String]( "all", "jdbc-source", "s3-all", "s3-parquet", "s3-csv", "snowflake-all-with-stage", "snowflake-all", "snowflake-stage", "snowflake-with-pushdown", "snowflake-partial-pushdown" ) protected final var fullPushdown: Boolean = false protected final var partialPushdown: Boolean = false protected final var internalStage: Boolean = false protected final var s3CSV: Boolean = false protected final var s3Parquet: Boolean = false protected final var jdbcSource: Boolean = false protected final val outputFormatAccepted = Set[String]("csv", "print", "both") protected var dataSources: mutable.LinkedHashMap[String, Map[String, DataFrame]] protected final var headersWritten: Boolean = false protected final var fileWriter: Option[Writer] = None protected final var currentSource: String = "" // For implementing classes to add their own required config params protected var requiredParams: mutable.LinkedHashMap[String, String] protected var acceptedArguments: mutable.LinkedHashMap[String, Set[String]] /** Configuration string for run mode for benchmarks */ protected var runOption: String = "" /** Configuration string for output: simple print or CSV */ protected var outputFormat: String = "" protected final var runTests: Boolean = true // Maintain session state to make sure it is restored upon finishing of suite protected final var sessionStatus: Boolean = false protected final var jdbcProperties: Properties = new Properties protected final var jdbcURL: String = "" override def beforeAll(): Unit = { super.beforeAll() sessionStatus = sparkSession.experimental.extraStrategies .exists(s => s.isInstanceOf[SnowflakeStrategy]) try { runOption = getConfigValue("runOption") outputFormat = getConfigValue("outputFormat") for ((k, _) <- requiredParams) requiredParams.put(k, getConfigValue(k).toLowerCase) } catch { case t: TestFailedException => if (t.getMessage contains MISSING_PARAM_ERROR) { runTests = false val reqParams = requiredParams.keySet.mkString(", ") // scalastyle:off println println( s"""One or more required parameters for running the benchmark suite was missing: " + "runOption, outputFormat, $reqParams. Skipping ${getClass.getSimpleName}.""" ) // scalastyle:on println } else throw t case e: Exception => throw e } if (runTests) { verifyParams() if (outputFormat == "csv" || outputFormat == "both") prepareCSV() } internalStage = Set("all", "snowflake-all-with-stage", "snowflake-stage") contains runOption partialPushdown = Set( "all", "snowflake-all", "snowflake-all-with-stage", "snowflake-partial-pushdown" ) contains runOption fullPushdown = Set( "all", "snowflake-all", "snowflake-all-with-stage", "snowflake-with-pushdown" ) contains runOption jdbcSource = Set("all", "jdbc-source") contains runOption s3Parquet = Set("all", "s3-all", "s3-parquet") contains runOption s3CSV = Set("all", "s3-all", "s3-csv") contains runOption jdbcURL = s"""jdbc:snowflake://${params.sfURL}""" jdbcProperties.put("db", params.sfDatabase) jdbcProperties.put("schema", params.sfSchema) // Has a default jdbcProperties.put("user", params.sfUser) params.privateKey match { case Some(_) => jdbcProperties.put("privateKey", params.privateKey) case None => jdbcProperties.put("password", params.sfPassword) } jdbcProperties.put("ssl", params.sfSSL) // Has a default // Optional properties if (params.sfAccount.isDefined) { jdbcProperties.put("account", params.sfAccount.get) } if (params.sfWarehouse.isDefined) { jdbcProperties.put("warehouse", params.sfWarehouse.get) } if (params.sfRole.isDefined) { jdbcProperties.put("role", params.sfRole.get) } } protected final def verifyParams(): Unit = { val argCheckMap = mutable.LinkedHashMap() ++= acceptedArguments argCheckMap.put("runOption", runOptionAccepted) argCheckMap.put("outputFormat", outputFormatAccepted) val fullParams = mutable.LinkedHashMap() ++= requiredParams fullParams.put("runOption", runOption) fullParams.put("outputFormat", outputFormat) for ((param, acceptedSet) <- argCheckMap) { val argValue = fullParams .getOrElse(param, fail(s"Required parameter $param missing.")) if (!acceptedSet.contains(argValue) && !acceptedSet.contains("*")) { fail( s"""Value $argValue not accepted for parameter $param. Accepted values are: ${acceptedSet .mkString(", ")} """ ) } } } override def beforeEach(): Unit = { super.beforeEach() } override def afterAll(): Unit = { super.afterAll() SnowflakeConnectorUtils.setPushdownSession(sparkSession, sessionStatus) if (fileWriter.isDefined) { fileWriter.get.close() } } /** * Run the query using the given runtime configurations (S3-direct, with pushdown, * without pushdown, etc., outputting using also the provided config value. */ protected final def testQuery(query: String, name: String = "unnamed"): Unit = { val failedMessage = s"""Failed: $name. No result generated.""" // Skip if invalid configs if (!runTests) return var columnWriters = new mutable.ListBuffer[(String, String) => Option[String]] var outputHeaders = new mutable.ListBuffer[String] if (partialPushdown) { columnWriters += runWithSnowflake(pushdown = false) outputHeaders += s"""Only filter/proj pushdowns""" } if (fullPushdown) { columnWriters += runWithSnowflake(pushdown = true) outputHeaders += s"""With full pushdowns""" } if (internalStage) { columnWriters += runWithSnowflakeStage(pushdown = false) outputHeaders += s"""Only filter/proj pushdowns, internal stage""" columnWriters += runWithSnowflakeStage(pushdown = true) outputHeaders += s"""With full pushdowns, internal stage""" } if (jdbcSource) { columnWriters += runWithoutSnowflake(format = "jdbc") outputHeaders += s"""Using Spark JDBC Source""" } if (s3Parquet) { columnWriters += runWithoutSnowflake(format = "parquet") outputHeaders += s"""Direct from S3 Parquet""" } if (s3CSV) { columnWriters += runWithoutSnowflake(format = "csv") outputHeaders += s"""Direct from S3 CSV""" } val results = columnWriters.map(f => f(query, name).getOrElse(failedMessage)) if (outputFormat == "print" || outputFormat == "both") { // scalastyle:off println outputHeaders .zip(results) .foreach(x => println(name + ", " + x._1 + ": " + x._2)) // scalastyle:on println } if (outputFormat == "csv" || outputFormat == "both") { writeToCSV(outputHeaders, results, name) } } protected final def writeToCSV(headers: Seq[String], results: Seq[String], name: String): Unit = { val writer = { if (fileWriter.isEmpty) prepareCSV() fileWriter.get } if (!headersWritten) { headersWritten = true writer.write("Name, " + headers.mkString(", ") + "\\n") } writer.write(name + ", " + results.mkString(", ") + "\\n") } protected final def prepareCSV(): Unit = { if (fileWriter.isEmpty) { try { val outputFile = new File(getConfigValue("outputFile")) fileWriter = Some(new BufferedWriter(new FileWriter(outputFile, false))) } catch { case e: Exception => if (fileWriter.isDefined) { fileWriter.get.close() } throw e } } } protected final def runWithSnowflake( pushdown: Boolean )(sql: String, name: String): Option[String] = { if (currentSource != "snowflake") { dataSources.foreach { case (tableName: String, sources: Map[String, DataFrame]) => val df: DataFrame = sources.getOrElse( "snowflake", fail("Snowflake datasource missing for snowflake performance test.") ) df.createOrReplaceTempView(tableName) } currentSource = "snowflake" } val state = sessionStatus SnowflakeConnectorUtils.setPushdownSession(sparkSession, pushdown) val result = executeSqlBenchmarkStatement(sql, name) SnowflakeConnectorUtils.setPushdownSession(sparkSession, state) result } protected final def runWithSnowflakeStage( pushdown: Boolean )(sql: String, name: String): Option[String] = { if (currentSource != "snowflake-stage") { dataSources.foreach { case (tableName: String, sources: Map[String, DataFrame]) => val df: DataFrame = sources.getOrElse( "snowflake-stage", fail( "Snowflake-Stage datasource missing for snowflake performance test." ) ) df.createOrReplaceTempView(tableName) } currentSource = "snowflake-stage" } val state = sessionStatus SnowflakeConnectorUtils.setPushdownSession(sparkSession, pushdown) val result = executeSqlBenchmarkStatement(sql, name) SnowflakeConnectorUtils.setPushdownSession(sparkSession, state) result } private def executeSqlBenchmarkStatement(sql: String, name: String): Option[String] = { try { val t1 = System.nanoTime() sparkSession.sql(sql).collect() Some(((System.nanoTime() - t1) / 1e9d).toString) } catch { case _: Exception => // scalastyle:off println println(s"""Query $name failed.""") // scalastyle:on println None } } /* Used for running direct from S3, or JDBC Source */ protected final def runWithoutSnowflake( format: String )(sql: String, name: String): Option[String] = { if (currentSource != format) { dataSources.foreach { case (tableName: String, sources: Map[String, DataFrame]) => val df: DataFrame = sources.getOrElse( format, fail(s"$format datasource missing for snowflake performance test.") ) df.createOrReplaceTempView(tableName) } currentSource = format } executeSqlBenchmarkStatement(sql, name) } }
snowflakedb/spark-snowflakedb
src/it/scala/net/snowflake/spark/snowflake/benchmarks/PerformanceSuite.scala
Scala
apache-2.0
11,781
package org.coroutines import org.coroutines.common._ import scala.collection._ import scala.language.experimental.macros import scala.reflect.macros.whitebox.Context /** Declares basic data types and analysis utilities. */ trait Analyzer[C <: Context] { val c: C import c.universe._ case class Zipper(above: Zipper, left: List[Tree], ctor: List[Tree] => Tree) { def append(x: Tree) = Zipper(above, x :: left, ctor) def isRoot = above == null def result: Tree = { var z = this while (z.above != null) z = z.ascend z.ctor(z.left.reverse) } def ascend: Zipper = if (above == null) sys.error("cannot ascend") else { Zipper(above.above, ctor(left.reverse) :: above.left, above.ctor) } def descend(ctor: List[Tree] => Tree) = Zipper(this, Nil, ctor) } class VarInfo( val uid: Int, val origtree: Tree, val sym: Symbol, val isArg: Boolean, val table: Table ) { private var rawstackpos: (Int, Int) = null val tpe = sym.info val name = sym.name.toTermName def stackpos: (Int, Int) = { assert(rawstackpos != null, s"Variable '$sym' without computed stack position.") rawstackpos } def isWide = tpe =:= typeOf[Double] || tpe =:= typeOf[Long] def width: Int = if (isWide) 2 else 1 def stackpos_=(v: (Int, Int)) = rawstackpos = v def isUnitType = tpe =:= typeOf[Unit] def isAnyType = tpe =:= typeOf[Any] def isRefType = Analyzer.this.isRefType(tpe) def isValType = Analyzer.this.isValType(tpe) val defaultValue: Tree = { if (isRefType) q"null" else if (tpe =:= typeOf[Boolean]) q"false" else if (tpe =:= typeOf[Byte]) q"0.toByte" else if (tpe =:= typeOf[Short]) q"0.toShort" else if (tpe =:= typeOf[Char]) q"0.toChar" else if (tpe =:= typeOf[Int]) q"0" else if (tpe =:= typeOf[Float]) q"0.0f" else if (tpe =:= typeOf[Long]) q"0L" else if (tpe =:= typeOf[Double]) q"0.0" else sys.error(s"Unknown type: $tpe") } private def encodeInt(t: Tree): Tree = { if (tpe =:= typeOf[Boolean]) q"if ($t) 1 else 0" else if (tpe =:= typeOf[Byte]) q"$t.toInt" else if (tpe =:= typeOf[Short]) q"$t.toInt" else if (tpe =:= typeOf[Char]) q"$t.toInt" else if (tpe =:= typeOf[Int]) q"$t" else if (tpe =:= typeOf[Float]) q"_root_.java.lang.Float.floatToIntBits($t)" else sys.error(s"Cannot encode type $tpe as Int.") } private def encodeWide(t: Tree): (Tree, Tree) = { val nme = TermName(c.freshName("v")) val enc = if (tpe =:= typeOf[Long]) q"$t" else if (tpe =:= typeOf[Double]) q"_root_.java.lang.Double.doubleToLongBits($t)" else sys.error(s"Cannot encode wide type $tpe.") (q"val $nme = $enc", q"$nme") } private def decodeInt(t: Tree): Tree = { if (tpe =:= typeOf[Boolean]) q"($t != 0)" else if (tpe =:= typeOf[Byte]) q"$t.toByte" else if (tpe =:= typeOf[Short]) q"$t.toShort" else if (tpe =:= typeOf[Char]) q"$t.toChar" else if (tpe =:= typeOf[Int]) q"$t" else if (tpe =:= typeOf[Float]) q"_root_.java.lang.Float.intBitsToFloat($t)" else sys.error(s"Cannot decode type $tpe from Long.") } private def decodeWide(t: Tree): Tree = { if (tpe =:= typeOf[Long]) q"$t" else if (tpe =:= typeOf[Double]) q"_root_.java.lang.Double.longBitsToDouble($t)" else sys.error(s"Cannot decode wide type $tpe.") } val initialValue: Tree = { val t = if (isArg) q"$name" else defaultValue if (isRefType) t else t } val stackname = { if (isRefType) TermName("$refstack") else TermName("$valstack") } val stacktpe = { if (isRefType) typeOf[AnyRef] else typeOf[Int] } def pushTree(implicit t: Table): Tree = { if (isWide) { val (decl, ident) = encodeWide(initialValue) q""" $decl _root_.org.coroutines.common.Stack.push[$stacktpe]( $$c.$stackname, ($ident & 0xffffffff).toInt, ${t.initialStackSize}) _root_.org.coroutines.common.Stack.push[$stacktpe]( $$c.$stackname, (($ident >>> 32) & 0xffffffff).toInt, ${t.initialStackSize}) """ } else q""" _root_.org.coroutines.common.Stack.push[$stacktpe]( $$c.$stackname, ${encodeInt(initialValue)}, ${t.initialStackSize}) """ } def popTree = { if (isWide) q""" _root_.org.coroutines.common.Stack.pop[$stacktpe]($$c.$stackname) _root_.org.coroutines.common.Stack.pop[$stacktpe]($$c.$stackname) """ else q""" _root_.org.coroutines.common.Stack.pop[$stacktpe]($$c.$stackname) """ } def storeTree(coroutine: Tree, x: Tree): Tree = { if (isWide) { val (decl, v) = encodeWide(x) q""" $decl _root_.org.coroutines.common.Stack.set[$stacktpe]( $coroutine.$stackname, ${stackpos._1 + 0}, ($v & 0xffffffff).toInt) _root_.org.coroutines.common.Stack.set[$stacktpe]( $coroutine.$stackname, ${stackpos._1 + 1}, (($v >>> 32) & 0xffffffff).toInt) """ } else { val encoded = { if (isUnitType) q"$x.asInstanceOf[AnyRef]" else if (isAnyType) q"$x.asInstanceOf[AnyRef]" else if (isRefType) x else encodeInt(x) } q""" _root_.org.coroutines.common.Stack.set[$stacktpe]( $coroutine.$stackname, ${stackpos._1}, $encoded) """ } } def loadTree(coroutine: Tree): Tree = { if (isWide) { val nme0 = TermName(c.freshName("v")) val nme1 = TermName(c.freshName("v")) val decoded = decodeWide(q"($nme1.toLong << 32) | $nme0") q""" val $nme0 = _root_.org.coroutines.common.Stack.get[$stacktpe]( $coroutine.$stackname, ${stackpos._1 + 0}) val $nme1 = _root_.org.coroutines.common.Stack.get[$stacktpe]( $coroutine.$stackname, ${stackpos._1 + 1}) $decoded """ } else { if (isUnitType) q"()" else { val t = q""" _root_.org.coroutines.common.Stack.get[$stacktpe]( $coroutine.$stackname, ${stackpos._1}) """ if (isRefType) q"$t.asInstanceOf[$tpe]" else decodeInt(t) } } } override def toString = s"VarInfo($uid, $sym)" } class Table(private val lambda: Tree) { val q"(..$args) => $body" = lambda val yieldType = inferYieldType(body) val returnType = inferReturnType(body) val returnValueMethodName = Analyzer.this.returnValueMethodName(returnType.tpe) private var varCount = 0 private var nodeCount = 0L private var subgraphCount = 0L val vars = mutable.LinkedHashMap[Symbol, VarInfo]() val topChain = Chain(new BlockInfo(None), Nil, this, null) val untyper = new ByTreeUntyper[c.type](c)(lambda) def initialStackSize: Int = 4 object names { val coroutineParam = TermName(c.freshName()) } def newVarUid(): Int = { val c = varCount varCount += 1 c } def newNodeUid(): Long = { val c = nodeCount nodeCount += 1 c } def newSubgraphUid(): Long = { val c = subgraphCount subgraphCount += 1 c } def foreach[U](f: ((Symbol, VarInfo)) => U): Unit = vars.foreach(f) def contains(s: Symbol) = vars.contains(s) def apply(s: Symbol) = vars(s) def refvars = vars.filter(_._2.isRefType) def valvars = vars.filter(_._2.isValType) } class BlockInfo(val tryuids: Option[(Long, Long)]) { val decls = mutable.LinkedHashMap[Symbol, VarInfo]() val occurrences = mutable.LinkedHashMap[Symbol, VarInfo]() val assignments = mutable.LinkedHashMap[Symbol, VarInfo]() def copyWithoutVars = new BlockInfo(tryuids) override def toString = { s"[decl = ${decls.map(_._1.name).mkString(", ")}, " + s"occ = ${occurrences.map(_._1.name).mkString(", ")}, " + s"ass = ${assignments.map(_._1.name).mkString(", ")}, " + s"tryuids = $tryuids]" } } case class Chain( info: BlockInfo, decls: List[(Symbol, VarInfo)], table: Table, parent: Chain ) { def alldecls: List[(Symbol, VarInfo)] = { decls ::: (if (parent != null) parent.alldecls else Nil) } def contains(s: Symbol): Boolean = { decls.exists(_._1 == s) || (parent != null && parent.contains(s)) } def ancestors: List[Chain] = { if (parent != null) this :: parent.ancestors else this :: Nil } def chainForDecl(s: Symbol): Option[Chain] = { if (decls.exists(_._1 == s)) Some(this) else if (parent != null) parent.chainForDecl(s) else None } def isDescendantOf(that: Chain): Boolean = { (this.info == that.info && this.decls.length >= that.decls.length) || (parent != null && parent.isDescendantOf(that)) } def isAssigned(s: Symbol): Boolean = { info.assignments.contains(s) } def isAssignedInAncestors(s: Symbol): Boolean = { isAssigned(s) || (parent != null && parent.isAssignedInAncestors(s)) } def isDeclared(s: Symbol): Boolean = { info.decls.contains(s) } def isDeclaredInAncestors(s: Symbol): Boolean = { isDeclared(s) || (parent != null && parent.isDeclaredInAncestors(s)) } def isOccurring(s: Symbol): Boolean = { info.occurrences.contains(s) } def isOccurringInAncestors(s: Symbol): Boolean = { isOccurring(s) || (parent != null && parent.isOccurringInAncestors(s)) } def withDecl(valdef: Tree, isArg: Boolean): Chain = { val sym = valdef.symbol val varinfo = table.vars.get(sym) match { case Some(varinfo) => varinfo case None => new VarInfo(table.newVarUid, valdef, sym, isArg, table) } table.vars(sym) = varinfo Chain(info, (sym, varinfo) :: decls, table, parent) } def takeDecls(n: Int) = Chain(info, decls.take(n), table, parent) def descend(tryuids: Option[(Long, Long)] = None) = Chain(new BlockInfo(tryuids), Nil, table, this) def copyWithoutBlocks: Chain = { val nparent = if (parent == null) null else parent.copyWithoutBlocks Chain(info.copyWithoutVars, decls, table, nparent) } override def equals(that: Any) = that match { case that: AnyRef => this eq that case _ => false } override def hashCode = System.identityHashCode(this) override def toString = { val s = s"[${decls.map(_._1.name).mkString(", ")}] -> " if (parent != null) s + parent.toString else s } def verboseString: String = { val b = info.toString val s = s"[${decls.map(_._1.name).mkString(", ")} | <$b>] -> " if (parent != null) s + parent.verboseString else s } } object ValDecl { def unapply(t: Tree): Option[Tree] = t match { case q"$_ val $name: $_ = $_" => Some(t) case q"$_ var $name: $_ = $_" => Some(t) case q"{ $_ val $name: $_ = $_ }" => Some(t.collect({ case t @ q"$_ val $_: $_ = $_" => t }).head) case q"{ $_ var $name: $_ = $_ }" => Some(t.collect({ case t @ q"$_ var $_: $_ = $_" => t }).head) case _ => None } } def isCoroutineDef(tpe: Type) = { val codefsym = typeOf[Coroutine[_, _]].typeConstructor.typeSymbol tpe.baseType(codefsym) != NoType } def isCoroutineDefMarker(tpe: Type) = { val codefsym = typeOf[Coroutine.DefMarker[_]].typeConstructor.typeSymbol tpe.baseType(codefsym) != NoType } def isCoroutineDefSugar0(tpe: Type) = { val codefsym0 = typeOf[~~~>[_, _]].typeConstructor.typeSymbol def hasBase(sym: Symbol) = tpe.baseType(sym) != NoType hasBase(codefsym0) } def isCoroutineDefSugar(tpe: Type) = { val codefsym0 = typeOf[~~~>[_, _]].typeConstructor.typeSymbol val codefsym1 = typeOf[~~>[_, _]].typeConstructor.typeSymbol val codefsym2 = typeOf[~>[_, _]].typeConstructor.typeSymbol def hasBase(sym: Symbol) = tpe.baseType(sym) != NoType hasBase(codefsym0) || hasBase(codefsym1) || hasBase(codefsym2) } def coroutineMethodArgs(tpe: Type): List[Type] = if (!isCoroutineDefSugar(tpe)) Nil else if (isCoroutineDefSugar0(tpe)) Nil else { val (ytpe, rtpe) = coroutineYieldReturnTypes(tpe) val codefsym1 = typeOf[~~>[_, _]].typeConstructor.typeSymbol tpe.baseType(codefsym1) match { case TypeRef(_, _, List(_, _)) => return List(ytpe, rtpe) case _ => } val codefsym2 = typeOf[~>[_, _]].typeConstructor.typeSymbol val tupletpe = tpe.baseType(codefsym2) match { case TypeRef(_, _, List(tpe, _)) => tpe } val tuple2sym = typeOf[(_, _)].typeConstructor.typeSymbol tupletpe.baseType(tuple2sym) match { case TypeRef(_, _, tpargs) => return tpargs ++ List(ytpe, rtpe) case _ => } val tuple3sym = typeOf[(_, _, _)].typeConstructor.typeSymbol tupletpe.baseType(tuple3sym) match { case TypeRef(_, _, tpargs) => return tpargs ++ List(ytpe, rtpe) case _ => } sys.error(s"Not a coroutine sugar type with type params: $tpe") } def coroutineYieldReturnTypes(tpe: Type) = { val codefsym = typeOf[Coroutine.DefMarker[_]].typeConstructor.typeSymbol val tuplesym = typeOf[(_, _)].typeConstructor.typeSymbol tpe.baseType(codefsym) match { case TypeRef(_, sym, List(typetuple)) => typetuple.baseType(tuplesym) match { case TypeRef(_, sym, List(yldtpe, rettpe)) => (yldtpe, rettpe) } } } def coroutineTypeFor(tpe: Type) = { val codeftpe = typeOf[Coroutine[_, _]].typeConstructor appliedType(codeftpe, List(tpe)) } object CoroutineOp { def unapply(t: Tree): Option[Tree] = t match { case q"$qual.`package`.coroutine[$_]($_)" if isCoroutinesPkg(qual) => Some(t) case q"$qual.`package`.yieldval[$_]($_)" if isCoroutinesPkg(qual) => Some(t) case q"$qual.`package`.yieldto[$_]($_)" if isCoroutinesPkg(qual) => Some(t) case q"$qual.`package`.call($_.apply(..$_))" if isCoroutinesPkg(qual) => Some(t) case q"$co.apply(..$_)" if isCoroutineDefMarker(co.tpe) => Some(t) case q"$co.apply[..$_](..$_)(..$_)" if isCoroutineDefSugar(co.tpe) => Some(t) case _ => None } } // return type is the lub of the function return type and yield argument types def isCoroutinesPkg(q: Tree) = q match { case q"org.coroutines.`package`" => true case q"coroutines.this.`package`" => true case t => false } def isRefType(tpe: Type) = !isValType(tpe) def isValType(tpe: Type) = { tpe =:= typeOf[Boolean] || tpe =:= typeOf[Byte] || tpe =:= typeOf[Short] || tpe =:= typeOf[Char] || tpe =:= typeOf[Int] || tpe =:= typeOf[Float] || tpe =:= typeOf[Long] || tpe =:= typeOf[Double] } def typeChar(tpe: Type): Char = { if (isRefType(tpe)) 'L' else if (tpe =:= typeOf[Boolean]) 'Z' else if (tpe =:= typeOf[Byte]) 'B' else if (tpe =:= typeOf[Short]) 'S' else if (tpe =:= typeOf[Char]) 'C' else if (tpe =:= typeOf[Int]) 'I' else if (tpe =:= typeOf[Float]) 'F' else if (tpe =:= typeOf[Long]) 'J' else if (tpe =:= typeOf[Double]) 'D' else sys.error("unreachable") } def returnValueMethodName(tpe: Type) = TermName("$returnvalue$" + typeChar(tpe)) def inferYieldType(body: Tree): Tree = { // yield type must correspond to the `yieldval`, `yieldto` and coroutine-apply args val constraintTpes = body.collect { case q"$qual.yieldval[$tpt]($_)" if isCoroutinesPkg(qual) => tpt.tpe case q"$co.apply(..$_)" if isCoroutineDefMarker(co.tpe) => coroutineYieldReturnTypes(co.tpe)._1 case q"$co.apply[..$_](..$_)(..$_)" if isCoroutineDefSugar(co.tpe) => coroutineYieldReturnTypes(co.tpe)._1 } tq"${lub(constraintTpes).widen}" } def inferReturnType(body: Tree): Tree = tq"${lub(body.tpe :: Nil).widen}" }
storm-enroute/coroutines
src/main/scala/org/coroutines/Analyzer.scala
Scala
bsd-3-clause
16,080
package concrete package constraint package linear import bitvectors.BitVector import com.typesafe.scalalogging.LazyLogging import concrete.constraint.extension.BinaryExt import concrete.generator.ACBC import concrete.util.Interval object Eq { def apply(neg: Boolean, x: Variable, b: Int, y: Variable): ACBC = if (neg) { ACBC .withAC(new EqACNeg(x, y, b)) .withBC(new EqBC(neg, x, b, y)) } else { ACBC .withAC(new EqACFast(x, b, y)) } } final class EqCReif(val r: Variable, val x: Variable, val y: Int) extends Constraint(Array(r, x)) { def advise(problemState: ProblemState, event: Event, pos: Int): Int = 2 def check(tuple: Array[Int]): Boolean = tuple(0) == (if (tuple(1) == y) 1 else 0) def init(ps: ProblemState): Outcome = ps def revise(ps: ProblemState, mod: BitVector): Outcome = { val dx = ps.dom(x) ps.dom(r) match { case BooleanDomain.UNKNOWNBoolean => if (dx.contains(y)) { if (dx.isAssigned) { // Necessarily grounded to the same value since not disjoint ps.updateDomNonEmpty(r, BooleanDomain.TRUE) } else { ps } } else { ps.updateDomNonEmpty(r, BooleanDomain.FALSE).entail(this) } case BooleanDomain.TRUE => ps.tryAssign(x, y) case BooleanDomain.FALSE => ps.removeIfPresent(x, y).entail(this) } } def simpleEvaluation: Int = 1 override def toString(ps: ProblemState) = s"${r.toString(ps)} <=> ${x.toString(ps)} = $y" } final class EqReif(val r: Variable, val x: Variable, val y: Variable) extends Constraint(Array(r, x, y)) { def advise(problemState: ProblemState, event: Event, pos: Int): Int = 3 def check(tuple: Array[Int]): Boolean = tuple(0) == (if (tuple(1) == tuple(2)) 1 else 0) def init(ps: ProblemState): Outcome = ps def revise(ps: ProblemState, mod: BitVector): Outcome = { val dx = ps.dom(x) val dy = ps.dom(y) ps.dom(r) match { case BooleanDomain.UNKNOWNBoolean => if (dx disjoint dy) { ps.updateDomNonEmpty(r, BooleanDomain.FALSE).entail(this) } else if (dx.isAssigned && dy.isAssigned) { // Necessarily grounded to the same value since not disjoint ps.updateDomNonEmpty(r, BooleanDomain.TRUE).entail(this) } else { ps } case BooleanDomain.TRUE => val d = dx & dy ps.updateDom(x, d) .andThen { ps => if (d.size < dy.size) ps.updateDom(y, d) else ps } case BooleanDomain.FALSE => (if (dx.isAssigned) ps.removeIfPresent(y, dx.head) else ps) .andThen { ps => if (dy.isAssigned) ps.removeIfPresent(x, dy.head) else ps } .entailIf(this, ps => ps.dom(x) disjoint ps.dom(y)) } } def simpleEvaluation: Int = 1 } /** * Constraint x + b = y */ final class EqACFast(val x: Variable, val b: Int, val y: Variable) extends Constraint(Array(x, y)) { var staticEvaluation: Int = _ def this(x: Variable, y: Variable) = this(x, 0, y) def advise(problemState: ProblemState, event: Event, pos: Int): Int = 2 def check(tuple: Array[Int]): Boolean = tuple(0) + b == tuple(1) def init(ps: ProblemState): Outcome = { staticEvaluation = (ps.card(x) + ps.card(y)) / BinaryExt.GAIN_OVER_GENERAL ps } def revise(ps: concrete.ProblemState, mod: BitVector): Outcome = { val oldY = ps.dom(y) val newX = ps.dom(x) & oldY.shift(-b) ps.updateDom(x, newX) .andThen { ps => if (newX.size < oldY.size) { val newY = newX.shift(b) ps.updateDomNonEmptyNoCheck(y, newY) } else { ps } } } override def consistent(ps: ProblemState, mod: Iterable[Int]): Outcome = if (ps.dom(x).shift(b) disjoint ps.dom(y)) Contradiction(scope) else ps def simpleEvaluation: Int = 1 override def toString(ps: ProblemState): String = s"${x.toString(ps)}${ if (b > 0) s" + $b" else if (b < 0) s" βˆ’ ${-b}" else "" } =FAC= ${y.toString(ps)}" override def toString: String = s"$x${ if (b > 0) s" + $b" else if (b < 0) s" βˆ’ ${-b}" else "" } =FAC= $y" } /** * Constraint x + y = b * * @param x * @param y * @param b * */ final class EqACNeg private[linear]( val x: Variable, val y: Variable, val b: Int, val skipIntervals: Boolean = true) extends Constraint(Array(x, y)) with BCCompanion { def init(ps: ProblemState): ProblemState = ps def this(x: Variable, y: Variable) = this(x, y, 0, true) def check(t: Array[Int]): Boolean = t(0) + t(1) == b def simpleEvaluation: Int = 2 def advise(ps: ProblemState, event: Event, pos: Int): Int = if (skip(ps)) -1 else ps.card(x) + ps.card(y) override def consistent(ps: ProblemState, mod: Iterable[Int]): Outcome = { val xDom = ps.dom(x) val yDom = ps.dom(y) val r = (xDom.span + yDom.span).contains(b) && ( if (xDom.size < yDom.size) { xDom.exists(xv => yDom.contains(b - xv)) } else { yDom.exists(yv => xDom.contains(b - yv)) }) if (r) ps else Contradiction(scope) } def revise(ps: ProblemState, mod: BitVector): Outcome = { val domY = ps.dom(y) ps.filterDom(x)(xv => domY.contains(b - xv)) .andThen { ps => val domX = ps.dom(x) ps.filterDom(y)(yv => domX.contains(b - yv)) } } override def toString(ps: ProblemState) = s"${x.toString(ps)} + ${y.toString(ps)} =AC= $b" } /** * if (neg) * constraint x + y = b * else * constraint y - x = b */ final class EqBC(val neg: Boolean, val x: Variable, val b: Int, val y: Variable) extends Constraint(Array(x, y)) with BC with LazyLogging with ItvArrayFixPoint { val simpleEvaluation = 2 val ops = if (neg) Array(reviseXNeg(_), reviseYNeg(_)) else Array(reviseXPos(_), reviseYPos(_)) def init(ps: ProblemState): ProblemState = ps /** * public * Constraint x = y. * * @param x * @param y */ def this(x: Variable, y: Variable) = this(false, x, 0, y) def check(t: Array[Int]): Boolean = (if (neg) -t(0) else t(0)) + b == t(1) override def revise(ps: ProblemState, mod: BitVector): Outcome = fixPoint(ps) override def consistent(ps: ProblemState, mod: Iterable[Int]): Outcome = { val xSpan = ps.span(x) val negX = if (neg) -xSpan else xSpan if ((negX + b) intersects ps.span(y)) ps else Contradiction(scope) } override def toString(ps: ProblemState) = s"${if (neg) "βˆ’" else ""}${x.toString(ps)}${ if (b > 0) s" + $b" else if (b < 0) s" βˆ’ ${-b}" else "" } =BC= ${y.toString(ps)}" def advise(ps: ProblemState, p: Int) = 3 // x + b = y <=> x = y - b private def reviseXPos(doms: Array[Domain]): Option[Interval] = Some(doms(1).span - b) private def reviseYPos(doms: Array[Domain]): Option[Interval] = Some(doms(0).span + b) // -x + b = y <=> x = -y + b private def reviseXNeg(doms: Array[Domain]): Option[Interval] = Some(-doms(1).span + b) private def reviseYNeg(doms: Array[Domain]): Option[Interval] = Some(-doms(0).span + b) }
concrete-cp/concrete
src/main/scala/concrete/constraint/linear/Eq.scala
Scala
lgpl-2.1
7,263
/* * Copyright 2008 WorldWide Conferencing, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions * and limitations under the License. */ package org.wso2.as package model import org.scala_libs.jpa.LocalEMF import net.liftweb.jpa.RequestVarEM object Model extends LocalEMF("jpaweb") with RequestVarEM
wso2as-developer/scala-samples
lift-jpa/web/src/main/scala/org/wso2/as/model/Model.scala
Scala
apache-2.0
775
package pl.edu.agh.iosr.iosr2015.data.streaming.twitter.client import java.util.UUID // //import com.websudos.phantom.sample.ExampleModel //import com.websudos.phantom.dsl._ // //case class TDIDF( // id: Int, // name: String, // props: Map[String, String], // timestamp: Int, // test: Option[Int] // ) // //sealed class ExampleRecord extends CassandraTable[ExampleRecord, ExampleModel] { // // object id extends UUIDColumn(this) with PartitionKey[UUID] // object timestamp extends DateTimeColumn(this) with ClusteringOrder[DateTime] with Ascending // object name extends StringColumn(this) // object props extends MapColumn[ExampleRecord, ExampleModel, String, String](this) // object test extends OptionalIntColumn(this) // // def fromRow(row: Row): ExampleModel = { // ExampleModel(id(row), name(row), props(row), timestamp(row), test(row)); // } //}
IOSR-Streaming-data/IOSR2015-twitter-streaming-data-client
src/main/scala/pl/edu/agh/iosr/iosr2015/data/streaming/twitter/client/models.scala
Scala
apache-2.0
1,013
/* * Copyright (C) 2014-2015 by Nokia. * See the LICENCE.txt file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package wookie import com.twitter.algebird.Monoid object Conjunction extends Monoid[Boolean] with Serializable { override def plus(f1: Boolean, f2: Boolean): Boolean = f1 && f2 override def zero: Boolean = true } object Disjunction extends Monoid[Boolean] with Serializable { override def plus(f1: Boolean, f2: Boolean): Boolean = f1 || f2 override def zero: Boolean = false } /** * Boolean expression evaluation helper */ object Bools { def fold[A](f1: A => Boolean, fs: (A => Boolean) *)(boolM: Monoid[Boolean]): A => Boolean = s => { fs.foldLeft(f1(s))((t1, t2) => boolM.plus(t2(s), t1)) } def or[A](f1: A => Boolean, fs: (A => Boolean) *): (A => Boolean) = fold(f1, fs: _ *)(Disjunction) def and[A](f1: A => Boolean, fs: (A => Boolean) *): A => Boolean = fold(f1, fs: _ *)(Conjunction) }
elyast/wookie
app-api/src/main/scala/wookie/Bools.scala
Scala
apache-2.0
1,543
import com.typesafe.sbt.packager.docker.{DockerPlugin, Cmd, DockerKeys} import sbt._ object CommonDockerSettingsPlugin extends AutoPlugin with DockerKeys { override def trigger = allRequirements override def requires = DockerPlugin def appLogLevel = sys.props.getOrElse("ECOMMERCE_LOG_LEVEL", default = "INFO") override lazy val projectSettings = Seq( dockerBaseImage := "newicom/docker-alpine-java:latest", dockerCommands ++= Seq( Cmd("ENV", s"ES_HOST=127.0.0.1 ES_PASSWORD=changeit ECOMMERCE_LOG_LEVEL=$appLogLevel") ) ) }
pawelkaczor/ddd-leaven-akka-v2
project/CommonDockerSettingsPlugin.scala
Scala
mit
562
package com.twitter.finagle.http2.transport.server import com.twitter.finagle.Stack.Params import com.twitter.finagle.netty4.http.handler.UriValidatorHandler import com.twitter.finagle.netty4.http.{Http2CodecName, HttpCodecName} import com.twitter.finagle.param.Stats import com.twitter.finagle.stats.InMemoryStatsReceiver import io.netty.channel._ import io.netty.channel.embedded.EmbeddedChannel import io.netty.handler.ssl.{ApplicationProtocolNames, SslHandler, SslHandshakeCompletionEvent} import org.mockito.Matchers._ import org.mockito.Mockito._ import org.scalatest.BeforeAndAfter import org.scalatestplus.mockito.MockitoSugar import org.scalatest.funsuite.AnyFunSuite class ServerNpnOrAlpnHandlerTest extends AnyFunSuite with BeforeAndAfter with MockitoSugar { val http2 = ApplicationProtocolNames.HTTP_2 val http11 = ApplicationProtocolNames.HTTP_1_1 var pipeline: ChannelPipeline = null var sslHandler: SslHandler = null val stats = new InMemoryStatsReceiver var params = Params.empty + Stats(stats) before { stats.clear() val channel = new EmbeddedChannel() pipeline = channel.pipeline() val init = new ChannelInitializer[Channel] { def initChannel(ch: Channel): Unit = {} } sslHandler = mock[SslHandler] doCallRealMethod().when(sslHandler).userEventTriggered(any[ChannelHandlerContext], anyObject()) pipeline.addLast(sslHandler) val handler = new ServerNpnOrAlpnHandler(init, params) pipeline.addLast(handler) val dummyHttp11Codec = new ChannelHandlerAdapter() {} pipeline.addLast(HttpCodecName, dummyHttp11Codec) pipeline.addLast(UriValidatorHandler.HandlerName, UriValidatorHandler) } test("Replaces http codec with http/2 codec when h2 negotiated & records stat") { when(sslHandler.applicationProtocol()).thenReturn(http2) pipeline.fireUserEventTriggered(SslHandshakeCompletionEvent.SUCCESS) assert(pipeline.names().contains(Http2CodecName)) assert(!pipeline.names().contains(HttpCodecName)) assert(!pipeline.names().contains(UriValidatorHandler.HandlerName)) assert(stats.counters(Seq("upgrade", "success")) == 1) } test("Leaves http codec in place when http/1.1 is negotiated & doesn't record stat") { when(sslHandler.applicationProtocol()).thenReturn(http11) pipeline.fireUserEventTriggered(SslHandshakeCompletionEvent.SUCCESS) assert(pipeline.names().contains(HttpCodecName)) assert(pipeline.names().contains(UriValidatorHandler.HandlerName)) assert(!pipeline.names().contains(Http2CodecName)) assert(stats.counters(Seq("upgrade", "success")) == 0) } }
twitter/finagle
finagle-http2/src/test/scala/com/twitter/finagle/http2/transport/server/ServerNpnOrAlpnHandlerTest.scala
Scala
apache-2.0
2,615
package com.seanshubin.todo.persistence.domain import java.io.{ByteArrayInputStream, ByteArrayOutputStream} import java.nio.charset.StandardCharsets import javax.servlet.{ServletInputStream, ServletOutputStream} import org.eclipse.jetty.server.{HttpChannel, HttpInput, Request} import org.scalatest.FunSuite import scala.collection.mutable.ArrayBuffer /* test-driven-012 Get a contract between us and jetty, so we isolate our business logic from jetty specific details */ class HandlerAdapterTest extends FunSuite { val charset = StandardCharsets.UTF_8 test("forward request and response") { //given val responseStatusCode = 12345 val responseBody = "response body" val responseValue = ResponseValue(responseStatusCode, responseBody) val delegate = new RequestValueHandlerStub(responseValue) val handlerAdapter = new HandlerAdapter(delegate, charset) val target = "the target" val baseRequest = new StubRequest val request = new StubHttpServletRequest( method = "the method", pathInfo = "/path info", inputStreamContent = "request content" ) val response = new StubHttpServletResponse //when handlerAdapter.handle(target, baseRequest, request, response) //then assert(delegate.requests.size === 1) assert(delegate.requests.head.method === "the method") assert(delegate.requests.head.path === "/path info") assert(delegate.requests.head.body === "request content") assert(response.status === responseStatusCode) assert(response.outputStream.content === "response body") assert(baseRequest.isHandled === true) } class RequestValueHandlerStub(response: ResponseValue) extends RequestValueHandler { val requests = new ArrayBuffer[RequestValue] override def handle(request: RequestValue): ResponseValue = { requests.append(request) response } } val channel: HttpChannel = null val input: HttpInput = null class StubRequest extends Request(channel, input) { } class StubHttpServletRequest(val method: String, val pathInfo: String, val inputStreamContent: String) extends HttpServletRequestNotImplemented { val inputStream = new StubServletInputStream(inputStreamContent) override def getMethod: String = method override def getPathInfo: String = pathInfo override def getInputStream: ServletInputStream = inputStream } class StubHttpServletResponse extends HttpServletResponseNotImplemented { var status = -1 val outputStream = new StubServletOutputStream override def setStatus(sc: Int): Unit = status = sc override def getOutputStream: ServletOutputStream = outputStream } class StubServletInputStream(content: String) extends ServletInputStreamNotImplemented { val delegate = new ByteArrayInputStream(content.getBytes(charset)) override def read(): Int = delegate.read() } class StubServletOutputStream extends ServletOutputStreamNotImplemented { val delegate = new ByteArrayOutputStream() def content: String = new String(delegate.toByteArray, charset) override def write(b: Int): Unit = delegate.write(b) } }
SeanShubin/todo-persistence
domain/src/test/scala/com/seanshubin/todo/persistence/domain/HandlerAdapterTest.scala
Scala
unlicense
3,209
/* * Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com> */ package play.api.libs import org.apache.commons.codec.digest.DigestUtils import org.apache.commons.codec.binary.Hex /** * Utilities for Codecs operations. */ object Codecs { /** * Computes the SHA-1 digest for a byte array. * * @param bytes the data to hash * @return the SHA-1 digest, encoded as a hex string */ def sha1(bytes: Array[Byte]): String = DigestUtils.sha1Hex(bytes) /** * Computes the MD5 digest for a byte array. * * @param bytes the data to hash * @return the MD5 digest, encoded as a hex string */ def md5(bytes: Array[Byte]): String = DigestUtils.md5Hex(bytes) /** * Compute the SHA-1 digest for a `String`. * * @param text the text to hash * @return the SHA-1 digest, encoded as a hex string */ def sha1(text: String): String = DigestUtils.sha1Hex(text) /** * Converts a byte array into an array of characters that denotes a hexadecimal representation. */ def toHex(array: Array[Byte]): Array[Char] = Hex.encodeHex(array) /** * Converts a byte array into a `String` that denotes a hexadecimal representation. */ def toHexString(array: Array[Byte]): String = Hex.encodeHexString(array) /** * Transform an hexadecimal String to a byte array. */ def hexStringToByte(hexString: String): Array[Byte] = Hex.decodeHex(hexString.toCharArray) }
jyotikamboj/container
pf-framework/src/play/src/main/scala/play/api/libs/Codecs.scala
Scala
mit
1,425
package almhirt.streaming import almhirt.common._ import almhirt.tracking.TrackingTicket sealed trait DeliveryStatus { def isSuccess: Boolean } object DeliveryStatus { implicit class DeliveryStatusOps(self: DeliveryStatus) { def onFailure(f: Problem β‡’ Unit) { self match { case d: DeliveryJobFailed β‡’ f(d.problem) case _ β‡’ () } } } } sealed trait DeliveryJobDone extends DeliveryStatus case object UntrackedDeliveryJobDone extends DeliveryJobDone { override val isSuccess = true } final case class TrackedDeliveryJobDone(ticket: TrackingTicket) extends DeliveryJobDone { override val isSuccess = true } object DeliveryJobDone { def apply(): DeliveryJobDone = UntrackedDeliveryJobDone def apply(ticket: Option[TrackingTicket]): DeliveryJobDone = ticket match { case Some(t) β‡’ TrackedDeliveryJobDone(t) case None β‡’ UntrackedDeliveryJobDone } def unapply(d: DeliveryStatus): Option[Option[TrackingTicket]] = d match { case UntrackedDeliveryJobDone β‡’ Some(None) case TrackedDeliveryJobDone(ticket) β‡’ Some(Some(ticket)) case _ β‡’ None } } sealed trait DeliveryJobFailed extends DeliveryStatus { def problem: Problem } final case class UntrackedDeliveryJobFailed(problem: Problem) extends DeliveryJobFailed { override val isSuccess = false } final case class TrackedDeliveryJobFailed(problem: Problem, ticket: TrackingTicket) extends DeliveryJobFailed { override val isSuccess = false } object DeliveryJobFailed { def apply(problem: Problem): DeliveryJobFailed = UntrackedDeliveryJobFailed(problem) def apply(problem: Problem, ticket: Option[TrackingTicket]): DeliveryJobFailed = ticket match { case Some(t) β‡’ TrackedDeliveryJobFailed(problem, t) case None β‡’ UntrackedDeliveryJobFailed(problem) } def unapply(d: DeliveryStatus): Option[(Problem, Option[TrackingTicket])] = d match { case UntrackedDeliveryJobFailed(problem) β‡’ Some((problem, None)) case TrackedDeliveryJobFailed(problem, ticket) β‡’ Some((problem, Some(ticket))) case _ β‡’ None } }
chridou/almhirt
almhirt-core/src/main/scala/almhirt/streaming/DeliveryStatus.scala
Scala
apache-2.0
2,120
package ml.wolfe.term import ml.wolfe.{Vect, WolfeSpec} /** * @author riedel */ class TermSpecs extends WolfeSpec { import ml.wolfe.term.TermImplicits._ import ml.wolfe.util.Math._ "A matrix variable term" should { "evaluate to a matrix" in { val x = Matrices(2, 3).Var val tensor2 = matrix(Seq(1, 2, 3), Seq(4, 5, 6)) val result = x.eval(x := tensor2) result should equal(tensor2) } "provide its constant gradient" in { val x = Matrices(2, 3).Var val tensor2 = matrix(Seq(1, 2, 3), Seq(4, 5, 6)) val result = x.diff(x)(x := tensor2) result.toArray should equal(new MatrixDom(2: Int, 3: Int).one.toArray) } } "A Tuple2Var term" should { "evaluate to a tuple2" in { val dom = Doubles x Doubles val x = dom.Var val result = x.eval(x :=(1.0, 2.0)) result should be(1.0, 2.0) } "provide its first argument" ignore { val dom = Doubles x Doubles val x = dom.Var val arg1 = x._1 arg1.eval(x :=(2.0, 1.0)) should be(2.0) } } "A dot product term" should { "evaluate to the value of a dot product" in { val x = Vectors(2).Var val dot = x dot x val result = dot.eval(x := vector(2.0, 3.0)) result should be(13.0) } "provide its gradient for identical variables" in { val x = Vectors(2).Var val dot = x dot x val result = dot.diff(x)(x := vector(2.0, 3.0)) result should equal(vector(4.0, 6.0)) } "provide its gradient for different variables " in { val x = Vectors(2).Var val y = Vectors(2).Var val dot = x dot y dot.diff(x)(x := vector(2.0, 3.0), y := vector(1.0, 2.0)) should equal(vector(1.0, 2.0)) dot.diff(y)(x := vector(2.0, 3.0), y := vector(1.0, 2.0)) should equal(vector(2.0, 3.0)) } } "A matrix-vector product term" should { "evaluate to the value of a matrix-vector product" in { val x = Vectors(2).Var val A = Matrices(3, 2).Var val op = A * x val tensor2 = matrix(Seq(1, 0), Seq(1, 1), Seq(2, 1)) val result = op.eval(A := tensor2, x := vector(2, 4)) result should equal(vector(2.0, 6.0, 8.0)) } "provide its gradient for different variables given an upstream error vector" ignore { val AVar = Matrices(3, 2).Var val xVar = Vectors(2).Var val term = AVar * xVar val A = matrix(Seq(1, 0.5), Seq(1, 1), Seq(2, 1)) val x = vector(2, 4) val e = vector(1.0, 0.5, 1.5) // val dA = term.gradient(Seq(A, x), e, Seq(AVar))(0) // val dx = term.gradient(Seq(A, x), e, Seq(xVar))(1) // // //todo: test the gradient // println(dx) // println(dA) } } "A product" should { "evaluate to the product of its arguments" in { val x = Doubles.Var val y = Doubles.Var val term = x * y * x * 0.5 term.eval(x := 2.0, y := 3.0) should be(6.0) } "calculate its gradient" in { val x = Doubles.Var val y = Doubles.Var val term = x * y * x * 0.5 term.diff(x)(x := 2.0, y := 3.0) should be(6.0) term.diff(y)(x := 2.0, y := 3.0) should be(2.0) } } "A div" should { "evaluate to the division of its arguments" in { val x = Doubles.Var val y = Doubles.Var val term = (x / y) * 2.0 term.eval(x := 2.0, y := 0.5) should be(8.0) } "calculate its gradient" in { val x = Doubles.Var val y = Doubles.Var val term = (x / y) * 2.0 term.diff(x)(x := 2.0, y := 0.5) should be(4.0) term.diff(y)(x := 2.0, y := 0.5) should be(-16.0) } } "An iverson bracket" should { "evaluate to 0 if a predicate is false, and 1 otherwise" in { val x = Bools.Var val term = I(x) term.eval(x := false) should be(0.0) term.eval(x := true) should be(1.0) } } "Composing log, sigmoid and dot products" should { "provide a logistic loss matrix factorization objective" in { val x = Vectors(2).Var val y = Vectors(2).Var val term = log(sigm(x dot y)) term.eval(x := vector(1.0, 2.0), y := vector(2.0, 3.0)) should equal(math.log(sigmoid(8.0))) } "provide the gradient of a logistic loss matrix factorization objective" in { val x = Vectors(2).Var val y = Vectors(2).Var val term = log(sigm(x dot y)) val result = term.diff(x)(x := vector(1.0, 2.0), y := vector(2.0, 3.0)) val prob = sigmoid(8.0) result should equal(vector(2.0, 3.0) * (1.0 - prob)) } } "A term with discrete variables" should { "provide its argmax" in { val dom = Bools x Bools val x = dom.Var val result = argmax(dom) { x => I(x._1 && x._2) } result.eval() should be(true, true) } "provide a partial argmax" in { val y = Bools.Var val term = argmax(Bools)(x => I(x === y)) term.eval(y := true) should be(true) term.eval(y := false) should be(false) } } "A sequence term" should { "evaluate to a sequence" in { val dom = Seqs(Doubles, 3) val x = Doubles.Var val y = Doubles.Var val term = dom.Term(x, y, x) term.eval(x := 1.0, y := 2.0) should be(Seq(1.0, 2.0, 1.0)) } "provide its gradient" in { val dom = Seqs(Doubles, 3) val x = Doubles.Var val y = Doubles.Var val term = dom.Term(x, y, x) term.diff(x)(x := 10.0, y := 3.0) should be(2.0) term.diff(y)(x := 10.0, y := 3.0) should be(1.0) } } "A structured loss" should { "evaluate to its value" ignore { implicit val Scores = Seqs(Doubles, 3) implicit val Output = Seqs(Bools, 3) def model(scores: Scores.Term)(y: Output.Term) = sum(0 until y.length) { i => I(y(i)) * scores(i) } def loss(gold: Output.Term)(scores: Scores.Term) = max(Output) { model(scores) } - model(scores)(gold) val term = loss(Output.Const(IndexedSeq(true, false, true)))(Scores.Const(IndexedSeq(1.0, 1.0, -1.0))) term.eval() should be(2.0) } "calculate its gradient for tied weights" ignore { implicit val Scores = Seqs(Doubles, 3) implicit val Output = Seqs(Bools, 3) def model(scores: Scores.Term)(y: Output.Term) = sum(0 until y.length) { i => I(y(i)) * scores(i) } def loss(gold: Output.Term)(scores: Scores.Term) = max(Output) { model(scores) } - model(scores)(gold) val weight = Doubles.Var val weights = Scores.Term(weight, weight, weight) val term = loss(Output.Const(IndexedSeq(false, false, true)))(weights) term.diff(weight)(weight := 1.0) should be(2.0) term.diff(weight)(weight := -1.0) should be(-1.0) } } "A quadratic objective" should { "provide its gradient" in { val x = Doubles.Var val obj = (x * 4.0) - (x * x) obj.diff(x)(x := 0.0) should be(4.0) obj.diff(x)(x := 12.0) should be(-20.0) } } "Exhaustive max-marginalizing" should { "provide the exact max marginals" in { val x = Bools.Var val y = Bools.Var val term = I(x === y) val result = term.maxMarginals(x, y)(Map(false -> 1.0, true -> 2.0))() result should be(Map(false -> 2.0, true -> 3.0)) } } "A binding" should { "bind variables to the value of a different term" in { val x = Doubles.Var val t = (x + 1.0) | x << 2.0 t.eval() should be(3.0) } "bind variables to the value of a different term in a nested way" in { val x = Doubles.Var val y = Doubles.Var val t = x | x << (y + 1.0) | y << 2.0 t.eval() should be(3.0) } } "A choice term" should { "evaluate to the right branch depending on the condition" in { val x = Ints.Var val t = choice(x)(1.0.toConst, 2.0.toConst) t.eval(x := 0) should be(1.0) t.eval(x := 1) should be(2.0) } } "An ifthenelse term" should { "evaluate to the right branch depending on the condition" in { val x = Bools.Var val t = ifThenElse(x)(1.0.toConst)(2.0) t.eval(x := true) should be(1.0) t.eval(x := false) should be(2.0) } } "A stochastic term over an empty sequence" should { "not throw a null pointer exception" in { import TermImplicits.Seqs implicit val rand = ml.wolfe.util.Math.random @domain case class Theta(params: IndexedSeq[Vect]) implicit val Thetas = Theta.Values(Seqs(Vectors(2), 4)) @domain case class User(items: IndexedSeq[Int]) implicit val Items = Ints(0 until 4) implicit val Users = User.Values(Seqs(Items, 0, 2)) val users = Seq(User(IndexedSeq()), User(IndexedSeq(1))).toConst def loss(t: Thetas.Term): DoubleTerm = { val user = mem(users.sampleSequential) sum(user.items) { ix => t.params(ix) dot t.params(ix) } } val init = Settings(Thetas.createRandomSetting(random.nextGaussian())) argmax(Thetas)(t => loss(t).argmaxBy(Argmaxer.adaGrad(AdaGradParameters(100, 0.01, initParams = init)))).eval() } // "also not throw a null pointer exception" ignore { // // import TermImplicits.Seqs // @domain case class Param(x: Vect, y: Vect) // implicit val Params = Param.Values(Vectors(1), Vectors(1)) // // val length = Ints(0 until 5).Var // val t = Params.Var // val obj = sum(length)(t.x dot t.y, t.x dot t.y) // // val evaluator = obj.evaluator() // evaluator.input(0).disc(0) = 1 // Params.copyValue(Param(vector(1),vector(1)),evaluator.input(1)) // evaluator.eval.eval()(Execution(0)) // evaluator.input(1).clearChangeRecord() // evaluator.input(0).disc(0) = 2 // evaluator.eval.eval()(Execution(0)) // // // implicit val rand = ml.wolfe.util.Math.random // // // // @domain case class Theta(params: IndexedSeq[Vect]) // // implicit val Thetas = Theta.Values(Seqs(Vectors(2), 4)) // // // // @domain case class User(items: IndexedSeq[Int]) // // implicit val Items = Ints(0 until 4) // // implicit val Users = User.Values(Seqs(Items, 0, 2)) // // val users = Seq(User(IndexedSeq()), User(IndexedSeq(1))).toConst // // // // def loss(t: Thetas.Term): DoubleTerm = { // // val user = mem(users.sampleSequential) // // sum(user.items) { ix => t.params(ix) dot t.params(ix) } // // } // // // } } }
wolfe-pack/wolfe
wolfe-core/src/test/scala/ml/wolfe/term/TermSpecs.scala
Scala
apache-2.0
10,531
package weaponmark import com.diogonunes.jcdp.color.ColoredPrinter import com.diogonunes.jcdp.color.api.Ansi.{Attribute, BColor, FColor} /** Prints multiple command line examples with explanations. */ case class Example(cp: ColoredPrinter) { private def print() = { val cmdColor = FColor.YELLOW val descColor = FColor.CYAN cp.setForegroundColor(FColor.YELLOW) cp.println("Benchmark weapon performance in Mage: The Ascension (WW4600 - Revised Edition)") cp.println("Works for martial arts, brawling, melee, and firearm weapons.") cp.clear() cp.println("\\nweaponmark [OPTIONS] weaponName hitDice damageDice [hitDifficulty] [damageDifficulty]\\n") cp.println(" Examples:\\n", Attribute.CLEAR, cmdColor, BColor.BLACK) cp.println(""" 1) weaponmark -l "Long Sword" 8 7""",Attribute.CLEAR, cmdColor, BColor.BLACK) cp.println(""" Benchmark a "Long Sword" (quotes needed for space) with 8 hit dice, 7 damage dice, and default 6 hit difficulty and 6 damage difficulty. Wielded as a specialty (-l) = bonus roll to hit on 10""", Attribute.DARK, descColor, BColor.BLACK) cp.println(""" 2) weaponmark -l Heavy Kick 8 6 7""", Attribute.CLEAR, cmdColor, BColor.BLACK) cp.println(""" Benchmark a Heavy Kick with 8 hit dice, 7 hit difficulty, 6 damage dice & damage difficulty. Wielded as a specialty (-l) and running verbose (-v) to show miss-related stats.""", Attribute.CLEAR, descColor, BColor.BLACK) cp.println(""" 3) weaponmark -la2 -m2 -x1 -y2 "Pistols (dual-wield)" 8 5""", Attribute.CLEAR, cmdColor, BColor.BLACK) cp.println(""" Dual wield pistols, specialty use (-l = bonus roll on 10). Two actions a turn (-a2) including a 2-use multi-action (-m2) for a total of 3 attempts per turn. Multi-action hit dice mod penalties are 1 and 2 (-x1 and -y2) instead of default values (2,3). The Pistols have 8 hit dice and do 5 dice of damage.""", Attribute.CLEAR, descColor, BColor.BLACK) cp.println(""" 4) weaponmark -i""", Attribute.CLEAR, cmdColor, BColor.BLACK) cp.println(""" Interactive mode. You will be queried for all needed input. Using --interactive or no parameters at all will also start the interactive mode. """, Attribute.CLEAR, descColor, BColor.BLACK) cp.clear() } print() }
locke8/weaponmark
src/main/scala/weaponmark/Example.scala
Scala
mit
2,384
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package spark.scheduler import spark.scheduler.cluster.Pool import spark.scheduler.cluster.SchedulingMode.SchedulingMode /** * Low-level task scheduler interface, implemented by both ClusterScheduler and LocalScheduler. * These schedulers get sets of tasks submitted to them from the DAGScheduler for each stage, * and are responsible for sending the tasks to the cluster, running them, retrying if there * are failures, and mitigating stragglers. They return events to the DAGScheduler through * the TaskSchedulerListener interface. */ private[spark] trait TaskScheduler { def rootPool: Pool def schedulingMode: SchedulingMode def start(): Unit // Invoked after system has successfully initialized (typically in spark context). // Yarn uses this to bootstrap allocation of resources based on preferred locations, wait for slave registerations, etc. def postStartHook() { } // Disconnect from the cluster. def stop(): Unit // Submit a sequence of tasks to run. def submitTasks(taskSet: TaskSet): Unit // Set a listener for upcalls. This is guaranteed to be set before submitTasks is called. def setListener(listener: TaskSchedulerListener): Unit // Get the default level of parallelism to use in the cluster, as a hint for sizing jobs. def defaultParallelism(): Int }
bavardage/spark
core/src/main/scala/spark/scheduler/TaskScheduler.scala
Scala
apache-2.0
2,113
package br.gov.lexml.parser.pl.linker import akka.actor._ import scala.language.postfixOps import br.gov.lexml.parser.pl.block._ import br.gov.lexml.parser.pl.rotulo._ import scala.xml._ import scala.concurrent.duration._ import akka.actor.SupervisorStrategy._ import grizzled.slf4j.Logger import scala.concurrent.Await //import akka.config.Supervision._ import akka.pattern.ask import akka.routing.SmallestMailboxPool object Linker { val logger = Logger(this.getClass) val system = ActorSystem("linker") val strategy = OneForOneStrategy(maxNrOfRetries = 10, withinTimeRange = 1 minute) { case _ : java.io.IOException => Restart case _ : Exception => Escalate } private val linkerRouter = system.actorOf(Props[LinkerActor].withRouter(SmallestMailboxPool(8, supervisorStrategy = strategy))) def findLinks(urnContexto : String, ns : Seq[Node]) : (List[String],List[Node]) = { logger.info(s"findLinks: urnContexto = $urnContexto, ns=$ns") import akka.util.Timeout implicit val timeout = Timeout(30 seconds) val msg = (urnContexto,ns) import system.dispatcher val f = (linkerRouter ? msg).mapTo[(List[Node],Set[String])] map { case (nl,links) β‡’ (links.toList,nl) } logger.info(s"findLinks: waiting for result....") val res = Await.result(f,timeout.duration) logger.info(s"findLinks: result = $res") res } private def processaAlteracao(a: Alteracao, links: List[URN]): Alteracao = { val mr = MatchResult.fromAlteracao(a,links) val a1 = a copy (matches = Some(mr)) mr.first.map(_.updateAlteracao(a1)).getOrElse(a1) } private def getLinks(d: Dispositivo): List[URN] = d.rotulo match { case _: RotuloArtigo β‡’ for { (dd: Dispositivo) ← d.conteudo.toList.collect { case (d: Dispositivo) β‡’ d }; l ← getLinks(dd) } yield l case _ β‡’ d.links.flatMap(URN.fromString(_)) } def paraCadaAlteracao(bl: List[Block]) = { def f(d: Dispositivo): Block β‡’ Block = (b: Block) β‡’ b match { case a: Alteracao β‡’ { val links = getLinks(d) processaAlteracao(a, links) } case dd: Dispositivo β‡’ dd.replaceChildren(dd.children.map(f(dd))) case x β‡’ x } def g(b: Block) = b match { case dd: Dispositivo β‡’ dd.replaceChildren(dd.children.map(f(dd))) case x β‡’ x } bl.map(g) } }
lexml/lexml-parser-projeto-lei
src/main/scala/br/gov/lexml/parser/pl/linker/Linker.scala
Scala
gpl-2.0
2,374
package pl.touk.nussknacker.test import org.scalatest.concurrent.{Eventually, ScalaFutures} import org.scalatest.time.{Millis, Seconds, Span} /** * Default ScalaFutures patient is set to timeout = 150ms and interval = 15ms. It is a good setting in the perfect scenario * when asynchronous tasks are very short and global ExecutionContext works without delays. But ... in the real World we have * very vary tasks duration and on slow environments (like Travis) it cause occasional delays. So we need to be more patient. */ trait PatientScalaFutures extends ScalaFutures with Eventually { final override implicit def patienceConfig: PatienceConfig = PatienceConfig(timeout = scaled(Span(5, Seconds)), interval = scaled(Span(50, Millis))) }
TouK/nussknacker
utils/test-utils/src/main/scala/pl/touk/nussknacker/test/PatientScalaFutures.scala
Scala
apache-2.0
748
package IFDS class TestHelper { val foo = Method("foo") val bar = Method("bar") val a = Node("a") val b = Node("b") val b1 = Node("b1") val b2 = Node("b2") val c = Node("c") val c0 = Node("c0") val d = Node("d") val e = Node("e") val f = Node("f") val g = Node("g") val h = Node("h") val i = Node("i") val unused = Node("unused") val x = Node("x") val y = Node("y") val z = Node("z") def flow(source: String, targets: String*) = FlowFunc(1, source, targets: _*) def flow(times: Int, source: String, targets: String*) = FlowFunc(times, source, targets: _*) def mergeFlowAfterReturnSite: SimpleIFDSProblem = { val calAX = CallEdge(a, bar, Set(flow("0", "x"))) val ctrAB = CallToReturnEdge(a, b, Set(flow("0", "b"))) val norXY = NormalEdge(x, y, Set(flow("x", "y"))) val retYB = ReturnEdge(Some(a), y, Some(b), bar, Set(flow("y", "c"))) val norBC = NormalEdge(b, c, Set(flow("b", "1"), flow("c", "1"))) val normalEdges: Set[NormalEdge] = Set(norXY, norBC) val callEdges: Set[CallEdge] = Set(calAX) val callToReturnEdges: Set[CallToReturnEdge] = Set(ctrAB) val returnEdges: Set[ReturnEdge] = Set(retYB) val methodToStartPoints: Map[Method, Set[Node]] = Map( foo -> Set(a), bar -> Set(x) ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, b -> foo, c -> foo, x -> bar, y -> bar ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(a), false ) } def notAllFlowsUsed: SimpleIFDSProblem = { // This demonstrates the flow val norAB = NormalEdge(a, b, Set(flow("0", "x"))) val norBC = NormalEdge(b, c, Set(flow("x", "x"))) val norDE = NormalEdge(d, e, Set(flow("y", "y", "z"))) //////////// val normalEdges: Set[NormalEdge] = Set(norAB, norAB, norBC, norDE) val methodToStartPoints: Map[Method, Set[Node]] = Map( bar -> Set(a), foo -> Set(d) ) val stmtToMethod: Map[Node, Method] = Map( a -> bar, b -> bar, c -> bar, d -> foo, e -> foo ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, Set(), Set(), Set(), stmtToMethod, Set(a), false ) } def unexpectedFact: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from IFDSSolverTest.happyPath from Heros. // but changed fact val norAB = NormalEdge(a, b, Set(flow("0", "x"))) val norBC = NormalEdge(b, c, Set(flow("unexpected fact", "x"))) val calCFoo = CallEdge(c, foo, Set(flow("x", "y"))) val ctrCF = CallToReturnEdge(c, f, Set(flow("x", "x"))) val norDE = NormalEdge(d, e, Set(flow("y", "y", "z"))) val retEF = ReturnEdge(Some(c), e, Some(f), foo, Set(flow("z", "u"), flow("y"))) //////////// val normalEdges: Set[NormalEdge] = Set(norAB, norBC, norDE) val callEdges: Set[CallEdge] = Set(calCFoo) val callToReturnEdges: Set[CallToReturnEdge] = Set(ctrCF) val returnEdges: Set[ReturnEdge] = Set(retEF) val methodToStartPoints: Map[Method, Set[Node]] = Map( bar -> Set(a), foo -> Set(d) ) val stmtToMethod: Map[Node, Method] = Map( a -> bar, b -> bar, c -> bar, f -> bar, d -> foo, e -> foo ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(a), false ) } def artificalReturnEdgeForNoCallersCase: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from IFDSSolverTest.artificalReturnEdgeForNoCallersCase from Heros. val norAB = NormalEdge(a, b, Set(flow("0", "1"))) val retBNull = ReturnEdge(None, b, None, foo, Set(flow("1", "1"))) //////////// val normalEdges: Set[NormalEdge] = Set(norAB) val returnEdges: Set[ReturnEdge] = Set(retBNull) val methodToStartPoints: Map[Method, Set[Node]] = Map( foo -> Set(a) ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, b -> foo ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, Set(), Set(), stmtToMethod, Set(a), true ) } def unbalancedReturn: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from IFDSSolverTest.unbalancedReturns from Heros. val norAB = NormalEdge(a, b, Set(flow("0", "1"))) val retBY = ReturnEdge(Some(x), b, Some(y), foo, Set(flow("1", "1"))) val norYZ = NormalEdge(y, z, Set(flow("1", "2"))) //////////// val normalEdges: Set[NormalEdge] = Set(norAB, norYZ) val returnEdges: Set[ReturnEdge] = Set(retBY) val methodToStartPoints: Map[Method, Set[Node]] = Map( bar -> Set(unused), foo -> Set(a) ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, b -> foo, y -> bar, z -> bar ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, Set(), Set(), stmtToMethod, Set(a), true ) } def branch: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from IFDSSolverTest.branch from Heros. val norAB2 = NormalEdge(a, b2, Set(flow("0", "x"))) val norAB1 = NormalEdge(a, b1, Set(flow("0", "x"))) val norB1C = NormalEdge(b1, c, Set(flow("x", "x", "y"))) val norB2C = NormalEdge(b2, c, Set(flow("x", "x"))) val norCD = NormalEdge(c, d, Set(flow("x", "z"), flow("y", "w"))) val norDE = NormalEdge(d, e, Set(flow("z"), flow("w"))) //////////// val normalEdges: Set[NormalEdge] = Set(norAB2, norAB1, norB1C, norB2C, norCD, norDE) val methodToStartPoints: Map[Method, Set[Node]] = Map( foo -> Set(a) ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, b1 -> foo, b2 -> foo, c -> foo, d -> foo, e -> foo ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, Set(), Set(), Set(), stmtToMethod, Set(a), false ) } def reuseSummaryForRecursiveCall: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from IFDSSolverTest.reuseSummaryForRecursiveCall from Heros. val calABar = CallEdge(a, bar, Set(flow("0", "1"))) val ctrAB = CallToReturnEdge(a, b, Set(flow("0"))) val norBC = NormalEdge(b, c, Set(flow("2", "3"))) val norGI = NormalEdge(g, i, Set(flow("1", "1"))) val norGH = NormalEdge(g, h, Set(flow("1", "1"))) val calIBar = CallEdge(i, bar, Set(flow("1", "1"))) val ctrIH = CallToReturnEdge(i, h, Set(flow("1"))) val retHB = ReturnEdge(Some(a), h, Some(b), bar, Set(flow("1"), flow("2", "2"))) val retHH = ReturnEdge(Some(i), h, Some(h), bar, Set(flow("1", "2"), flow("2", "2"))) //////////// val normalEdges: Set[NormalEdge] = Set(norBC, norGI, norGH) val callEdges: Set[CallEdge] = Set(calABar, calIBar) val callToReturnEdges: Set[CallToReturnEdge] = Set(ctrAB, ctrIH) val returnEdges: Set[ReturnEdge] = Set(retHB, retHH) val methodToStartPoints: Map[Method, Set[Node]] = Map( foo -> Set(a), bar -> Set(g) ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, b -> foo, c -> foo, g -> bar, h -> bar, i -> bar ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(a), false ) } def reuseSummary: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from IFDSSolverTest.reuseSummary from Heros. val calABar = CallEdge(a, bar, Set(flow("0", "x"))) val ctrAB = CallToReturnEdge(a, b, Set(flow("0", "y"))) val calBBar = CallEdge(b, bar, Set(flow("y", "x"))) val ctrBC = CallToReturnEdge(b, c, Set(flow("y"))) val norCC0 = NormalEdge(c, c0, Set(flow("w", "0"))) val norDE = NormalEdge(d, e, Set(flow("x", "z"))) val retEB = ReturnEdge(Some(a), e, Some(b), bar, Set(flow("z", "y"))) val retEC = ReturnEdge(Some(b), e, Some(c), bar, Set(flow("z", "w"))) //////////// val normalEdges: Set[NormalEdge] = Set(norCC0, norDE) val callEdges: Set[CallEdge] = Set(calABar, calBBar) val callToReturnEdges: Set[CallToReturnEdge] = Set(ctrAB, ctrBC) val returnEdges: Set[ReturnEdge] = Set(retEB, retEC) val methodToStartPoints: Map[Method, Set[Node]] = Map( foo -> Set(a), bar -> Set(d) ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, b -> foo, c -> foo, c0 -> foo, d -> bar, e -> bar ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(a), false ) } def happyPath: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from IFDSSolverTest.happyPath from Heros. val norAB = NormalEdge(a, b, Set(flow("0", "x"))) val norBC = NormalEdge(b, c, Set(flow("x", "x"))) val calCFoo = CallEdge(c, foo, Set(flow("x", "y"))) val ctrCF = CallToReturnEdge(c, f, Set(flow("x", "x"))) val norDE = NormalEdge(d, e, Set(flow("y", "y", "z"))) val retEF = ReturnEdge(Some(c), e, Some(f), foo, Set(flow("z", "u"), flow("y"))) //////////// val normalEdges: Set[NormalEdge] = Set(norAB, norBC, norDE) val callEdges: Set[CallEdge] = Set(calCFoo) val callToReturnEdges: Set[CallToReturnEdge] = Set(ctrCF) val returnEdges: Set[ReturnEdge] = Set(retEF) val methodToStartPoints: Map[Method, Set[Node]] = Map( bar -> Set(a), foo -> Set(d) ) val stmtToMethod: Map[Node, Method] = Map( a -> bar, b -> bar, c -> bar, f -> bar, d -> foo, e -> foo ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(a), false ) } def possiblyUninitializedVariables: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from IFDSSolverTest.happyPath from Heros. val norAB = NormalEdge(a, b, Set(flow("0", "x"), flow("0", "y"))) val norBC = NormalEdge(b, c, Set(flow("y", "y"), flow("x"))) val calCFoo = CallEdge(c, foo, Set(flow("y", "y"))) val ctrCF = CallToReturnEdge(c, f, Set(flow("x", "x"))) val norDE = NormalEdge(d, e, Set(flow("x", "x"), flow("y", "y"))) val retEF = ReturnEdge(Some(c), e, Some(f), foo, Set(flow("y", "y"))) //////////// val normalEdges: Set[NormalEdge] = Set(norAB, norBC, norDE) val callEdges: Set[CallEdge] = Set(calCFoo) val callToReturnEdges: Set[CallToReturnEdge] = Set(ctrCF) val returnEdges: Set[ReturnEdge] = Set(retEF) val methodToStartPoints: Map[Method, Set[Node]] = Map( bar -> Set(a), foo -> Set(d) ) val stmtToMethod: Map[Node, Method] = Map( a -> bar, b -> bar, c -> bar, f -> bar, d -> foo, e -> foo ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(a), false ) } }
packlnd/IFDS-RA
src/test/scala/IFDS/TestHelper.scala
Scala
mit
11,479
/** * Copyright (C) 2012-2013 Kaj Magnus Lindberg (born 1979) * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package debiki import com.debiki.core.Prelude._ import play.{api => p} import play.api.Play import play.api.Play.current // COULD remove, see comments just below: @deprecated("Remove once the BoneCP bug has been fixed", since = "a while ago") object Debiki { // Play's BoneCP v.0.7.1 is broken and unusable, and SBT refuses to use version 0.8 RC1 // which supposedly has fixed the problem. Use Postgres connection pool instead. // The problem is: // java.sql.SQLException: Timed out waiting for a free available connection. // at com.jolbox.bonecp.BoneCP.getConnection(BoneCP.java:503) // ~[com.jolbox.bonecp-0.7.1.RELEASE.jar:0.7.1.RELEASE // See: http://stackoverflow.com/questions/15480506/ // heroku-play-bonecp-connection-issues/15500442#15500442 def getPostgreSqlDataSource(): javax.sql.DataSource = { if (Play.isTest) return p.db.DB.getDataSource("test") def configPrefix = "" def configStr(path: String) = Play.configuration.getString(configPrefix + path) getOrElse runErr("DwE93KI2", "Config value missing: "+ path) // I've hardcoded credentials to the test database here, so that it // cannot possibly happen, that you accidentally connect to the prod // database. (You'll never name the prod schema "debiki_test_0_0_2_empty", // with "auto-dropped" as password?) def user = if (Play.isTest) "debiki_test_0_0_2_empty" else configStr("debiki.pgsql.user") def password = if (Play.isTest) "auto-dropped" else configStr("debiki.pgsql.password") val ds = new org.postgresql.ds.PGPoolingDataSource() ds.setDataSourceName("DebikiPostgreConnCache"+ math.random) val server = configStr("debiki.pgsql.server") val port = configStr("debiki.pgsql.port").toInt val dbName = configStr("debiki.pgsql.database") ds.setServerName(server) ds.setPortNumber(port) ds.setDatabaseName(dbName) ds.setUser(user) ds.setPassword(password) ds.setInitialConnections(2) ds.setMaxConnections(10) ds.setPrepareThreshold(3) play.Logger.info(s"""Connected to database: $server:$port/$dbName as $user""") ds } } // vim: fdm=marker et ts=2 sw=2 tw=80 fo=tcqn list ft=scala
debiki/debiki-server-old
app/debiki/Debiki.scala
Scala
agpl-3.0
3,024
package org.broadinstitute.dsde.snoop import javax.sql.DataSource import akka.actor.{ActorSystem, Props} import akka.io.IO import com.mchange.v2.c3p0.ComboPooledDataSource import com.wordnik.swagger.model.ApiInfo import org.broadinstitute.dsde.snoop.dataaccess.SnoopSubmissionController import org.broadinstitute.dsde.snoop.ws._ import spray.can.Http import akka.pattern.ask import akka.util.Timeout import scala.concurrent.duration._ import com.typesafe.config.{Config, ConfigFactory} import java.io.File import scala.reflect.runtime.universe._ import scala.sys.process.Process object Boot extends App { private def setupSsl(conf: Config): Unit = { System.setProperty("javax.net.ssl.trustStore", conf.getString("ssl.truststore")) System.setProperty("javax.net.ssl.trustStorePassword", conf.getString("ssl.tsPasswd")) System.setProperty("javax.net.ssl.keyStore", conf.getString("ssl.keystore")) System.setProperty("javax.net.ssl.keyStorePassword", conf.getString("ssl.ksPasswd")) Process("env", None, "JAVA_TOOL_OPTIONS" -> "-Djavax.net.ssl.trustStore=/etc/truststore;-Djavax.net.ssl.trustStorePassword=truststore;-Djavax.net.ssl.keyStore=/etc/keystore;-Djavax.net.ssl.keyStorePassword=keystore") } def createDataSource(jdbcDriver: String, jdbcUrl: String, jdbcUser: String, jdbcPassword: String, c3p0MaxStatementsOption: Option[Int]): DataSource = { val comboPooledDataSource = new ComboPooledDataSource comboPooledDataSource.setDriverClass(jdbcDriver) comboPooledDataSource.setJdbcUrl(jdbcUrl) comboPooledDataSource.setUser(jdbcUser) comboPooledDataSource.setPassword(jdbcPassword) c3p0MaxStatementsOption.map(comboPooledDataSource.setMaxStatements) comboPooledDataSource } private def startup(): Unit = { val conf = ConfigFactory.parseFile(new File("/etc/snoop.conf")) setupSsl(conf) // we need an ActorSystem to host our application in implicit val system = ActorSystem("snoop") val zamboniApi = StandardZamboniApi(conf.getString("zamboni.server")) val snoopSubmissionController = SnoopSubmissionController(createDataSource( conf.getString("database.jdbc.driver"), conf.getString("database.jdbc.url"), conf.getString("database.jdbc.user"), conf.getString("database.jdbc.password"), if (conf.hasPath("database.c3p0.maxStatements")) Option(conf.getInt("database.c3p0.maxStatements")) else None ), conf.getString("database.slick.driver")) def executionServiceConstructor(): WorkflowExecutionService = ZamboniWorkflowExecutionService(zamboniApi, conf.getString("workflow.sandbox.bucket"), conf.getString("workflow.sandbox.keyPrefix"), snoopSubmissionController , StandardAnalysisCallbackHandler(conf.getString("vault.server"), conf.getString("vault.queryPath")), GcsOutputRepository(conf.getString("workflow.sandbox.email"), new File(conf.getString("workflow.sandbox.p12")))) val swaggerConfig = conf.getConfig("swagger") val swaggerService = new SwaggerService( swaggerConfig.getString("apiVersion"), swaggerConfig.getString("baseUrl"), swaggerConfig.getString("apiDocs"), swaggerConfig.getString("swaggerVersion"), Seq(typeOf[RootSnoopApiService], typeOf[WorkflowExecutionApiService]), Option(new ApiInfo( swaggerConfig.getString("info"), swaggerConfig.getString("description"), swaggerConfig.getString("termsOfServiceUrl"), swaggerConfig.getString("contact"), swaggerConfig.getString("license"), swaggerConfig.getString("licenseUrl")) )) val service = system.actorOf(SnoopApiServiceActor.props(executionServiceConstructor, swaggerService), "snoop-service") implicit val timeout = Timeout(5.seconds) // start a new HTTP server on port 8080 with our service actor as the handler IO(Http) ? Http.Bind(service, interface = "0.0.0.0", port = 8080) } startup() }
broadinstitute/snoop
src/main/scala/org/broadinstitute/dsde/snoop/Boot.scala
Scala
bsd-3-clause
3,966
// See LICENSE for license details. package librefpga.test import Chisel.iotesters.{ChiselFlatSpec, Driver, PeekPokeTester} import librefpga.LUTn import chisel3._ class LUTnUnitTester(c: LUTn) extends PeekPokeTester(c) { private val lutn = c poke(lutn.io.cfg.sen, 0) step(1) /* test default outputs (0) */ expect(lutn.io.logic.out, 0) expect(lutn.io.cfg.sout, 0) for(i <- 0 to 15) { poke(lutn.io.logic.in, i) step(1) expect(lutn.io.logic.out, 0) } val rnd_gen = new scala.util.Random() /* generate random test pattern to test look-up table */ for(i <- 0 to 100) { val rnum = UInt(rnd_gen.nextInt(65536)) /* shift it in */ for (j <- 0 to 15) { poke(lutn.io.cfg.sen, true) poke(lutn.io.cfg.sin, rnum(j)) step(1) } poke(lutn.io.cfg.sen, false) step(1) /* test output */ for (j <- 0 to 15) { poke(lutn.io.logic.in, j) step(1) expect(lutn.io.logic.out, rnum(j)) } } } class LUTnTester extends ChiselFlatSpec { private val backendNames = Array[String]("firrtl", "verilator") for ( backendName <- backendNames ) { "LUTn" should s"calculate proper greatest common denominator (with $backendName)" in { Driver(() => new LUTn, backendName) { c => new LUTnUnitTester(c) } should be (true) } } }
xfguo/librefpga
src/test/scala/fpga/test/LUT4UnitTest.scala
Scala
apache-2.0
1,342
package modules import com.google.inject.{ AbstractModule, Provides } import com.mohiva.play.silhouette.api.repositories.AuthInfoRepository import com.mohiva.play.silhouette.api.services._ import com.mohiva.play.silhouette.api.util._ import com.mohiva.play.silhouette.api.{ Environment, EventBus } import com.mohiva.play.silhouette.impl.authenticators._ import com.mohiva.play.silhouette.impl.daos.DelegableAuthInfoDAO import com.mohiva.play.silhouette.impl.providers._ import com.mohiva.play.silhouette.impl.providers.oauth1._ import com.mohiva.play.silhouette.impl.providers.oauth1.secrets.{ CookieSecretProvider, CookieSecretSettings } import com.mohiva.play.silhouette.impl.providers.oauth1.services.PlayOAuth1Service import com.mohiva.play.silhouette.impl.providers.oauth2._ import com.mohiva.play.silhouette.impl.providers.oauth2.state.{ CookieStateProvider, CookieStateSettings, DummyStateProvider } import com.mohiva.play.silhouette.impl.providers.openid.YahooProvider import com.mohiva.play.silhouette.impl.providers.openid.services.PlayOpenIDService import com.mohiva.play.silhouette.impl.repositories.DelegableAuthInfoRepository import com.mohiva.play.silhouette.impl.services._ import com.mohiva.play.silhouette.impl.util._ import models.User import models.daos._ import models.services._ import net.ceedubs.ficus.Ficus._ import net.ceedubs.ficus.readers.ArbitraryTypeReader._ import net.codingwell.scalaguice.ScalaModule import play.api.Configuration import play.api.libs.concurrent.Execution.Implicits._ import play.api.libs.openid.OpenIdClient import play.api.libs.ws.WSClient import play.api.Play import play.api.Play.current import scala.concurrent.duration._ import java.util.concurrent.TimeUnit case class RestEnvironment(env: Environment[User, JWTAuthenticator]) /** * The Guice module which wires all Silhouette dependencies. */ class SilhouetteModule extends AbstractModule with ScalaModule { /** * Configures the module. */ def configure() { bind[UserService].to[UserServiceImpl] bind[UserDAO].to[UserDAOImpl] bind[DelegableAuthInfoDAO[PasswordInfo]].to[PasswordInfoDAO] bind[DelegableAuthInfoDAO[OAuth1Info]].to[OAuth1InfoDAO] bind[DelegableAuthInfoDAO[OAuth2Info]].to[OAuth2InfoDAO] bind[DelegableAuthInfoDAO[OpenIDInfo]].to[OpenIDInfoDAO] bind[CacheLayer].to[PlayCacheLayer] bind[IDGenerator].toInstance(new SecureRandomIDGenerator()) bind[PasswordHasher].toInstance(new BCryptPasswordHasher) bind[FingerprintGenerator].toInstance(new DefaultFingerprintGenerator(false)) bind[EventBus].toInstance(EventBus()) bind[Clock].toInstance(Clock()) bind[MantraService].to[MantraServiceImpl] bind[MantraDAO].to[MantraDAOImpl] bind[AccumulationService].to[AccumulationServiceImpl] bind[AccumulationDAO].to[AccumulationDAOImpl] bind[GatheringService].to[GatheringServiceImpl] bind[GoalDAO].to[GoalDAOImpl] bind[GatheringDAO].to[GatheringDAOImpl] } /** * Provides the HTTP layer implementation. * * @param client Play's WS client. * @return The HTTP layer implementation. */ @Provides def provideHTTPLayer(client: WSClient): HTTPLayer = new PlayHTTPLayer(client) /** * Provides the Silhouette environment. * * @param userService The user service implementation. * @param authenticatorService The authentication service implementation. * @param eventBus The event bus instance. * @return The Silhouette environment. */ @Provides def provideEnvironment( userService: UserService, authenticatorService: AuthenticatorService[CookieAuthenticator], eventBus: EventBus): Environment[User, CookieAuthenticator] = { Environment[User, CookieAuthenticator]( userService, authenticatorService, Seq(), eventBus ) } /** * Provides the Silhouette environment. * * @param userService The user service implementation. * @param authenticatorService The authentication service implementation. * @param eventBus The event bus instance. * @return The Silhouette environment. */ @Provides def provideEnvironment( userService: UserService, authenticatorService: AuthenticatorService[JWTAuthenticator], eventBus: EventBus): RestEnvironment = { RestEnvironment( Environment[User, JWTAuthenticator]( userService, authenticatorService, Seq(), eventBus )) } /** * Provides the social provider registry. * * @param facebookProvider The Facebook provider implementation. * @param googleProvider The Google provider implementation. * @param vkProvider The VK provider implementation. * @param clefProvider The Clef provider implementation. * @param twitterProvider The Twitter provider implementation. * @param xingProvider The Xing provider implementation. * @param yahooProvider The Yahoo provider implementation. * @return The Silhouette environment. */ @Provides def provideSocialProviderRegistry( facebookProvider: FacebookProvider, googleProvider: GoogleProvider, vkProvider: VKProvider, clefProvider: ClefProvider, twitterProvider: TwitterProvider, xingProvider: XingProvider, yahooProvider: YahooProvider): SocialProviderRegistry = { SocialProviderRegistry(Seq( googleProvider, facebookProvider, twitterProvider, vkProvider, xingProvider, yahooProvider, clefProvider )) } /** * Provides the authenticator service. * * @param fingerprintGenerator The fingerprint generator implementation. * @param idGenerator The ID generator implementation. * @param configuration The Play configuration. * @param clock The clock instance. * @return The authenticator service. */ @Provides def provideAuthenticatorService( fingerprintGenerator: FingerprintGenerator, idGenerator: IDGenerator, configuration: Configuration, clock: Clock): AuthenticatorService[CookieAuthenticator] = { val config = configuration.underlying.as[CookieAuthenticatorSettings]("silhouette.authenticator") new CookieAuthenticatorService(config, None, fingerprintGenerator, idGenerator, clock) } /** * Provides the authenticator service. * * @param fingerprintGenerator The fingerprint generator implementation. * @param idGenerator The ID generator implementation. * @param configuration The Play configuration. * @param clock The clock instance. * @return The authenticator service. */ @Provides def provideAuthenticatorService(idGenerator: IDGenerator): AuthenticatorService[JWTAuthenticator] = { val authenticatorService: AuthenticatorService[JWTAuthenticator] = { val idleTimeout = FiniteDuration(Play.configuration.getMilliseconds("silhouette.authenticator.authenticatorIdleTimeout").getOrElse(1800000L), TimeUnit.MILLISECONDS) val expiry = FiniteDuration(Play.configuration.getMilliseconds("silhouette.authenticator.authenticatorExpiry").getOrElse(43200000L), TimeUnit.MILLISECONDS) val settings = JWTAuthenticatorSettings( headerName = Play.configuration.getString("silhouette.authenticator.headerName").getOrElse { "X-Auth-Token" }, issuerClaim = Play.configuration.getString("silhouette.authenticator.issueClaim").getOrElse { "play-silhouette" }, encryptSubject = Play.configuration.getBoolean("silhouette.authenticator.encryptSubject").getOrElse { true }, authenticatorIdleTimeout = Some(idleTimeout), // This feature is disabled by default to prevent the generation of a new JWT on every request authenticatorExpiry = expiry, sharedSecret = Play.configuration.getString("play.crypto.secret").get) new JWTAuthenticatorService( settings = settings, dao = None, idGenerator = idGenerator, clock = Clock()) } authenticatorService } /** * Provides the auth info repository. * * @param passwordInfoDAO The implementation of the delegable password auth info DAO. * @param oauth1InfoDAO The implementation of the delegable OAuth1 auth info DAO. * @param oauth2InfoDAO The implementation of the delegable OAuth2 auth info DAO. * @param openIDInfoDAO The implementation of the delegable OpenID auth info DAO. * @return The auth info repository instance. */ @Provides def provideAuthInfoRepository( passwordInfoDAO: DelegableAuthInfoDAO[PasswordInfo], oauth1InfoDAO: DelegableAuthInfoDAO[OAuth1Info], oauth2InfoDAO: DelegableAuthInfoDAO[OAuth2Info], openIDInfoDAO: DelegableAuthInfoDAO[OpenIDInfo]): AuthInfoRepository = { new DelegableAuthInfoRepository(passwordInfoDAO, oauth1InfoDAO, oauth2InfoDAO, openIDInfoDAO) } /** * Provides the avatar service. * * @param httpLayer The HTTP layer implementation. * @return The avatar service implementation. */ @Provides def provideAvatarService(httpLayer: HTTPLayer): AvatarService = new GravatarService(httpLayer) /** * Provides the OAuth1 token secret provider. * * @param configuration The Play configuration. * @param clock The clock instance. * @return The OAuth1 token secret provider implementation. */ @Provides def provideOAuth1TokenSecretProvider(configuration: Configuration, clock: Clock): OAuth1TokenSecretProvider = { val settings = configuration.underlying.as[CookieSecretSettings]("silhouette.oauth1TokenSecretProvider") new CookieSecretProvider(settings, clock) } /** * Provides the OAuth2 state provider. * * @param idGenerator The ID generator implementation. * @param configuration The Play configuration. * @param clock The clock instance. * @return The OAuth2 state provider implementation. */ @Provides def provideOAuth2StateProvider(idGenerator: IDGenerator, configuration: Configuration, clock: Clock): OAuth2StateProvider = { val settings = configuration.underlying.as[CookieStateSettings]("silhouette.oauth2StateProvider") new CookieStateProvider(settings, idGenerator, clock) } /** * Provides the credentials provider. * * @param authInfoRepository The auth info repository implementation. * @param passwordHasher The default password hasher implementation. * @return The credentials provider. */ @Provides def provideCredentialsProvider( authInfoRepository: AuthInfoRepository, passwordHasher: PasswordHasher): CredentialsProvider = { new CredentialsProvider(authInfoRepository, passwordHasher, Seq(passwordHasher)) } /** * Provides the Facebook provider. * * @param httpLayer The HTTP layer implementation. * @param stateProvider The OAuth2 state provider implementation. * @param configuration The Play configuration. * @return The Facebook provider. */ @Provides def provideFacebookProvider( httpLayer: HTTPLayer, stateProvider: OAuth2StateProvider, configuration: Configuration): FacebookProvider = { new FacebookProvider(httpLayer, stateProvider, configuration.underlying.as[OAuth2Settings]("silhouette.facebook")) } /** * Provides the Google provider. * * @param httpLayer The HTTP layer implementation. * @param stateProvider The OAuth2 state provider implementation. * @param configuration The Play configuration. * @return The Google provider. */ @Provides def provideGoogleProvider( httpLayer: HTTPLayer, stateProvider: OAuth2StateProvider, configuration: Configuration): GoogleProvider = { new GoogleProvider(httpLayer, stateProvider, configuration.underlying.as[OAuth2Settings]("silhouette.google")) } /** * Provides the VK provider. * * @param httpLayer The HTTP layer implementation. * @param stateProvider The OAuth2 state provider implementation. * @param configuration The Play configuration. * @return The VK provider. */ @Provides def provideVKProvider( httpLayer: HTTPLayer, stateProvider: OAuth2StateProvider, configuration: Configuration): VKProvider = { new VKProvider(httpLayer, stateProvider, configuration.underlying.as[OAuth2Settings]("silhouette.vk")) } /** * Provides the Clef provider. * * @param httpLayer The HTTP layer implementation. * @param configuration The Play configuration. * @return The Clef provider. */ @Provides def provideClefProvider(httpLayer: HTTPLayer, configuration: Configuration): ClefProvider = { new ClefProvider(httpLayer, new DummyStateProvider, configuration.underlying.as[OAuth2Settings]("silhouette.clef")) } /** * Provides the Twitter provider. * * @param httpLayer The HTTP layer implementation. * @param tokenSecretProvider The token secret provider implementation. * @param configuration The Play configuration. * @return The Twitter provider. */ @Provides def provideTwitterProvider( httpLayer: HTTPLayer, tokenSecretProvider: OAuth1TokenSecretProvider, configuration: Configuration): TwitterProvider = { val settings = configuration.underlying.as[OAuth1Settings]("silhouette.twitter") new TwitterProvider(httpLayer, new PlayOAuth1Service(settings), tokenSecretProvider, settings) } /** * Provides the Xing provider. * * @param httpLayer The HTTP layer implementation. * @param tokenSecretProvider The token secret provider implementation. * @param configuration The Play configuration. * @return The Xing provider. */ @Provides def provideXingProvider( httpLayer: HTTPLayer, tokenSecretProvider: OAuth1TokenSecretProvider, configuration: Configuration): XingProvider = { val settings = configuration.underlying.as[OAuth1Settings]("silhouette.xing") new XingProvider(httpLayer, new PlayOAuth1Service(settings), tokenSecretProvider, settings) } /** * Provides the Yahoo provider. * * @param cacheLayer The cache layer implementation. * @param httpLayer The HTTP layer implementation. * @param client The OpenID client implementation. * @param configuration The Play configuration. * @return The Yahoo provider. */ @Provides def provideYahooProvider( cacheLayer: CacheLayer, httpLayer: HTTPLayer, client: OpenIdClient, configuration: Configuration): YahooProvider = { val settings = configuration.underlying.as[OpenIDSettings]("silhouette.yahoo") new YahooProvider(httpLayer, new PlayOpenIDService(client, settings), settings) } }
leannenorthrop/play-mantra-accumulations
app/modules/SilhouetteModule.scala
Scala
apache-2.0
14,451
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel package scala.dsl import org.junit.Test import builder.RouteBuilder import junit.framework.Assert._ /** * Test case for rollback */ class RollbackTest extends ScalaTestSupport { var handled = false @Test def testSimple() { test { try { template.requestBody("direct:a", "The Joker") fail("Expected a RollbackExchangeException") } catch { // oh no, not the Joker again, let's send Batman case e: RuntimeCamelException if (e.getCause.isInstanceOf[RollbackExchangeException]) => template.requestBody("direct:a", "Batman") case unknown : Throwable => fail("We didn't expect " + unknown) } } } @Test def testBlock() { "mock:b" expect { _.count = 2 } "mock:ok" expect { _.count = 1 } test { try { template.requestBody("direct:b", "Lex Luthor") fail("Expected a RollbackExchangeException") } catch { // oh no, not Lex Luthor again, let's send Superman case e: RuntimeCamelException if (e.getCause.isInstanceOf[RollbackExchangeException]) => template.requestBody("direct:b", "Superman") case unknown : Throwable => fail("We didn't expect " + unknown) } } } val builder = new RouteBuilder { //START SNIPPET: simple "direct:a" to("mock:a") when(_.in != "Batman") rollback //END SNIPPET: simple //START SNIPPET: block "direct:b" ==> { to("mock:b") choice { when(_.in != "Superman") { rollback } otherwise to "mock:ok" } } //END SNIPPET: block } }
coderczp/camel
components/camel-scala/src/test/scala/org/apache/camel/scala/dsl/RollbackTest.scala
Scala
apache-2.0
2,476
package com.originate.scalypher import com.originate.scalypher.action.ActionItem import com.originate.scalypher.action.ReturnAction import com.originate.scalypher.action.ReturnAll import com.originate.scalypher.action.ReturnDistinct import com.originate.scalypher.action.ReturnReference import com.originate.scalypher.path.AnyNode import com.originate.scalypher.path.AnyRelationship import com.originate.scalypher.path.Node import com.originate.scalypher.path.Path import com.originate.scalypher.path.Relationship import com.originate.scalypher.types._ import com.originate.scalypher.where.Reference trait MatchCreateQuery extends Query { def returnAction: Option[ReturnAction] def returns(reference: ActionItem, rest: ActionItem*): MatchCreateQuery = withReturnAction(ReturnReference(reference, rest: _*)) def returnDistinct(reference: ActionItem, rest: ActionItem*): MatchCreateQuery = withReturnAction(ReturnDistinct(reference, rest: _*)) def returnAll: MatchCreateQuery = withReturnAction(ReturnAll) def getReturnColumns: Set[String] = returnAction map matchActionToReturnColumns getOrElse Set.empty protected def withReturnAction(action: ReturnAction): MatchCreateQuery protected def forcedCreateReferenceables: Set[Identifiable] protected def modifiedReferenceableMap: IdentifiableMap = { val forcedMap = identifiableMap filterKeys (forcedCreateReferenceables contains _) createMap ++ forcedMap } protected def cleanedCreatePath: Path protected def createMap: IdentifiableMap protected def cleanPathAndExtractMap(path: Path, matchPaths: Seq[Path]): (Path, IdentifiableMap) = { val overlapReferenceables = matchPaths flatMap (_.identifiables) intersect path.identifiables.toSeq val relevantMap = identifiableMap filterKeys { key => overlapReferenceables contains key } val pathTransform = relevantMap.foldLeft(PathTranform(path)) { case (acc @ PathTranform(path, map), (identifiable, identifier)) => identifiable match { case node: Node => val newNode = AnyNode() PathTranform(path.replaceNode(node, newNode), map - identifiable + (newNode -> identifier)) case relationship: Relationship => val newRelationship = AnyRelationship() PathTranform(path.replaceRelationship(relationship, newRelationship), map - identifiable + (newRelationship -> identifier)) case _ => acc } } (pathTransform.path, pathTransform.map) } private case class PathTranform(path: Path, map: IdentifiableMap = Map[Identifiable, String]()) }
Originate/scalypher
src/main/scala/query/MatchCreateQuery.scala
Scala
mit
2,603
package ca.uqam.euler.nicolas import Util.gcd object Problem033 { case class Fraction(n: Int, d: Int) { override def toString = n + "/" + d def *(x: Fraction) = new Fraction(n * x.n, d * x.d) def simplified = { val g = gcd(n, d) new Fraction(n / g, d / g) } def equivalent(that: Fraction) = this.simplified == that.simplified } def main(args: Array[String]) = Answer { def other(c: Char, s: String) = (if (s(0) == c) s(1) else s(0)).asDigit val xs = for { d ← 10 to 99 den = d.toString n ← 10 until d num = n.toString c ← num.distinct if c != '0' && den.contains(c) f1 = Fraction(n, d) f2 = Fraction(other(c, num), other(c, den)) if f1 equivalent f2 } yield f1 xs.reduce(_ * _).simplified.d } }
nicolaspayette/project-euler
src/main/scala/ca/uqam/euler/nicolas/Problem033.scala
Scala
mit
814
package org.jetbrains.plugins.scala package codeInspection package collections import com.intellij.testFramework.EditorTestUtil.{SELECTION_END_TAG => END, SELECTION_START_TAG => START} /** * Nikolay.Tropin * 5/30/13 */ class FindIsDefinedTest extends OperationsOnCollectionInspectionTest { override val classOfInspection = classOf[FindEmptyCheckInspection] override val hint = ScalaInspectionBundle.message("find.isDefined.hint") def testFindIsDefined(): Unit = { val selected = s"""val valueIsGoodEnough: (Any) => Boolean = _ => true |Nil$START.find(valueIsGoodEnough).isDefined$END""".stripMargin checkTextHasError(selected) val text = """val valueIsGoodEnough: (Any) => Boolean = _ => true |Nil.find(valueIsGoodEnough).isDefined""".stripMargin val result = """val valueIsGoodEnough: (Any) => Boolean = _ => true |Nil.exists(valueIsGoodEnough)""".stripMargin testQuickFix(text, result, hint) } def testInfix(): Unit = { val selected = s"(Nil$START find (_ => true)) isDefined$END" checkTextHasError(selected) val text = "(Nil find (_ => true)) isDefined" val result = "Nil exists (_ => true)" testQuickFix(text, result, hint) } def testNotEqNoneInfix(): Unit = { val selected = s"(Nil$START find (_ => true)) != None$END" checkTextHasError(selected) val text = "(Nil find (_ => true)) != None" val result = "Nil exists (_ => true)" testQuickFix(text, result, hint) } def testNotEqNone(): Unit = { val selected = s"Nil$START.find(_ => true) != None$END" checkTextHasError(selected) val text = "Nil.find(_ => true) != None" val result = "Nil.exists(_ => true)" testQuickFix(text, result, hint) } } class FindIsEmptyTest extends OperationsOnCollectionInspectionTest { override val classOfInspection = classOf[FindEmptyCheckInspection] override val hint = ScalaInspectionBundle.message("find.isEmpty.hint") def testEqNone(): Unit = { val selected = s"Nil$START.find(_ => true) == None$END" checkTextHasError(selected) val text = "Nil.find(_ => true) == None" val result = "!Nil.exists(_ => true)" testQuickFix(text, result, hint) } def testIsEmpty(): Unit = { val selected = s"Nil$START.find(_ => true).isEmpty$END" checkTextHasError(selected) val text = "Nil.find(_ => true).isEmpty" val result = "!Nil.exists(_ => true)" testQuickFix(text, result, hint) } }
JetBrains/intellij-scala
scala/scala-impl/test/org/jetbrains/plugins/scala/codeInspection/collections/FindIsDefinedTest.scala
Scala
apache-2.0
2,474
package adt.bson.mongo.async.client import java.util.concurrent.TimeUnit import adt.bson.mongo.MongoBlock import adt.bson.mongo.async.{AsyncBatchCursor, MongoSingleCallback} import adt.bson.{BsonJavaScript, BsonObject} import com.mongodb.CursorType import com.mongodb.client.model.MapReduceAction import play.api.libs.functional.syntax._ import scala.collection.convert.Wrappers.MutableBufferWrapper import scala.collection.mutable import scala.concurrent.{Future, Promise} /** * Substitutes [[com.mongodb.async.client.MongoIterable]] */ class MongoIterable[+T](underlying: JavaMongoIterable[T]) { def foreach[U](block: T => U): Unit = { underlying.forEach(new MongoBlock(block), MongoSingleCallback.throwAnyException) } def completeForEach[U](f: T => U): Future[Unit] = { val promise = Promise[Unit]() underlying.forEach(new MongoBlock(f), MongoSingleCallback.complete(promise).contramap(_ => ())) promise.future } def sequence(): Future[Seq[T]] = { val target = MutableBufferWrapper(mutable.Buffer.empty[T]) val promise = Promise[Seq[T]]() underlying.into(target, new MongoSingleCallback[MutableBufferWrapper[T]]({ result => promise.tryComplete(result.map(_.underlying)) })) promise.future } def batchSize(batchSize: Int): MongoIterable[T] = new MongoIterable(underlying.batchSize(batchSize)) def batchCursor(): Future[AsyncBatchCursor[T]] = { val promise = Promise[AsyncBatchCursor[T]]() underlying.batchCursor(MongoSingleCallback.complete(promise).contramap(new AsyncBatchCursor(_))) promise.future } def first(): Future[Option[T]] = { val promise = Promise[Option[T]]() underlying.first(MongoSingleCallback.complete(promise).contramap(Option(_))) promise.future } def map[U](f: T => U): MongoIterable[U] = new MongoIterable(underlying.map(f)) } /** * Substitutes [[com.mongodb.async.client.AggregateIterable]] */ class AggregateIterable[+T](underlying: JavaAggregateIterable[T]) extends MongoIterable(underlying) { def useCursor(useCursor: Boolean): AggregateIterable[T] = new AggregateIterable(underlying.useCursor(useCursor)) def maxTime(maxTime: Long, timeUnit: TimeUnit): AggregateIterable[T] = new AggregateIterable(underlying.maxTime(maxTime, timeUnit)) def toCollection: Future[Unit] = { val promise = Promise[Unit]() underlying.toCollection(MongoSingleCallback.complete(promise).contramap(_ => ())) promise.future } override def batchSize(batchSize: Int): AggregateIterable[T] = new AggregateIterable(underlying.batchSize(batchSize)) def allowDiskUse(allowDiskUse: Boolean): AggregateIterable[T] = new AggregateIterable(underlying.allowDiskUse(allowDiskUse)) } /** * Substitutes [[com.mongodb.async.client.DistinctIterable]] */ class DistinctIterable[+T](underlying: JavaDistinctIterable[T]) extends MongoIterable(underlying) { def maxTime(maxTime: Long, timeUnit: TimeUnit): DistinctIterable[T] = new DistinctIterable(underlying.maxTime(maxTime, timeUnit)) def filter(filter: BsonObject): DistinctIterable[T] = new DistinctIterable(underlying.filter(filter)) } /** * Substitutes [[com.mongodb.async.client.FindIterable]] */ class FindIterable[+T](underlying: JavaFindIterable[T]) extends MongoIterable(underlying) { def oplogReplay(oplogReplay: Boolean): FindIterable[T] = new FindIterable(underlying.oplogReplay(oplogReplay)) def sort(sort: BsonObject): FindIterable[T] = new FindIterable(underlying.sort(sort)) def skip(skip: Int): FindIterable[T] = new FindIterable(underlying.skip(skip)) def projection(projection: BsonObject): FindIterable[T] = new FindIterable(underlying.projection(projection)) def partial(partial: Boolean): FindIterable[T] = new FindIterable(underlying.partial(partial)) def cursorType(cursorType: CursorType): FindIterable[T] = new FindIterable(underlying.cursorType(cursorType)) def modifiers(modifiers: BsonObject): FindIterable[T] = new FindIterable(underlying.modifiers(modifiers)) def noCursorTimeout(noCursorTimeout: Boolean): FindIterable[T] = new FindIterable(underlying.noCursorTimeout(noCursorTimeout)) def filter(filter: BsonObject): FindIterable[T] = new FindIterable(underlying.filter(filter)) def maxTime(maxTime: Long, timeUnit: TimeUnit): FindIterable[T] = new FindIterable(underlying.maxTime(maxTime, timeUnit)) def limit(limit: Int): FindIterable[T] = new FindIterable(underlying.limit(limit)) } /** * Substitutes [[com.mongodb.async.client.ListCollectionsIterable]] */ class ListCollectionsIterable[+T](underlying: JavaListCollectionsIterable[T]) extends MongoIterable(underlying) { def maxTime(maxTime: Long, timeUnit: TimeUnit): ListCollectionsIterable[T] = new ListCollectionsIterable(underlying.maxTime(maxTime, timeUnit)) def filter(filter: BsonObject): ListCollectionsIterable[T] = new ListCollectionsIterable(underlying.filter(filter)) } /** * Substitutes [[com.mongodb.async.client.ListDatabasesIterable]] */ class ListDatabasesIterable[+T](underlying: JavaListDatabasesIterable[T]) extends MongoIterable(underlying) { def maxTime(maxTime: Long, timeUnit: TimeUnit): ListDatabasesIterable[T] = new ListDatabasesIterable(underlying.maxTime(maxTime, timeUnit)) } /** * Substitutes [[com.mongodb.async.client.ListIndexesIterable]] */ class ListIndexesIterable[+T](underlying: JavaListIndexesIterable[T]) extends MongoIterable(underlying) { def maxTime(maxTime: Long, timeUnit: TimeUnit): ListIndexesIterable[T] = new ListIndexesIterable(underlying.maxTime(maxTime, timeUnit)) } /** * Substitutes [[com.mongodb.async.client.MapReduceIterable]] */ class MapReduceIterable[+T](underlying: JavaMapReduceIterable[T]) extends MongoIterable(underlying) { def collectionName(collectionName: String): MapReduceIterable[T] = new MapReduceIterable(underlying.collectionName(collectionName)) def nonAtomic(nonAtomic: Boolean): MapReduceIterable[T] = new MapReduceIterable(underlying.nonAtomic(nonAtomic)) def databaseName(databaseName: String): MapReduceIterable[T] = new MapReduceIterable(underlying.databaseName(databaseName)) def jsMode(jsMode: Boolean): MapReduceIterable[T] = new MapReduceIterable(underlying.jsMode(jsMode)) def scope(scope: BsonObject): MapReduceIterable[T] = new MapReduceIterable(underlying.scope(scope)) def sort(sort: BsonObject): MapReduceIterable[T] = new MapReduceIterable(underlying.sort(sort)) def finalizeFunction(finalizeFunction: BsonJavaScript): MapReduceIterable[T] = new MapReduceIterable(underlying.finalizeFunction(finalizeFunction.value)) def verbose(verbose: Boolean): MapReduceIterable[T] = new MapReduceIterable(underlying.verbose(verbose)) def toCollection: Future[Unit] = { val promise = Promise[Unit]() underlying.toCollection(MongoSingleCallback.complete(promise).contramap(_ => ())) promise.future } def sharded(sharded: Boolean): MapReduceIterable[T] = new MapReduceIterable(underlying.sharded(sharded)) def filter(filter: BsonObject): MapReduceIterable[T] = new MapReduceIterable(underlying.filter(filter)) def maxTime(maxTime: Long, timeUnit: TimeUnit): MapReduceIterable[T] = new MapReduceIterable(underlying.maxTime(maxTime, timeUnit)) def action(action: MapReduceAction): MapReduceIterable[T] = new MapReduceIterable(underlying.action(action)) def limit(limit: Int): MapReduceIterable[T] = new MapReduceIterable(underlying.limit(limit)) }
jeffmay/bson-adt
bson-adt-mongo3-async/src/main/scala/adt/bson/mongo/async/client/MongoIterables.scala
Scala
apache-2.0
7,467
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.computations import org.mockito.Mockito.when import org.scalatest.{BeforeAndAfterEach, Matchers, WordSpec} import org.scalatestplus.mockito.MockitoSugar import uk.gov.hmrc.ct.BoxValidationFixture import uk.gov.hmrc.ct.box.CtValidation import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever class CP87aSpec extends WordSpec with MockitoSugar with Matchers with BoxValidationFixture[ComputationsBoxRetriever] with BeforeAndAfterEach { val boxRetriever = mock[ComputationsBoxRetriever] override def setUpMocks = { when(boxRetriever.cp87Input()).thenReturn(CP87Input(Some(50))) } testBoxIsZeroOrPositive("CP87a", CP87a.apply) "when non empty" when { "fail validation when greater than CP87" in { CP87a(Some(100)).validate(boxRetriever) shouldBe Set(CtValidation(Some("CP87a"), "error.CP87a.exceeds.max",Some(List("50")))) } "pass validation when lesser than CP87" in { CP87a(Some(40)).validate(boxRetriever) shouldBe Set() } } }
hmrc/ct-calculations
src/test/scala/uk/gov/hmrc/ct/computations/CP87aSpec.scala
Scala
apache-2.0
1,618
object NegativeId extends Enumeration { val Negative = Value(-1, "minus") val Zero = Value(0, "zero") val Positive = Value(1, "plus") def fromInt(id: Int) = values find (_.id == id) match { case Some(v) => v case None => null } } object Test extends App { println(NegativeId.fromInt(-1)) println(NegativeId.fromInt(0)) println(NegativeId.fromInt(1)) }
som-snytt/dotty
tests/run/t3950.scala
Scala
apache-2.0
379
package se.lu.nateko.cp.job import scala.util.Try import java.util.UUID object ReportValidator { val WAIT_TIME = 5 private val emailRegex = """^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,6}$""".r def validate(report: Report): Try[CandidateInfo] = for{ _ <- validateCandidateInfo(report.candidateInfo); _ <- validateTime(report.assignmentId); _ <- validateAnswer(report) } yield report.candidateInfo private def validateTime(id: UUID): Try[Unit] = Try{ if(id.version != 1) throw new Exception("Wrong assignment id!") val currentTimestamp = AssignmentGenerator.makeId.timestamp val elapsedSeconds = (currentTimestamp - id.timestamp).toDouble / 1e7 if(elapsedSeconds <= 0) throw new Exception("Assignment from the future? Are you cheating?") if(elapsedSeconds >= WAIT_TIME) throw new Exception("Too late!") } private def validateAnswer(report: Report): Try[Unit] = Try{ val id = report.assignmentId val question = AssignmentGenerator.makeAssignment(id).question val answer = report.answer if(question.country != answer.country) throw new Exception("Wrong country!") val perCapita = question.annualCO2 * 1000 / question.population val deviation = Math.abs(perCapita - answer.annualCO2PerCapita) / perCapita if(deviation > 1e-4) throw new Exception("Wrong numeric value!") } private def validateCandidateInfo(info: CandidateInfo): Try[Unit] = Try{ if(info.firstName.isEmpty) throw new Exception("First name was empty!") if(info.lastName.isEmpty) throw new Exception("Last name was empty!") if(emailRegex.findFirstIn(info.email).isEmpty) throw new Exception("Wrong email address!") } }
ICOS-Carbon-Portal/meta
jobAd/src/main/scala/se/lu/nateko/cp/job/ReportValidator.scala
Scala
gpl-3.0
1,636
/** * Magmanics Licensing. This web application allows for centralized control * of client application activation, with optional configuration parameters * to control licensable features, and storage of supplementary information * about the client machine. Client applications may interface with this * central server (for activation) using libraries licenced under an * alternative licence. * * Copyright (C) 2010 James Baxter <j.w.baxter(at)gmail.com> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.magmanics.licensing.datalayer.model import com.magmanics.licensing.model.Customer import org.testng.Assert._ import org.testng.annotations.Test /** * @author James Baxter <j.w.baxter@gmail.com> * @since 12-Jun-2010 */ class CustomerTest { //focus on extra methods @Test def enabledByDefault { assertTrue(new Customer(name = "Edwards").enabled) } @Test def customerNameRetrievable { assertEquals(new Customer(name = "Edwards").name, "Edwards") } @Test def customerNameCanBeChanged { val customer = new Customer(name = "Customer one") customer.name = "XXX" assertEquals(customer.name, "XXX") } }
manicmonkey/licensing
Licensing-Server/src/test/scala/com/magmanics/licensing/datalayer/model/CustomerTest.scala
Scala
gpl-3.0
1,766
package gitbucket.core.plugin import gitbucket.core.controller.Context import gitbucket.core.service.RepositoryService import gitbucket.core.view.Markdown import play.twirl.api.Html /** * A render engine to render content to HTML. */ trait Renderer { /** * Render the given request to HTML. */ def render(request: RenderRequest): Html } object MarkdownRenderer extends Renderer { override def render(request: RenderRequest): Html = { import request._ Html(Markdown.toHtml(fileContent, repository, enableWikiLink, enableRefsLink, enableAnchor)(context)) } } object DefaultRenderer extends Renderer { override def render(request: RenderRequest): Html = { import request._ Html( s"<tt>${ fileContent.split("(\\\\r\\\\n)|\\\\n").map(xml.Utility.escape(_)).mkString("<br/>") }</tt>" ) } } case class RenderRequest(filePath: List[String], fileContent: String, branch: String, repository: RepositoryService.RepositoryInfo, enableWikiLink: Boolean, enableRefsLink: Boolean, enableAnchor: Boolean, context: Context)
intermezzo-fr/gitbucket
src/main/scala/gitbucket/core/plugin/Renderer.scala
Scala
apache-2.0
1,088
/*********************************************************************** * Copyright (c) 2013-2022 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.index import org.locationtech.geomesa.index.metadata.GeoMesaMetadata class InMemoryMetadata[T] extends GeoMesaMetadata[T] { import scala.collection.mutable.{ Map => mMap } private val schemas = mMap.empty[String, mMap[String, T]] override def getFeatureTypes: Array[String] = synchronized(schemas.keys.toArray) override def insert(typeName: String, key: String, value: T): Unit = synchronized { schemas.getOrElseUpdate(typeName, mMap.empty[String, T]).put(key, value) } override def insert(typeName: String, kvPairs: Map[String, T]): Unit = synchronized { val m = schemas.getOrElseUpdate(typeName, mMap.empty[String, T]) kvPairs.foreach { case (k, v) => m.put(k, v) } } override def remove(typeName: String, key: String): Unit = synchronized { schemas.get(typeName).foreach(_.remove(key)) } override def remove(typeName: String, keys: Seq[String]): Unit = keys.foreach(remove(typeName, _)) override def read(typeName: String, key: String, cache: Boolean): Option[T] = synchronized { schemas.get(typeName).flatMap(_.get(key)) } override def scan(typeName: String, prefix: String, cache: Boolean): Seq[(String, T)] = synchronized { schemas.get(typeName) match { case None => Seq.empty case Some(m) => m.filterKeys(_.startsWith(prefix)).toSeq } } override def delete(typeName: String): Unit = synchronized { schemas.remove(typeName) } override def invalidateCache(typeName: String, key: String): Unit = {} override def backup(typeName: String): Unit = {} override def close(): Unit = {} /** * table cache should be cleared up when catalog is deleted in database */ override def resetCache(): Unit = {} }
locationtech/geomesa
geomesa-index-api/src/test/scala/org/locationtech/geomesa/index/InMemoryMetadata.scala
Scala
apache-2.0
2,207
package scalashop import org.scalameter._ import common._ import java.util.concurrent._ object HorizontalBoxBlurRunner { val standardConfig = config( Key.exec.minWarmupRuns -> 5, Key.exec.maxWarmupRuns -> 10, Key.exec.benchRuns -> 10, Key.verbose -> true ) withWarmer(new Warmer.Default) def main(args: Array[String]): Unit = { val radius = 3 val width = 1920 val height = 1080 val src = new Img(width, height) val dst = new Img(width, height) val seqtime = standardConfig measure { HorizontalBoxBlur.blur(src, dst, 0, height, radius) } println(s"sequential blur time: $seqtime ms") val numTasks = 32 val partime = standardConfig measure { HorizontalBoxBlur.parBlur(src, dst, numTasks, radius) } println(s"fork/join blur time: $partime ms") println(s"speedup: ${seqtime / partime}") } } /** A simple, trivially parallelizable computation. */ object HorizontalBoxBlur { /** Blurs the rows of the source image `src` into the destination image `dst`, * starting with `from` and ending with `end` (non-inclusive). * * Within each row, `blur` traverses the pixels by going from left to right. */ def blur(src: Img, dst: Img, from: Int, end: Int, radius: Int): Unit = { // TODO implement this method using the `boxBlurKernel` method for (y <- from until end) { for (x <- 0 until src.width) { val rgba = boxBlurKernel(src, x, y, radius) dst.update(x, y, rgba) } } } /** Blurs the rows of the source image in parallel using `numTasks` tasks. * * Parallelization is done by stripping the source image `src` into * `numTasks` separate strips, where each strip is composed of some number of * rows. */ def parBlur(src: Img, dst: Img, numTasks: Int, radius: Int): Unit = { // TODO implement using the `task` construct and the `blur` method val rowsPerTask = { if (src.height % numTasks == 0) src.height / numTasks else src.height / numTasks + 1 } var tasks = List(): List[ForkJoinTask[Unit]] for (row <- 0 until src.height by rowsPerTask) { tasks = task { val bottom = src.height min (row + rowsPerTask) blur(src, dst, row, bottom, radius) } :: tasks } tasks.map(_.join()) } }
huajianmao/learning
coursera/parprog1/week1/scalashop/src/main/scala/scalashop/HorizontalBoxBlur.scala
Scala
mit
2,312
package eu.timepit.refined.cats import eu.timepit.refined.api.{Max, Min} import org.scalacheck.{Arbitrary, Prop, Properties} import org.scalacheck.Prop._ class NonNegShiftSpec extends Properties("NonNegShift") { final def createProperty[A: Arbitrary: Min: NonNegShift](implicit num: Numeric[A]): Prop = { import num.{abs, gteq, lt, plus, zero} forAll { (a: A) => gteq(a, zero) ==> (NonNegShift[A].shift(a) == a) } && forAll { (a: A) => lt(a, zero) ==> (NonNegShift[A].shift(a) == plus(a, abs(Min[A].min))) } } property("shift Byte") = createProperty[Byte] property("shift Short") = createProperty[Short] property("shift Int") = createProperty[Int] property("shift Long") = createProperty[Long] } class NegShiftSpec extends Properties("NegShift") { final def createProperty[A: Arbitrary: Max: NegShift](implicit num: Numeric[A]): Prop = { import num.{gteq, lt, minus, one, zero} forAll { (a: A) => lt(a, zero) ==> (NegShift[A].shift(a) == a) } && forAll { (a: A) => gteq(a, zero) ==> (NegShift[A].shift(a) == minus(minus(a, Max[A].max), one)) } } property("shift Byte") = createProperty[Byte] property("shift Short") = createProperty[Short] property("shift Int") = createProperty[Int] property("shift Long") = createProperty[Long] }
fthomas/refined
modules/cats/shared/src/test/scala/eu/timepit/refined/cats/ShiftSpec.scala
Scala
mit
1,327
package org.tomahna.scalaresume.resume import play.api.libs.json.JsSuccess import play.api.libs.json.Reads case class Resume( basics: Basics, work: List[Work], volunteer: List[Volunteer], education: List[Education], awards: List[Award], publications: List[Publication], skills: List[Skill], languages: List[Language], interests: List[Interest], references: List[Reference] ) object Resume { implicit val resumeReads: Reads[Resume] = Reads { json => JsSuccess( Resume( (json \\ "basics").as[Basics], (json \\ "work").as[List[Work]], (json \\ "volunteer").as[List[Volunteer]], (json \\ "education").as[List[Education]], (json \\ "awards").as[List[Award]], (json \\ "publications").as[List[Publication]], (json \\ "skills").as[List[Skill]], (json \\ "languages").as[List[Language]], (json \\ "interests").as[List[Interest]], (json \\ "references").as[List[Reference]] ) ) } }
Tomahna/scalaresume
resume/src/main/scala/org/tomahna/scalaresume/resume/Resume.scala
Scala
mit
1,021
package me.enkode.j8 import java.util.Optional trait JavaOptionalSupport[T] { def jOptional: Optional[T] def asScala = if (jOptional.isPresent) { Option(jOptional.get()) } else { None } }
kender/java8-converters
src/main/scala/me/enkode/j8/JavaOptionalSupport.scala
Scala
mit
207
package uk.gov.gds.location.importer.helpers import org.joda.time.DateTime import uk.gov.gds.location.importer.model._ import uk.gov.gds.location.importer.model.CodeLists._ import scala.Some object TestHelpers { lazy val startDate = new DateTime().minusDays(100) lazy val lastUpdatedDate = new DateTime().minusDays(50) lazy val endDate = new DateTime().minusDays(50) def deliveryPoint(uprn: String) = DeliveryPoint( uprn, Some("subBuildingName"), Some("buildingName"), Some("buildingNumber"), Some("dependantThoroughfareName"), Some("thoroughfareName"), Some("doubleDependantLocality"), Some("dependantLocality"), "POSTCODE", startDate, None, lastUpdatedDate ) def blpu(uprn: String) = BLPU( uprn, Some(BlpuStateCode.inUse), Some(LogicalStatusCode.approved), 1.1, 2.2, "1234", startDate, None, lastUpdatedDate, "S", "postcode") def lpi(uprn: String, usrn: String) = LPI( uprn, usrn, Some(LogicalStatusCode.approved), startDate, None, lastUpdatedDate, Some("1"), Some("a"), Some("2"), Some("b"), Some("pao text"), Some("3"), Some("c"), Some("4"), Some("d"), Some("sao text"), Some("area name"), Some(true), "ENG" ) def street(usrn: String) = Street(usrn, Some(StreetRecordTypeCode.numberedStreet), Some(StreetStateCode.open), Some(StreetSurfaceCode.mixed), Some(StreetClassificationCode.allVehicles), startDate, None, lastUpdatedDate) def streetDescriptor(usrn: String) = StreetDescriptor(usrn, "description", Some("locality"), Some("town"), "admin area", "ENG") def classification(uprn: String) = Classification(uprn, "code", startDate, None, lastUpdatedDate, "primaryUse", Some("secondaryUse")) def organisation(uprn: String) = Organisation(uprn, "organisation", startDate, None, lastUpdatedDate) def streetWithDescription(fileName: String, streetDescriptor: StreetDescriptor, s: Street) = StreetWithDescription( streetDescriptor.usrn, streetDescriptor.streetDescription, streetDescriptor.localityName, streetDescriptor.townName, streetDescriptor.administrativeArea, s.recordType.map(r => r.toString), s.state.map(r => r.toString), s.surface.map(r => r.toString), s.classification.map(r => r.toString), fileName ) def codePoint = CodePoint("postcode", "country", "district", "name", 1.1, 2.2, 1.1, 2.2, "nhs-region", "nhs", "ward", "county") }
alphagov/location-data-importer
src/test/scala/uk/gov/gds/location/importer/helpers/TestHelpers.scala
Scala
mit
2,497
/* Copyright (c) 2017 KAPSARC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.economicsl.auctions.messages import org.economicsl.core.{Price, Quantity, Tradable} import scala.collection.GenIterable /** Mixin trait providing a schedule of price-quantity pairs for an order. * * @author davidrpugh * @since 0.1.0 */ trait PriceQuantitySchedule[+T <: Tradable] { type PricePoint = (Price, Quantity) /** A schedule is a step-wise specification of an `Order` to buy (or sell) various quantities * of a `Tradable` at specific, discrete price-points. */ def schedule: GenIterable[PricePoint] def tradable: T }
EconomicSL/auctions
src/main/scala/org/economicsl/auctions/messages/PriceQuantitySchedule.scala
Scala
apache-2.0
1,127
package db import java.util.UUID import models.TileCacheExpiration import com.trifectalabs.roadquality.v0.models._ import models.{MiniSegment, MiniSegmentToSegment} import org.joda.time.DateTime import com.vividsolutions.jts.geom.Geometry object Tables { import MyPostgresDriver.api._ import TablesHelper._ class Segments(tag: Tag) extends Table[Segment](tag, "segments") { def id = column[UUID]("id", O.PrimaryKey) def name = column[Option[String]]("name") def description = column[Option[String]]("description") def polyline = column[String]("polyline") def createdBy = column[UUID]("created_by") def hidden = column[Boolean]("hidden") override def * = (id, name, description, polyline, createdBy, hidden) <> (Segment.tupled, Segment.unapply) } val segments = TableQuery[Segments] class Users(tag: Tag) extends Table[User](tag, "users") { def id = column[UUID]("id", O.PrimaryKey) def firstName = column[String]("first_name") def lastName = column[String]("last_name") def email = column[String]("email") def city = column[Option[String]]("city") def province = column[Option[String]]("province") def country = column[Option[String]]("country") def birthdate = column[Option[DateTime]]("birthdate") def sex = column[Option[String]]("sex") def role = column[UserRole]("role") def stravaToken = column[String]("strava_token") def createdAt = column[DateTime]("created_at") def updatedAt = column[DateTime]("updated_at") def deletedAt = column[Option[DateTime]]("deleted_at") override def * = (id, firstName, lastName, email, city, province, country, birthdate, sex, role, stravaToken, createdAt, updatedAt, deletedAt) <> (User.tupled, User.unapply) } val users = TableQuery[Users] class SegmentRatings(tag: Tag) extends Table[SegmentRating](tag, "segment_ratings") { def id = column[UUID]("id") def segmentId = column[UUID]("segment_id") def userId = column[UUID]("user_id") def trafficRating = column[Int]("traffic_rating") def surfaceRating = column[Int]("surface_rating") def surface = column[SurfaceType]("surface") def pathType = column[PathType]("path_type") def createdAt = column[DateTime]("created_at") def updatedAt = column[DateTime]("updated_at") def deletedAt = column[Option[DateTime]]("deleted_at") override def * = (id, segmentId, userId, trafficRating, surfaceRating, surface, pathType, createdAt, updatedAt, deletedAt) <> (SegmentRating.tupled, SegmentRating.unapply) } val segmentRatings = TableQuery[SegmentRatings] class MiniSegments(tag: Tag) extends Table[MiniSegment](tag, "mini_segments") { def id = column[UUID]("id") def trafficRating = column[Double]("traffic_rating") def surfaceRating = column[Double]("surface_rating") def surface = column[SurfaceType]("surface") def pathType = column[PathType]("path_type") def polyline = column[String]("polyline") override def * = (id, trafficRating, surfaceRating, surface, pathType, polyline) <> (MiniSegment.tupled, MiniSegment.unapply) } val miniSegments = TableQuery[MiniSegments] class MiniSegmentsToSegments(tag: Tag) extends Table[MiniSegmentToSegment](tag, "mini_segments_to_segments") { def miniSegmentId = column[UUID]("mini_segment_id") def miniSegmentPolyline = column[Geometry]("mini_segment_polyline") def segmentId = column[UUID]("segment_id") override def * = (miniSegmentId, miniSegmentPolyline, segmentId) <> (MiniSegmentToSegment.tupled, MiniSegmentToSegment.unapply) } val miniSegmentsToSegments = TableQuery[MiniSegmentsToSegments] class BetaUserWhitelist(tag: Tag) extends Table[(String)](tag, "beta_user_whitelist") { def email = column[String]("email") override def * = (email) } val betaUserWhitelist = TableQuery[BetaUserWhitelist] }
trifectalabs/roadquality
api/app/db/Tables.scala
Scala
bsd-3-clause
3,878
package se.citerus.dddsample.infrastructure.persistence.hibernate class VoyageRepositoryHibernate { }
oluies/ddd-sample-scala
src/main/scala/se/citerus/dddsample/infrastructure/persistence/hibernate/VoyageRepositoryHibernate.scala
Scala
mit
107
/* Copyright (c) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.xml.combinators import scala.xml.PrettyPrinter import org.junit.{Test, Assert, Ignore} /** * This class tests simple XML pickler combinators. * * @author Iulian Dragos (iuliandragos@google.com) */ class PicklerTest extends PicklerAsserts { import Picklers._ final val URI = "testing-uri" val pprinter = new PrettyPrinter(80, 4) def pSeq2: Pickler[~[String, String]] = elem("p", URI, "pair", elem("p", URI, "a", text) ~ elem("p", URI, "b", text)) val input = (<p:pair xmlns:p="testing-uri"> <p:a>alfa</p:a> <p:b>omega</p:b> </p:pair>) val pair = new ~("alfa", "omega") @Test def testSequencePickle { val pickled = pSeq2.pickle(pair, PlainOutputStore.empty) Assert.assertEquals("Sequence pickling failed", normalize(input), normalize(pickled.rootNode)) } @Test def testSequenceUnpickle { assertSucceedsWith("Sequence unpickling failed", pair, input, pSeq2) } def pSeq3: Pickler[String ~ String ~ String] = elem("p", URI, "triple", elem("p", URI, "a", text) ~ elem("p", URI, "b", text) ~ elem("p", URI, "c", text)) val triple = new ~(new ~("alfa", "beta"), "gamma") val inputTriple = (<m:triple xmlns:m="testing-uri"> <m:a>alfa</m:a> <m:b>beta</m:b> <m:c>gamma</m:c> </m:triple>) @Test def testSequence3Unpickle { assertSucceedsWith("Sequence 3 unpickling failed", triple, inputTriple, pSeq3) } def pStrings = elem("p", URI, "strings", rep(elem("p", URI, "str", text))) @Test def testRepetition0Unpickle { val inputRep = (<p:strings xmlns:p="testing-uri"></p:strings>) val strings = List() assertSucceedsWith("Repetition with empty sequence failed", strings, inputRep, pStrings) } @Test def testRepetition1Unpickle { val inputRep = (<p:strings xmlns:p="testing-uri"> <p:str>one</p:str> </p:strings>) val strings = List("one") assertSucceedsWith("Repetition with one element failed", strings, inputRep, pStrings) } @Test def testRepetition3Unpickle { val inputRep = (<p:strings xmlns:p="testing-uri"> <p:str>one</p:str> <p:str>two</p:str> <p:str>three</p:str> </p:strings>) val strings = List("one", "two", "three") assertSucceedsWith("Repetition failed", strings, inputRep, pStrings) } @Test def testRepetition0Pickle { val inputRep = (<p:strings xmlns:p="testing-uri"></p:strings>) val strings = List() val pickled = pStrings.pickle(strings, PlainOutputStore.empty) Assert.assertEquals("Empty repetition pickling", normalize(inputRep), normalize(pickled.rootNode)) } @Test def testRepetition1Pickle { val inputRep = (<p:strings xmlns:p="testing-uri"> <p:str>one</p:str> </p:strings>) val strings = List("one") val pickled = pStrings.pickle(strings, PlainOutputStore.empty) Assert.assertEquals("Repetition of 1 element, pickling", normalize(inputRep), normalize(pickled.rootNode)) } @Test def testRepetition3Pickle { val inputRep = (<p:strings xmlns:p="testing-uri"> <p:str>one</p:str> <p:str>two</p:str> <p:str>three</p:str> </p:strings>) val strings = List("one", "two", "three") val pickled = pStrings.pickle(strings, PlainOutputStore.empty) Assert.assertEquals("Repetition of 3 elements, pickling", normalize(inputRep), normalize(pickled.rootNode)) } @Test def testWhen { implicit val ns = ("p", "testing-uri") val input = (<p:strings xmlns:p="testing-uri"> <p:str>one</p:str> <p:str kind="special">this is special</p:str> <p:a>a</p:a> <p:b>b</p:b> </p:strings>) val pickler = elem("strings", (when(elem("str", const(attr("kind", text), "special")), elem("str", text)) ~ elem("str", text) ~ elem("a", text) ~ elem("b", text))) val expected = new ~("this is special", "one") ~ "a" ~ "b" assertSucceedsWith("Unpickling when", expected, input, pickler) } @Test def testWhenInterleaved { implicit val ns = ("p", "testing-uri") val input = (<p:strings xmlns:p="testing-uri"> <p:b>b</p:b> <p:a>a</p:a> <p:str>one</p:str> <p:str kind="special">this is special</p:str> </p:strings>) val pickler = interleaved("strings", (when(elem("str", const(attr("kind", text), "special")), elem("str", text)) ~ elem("str", text) ~ elem("a", text) ~ elem("b", text))) val expected = new ~("this is special", "one") ~ "a" ~ "b" assertSucceedsWith("Unpickling when", expected, input, pickler) } }
mjanson/gdata-scala-client
tests/com/google/xml/combinators/PicklerTest.scala
Scala
apache-2.0
5,429
/* * Copyright Β© 2014 Teo Klestrup, Carl Dybdahl * * This file is part of Republix. * * Republix is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Republix is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Republix. If not, see <http://www.gnu.org/licenses/>. */ package republix.game import republix.io._ import republix.sim._ class SimVote(proposals: Map[(Party, GameNode), Option[Intensity]]) extends SimPhase{ def sim(model: GameModel, players: => Vector[Party], updates: In[(Party, PhaseCommand)], state: GameState, feedback: SimEffect => Unit): Unit = { var votes = Map[Party, Party]() updates.listen { case (p, VoteFor(proposer)) => votes += p -> proposer case (p, _) => feedback(Kick(p)) } } }
teozkr/republix-online
republix/src/main/scala/republix/game/SimVote.scala
Scala
agpl-3.0
1,217
object `gadt-no-approx` { def fo[U](u: U): U = (0 : Int) match { case _: u.type => val i: Int = (??? : U) // error // potentially could compile // val i2: Int = u u } }
som-snytt/dotty
tests/neg/gadt-no-approx.scala
Scala
apache-2.0
217
/* * Copyright 2016 Dennis Vriend * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package akka.persistence.jdbc import akka.actor.{ ActorRef, ActorSystem } import akka.persistence.jdbc.util.ClasspathResources import akka.testkit.TestProbe import org.scalatest._ import org.scalatest.concurrent.{ Eventually, ScalaFutures } trait SimpleSpec extends FlatSpec with Matchers with ScalaFutures with TryValues with OptionValues with Eventually with ClasspathResources with BeforeAndAfterAll with BeforeAndAfterEach with GivenWhenThen { /** * Sends the PoisonPill command to an actor and waits for it to die */ def killActors(actors: ActorRef*)(implicit system: ActorSystem): Unit = { val tp = TestProbe() actors.foreach { (actor: ActorRef) => tp watch actor system.stop(actor) tp.expectTerminated(actor) } } }
gavares/akka-persistence-jdbc
src/test/scala/akka/persistence/jdbc/SimpleSpec.scala
Scala
apache-2.0
1,383
package pimpathon.argonautTests import _root_.argonaut._ import pimpathon.PSpec import pimpathon.argonaut._ class DecodeJsonSpec extends PSpec with JsonUtil { "beforeDecode" in decoder.beforeDecode(reverse).decodeJson(json) ≑ decoder.decodeJson(reverse(json)) "compose" in decoder.compose(reverse).decodeJson(json) ≑ decoder.decodeJson(reverse(json)) "upcast" in Derived.codec.upcast[Base].decodeJson(derivedEncoded) ≑ DecodeResult.ok(derived) "mapEntries" in mapDecoder.mapEntries(reverseEntry).decodeJson(jsonMap("foo" β†’ "bar")) ≑ mapDecoder.decodeJson(jsonMap("oof" β†’ "rab")) "mapKeys" in mapDecoder.mapKeys(_.reverse).decodeJson(jsonMap("foo" β†’ "bar")) ≑ mapDecoder.decodeJson(jsonMap("oof" β†’ "bar")) "mapValues" in mapDecoder.mapValues(_.reverse).decodeJson(jsonMap("foo" β†’ "bar")) ≑ mapDecoder.decodeJson(jsonMap("foo" β†’ "rab")) }
stacycurl/pimpathon
src/test/scala/pimpathon/argonautTests/DecodeJsonTest.scala
Scala
apache-2.0
907
package ru.dgolubets.reactjs.server.util import java.io.{File, FileInputStream, InputStream} import java.security.MessageDigest import akka.util.ByteString private[server] object MD5 { def ofStream(stream: InputStream, bufferSize: Int = 4096): ByteString = { val md = MessageDigest.getInstance("MD5") val buffer = new Array[Byte](bufferSize) var read = 0 do { read = stream.read(buffer) if (read > 0) { md.update(buffer, 0, read) } } while (read > 0) ByteString(md.digest()) } def ofFile(file: File, bufferSize: Int = 4096): ByteString = { val stream = new FileInputStream(file) try { ofStream(stream, bufferSize) } finally { stream.close() } } }
DGolubets/reactjs-server
src/main/scala/ru/dgolubets/reactjs/server/util/MD5.scala
Scala
mit
743
package spark.util import java.util.concurrent.ConcurrentHashMap import scala.collection.JavaConversions import scala.collection.mutable.Map /** * This is a custom implementation of scala.collection.mutable.Map which stores the insertion * time stamp along with each key-value pair. Key-value pairs that are older than a particular * threshold time can them be removed using the clearOldValues method. This is intended to be a drop-in * replacement of scala.collection.mutable.HashMap. */ class TimeStampedHashMap[A, B] extends Map[A, B]() with spark.Logging { val internalMap = new ConcurrentHashMap[A, (B, Long)]() def get(key: A): Option[B] = { val value = internalMap.get(key) if (value != null) Some(value._1) else None } def iterator: Iterator[(A, B)] = { val jIterator = internalMap.entrySet().iterator() JavaConversions.asScalaIterator(jIterator).map(kv => (kv.getKey, kv.getValue._1)) } override def + [B1 >: B](kv: (A, B1)): Map[A, B1] = { val newMap = new TimeStampedHashMap[A, B1] newMap.internalMap.putAll(this.internalMap) newMap.internalMap.put(kv._1, (kv._2, currentTime)) newMap } override def - (key: A): Map[A, B] = { val newMap = new TimeStampedHashMap[A, B] newMap.internalMap.putAll(this.internalMap) newMap.internalMap.remove(key) newMap } override def += (kv: (A, B)): this.type = { internalMap.put(kv._1, (kv._2, currentTime)) this } override def -= (key: A): this.type = { internalMap.remove(key) this } override def update(key: A, value: B) { this += ((key, value)) } override def apply(key: A): B = { val value = internalMap.get(key) if (value == null) throw new NoSuchElementException() value._1 } override def filter(p: ((A, B)) => Boolean): Map[A, B] = { JavaConversions.asScalaConcurrentMap(internalMap).map(kv => (kv._1, kv._2._1)).filter(p) } override def empty: Map[A, B] = new TimeStampedHashMap[A, B]() override def size: Int = internalMap.size override def foreach[U](f: ((A, B)) => U) { val iterator = internalMap.entrySet().iterator() while(iterator.hasNext) { val entry = iterator.next() val kv = (entry.getKey, entry.getValue._1) f(kv) } } /** * Removes old key-value pairs that have timestamp earlier than `threshTime` */ def clearOldValues(threshTime: Long) { val iterator = internalMap.entrySet().iterator() while(iterator.hasNext) { val entry = iterator.next() if (entry.getValue._2 < threshTime) { logDebug("Removing key " + entry.getKey) iterator.remove() } } } private def currentTime: Long = System.currentTimeMillis() }
koeninger/spark
core/src/main/scala/spark/util/TimeStampedHashMap.scala
Scala
bsd-3-clause
2,713
package org.scalawebtest.integration.doc._104 import org.scalawebtest.integration.json.{FitsTypeMismatchBehavior, ScalaWebTestJsonBaseSpec} import play.api.libs.json.{JsValue, Json} class FitsValuesIgnoringArrayOrderSpec extends ScalaWebTestJsonBaseSpec with FitsTypeMismatchBehavior { config.useBaseUri("http://localhost:9090") path = "/dijkstra.json" def dijkstra: JsValue = Json.parse(webDriver.getPageSource) "Dijkstra" should "contain the correct firstName and lastName and the correct universities in any order" in { dijkstra fits valuesIgnoringArrayOrder of """ |{ | "name": "Dijkstra", | "firstName": "Edsger", | "universities": | [ | { "name": "UniversitΓ€t Leiden","begin": 1948, "end": 1956 }, | { "name": "University of Texas at Austin", "begin": 1984, "end": 1999 }, | { "name": "Technische Universiteit Eindhoven", "begin": 1962, "end": 1984 }, | { "name": "Mathematisch Centrum Amsterdam", "begin": 1951, "end": 1959 } | ] |} """.stripMargin } }
unic/ScalaWebTest
scalawebtest-integration/src/it/scala/org/scalawebtest/integration/doc/_104/FitsValuesIgnoringArrayOrderSpec.scala
Scala
apache-2.0
1,087
// Since `State` is a binary type constructor, we need to partially apply it // with the `S` type argument. Thus, it is not just one monad, but an entire // family of monads, one for each type `S`. One solution is to create a class // `StateMonads` that accepts the `S` type argument and then has a _type member_ // for the fully applied `State[S, A]` type inside: class StateMonads[S] { type StateS[A] = State[S, A] // We can then declare the monad for the `StateS` type constructor: val monad = new Monad[StateS] { def unit[A](a: => A): State[S, A] = State(s => (a, s)) override def flatMap[A,B](st: State[S, A])(f: A => State[S, B]): State[S, B] = st flatMap f } } // But we don't have to create a full class like `StateMonads`. We can create // an anonymous class inline, inside parentheses, and project out its type member `f`. // This is sometimes called a "type lambda", since it's very similar to a type-level // anonymous function. def stateMonad[S] = new Monad[({type f[x] = State[S, x]})#f] { def unit[A](a: => A): State[S, A] = State(s => (a, s)) override def flatMap[A,B](st: State[S, A])(f: A => State[S, B]): State[S, B] = st flatMap f }
lucaviolanti/scala-redbook
answerkey/monads/02.answer.scala
Scala
mit
1,208