code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package com.wavesplatform.mining.microblocks
import cats.syntax.applicativeError._
import cats.syntax.bifunctor._
import cats.syntax.either._
import com.wavesplatform.account.KeyPair
import com.wavesplatform.block.Block.BlockId
import com.wavesplatform.block.{Block, MicroBlock}
import com.wavesplatform.metrics._
import com.wavesplatform.mining._
import com.wavesplatform.mining.microblocks.MicroBlockMinerImpl._
import com.wavesplatform.network.{MicroBlockInv, _}
import com.wavesplatform.settings.MinerSettings
import com.wavesplatform.state.Blockchain
import com.wavesplatform.state.appender.MicroblockAppender
import com.wavesplatform.transaction.{BlockchainUpdater, Transaction}
import com.wavesplatform.utils.ScorexLogging
import com.wavesplatform.utx.UtxPool
import com.wavesplatform.utx.UtxPool.PackStrategy
import io.netty.channel.group.ChannelGroup
import kamon.Kamon
import monix.eval.Task
import monix.execution.schedulers.SchedulerService
import scala.concurrent.duration._
class MicroBlockMinerImpl(
setDebugState: MinerDebugInfo.State => Unit,
allChannels: ChannelGroup,
blockchainUpdater: BlockchainUpdater with Blockchain,
utx: UtxPool,
settings: MinerSettings,
minerScheduler: SchedulerService,
appenderScheduler: SchedulerService,
nextMicroBlockSize: Int => Int
) extends MicroBlockMiner
with ScorexLogging {
private val microBlockBuildTimeStats = Kamon.timer("miner.forge-microblock-time").withoutTags()
def generateMicroBlockSequence(
account: KeyPair,
accumulatedBlock: Block,
restTotalConstraint: MiningConstraint,
lastMicroBlock: Long
): Task[Unit] =
generateOneMicroBlockTask(account, accumulatedBlock, restTotalConstraint, lastMicroBlock)
.flatMap {
case res @ Success(newBlock, newConstraint) =>
Task.defer(generateMicroBlockSequence(account, newBlock, newConstraint, res.nanoTime))
case Retry =>
Task
.defer(generateMicroBlockSequence(account, accumulatedBlock, restTotalConstraint, lastMicroBlock))
.delayExecution(1 second)
case Stop =>
setDebugState(MinerDebugInfo.MiningBlocks)
Task(log.debug("MicroBlock mining completed, block is full"))
}
.recover { case e => log.error("Error mining microblock", e) }
private[mining] def generateOneMicroBlockTask(
account: KeyPair,
accumulatedBlock: Block,
restTotalConstraint: MiningConstraint,
lastMicroBlock: Long
): Task[MicroBlockMiningResult] = {
val packTask = Task.cancelable[(Option[Seq[Transaction]], MiningConstraint)] { cb =>
@volatile var cancelled = false
minerScheduler.execute { () =>
val mdConstraint = MultiDimensionalMiningConstraint(
restTotalConstraint,
OneDimensionalMiningConstraint(
nextMicroBlockSize(settings.maxTransactionsInMicroBlock),
TxEstimators.one,
"MaxTxsInMicroBlock"
)
)
val packStrategy =
if (accumulatedBlock.transactionData.isEmpty) PackStrategy.Limit(settings.microBlockInterval)
else PackStrategy.Estimate(settings.microBlockInterval)
log.trace(s"Starting pack for ${accumulatedBlock.id()} with $packStrategy, initial constraint is $mdConstraint")
val (unconfirmed, updatedMdConstraint) =
concurrent.blocking(
Instrumented.logMeasure(log, "packing unconfirmed transactions for microblock")(
utx.packUnconfirmed(
mdConstraint,
packStrategy,
() => cancelled
)
)
)
log.trace(s"Finished pack for ${accumulatedBlock.id()}")
val updatedTotalConstraint = updatedMdConstraint.constraints.head
cb.onSuccess(unconfirmed -> updatedTotalConstraint)
}
Task.eval {
cancelled = true
}
}
packTask.flatMap {
case (Some(unconfirmed), updatedTotalConstraint) if unconfirmed.nonEmpty =>
val delay = {
val delay = System.nanoTime() - lastMicroBlock
val requiredDelay = settings.microBlockInterval.toNanos
if (delay >= requiredDelay) Duration.Zero else (requiredDelay - delay).nanos
}
for {
_ <- Task.now(if (delay > Duration.Zero) log.trace(s"Sleeping ${delay.toMillis} ms before applying microBlock"))
_ <- Task.sleep(delay)
_ = log.trace(s"Generating microBlock for ${account.toAddress}, constraints: $updatedTotalConstraint")
blocks <- forgeBlocks(account, accumulatedBlock, unconfirmed)
.leftWiden[Throwable]
.liftTo[Task]
(signedBlock, microBlock) = blocks
blockId <- appendMicroBlock(microBlock)
_ <- broadcastMicroBlock(account, microBlock, blockId)
} yield {
if (updatedTotalConstraint.isFull) Stop
else Success(signedBlock, updatedTotalConstraint)
}
case (_, updatedTotalConstraint) =>
if (updatedTotalConstraint.isFull) {
log.trace(s"Stopping forging microBlocks, the block is full: $updatedTotalConstraint")
Task.now(Stop)
} else {
log.trace("UTX is empty, retrying")
Task.now(Retry)
}
}
}
private def broadcastMicroBlock(account: KeyPair, microBlock: MicroBlock, blockId: BlockId): Task[Unit] =
Task(if (allChannels != null) allChannels.broadcast(MicroBlockInv(account, blockId, microBlock.reference)))
private def appendMicroBlock(microBlock: MicroBlock): Task[BlockId] =
MicroblockAppender(blockchainUpdater, utx, appenderScheduler)(microBlock)
.flatMap {
case Left(err) => Task.raiseError(MicroBlockAppendError(microBlock, err))
case Right(v) => Task.now(v)
}
private def forgeBlocks(
account: KeyPair,
accumulatedBlock: Block,
unconfirmed: Seq[Transaction]
): Either[MicroBlockMiningError, (Block, MicroBlock)] =
microBlockBuildTimeStats.measureSuccessful {
for {
signedBlock <- Block
.buildAndSign(
version = blockchainUpdater.currentBlockVersion,
timestamp = accumulatedBlock.header.timestamp,
reference = accumulatedBlock.header.reference,
baseTarget = accumulatedBlock.header.baseTarget,
generationSignature = accumulatedBlock.header.generationSignature,
txs = accumulatedBlock.transactionData ++ unconfirmed,
signer = account,
featureVotes = accumulatedBlock.header.featureVotes,
rewardVote = accumulatedBlock.header.rewardVote
)
.leftMap(BlockBuildError)
microBlock <- MicroBlock
.buildAndSign(signedBlock.header.version, account, unconfirmed, accumulatedBlock.id(), signedBlock.signature)
.leftMap(MicroBlockBuildError)
_ = BlockStats.mined(microBlock)
} yield (signedBlock, microBlock)
}
}
object MicroBlockMinerImpl {
sealed trait MicroBlockMiningResult
case object Stop extends MicroBlockMiningResult
case object Retry extends MicroBlockMiningResult
final case class Success(b: Block, totalConstraint: MiningConstraint) extends MicroBlockMiningResult {
val nanoTime: Long = System.nanoTime()
}
}
| wavesplatform/Waves | node/src/main/scala/com/wavesplatform/mining/microblocks/MicroBlockMinerImpl.scala | Scala | mit | 7,312 |
package org.workcraft.plugins
package stg21
import org.workcraft.util.Maybe
import org.workcraft.dependencymanager.advanced.core.EvaluationContext
import org.workcraft.dependencymanager.advanced.user.ModifiableExpressionBase
import org.workcraft.dependencymanager.advanced.user.ModifiableExpression
import java.awt.geom.Point2D
import org.workcraft.dom.visual.connections.RelativePoint
import scala.collection.mutable.WeakHashMap
import scalaz.State
import org.workcraft.interop.ModelService
import org.workcraft.dependencymanager.advanced.core.Expression
import scalaz.Lens
import org.workcraft.plugins.stg21.fields.GroupLenses
import org.workcraft.dom.visual.connections.StaticVisualConnectionData
import org.workcraft.plugins.stg21.parsing.Place
import org.workcraft.plugins.stg21.parsing.ExplicitPlacePlace
import org.workcraft.plugins.stg21.parsing.ImplicitPlace
object types {
sealed trait SignalType
object SignalType {
case object Input extends SignalType
case object Output extends SignalType
case object Internal extends SignalType
}
sealed trait TransitionDirection {
import TransitionDirection._
def symbol = this match {
case Rise => "+"
case Fall => "-"
case Toggle => "~"
}
}
object TransitionDirection {
case object Rise extends TransitionDirection
case object Fall extends TransitionDirection
case object Toggle extends TransitionDirection
}
type InstanceNumber = Int
type Transition = (TransitionLabel, InstanceNumber)
sealed trait TransitionLabel
case class DummyLabel(name: String) extends TransitionLabel
case class SignalLabel(signal: Id[Signal], direction: TransitionDirection) extends TransitionLabel
case class Signal(name: String, direction: SignalType)
object Signal extends fields.SignalLenses
case class Id[T](id: Int) {
def upCast[B >: T]: Id[B] = Id(id)
def downCast[B <: T]: Id[B] = Id(id)
}
case class ExplicitPlace(initialMarking: Int, name: String)
object ExplicitPlace extends fields.ExplicitPlaceLenses
sealed trait Arc
case class ConsumingArc(from: Id[ExplicitPlace], to: Id[Transition]) extends Arc
case class ProducingArc(from: Id[Transition], to: Id[ExplicitPlace]) extends Arc
case class ImplicitPlaceArc(from: Id[Transition], to: Id[Transition], initialMarking: Int) extends Arc
sealed trait StgConnectable
case class NodeConnectable(n: StgNode) extends StgConnectable
case class ArcConnectable(a: Id[Arc]) extends StgConnectable
case class Col[T](map: Map[Id[T], T], nextFreeId: Id[T]) {
def remove(id: Id[T]): Col[T] = Col[T](map - id, nextFreeId)
def lookup(key: Id[T]): Option[T] = map.get(key)
def apply(key: Id[T]) = map(key)
def unsafeLookup(key: Id[T]): T = lookup(key).get
def insert(key: Id[T])(value: T) = copy(map = map + ((key, value)))
def keys: List[Id[T]] = map.keys.toList
def values: List[T] = map.values.toList
}
import StateExtensions._
object Col {
def empty[T] = Col[T](Map.empty, Id[T](0))
def uncheckedLook[T] = (id: Id[T]) => Lens[Col[T], T](_.lookup(id).get, (col, v) => update(id)(x => v) ~> col)
def add[T](t: T): State[Col[T], Id[T]] = state(col => {
(Col[T](col.map + ((col.nextFreeId, t)), Id[T](col.nextFreeId.id + 1)), col.nextFreeId)
})
def remove[T](t: Id[T]): State[Col[T], Boolean] = state(col => {
(Col[T](col.map - t, col.nextFreeId), col.map.contains(t))
})
def update[T](t: Id[T])(f: T => T): State[Col[T], Boolean] = state(col => {
col.map.get(t) match {
case None => (col, false)
case Some(x) => (Col[T](col.map + (t -> f(x)), col.nextFreeId), true)
}
})
}
case class MathStg(
signals: Col[Signal],
transitions: Col[Transition],
places: Col[ExplicitPlace],
arcs: Col[Arc]) {
def initialMarking: Map[Place, Int] = {
import scalaz._
import Scalaz._
places.map.map { case (pid, p) => (ExplicitPlacePlace(pid): Place, p.initialMarking) } |+|
(arcs.map.toList >>= { case (aid, arc @ ImplicitPlaceArc(_, _, m)) => List((ImplicitPlace(aid.downCast), m)); case _ => List() }).toMap
}
def setInitialMarking(marking : Map[Place, Int]) : MathStg = {
marking.foldLeft(this){case (s, (p, m)) => s.setInitialMarking(p,m)}
}
def setInitialMarking(p : Place, m : Int) : MathStg = p match {
case ImplicitPlace(aid) => copy(arcs = Col.update(aid.upCast[Arc]){case ImplicitPlaceArc (a,b,_) => ImplicitPlaceArc (a,b,m)} ~> arcs)
case ExplicitPlacePlace(pid) => copy (places = Col.update(pid){case ExplicitPlace(_,n) => ExplicitPlace(m,n)} ~> places)
}
}
sealed trait StgNode
case class ExplicitPlaceNode(p: Id[ExplicitPlace]) extends StgNode
case class TransitionNode(t: Id[Transition]) extends StgNode
type VisualArc = StaticVisualConnectionData
implicit def decorateVisualArc(arc: Arc) = new {
def firstAndSecond: (StgNode, StgNode) = arc match {
case ProducingArc(t, p) => (TransitionNode(t), ExplicitPlaceNode(p))
case ConsumingArc(p, t) => (ExplicitPlaceNode(p), TransitionNode(t))
case ImplicitPlaceArc(t1, t2, _) => (TransitionNode(t1), TransitionNode(t2))
}
def first = firstAndSecond._1
def second = firstAndSecond._2
}
case class Group(info: VisualInfo)
object Group extends GroupLenses
case class VisualInfo(position: Point2D.Double, parent: Option[Id[Group]])
object VisualInfo extends fields.VisualInfoLenses
sealed trait VisualNode
case class StgVisualNode(n: StgNode) extends VisualNode
case class GroupVisualNode(g: Id[Group]) extends VisualNode
sealed trait VisualEntity
case class NodeVisualEntity(n: VisualNode) extends VisualEntity
case class ArcVisualEntity(a: Id[Arc]) extends VisualEntity
case class VisualModel[N, A](
groups: Col[Group],
arcs: Map[A, VisualArc],
nodesInfo: Map[N, VisualInfo])
case class VisualStg(
math: MathStg,
visual: VisualModel[StgNode, Id[Arc]])
object VisualStg extends fields.VisualStgLenses {
val empty = VisualStg(MathStg.empty, VisualModel.empty)
}
object VisualModel extends fields.VisualModelLenses {
def empty[N, A] = VisualModel[N, A](Col.empty, Map.empty, Map.empty)
def addNode[N, A](node: N, where: Point2D.Double): State[VisualModel[N, A], Unit] = state((m: Map[N, VisualInfo]) => (m + ((node, VisualInfo(where, None))), ())).on(nodesInfo)
def removeNode[N, A](node: N): State[VisualModel[N, A], Boolean] = state((m: Map[N, VisualInfo]) => (m - node, m.contains(node))).on(nodesInfo)
}
val MATH_STG_SERVICE_HANDLE: ModelService[Expression[MathStg]] = ModelService.createNewService(classOf[Expression[MathStg]], "STG representation of the underlying model");
object MathStg extends org.workcraft.plugins.stg21.fields.MathStgLenses {
val empty = MathStg(Col.empty, Col.empty, Col.empty, Col.empty)
}
}
| tuura/workcraft-2.2 | STGPlugin21/src/main/scala/org/workcraft/plugins/stg21/stg.scala | Scala | gpl-3.0 | 6,902 |
package ghpages.examples
import ghpages.GhPagesMacros
import japgolly.scalajs.react.ReactComponentB
import japgolly.scalajs.react.vdom.prefix_<^._
import ghpages.examples.util.SideBySide
object HelloMessageExample {
def content = SideBySide.Content(jsSource, source, main())
lazy val main = addIntro(HelloMessage withProps "John", _(scalaPortOf("A Simple Component")))
val jsSource =
"""
|var HelloMessage = React.createClass({displayName: 'HelloMessage',
| render: function() {
| return React.createElement("div", null, "Hello ", this.props.name);
| }
|});
|
|React.render(React.createElement(HelloMessage, {name: "John"}), mountNode);
""".stripMargin
val source =
s"""
|${GhPagesMacros.exampleSource}
|
|React.render(HelloMessage("John"), mountNode)""".stripMargin
// EXAMPLE:START
val HelloMessage = ReactComponentB[String]("HelloMessage")
.render(name => <.div("Hello ", name))
.build
// EXAMPLE:END
}
| vcarrera/scalajs-react | gh-pages/src/main/scala/ghpages/examples/HelloMessageExample.scala | Scala | apache-2.0 | 1,014 |
package org.jetbrains.plugins.scala.codeInspection.types
import com.intellij.codeInspection.LocalInspectionTool
import com.intellij.testFramework.EditorTestUtil
import org.jetbrains.plugins.scala.codeInspection.{ScalaInspectionBundle, ScalaQuickFixTestBase}
import org.jetbrains.plugins.scala.externalLibraries.kindProjector.inspections.KindProjectorSimplifyTypeProjectionInspection
import org.jetbrains.plugins.scala.project.settings.ScalaCompilerConfiguration
/**
* Author: Svyatoslav Ilinskiy
* Date: 7/6/15
*/
class KindProjectorSimplifyTypeProjectionTest extends ScalaQuickFixTestBase {
override protected val classOfInspection: Class[_ <: LocalInspectionTool] =
classOf[KindProjectorSimplifyTypeProjectionInspection]
override protected val description: String =
ScalaInspectionBundle.message("kind.projector.simplify.type")
private def testFix(text: String, res: String): Unit =
testQuickFix(text, res, description)
override protected def setUp(): Unit = {
super.setUp()
val defaultProfile = ScalaCompilerConfiguration.instanceIn(getProject).defaultProfile
val newSettings = defaultProfile.getSettings.copy(
plugins = defaultProfile.getSettings.plugins :+ "kind-projector"
)
defaultProfile.setSettings(newSettings)
}
def testEitherInline(): Unit = {
val code = s"def a: $START({type A[Beta] = Either[Int, Beta]})#A$END"
checkTextHasError(code)
val text = "def a: ({type A[Beta] = Either[Int, Beta]})#A"
val res = "def a: Either[Int, ?]"
testFix(text, res)
}
def testParametersWrongOrder(): Unit = {
val code = s"def a: $START({type L[A, B] = Either[B, A]})#L$END"
checkTextHasError(code)
val text = "def a: ({type L[A, B] = Either[B, A]})#L"
val res = "def a: Lambda[(A, B) => Either[B, A]] "
testFix(text, res)
}
def testTwoParameters(): Unit = {
val code = s"def a: $START({type A[-Alpha, +Gamma] = Function2[Alpha, String, Gamma]})#A$END"
checkTextHasError(code)
val text = "def a: ({type A[-Alpha, +Gamma] = Function2[Alpha, String, Gamma]})#A"
val res = "def a: Function2[-?, String, +?]"
testFix(text, res)
}
def testRepeatedParams(): Unit = {
val code = s"def a: $START({type A[A] = (A, A)})#A$END"
checkTextHasError(code)
val text = "def a: ({type A[A] = (A, A)})#A"
val res = "def a: Lambda[A => (A, A)]"
testFix(text, res)
}
def testCovariant(): Unit = {
val code = s"def a: $START({type A[+A, B] = Either[A, Option[B]]})#A$END"
checkTextHasError(code)
val text = "def a: ({type A[+A, B] = Either[A, Option[B]]})#A"
val res = "def a: Lambda[(`+A`, B) => Either[A, Option[B]]]"
testFix(text, res)
}
def testHigherKind(): Unit = {
val code = s"def a: $START({type A[A, B[_]] = B[A]})#A$END"
checkTextHasError(code)
val text = "def a: ({type A[A, B[_]] = B[A]})#A"
val res = "def a: Lambda[(A, B[_]) => B[A]]"
testFix(text, res)
}
def testBound(): Unit = {
val code = s"def a: $START({type B[A <: Any] = (A, A)})#B$END"
checkTextHasError(code)
val text = "def a: ({type B[A <: Any] = (A, A)})#B"
val res = "def a: Lambda[`A <: Any` => (A, A)]"
testFix(text, res)
}
def testTwoBound(): Unit = {
val code = s"def a: $START({type B[A >: Int <: Any] = (A, A)})#B$END"
checkTextHasError(code)
val text = "def a: ({type B[A >: Int <: Any] = (A, A)})#B"
val res = "def a: Lambda[`A >: Int <: Any` => (A, A)]"
testFix(text, res)
}
def testMultipleVariantBounds(): Unit = {
val code = s"def a: $START({type B[-C >: Int, +A <: Any] = (A, A, C)})#B$END"
checkTextHasError(code)
val text = "def a: ({type B[-C >: Int, +A <: Any] = (A, A, C)})#B"
val res = "def a: Lambda[(`-C >: Int`, `+A <: Any`) => (A, A, C)]"
testFix(text, res)
}
def testParameterizedBounds(): Unit = {
val code = s"def a: ({type B[C >: List[Int], +A <: Any] = (A, A, C)})#B"
checkTextHasNoErrors(code)
}
def testMixingBounds(): Unit = {
val code = s"def a: ({type B[C >: Int with String] = (C, C)})#B"
checkTextHasNoErrors(code)
}
def testExistentialBounds(): Unit = {
val code = s"def a: ({type B[C >: Array[X] forSome { type x }] = (C, C)})#B"
checkTextHasNoErrors(code)
}
def testAliasNoParam(): Unit = {
val code = "def a: ({type Lambda$ = String})#Lambda$"
checkTextHasNoErrors(code)
}
def testTupleInline(): Unit = {
val code = s"def a: $START({type R[A] = Tuple2[A, Double]})#R$END"
checkTextHasError(code)
val text = "def a: ({type R[A] = Tuple2[A, Double]})#R"
val res = "def a: Tuple2[?, Double]"
testFix(text, res)
}
def testHigherKindInline(): Unit = {
val code = s"def d: $START({type R[F[_], +B] = Either[F, B]})#R$END"
checkTextHasError(code)
val text = "def d: ({type R[F[_], +B] = Either[F, B]})#R"
val res = "def d: Either[?[_], +?]"
testFix(text, res)
}
def testTypeBoundsNoInline(): Unit = {
val code = s"def w: $START({type R[A <: String] = List[A]})#R$END"
checkTextHasError(code)
val text = "def w: ({type R[A <: String] = List[A]})#R"
val res = "def w: Lambda[`A <: String` => List[A]]"
testFix(text, res)
}
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/codeInspection/types/KindProjectorSimplifyTypeProjectionTest.scala | Scala | apache-2.0 | 5,217 |
package models
import scalikejdbc._
import skinny.orm.{Alias, SkinnyCRUDMapperWithId}
case class Train(id: Long, patternId: Long, trainClass: String, name: String) {
def pattern()(implicit session: DBSession = AutoSession): Option[Pattern] = Pattern.findById(patternId)
}
object Train extends SkinnyCRUDMapperWithId[Long, Train] {
override def defaultAlias: Alias[Train] = createAlias("t")
val t = defaultAlias
override def extract(rs: WrappedResultSet, n: ResultName[Train]): Train = autoConstruct(rs, n)
override def idToRawValue(id: Long): Any = id
override def rawValueToId(value: Any): Long = value.toString.toLong
def deleteWithTable(trainId: Long)(implicit session: DBSession = AutoSession): Boolean = {
TimeTable.deleteBy(sqls.eq(TimeTable.column.trainId, trainId)) >= 1 &&
Train.deleteById(trainId) == 1
}
}
case class TrainBuilder(patternId: Long, trainClass: String, name: String) {
def save()(implicit session: DBSession): Long = {
Train.createWithAttributes(
'patternId -> patternId,
'trainClass -> trainClass,
'name -> name
)
}
}
| ponkotuy/train-analyzer | app/models/Train.scala | Scala | apache-2.0 | 1,111 |
package com.harborx.api.system
import com.harborx.api.{HxAppComponents, OneAppPerTestWithComponents}
import org.scalamock.scalatest.MockFactory
import org.scalatest._
import org.scalatestplus.play.{PlaySpec, _}
import play.api.ApplicationLoader.Context
import play.api.mvc._
import play.api.test.FakeRequest
import play.api.test.Helpers._
import scala.concurrent.Future
class SystemSpec extends PlaySpec with OneAppPerTestWithComponents[HxAppComponents] with MustMatchers with MockFactory {
override def createComponents(context: Context) = new HxAppComponents(context)
"System controller" must {
"return OK when call GET /example" in {
val request = FakeRequest(GET, "/example")
val response = route(app, request)
response.isDefined mustEqual true
val result: Future[Result] = response.get
status(result) mustEqual OK
contentAsString(result) mustEqual "If you can see this, it means DI of Configuration is success!"
}
"return Test when call GET /env" in {
val request = FakeRequest(GET, "/env")
val response = route(app, request)
response.isDefined mustEqual true
val result: Future[Result] = response.get
status(result) mustEqual OK
contentAsString(result) mustEqual "current mode is:Test"
}
}
}
| harborx/play-di-example | play-macwire/test/com/harborx/api/system/SystemSpec.scala | Scala | mit | 1,303 |
package net.litola
import java.io.File
import org.scalatest.FunSpec
class SassCompilerSpec extends FunSpec {
describe("SassCompiler") {
it("should compile well-formed scss file") {
val scssFile = new File("src/test/resources/ok.scss")
val (full, minified, deps) = SassCompiler.compile(scssFile, Nil)
assert(full.replaceAll( """/\\* line.* \\*/\\n""", "") === ".test {\\n display: none; }\\n")
assert(minified.orNull === ".test{display:none}\\n")
assert(deps.length === 1)
assert(deps(0).getName === "ok.scss")
}
it("should compile well-formed scss file containing import") {
val scssFile = new File("src/test/resources/ok_import.scss")
val (full, minified, deps) = SassCompiler.compile(scssFile, Nil)
assert(full.replaceAll("""/\\* line.* \\*/\\n""", "") === ".test-import {\\n color: black; }\\n\\n.test {\\n display: none; }\\n")
assert(minified.orNull === ".test-import{color:black}.test{display:none}\\n")
assert(deps.length === 2)
assert(deps(0).getName === "_imported.scss")
assert(deps(1).getName === "ok_import.scss")
}
it("should fail to compile malformed scss file") {
val scssFile = new File("src/test/resources/broken.scss")
val thrown = intercept[com.typesafe.sbt.web.CompileProblemsException] {
SassCompiler.compile(scssFile, Nil)
}
val expectedMessage =
"""Compilation error [ Error: Invalid CSS after " display: none;": expected "}", was "" ]"""
assert(thrown.problems.head.position().line().get() === 3)
assert(thrown.problems.head.message() === expectedMessage)
}
}
}
| arakcheev/play-sass | src/test/scala/net/litola/SassCompilerSpec.scala | Scala | mit | 1,639 |
package com.chatwork.quiz.misc
import org.scalatest.{ Matchers, FunSpec }
import scala.math.Ordering.IntOrdering
class BTreeSpec extends FunSpec with Matchers {
describe("BTree#size") {
it("should return the number of elements in the BTree") {
BTree(Branch(Leaf(1), 2, Leaf(3))).size shouldBe 3
BTree(Leaf(1)).size shouldBe 1
}
}
describe("BTree#max") {
it("should return the max value in the BTree") {
BTree(Branch(Leaf(1), 2, Leaf(3))).max shouldBe 3
}
}
describe("BTree#min") {
it("should return the min value in the BTree") {
BTree(Branch(Leaf(1), 2, Leaf(3))).min shouldBe 1
}
}
describe("BTree#sum") {
it("should return the sum of values in the BTree") {
BTree(Branch(Leaf(1), 2, Leaf(3))).sum shouldBe 6
}
}
describe("BTree#avg") {
it("should return the average of values in the BTree") {
BTree(Branch(Leaf(1), 2, Leaf(3))).avg shouldBe 2.0d
}
}
describe("BTree#find") {
it("should return a node has the value in the BTree") {
BTree(Branch(Leaf(1), 2, Leaf(3))).find(1) shouldBe Some(Leaf(1))
}
}
describe("BTree#apply") {
it("should return a new BTree from List[Int]") {
BTree(List(1, 2, 3)) shouldEqual BTree(Branch(Leaf(1), 2, Leaf(3)))
}
}
}
| kazzna/scala-quiz | src/test/scala/com/chatwork/quiz/misc/BTreeSpec.scala | Scala | mit | 1,296 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.codegen.calls
import org.apache.flink.api.common.typeinfo.BasicTypeInfo._
import org.apache.flink.table.codegen.{CodeGenerator, GeneratedExpression}
import org.apache.flink.table.codegen.calls.CallGenerator.generateCallIfArgsNotNull
class DateFormatCallGen extends CallGenerator {
override def generate(codeGenerator: CodeGenerator,
operands: Seq[GeneratedExpression])
: GeneratedExpression = {
if (operands.last.literal) {
val formatter = codeGenerator.addReusableDateFormatter(operands.last)
generateCallIfArgsNotNull(codeGenerator.nullCheck, STRING_TYPE_INFO, operands) {
terms => s"$formatter.print(${terms.head})"
}
} else {
generateCallIfArgsNotNull(codeGenerator.nullCheck, STRING_TYPE_INFO, operands) {
terms => s"""
|org.apache.flink.table.runtime.functions.
|DateTimeFunctions$$.MODULE$$.dateFormat(${terms.head}, ${terms.last});
""".stripMargin
}
}
}
}
| jinglining/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/codegen/calls/DateFormatCallGen.scala | Scala | apache-2.0 | 1,823 |
package com.datawizards.splot.charts
import com.datawizards.splot.SPlotBaseTest
import com.datawizards.splot.api.implicits._
import com.datawizards.splot.builders.PlotBuilder
import com.datawizards.splot.model.PlotAxisValues.XAxisValueTypeString
import com.datawizards.splot.model.PlotType
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class BarPlotTest extends SPlotBaseTest {
test("Bar") {
val data = Seq(1, 4, 9)
data.plotBar(unitTestsDevice)
val plot = getLastPlot
assertResult(PlotType.Bar) {
plot.plotType
}
assertResult(1, "series count") {
plot.series.size
}
assertPlotXYAxisValues(Seq(1, 2, 3), data, getLastPlotFirstSeries)
}
test("String x values") {
val xs = Seq("c1", "c2", "c3")
val ys = Seq(1, 2, 3)
val data = xs zip ys
data.plotBar(unitTestsDevice)
assertResult(PlotType.Bar) {
getLastPlot.plotType
}
assertPlotXYAxisValues(xs, ys, getLastPlotFirstSeries)
}
test("Change title") {
val data = Seq(1.0, 4.0, 9.0)
data
.buildPlot()
.bar()
.titles("main title", "x title", "y title")
.display(unitTestsDevice)
val plot = getLastPlot
assertResult("main title") {
plot.title
}
assertResult("x title") {
plot.xTitle
}
assertResult("y title") {
plot.yTitle
}
}
test("Multiple series") {
val data = Seq(
("series1","b1",11),
("series1","b2",12),
("series2","b1",21),
("series2","b2",22),
("series3","b1",31),
("series3","b2",32),
("series3","b3",33)
)
data
.buildPlot()
.bar(_._2, _._3)
.seriesBy(_._1)
.display(unitTestsDevice)
assertPlotXYAxisValues(Seq("b1", "b2"), Seq(11, 12), getLastPlot.findSeriesByName("series1"))
assertPlotXYAxisValues(Seq("b1", "b2", "b3"), Seq(31, 32, 33), getLastPlot.findSeriesByName("series3"))
}
test("Multiple columns with sequence of X") {
val data = Seq(
("col1", 101),
("col1", 102),
("col1", 103),
("col2", 201),
("col2", 202)
)
data
.buildPlot()
.bar(_._2)
.colsBy(_._1)
.display(unitTestsDevice)
val plotsGrid = getLastPlotsGrid
val plotCol1 = plotsGrid.plotsMap(PlotBuilder.DefaultSingleGroup, new XAxisValueTypeString("col1"))
val plotCol2 = plotsGrid.plotsMap(PlotBuilder.DefaultSingleGroup, new XAxisValueTypeString("col2"))
assertPlotXYAxisValues(Seq(1,2,3), Seq(101,102,103), plotCol1.series.head)
assertPlotXYAxisValues(Seq(1,2), Seq(201,202), plotCol2.series.head)
}
test("IterablePlot[T].plotBar(x)") {
Seq((11,"a"),(12,"b")).plotBar(_._1)
assert(getLastPlot.plotType == PlotType.Bar)
assertPlotXYAxisValues(Seq(1,2), Seq(11,12), getLastPlotFirstSeries)
}
test("IterablePlot[T].plotBar(x,y)") {
Seq((11,"a"),(12,"b")).plotBar(_._2, _._1)
assert(getLastPlot.plotType == PlotType.Bar)
assertPlotXYAxisValues(Seq("a", "b"), Seq(11,12), getLastPlotFirstSeries)
}
test("IterableDoublePlot.plotBar()") {
Seq(11.0, 12.0).plotBar()
assert(getLastPlot.plotType == PlotType.Bar)
assertPlotXYAxisValues(Seq(1,2), Seq(11.0,12.0), getLastPlotFirstSeries)
}
test("IterableIntPlot.plotBar()") {
Seq(11, 12).plotBar()
assert(getLastPlot.plotType == PlotType.Bar)
assertPlotXYAxisValues(Seq(1,2), Seq(11,12), getLastPlotFirstSeries)
}
test("IterableIntPlot.plotBar(device)") {
Seq(11, 12).plotBar(unitTestsDevice)
assert(getLastPlot.plotType == PlotType.Bar)
assertPlotXYAxisValues(Seq(1,2), Seq(11,12), getLastPlotFirstSeries)
}
test("IterablePairOfXYAxis.plotBar(x,y)") {
Seq(("a",11),("b",12)).plotBar()
assert(getLastPlot.plotType == PlotType.Bar)
assertPlotXYAxisValues(Seq("a", "b"), Seq(11,12), getLastPlotFirstSeries)
}
}
| piotr-kalanski/SPlot | src/test/scala/com/datawizards/splot/charts/BarPlotTest.scala | Scala | apache-2.0 | 3,903 |
import scala.util.parsing.combinator.Parsers
import scala.util.parsing.input.CharSequenceReader
object IntParser extends Parsers {
type Elem = Char
def digit = elem("a digit (character between '0' and '9')",
(c => c >= '0' && c <= '9')) ^^ (_ - '0')
def digits = rep1(digit) ^^ (numbers => (0 /: numbers)((a,b) => 10*a+b))
def sign = elem('+') ^^^ 1 | elem('-') ^^^ -1
def intNumber = opt(sign)~digits ^^ {
case Some(s)~num => s*num
case None~num => num
}
def apply(str: String): Either[String,Int] = {
import scala.util.parsing.input.CharSequenceReader
val input = new CharSequenceReader(str)
intNumber(input) match {
case Success(num, _) => Right(num)
case NoSuccess(msg, remainder) =>
Left(msg + " at " + remainder.pos +
" instead of " + remainder.first)
}
}
}
| grzegorzbalcerek/scala-book-examples | examples/IntParser.scala | Scala | mit | 854 |
package controllers
import javax.inject.Inject
import com.mohiva.play.silhouette.api.Authenticator.Implicits._
import com.mohiva.play.silhouette.api._
import com.mohiva.play.silhouette.api.exceptions.ProviderException
import com.mohiva.play.silhouette.api.repositories.AuthInfoRepository
import com.mohiva.play.silhouette.api.util.{ Clock, Credentials }
import com.mohiva.play.silhouette.impl.exceptions.IdentityNotFoundException
import com.mohiva.play.silhouette.impl.providers._
import com.typesafe.config.Config
import forms.SignInForm
import net.ceedubs.ficus.Ficus._
import play.api.Configuration
import play.api.i18n.{ I18nSupport, Messages, MessagesApi }
import play.api.libs.concurrent.Execution.Implicits._
import play.api.mvc._
import _root_.services.UserService
import utils.auth.DefaultEnv
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.language.postfixOps
/**
* The `Sign In` controller.
*
* @param silhouette The Silhouette stack.
* @param userService The user service implementation.
* @param authInfoRepository The auth info repository implementation.
* @param credentialsProvider The credentials provider.
* @param socialProviderRegistry The social provider registry.
* @param configuration The Play configuration.
* @param clock The clock instance.
*/
class SignInController @Inject() (
silhouette: Silhouette[DefaultEnv],
userService: UserService,
authInfoRepository: AuthInfoRepository,
credentialsProvider: CredentialsProvider,
socialProviderRegistry: SocialProviderRegistry,
configuration: Configuration,
components: ControllerComponents,
clock: Clock)
extends AbstractController(components) with I18nSupport {
/**
* Views the `Sign In` page.
*
* @return The result to display.
*/
def view: Action[AnyContent] = silhouette.UnsecuredAction.async { implicit request =>
Future.successful(Ok(
views.html.authLayout(
"login-view",
"")(
views.html.signinForm(routes.SocialAuthController.authenticate("idme").url))))
}
/**
* Handles the submitted form.
*
* @return The result to display.
*/
def submit: Action[AnyContent] = silhouette.UnsecuredAction.async { implicit request =>
SignInForm.form.bindFromRequest.fold(
errors => Future.successful(
Redirect(routes.SignInController.view())
.flashing("error" -> errors.errorsAsJson.toString())),
data => {
val credentials = Credentials(data.email, data.password)
credentialsProvider.authenticate(credentials).flatMap { loginInfo =>
val result = Redirect("/")
userService.retrieve(loginInfo).flatMap {
case Some(user) =>
val c: Config = configuration.underlying
silhouette.env.authenticatorService.create(loginInfo).map {
case authenticator if data.rememberMe =>
authenticator.copy(
expirationDateTime = clock.now + c.as[FiniteDuration]("silhouette.authenticator.rememberMe.authenticatorExpiry"),
idleTimeout = c.getAs[FiniteDuration]("silhouette.authenticator.rememberMe.authenticatorIdleTimeout"),
cookieMaxAge = c.getAs[FiniteDuration]("silhouette.authenticator.rememberMe.cookieMaxAge"))
case authenticator => authenticator
}.flatMap { authenticator =>
silhouette.env.eventBus.publish(LoginEvent(user, request))
silhouette.env.authenticatorService.init(authenticator).flatMap { v =>
silhouette.env.authenticatorService.embed(v, result)
}
}
case None => Future.failed(new IdentityNotFoundException("Couldn't find user"))
}
}.recover {
case e: ProviderException =>
Redirect(routes.SignInController.view()).flashing("error" -> Messages("invalid.credentials"))
}
})
}
}
| vetafi/vetafi-web | app/controllers/SignInController.scala | Scala | apache-2.0 | 3,948 |
package $organization$
import com.imageworks.migration._
class Migrate_$datetime$_$migration_name;format="Camel"$
extends Migration
{
def up() {
}
def down() {
}
}
| akiomik/scala-migrations.g8 | src/main/g8/src/main/scala/$organization__packaged$/Migrate_$datetime$_$migration_name__Camel$.scala | Scala | mit | 178 |
package reactivemongo.api.bson
/**
* Implicit conversions for handler & values types
* between `reactivemongo.bson` and `reactivemongo.api.bson` .
*
* {{{
* import reactivemongo.api.bson.compat._
* }}}
*
* For more specific imports, see [[ValueConverters]]
* and [[HandlerConverters]] .
*/
package object compat extends ValueConverters with HandlerConverters
| ornicar/ReactiveMongo | bson-compat/src/main/scala/package.scala | Scala | apache-2.0 | 370 |
package tshrdlu.twitter
/**
* Copyright 2013 Jason Baldridge
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import twitter4j.TwitterStreamFactory
/**
* Extend this to have a stream that is configured and ready to use.
*/
trait StreamInstance {
val stream = new TwitterStreamFactory().getInstance
}
/**
* An example of how to obtain a certain number of tweets from the
* sample stream and cluster them.
*/
object ClusterStream extends StreamInstance {
val engTweets = new EnglishStatusAccumulator
def main(args: Array[String]) {
val Array(numClusters, numTweets) = args.map(_.toInt)
stream.addListener(engTweets)
println("Collecting " + numTweets + " tweets.")
stream.sample
while (engTweets.count < numTweets) { Thread.sleep(1) }
stream.shutdown
println("Running kmeans.")
val clustered = new StatusClusterer()(engTweets.tweets, numClusters)
println("Examples from the clusters found.\\n")
for ((id,cluster) <- clustered) {
println("Cluster " + id)
cluster.take(5).foreach(println)
println
}
}
}
| utcompling/tshrdlu | src/main/scala/tshrdlu/twitter/Stream.scala | Scala | apache-2.0 | 1,601 |
/*
* @author Philip Stutz
* @author Mihaela Verman
*
* Copyright 2013 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.triplerush
import com.signalcollect.util.IntHashMap
import com.signalcollect.util.IntValueHashMap
import java.util.concurrent.locks.ReadWriteLock
import java.util.concurrent.locks.ReentrantReadWriteLock
import scala.io.Source
import scala.collection.mutable.ResizableArray
import scala.collection.mutable.ArrayBuffer
trait Dictionary {
def contains(s: String): Boolean
def apply(s: String): Int
def apply(id: Int): String
// Can only be called when there are no concurrent writes.
def unsafeDecode(id: Int): String
def unsafeGetEncoded(s: String): Int
def decode(id: Int): Option[String]
def clear
}
class HashMapDictionary(
initialSize: Int = 32768,
rehashFraction: Float = 0.5f) extends Dictionary {
private val lock = new ReentrantReadWriteLock
private val read = lock.readLock
private val write = lock.writeLock
private var id2String = new ArrayBuffer[String](initialSize)
id2String += "*" // Wildcard entry at 0
private var string2Id = new IntValueHashMap[String](initialSize, rehashFraction)
private var maxId = 0
def clear {
write.lock
try {
maxId = 0
id2String = new ArrayBuffer[String](initialSize)
id2String += "*" // Wildcard entry at 0
string2Id = new IntValueHashMap[String](initialSize, rehashFraction)
} finally {
write.unlock
}
}
def contains(s: String): Boolean = {
read.lock
try {
val hasExistingEncoding = string2Id.get(s) != 0
hasExistingEncoding
} finally {
read.unlock
}
}
@inline final def unsafeGetEncoded(s: String): Int = {
string2Id.get(s)
}
def reserveId: Int = {
write.lock
var reserved = 0
try {
maxId += 1
id2String += null
reserved = maxId
} finally {
write.unlock
}
reserved
}
def apply(s: String): Int = {
read.lock
val existingEncoding: Int = try {
string2Id.get(s)
} finally {
read.unlock
}
if (existingEncoding == 0) {
write.lock
try {
val id = {
maxId += 1
maxId
}
string2Id.put(s, id)
id2String += s
id
} finally {
write.unlock
}
} else {
existingEncoding
}
}
def apply(id: Int): String = {
read.lock
try {
id2String(id)
} finally {
read.unlock
}
}
/**
* Returns null if no entry with the given id is found.
*
* Only call if there are no concurrent modifications of the dictionary.
*/
@inline final def unsafeDecode(id: Int): String = {
id2String(id)
}
def decode(id: Int): Option[String] = {
read.lock
try {
val decoded = apply(id)
if (decoded != null) {
Some(decoded)
} else {
None
}
} finally {
read.unlock
}
}
/**
* File format:
* http://dbpedia.org/resource/Kauffman_%28crater%29 -> 5421181
* http://dbpedia.org/resource/Watersports -> 2654992
*
* Warning: this has to be done before any other dictionary entries are added.
*/
def loadFromFile(fileName: String) {
assert(string2Id.isEmpty)
assert(id2String.isEmpty)
println(s"Parsing dictionary from $fileName.")
def parseEntry(line: String): (Int, String) = {
val split = line.split(" -> ")
val string = split(0)
val id = split(1).toInt
(id, string)
}
val entries = Source.fromFile(fileName).getLines
write.lock
var entriesAdded = 0
try {
for (entry <- entries) {
val (id, string) = parseEntry(entry)
maxId = math.max(id, maxId)
string2Id.put(string, id)
id2String += string
entriesAdded += 1
if (entriesAdded % 10000 == 0) {
println(s"Added $entriesAdded to dictionary so far...")
}
}
} finally {
write.unlock
}
println(s"Finished loading. Total entries added: $entriesAdded.")
}
}
| jacqueslk/triplerush-filter | src/main/scala/com/signalcollect/triplerush/Dictionary.scala | Scala | apache-2.0 | 4,640 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package expr
import java.util
import com.intellij.psi.impl.source.tree.LazyParseablePsiElement
import com.intellij.psi.{PsiClass, PsiElement, PsiElementVisitor, PsiModifiableCodeBlock}
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFunction, ScValue, ScVariable}
/**
* @author Alexander Podkhalyuzin
* Date: 06.03.2008
*/
class ScBlockExprImpl(text: CharSequence) extends LazyParseablePsiElement(ScalaElementTypes.BLOCK_EXPR, text)
with ScBlockExpr with PsiModifiableCodeBlock {
//todo: bad architecture to have it duplicated here, as ScBlockExprImpl is not instance of ScalaPsiElementImpl
override def getContext: PsiElement = {
context match {
case null => super.getContext
case _ => context
}
}
override def toString: String = "BlockExpression"
override def isAnonymousFunction: Boolean = caseClauses != None
protected def findChildrenByClassScala[T >: Null <: ScalaPsiElement](aClass: Class[T]): Array[T] = {
val result: util.List[T] = new util.ArrayList[T]
var cur: PsiElement = getFirstChild
while (cur != null) {
if (aClass.isInstance(cur)) result.add(cur.asInstanceOf[T])
cur = cur.getNextSibling
}
result.toArray[T](java.lang.reflect.Array.newInstance(aClass, result.size).asInstanceOf[Array[T]])
}
protected def findChildByClassScala[T >: Null <: ScalaPsiElement](aClass: Class[T]): T = {
var cur: PsiElement = getFirstChild
while (cur != null) {
if (aClass.isInstance(cur)) return cur.asInstanceOf[T]
cur = cur.getNextSibling
}
null
}
def shouldChangeModificationCount(place: PsiElement): Boolean = {
var parent = getParent
while (parent != null) {
parent match {
case f: ScFunction => f.returnTypeElement match {
case Some(ret) => return false
case None =>
if (!f.hasAssign) return false
return ScalaPsiUtil.shouldChangeModificationCount(f)
}
case v: ScValue => return ScalaPsiUtil.shouldChangeModificationCount(v)
case v: ScVariable => return ScalaPsiUtil.shouldChangeModificationCount(v)
case t: PsiClass => return true
case bl: ScBlockExprImpl => return bl.shouldChangeModificationCount(this)
case _ =>
}
parent = parent.getParent
}
false
}
override def accept(visitor: ScalaElementVisitor) = {visitor.visitBlockExpression(this)}
override def accept(visitor: PsiElementVisitor) {
visitor match {
case s: ScalaElementVisitor => accept(s)
case _ => super.accept(visitor)
}
}
} | triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScBlockExprImpl.scala | Scala | apache-2.0 | 2,842 |
package hclu.hreg.common
/**
* This class wraps string, so when this value is serialized as json not as plain string
* e.g.
* {"value":"Some text"} not "Some text"
*/
case class StringJsonWrapper(value: String)
| tsechov/hclu-registry | backend/src/main/scala/hclu/hreg/common/StringJsonWrapper.scala | Scala | apache-2.0 | 216 |
package com.landoop.streamreactor.connect.hive.sink
import com.datamountaineer.streamreactor.common.utils.JarManifest
import java.util
import com.landoop.streamreactor.connect.hive.sink.config.HiveSinkConfigDef
import org.apache.kafka.common.config.ConfigDef
import org.apache.kafka.connect.connector.Task
import org.apache.kafka.connect.sink.SinkConnector
import scala.collection.JavaConverters._
class HiveSinkConnector extends SinkConnector {
val logger = org.slf4j.LoggerFactory.getLogger(getClass.getName)
private val manifest = JarManifest(getClass.getProtectionDomain.getCodeSource.getLocation)
private var props: util.Map[String, String] = _
override def version(): String = manifest.version()
override def taskClass(): Class[_ <: Task] = classOf[HiveSinkTask]
override def config(): ConfigDef = HiveSinkConfigDef.config
override def start(props: util.Map[String, String]): Unit = {
logger.info(s"Creating hive sink connector")
this.props = props
}
override def stop(): Unit = ()
override def taskConfigs(maxTasks: Int): util.List[util.Map[String, String]] = {
logger.info(s"Creating $maxTasks tasks config")
List.fill(maxTasks)(props).asJava
}
} | datamountaineer/stream-reactor | kafka-connect-hive/src/main/scala/com/landoop/streamreactor/connect/hive/sink/HiveSinkConnector.scala | Scala | apache-2.0 | 1,202 |
package fly.play.aws
import org.specs2.mutable.Specification
object AwsErrorSpec extends Specification {
"AwsError" should {
"create a correct error from XML" in {
val xml = <ErrorResponse xmlns="https://sts.amazonaws.com/doc/2011-06-15/">
<Error>
<Type>Sender</Type>
<Code>InvalidClientTokenId</Code>
<Message>The security token included in the request is invalid.</Message>
</Error>
<RequestId>6fbc48cc-c16a-11e1-bd3b-1529eff94a35</RequestId>
</ErrorResponse>
AwsError(403, xml) must beLike {
case AwsError(403, "InvalidClientTokenId", "The security token included in the request is invalid.", Some(x)) if (x == xml) => ok
}
}
"create a correct error from XML without the correct root element" in {
val xml = <Error>
<Code>NoSuchKey</Code>
<Message>The specified key does not exist.</Message>
<Key>nonExistingElement</Key>
<RequestId>F5944A57D5444A0E</RequestId>
<HostId>z85KnjetGT4/VVHxTYLdK7ykqQxygZCVBM6dI/ALBvw93f/0eUIiDKp3V5aDr8L/</HostId>
</Error>
AwsError(403, xml) must beLike {
case AwsError(403, "NoSuchKey", "The specified key does not exist.", Some(x)) if (x == xml) => ok
}
}
}
} | Rhinofly/play-s3 | src/test/scala/fly/play/aws/AwsErrorSpec.scala | Scala | mit | 1,404 |
package org.usagram.clarify.validator
import org.usagram.clarify.Indefinite
import org.usagram.clarify.error.RequireAtLeast
class AtLeast[-V](limit: Int) extends Validator[Iterable[V]] {
def validate(value: Iterable[V]) =
failIf(value.size < limit) {
RequireAtLeast(limit)
}
def characters: AtLeastCharacters =
AtLeastCharacters(limit)
}
object AtLeast {
val validator = {
val limit: Validator[Int] = GreaterThanOrEqualTo.one
limit
}
def apply[V](limit: Int): AtLeast[V] =
validator(Indefinite(limit) label "limit").resolve(new AtLeast(_))
def one[V]: AtLeast[V] = apply(1)
}
| takkkun/clarify | core/src/main/scala/org/usagram/clarify/validator/AtLeast.scala | Scala | mit | 625 |
package cromwell.util.docker
import javax.xml.bind.DatatypeConverter
import cromwell.core.ErrorOr
import cromwell.util.TryUtil
import wdl4s.values._
import scala.util.{Failure, Success, Try}
import scalaz.Scalaz._
import scalaz.{Failure => FailureZ, Success => SuccessZ, _}
case class DockerHash(hashType: String, hashString: String) {
val digest = s"$hashType:$hashString"
}
object DockerHash {
/**
* Creates a unique hash from a sequence of hashes.
* If there is a single hash, returns just the single hash string.
* If there is more than one hash, concatenates the hash strings together, and then md5s the resulting string.
* Returns a failure if there are zero hashes, or if there are more than one type of hash type.
*
* @param hashCollectionType A description of the collection.
* @param dockerHashes The sequence of hashes to rehash.
* @return A single docker hash.
*/
def fromSeq(hashCollectionType: String, dockerHashes: Seq[DockerHash]): Try[DockerHash] = {
dockerHashes.size match {
case 0 =>
// Need at least one docker hash to hash.
Failure(new IllegalArgumentException("docker hashes is empty"))
case 1 =>
// Just use the original hash, but precede it with the collection type.
val dockerHash = dockerHashes.head
Success(dockerHash.copy(s"$hashCollectionType-${dockerHash.hashType}"))
case _ =>
// Concatenate the hash strings together, then md5 the result to create another hash.
val hashTypes = dockerHashes.map(_.hashType).distinct
hashTypes.size match {
case 1 =>
val hashType = s"$hashCollectionType-${hashTypes.head}-md5"
val hashString = dockerHashes.map(_.hashString).mkString("").md5Sum
Success(DockerHash(hashType, hashString))
case _ => Failure(new IllegalArgumentException(s"found more than one docker hash type: $hashTypes"))
}
}
}
/**
* Creates a unique hash from a sequence of hashes.
* If there is a single hash, returns just the single hash string.
* If there is more than one hash, concatenates the hash strings together, and then md5s the resulting string.
* Returns a failure if there are zero hashes, or if there are more than one type of hash type.
*
* @param hashCollectionType A description of the collection.
* @param dockerHashes The sequence of hashes to rehash.
* @return A single docker hash.
*/
def fromTries(hashCollectionType: String, dockerHashes: Seq[Try[DockerHash]]): Try[DockerHash] = {
TryUtil.sequence(dockerHashes).flatMap(fromSeq(hashCollectionType, _))
}
/**
* Parses a string like "type:12345678" into a DockerHash(type, hash)
*/
def fromDigest(digest: String): Try[DockerHash] = {
digest.indexOf(':') match {
case -1 => fromHash("unknown", digest)
case index => fromHash(digest.substring(0, index), digest.substring(index + 1))
}
}
/** Creates a docker hash, after validating the hash string. */
def fromHash(hashType: String, hashString: String): Try[DockerHash] = {
validateHashString(hashString) match {
case SuccessZ(_) => Success(DockerHash(hashType, hashString))
case FailureZ(e) =>
val errorMessages = e.toList.mkString(", ")
Failure(new IllegalArgumentException(s"hashString '$hashString' is not valid: $errorMessages"))
}
}
/** Validates the hash string. */
private def validateHashString(hashString: String): ErrorOr[String] = {
val validation = validateHashStringHex(hashString) +++ validateHashStringLength(hashString)
// Turn the concatenated results back into just the hashString.
validation map { _ => hashString }
}
/** Return the hash if it's valid hex, or the exception message. */
private def validateHashStringHex(hashString: String): ErrorOr[String] = {
// We only want to know that we _could_ parse the hash.
Validation
.fromTryCatchNonFatal(DatatypeConverter.parseHexBinary(hashString))
.map(_ => hashString)
.leftMap(_.getMessage)
.toValidationNel
}
/** Return the hash if it has a valid length, or an error message. */
private def validateHashStringLength(hashString: String): ErrorOr[String] = {
val length = hashString.length
if (length == 8 || length == 32 || length == 64) {
hashString.successNel
} else {
s"unexpected hash length: $length".failureNel
}
}
}
| cowmoo/cromwell | engine/src/main/scala/cromwell/util/docker/DockerHash.scala | Scala | bsd-3-clause | 4,468 |
/*
* Copyright 2014 Alan Rodas Bonjour
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.alanrodas.scaland.cli
import java.text.ParseException
import language.implicitConversions
import com.alanrodas.scaland.cli.runtime._
/**
* Provides a set of implicit conversions between different types of values.
*
* Among others, it provides conversions between [[Flag]], [[Argument]] and
* [[Value]] to a [[Boolean]] and [[Option]]
*
* It also provides a set of conversions between [[String]] and different
* basic types. This is specially useful when using the ''valueAs'' methods,
* provided in some classes.
*/
object implicits {
/**
* This exception is thrown when a string is tried to parse to a given
* type but fails. This a more general case of exceptions such as
* [[NumberFormatException]], as this covers not only cases for numbers
* but also for any other type conversion stated in [[implicits]].
*
* @param message The error message to show
*/
class ParseException(message : String) extends RuntimeException(message) {
def this() = this("")
}
/**
* Transforms a [[Definable]] into a [[Boolean]]
*
* This allow you to test for a definable value to test for existence just by fetching it. i.e.
* {{{
* val arg : Argument
* if (arg) "Defined as " + arg.value else "Not Defined"
* }}}
*
* this is specially useful when defining the callback of a command. o.e.
*
* {{{
* root accepts (arg named "test" as "Default") does {cmd =>
* println(
* if (cmd argument "test") "Defined as " + (cmd argument "test").value
* else "Not defined"
* )
* }
* }}}
*/
implicit def definable2bool(arg : Definable) : Boolean = arg.isDefined
/**
* Transforms an [[Argument]] into an [[Option]].
*
* This allow you to work with arguments as optional value, only defined when
* the argument was user defined. i.e.
* {{{
* val arg : Argument
* arg.fold("Not Defined"){value => "Defined as " + value}
* }}}
*
* this is specially useful when defining the callback of a command. o.e.
*
* {{{
* root accepts (arg named "test" as "Default") does {cmd =>
* println(
* arg.fold("Not Defined"){value => "Defined as " + value}
* )
* }
* }}}
*/
implicit def argument2option[T](arg : Argument[T]) : Option[Seq[T]] = {
if (arg.isDefined) Some(arg.values)
else None
}
/**
* Transforms an [[Value]] into an [[Option]].
*
* This allow you to work with arguments as optional value, only defined when
* the argument was user defined. i.e.
* {{{
* val arg : Value
* arg.fold("Not Defined"){value => "Defined as " + value}
* }}}
*
* this is specially useful when defining the callback of a command. o.e.
*
* {{{
* root receives (value named "test" as "Default") does {cmd =>
* println(
* arg.fold("Not Defined"){value => "Defined as " + value}
* )
* }
* }}}
*/
implicit def value2option[T](value : Value[T]) : Option[T] =
if (value.isDefined) Some(value.value) else None
/**
* Transforms from [[String]] to [[Int]]
*
* @throws NumberFormatException if the string does not represent a valid Int.
*/
implicit def stringToInt(s : String) : Int = {
val maybeInt = stringToSomeInt(s)
if (maybeInt.isEmpty) throw new ParseException(s"$s cannot be parsed as an Int.")
maybeInt.get
}
/**
* Transforms from [[String]] to [[Double]]
*
* @throws NumberFormatException if the string does not represent a valid Double.
*/
implicit def stringToDouble(s : String) : Double = {
val maybeDouble = stringToSomeDouble(s)
if (maybeDouble.isEmpty) throw new ParseException(s"$s cannot be parsed as a Double.")
maybeDouble.get
}
/**
* Transforms from [[String]] to [[Float]]
*
* @throws NumberFormatException if the string does not represent a valid Float.
*/
implicit def stringToFloat(s : String) : Float = {
val maybeFloat = stringToSomeFloat(s)
if (maybeFloat.isEmpty) throw new ParseException(s"$s cannot be parsed as a Float.")
maybeFloat.get
}
/**
* Transforms from [[String]] to [[Boolean]]
*
* This method return ''true'' if the string matches any of ''true'', ''t'',
* ''yes'', ''y'', ''on'' or ''1'', returns ''false'' otherwise.
*/
implicit def stringToBool(s : String) : Boolean = {
val maybeBool = stringToSomeBool(s)
if (maybeBool.isEmpty) throw new ParseException(s"$s cannot be parsed as a Boolean.")
maybeBool.get
}
/**
* Transforms from [[String]] to [[Option]] of [[Int]]
*
* Returns ''Some(x)'' if the string represents a valid Int ''x'', return ''None''
* otherwise.
*/
implicit def stringToSomeInt(s : String) : Option[Int] =
try {Some(Integer.decode(s))} catch {case nfe : NumberFormatException => None }
/**
* Transforms from [[String]] to [[Option]] of [[Double]]
*
* Returns ''Some(x)'' if the string represents a valid Double ''x'', return ''None''
* otherwise.
*/
implicit def stringToSomeDouble(s : String) : Option[Double] =
try {Some(java.lang.Double.valueOf(s))} catch {case nfe : NumberFormatException => None }
/**
* Transforms from [[String]] to [[Option]] of [[Float]]
*
* Returns ''Some(x)'' if the string represents a valid Float ''x'', return ''None''
* otherwise.
*/
implicit def stringToSomeFloat(s : String) : Option[Float] =
try {Some(java.lang.Float.valueOf(s))} catch {case nfe : NumberFormatException => None }
/**
* Transforms from [[String]] to [[Option]] of [[Boolean]]
*
* Returns ''Some(true)'' if the string matches any of ''true'', ''t'',
* ''yes'', ''y'', ''on'' or ''1'', returns ''Some(false)'' if matches
* ''false'', ''f'', ''no'', ''n'', ''off'' or ''0'' and ''None'' otherwise.
*/
implicit def stringToSomeBool(s : String) : Option[Boolean] = {
if (Set("true", "on", "yes", "t", "y", "1").contains(s.toLowerCase)) Some(true)
else if (Set("false", "off", "no", "f", "n", "0").contains(s.toLowerCase)) Some(false)
else None
}
/**
* Transforms from [[String]] to [[Option]] of [[String]]
*
* Returns ''Some(s)'' if the string is not empty, return ''None'' otherwise.
*/
implicit def stringToSomeString(s : String) : Option[String] =
if (s.isEmpty) None else Some(s)
}
| alanrodas/scaland | cli/src/main/scala/com/alanrodas/scaland/cli/implicits.scala | Scala | apache-2.0 | 6,928 |
/**
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.datasource.mongodb.rdd
import com.mongodb.casbah.Imports._
import com.stratio.datasource.Config
import com.stratio.datasource.mongodb.reader.MongodbReader
import org.apache.spark._
import org.apache.spark.sql.sources.Filter
/**
* MongoRDD values iterator.
*
* @param taskContext Spark task context.
* @param partition Spark partition.
* @param config Configuration object.
* @param requiredColumns Pruning fields
* @param filters Added query filters
*/
class MongodbRDDIterator(
taskContext: TaskContext,
partition: Partition,
config: Config,
requiredColumns: Array[String],
filters: Array[Filter])
extends Iterator[DBObject] {
protected var finished = false
private var closed = false
private var initialized = false
lazy val reader = {
initialized = true
initReader()
}
// Register an on-task-completion callback to close the input stream.
taskContext.addTaskCompletionListener((context: TaskContext) => closeIfNeeded())
override def hasNext: Boolean = {
!finished && reader.hasNext
}
override def next(): DBObject = {
if (!hasNext) {
throw new NoSuchElementException("End of stream")
}
reader.next()
}
def closeIfNeeded(): Unit = {
if (!closed) {
close()
closed = true
}
}
protected def close(): Unit = {
if (initialized) {
reader.close()
}
}
def initReader() = {
val reader = new MongodbReader(config,requiredColumns,filters)
reader.init(partition)
reader
}
} | pfcoperez/spark-mongodb | spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/rdd/MongodbRDDIterator.scala | Scala | apache-2.0 | 2,144 |
package com.twitter.finagle.stats
import com.twitter.util.{Future, Await}
import org.junit.runner.RunWith
import org.scalatest.{Matchers, FunSuite}
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class BroadcastStatsReceiverTest extends FunSuite
with Matchers
{
test("counter") {
val recv1 = new InMemoryStatsReceiver
val recvA = recv1.scope("scopeA")
val recvB = recv1.scope("scopeB")
val broadcastA = BroadcastStatsReceiver(Seq(recv1, recvA))
val counterA = broadcastA.counter("hi")
assert(None == recv1.counters.get(Seq("hi")))
assert(None == recv1.counters.get(Seq("scopeA", "hi")))
counterA.incr(1)
assert(1 == recv1.counters(Seq("hi")))
assert(1 == recv1.counters(Seq("scopeA", "hi")))
val broadcastB = BroadcastStatsReceiver(Seq(recv1, recvB))
val counterB = broadcastB.counter("hi")
assert(None == recv1.counters.get(Seq("scopeB", "hi")))
counterB.incr(1)
assert(2 == recv1.counters(Seq("hi")))
assert(1 == recv1.counters(Seq("scopeA", "hi")))
assert(1 == recv1.counters(Seq("scopeB", "hi")))
}
test("stat") {
val recv1 = new InMemoryStatsReceiver
val recvA = recv1.scope("scopeA")
val recvB = recv1.scope("scopeB")
val broadcastA = BroadcastStatsReceiver(Seq(recv1, recvA))
val statA = broadcastA.stat("hi")
assert(None == recv1.stats.get(Seq("hi")))
assert(None == recv1.stats.get(Seq("scopeA", "hi")))
statA.add(5f)
assert(Seq(5f) == recv1.stats(Seq("hi")))
assert(Seq(5f) == recv1.stats(Seq("scopeA", "hi")))
val broadcastB = BroadcastStatsReceiver(Seq(recv1, recvB))
val statB = broadcastB.stat("hi")
assert(None == recv1.stats.get(Seq("scopeB", "hi")))
statB.add(10f)
assert(Seq(5f, 10f) == recv1.stats(Seq("hi")).sorted)
assert(Seq(5f) == recv1.stats(Seq("scopeA", "hi")))
assert(Seq(10f) == recv1.stats(Seq("scopeB", "hi")))
}
test("gauge") {
val recv1 = new InMemoryStatsReceiver
val recvA = recv1.scope("scopeA")
val broadcastA = BroadcastStatsReceiver(Seq(recv1, recvA))
assert(None == recv1.gauges.get(Seq("hi")))
assert(None == recv1.gauges.get(Seq("scopeA", "hi")))
val gaugeA = broadcastA.addGauge("hi") { 5f }
assert(5f == recv1.gauges(Seq("hi"))())
assert(5f == recv1.gauges(Seq("scopeA", "hi"))())
gaugeA.remove()
assert(None == recv1.gauges.get(Seq("hi")))
assert(None == recv1.gauges.get(Seq("scopeA", "hi")))
}
test("scope") {
val base = new InMemoryStatsReceiver
val scoped = base.scope("scoped")
val subscoped = BroadcastStatsReceiver(Seq(base, scoped)).scope("subscoped")
val counter = subscoped.counter("yolo")
counter.incr(9)
assert(9 == base.counters(Seq("subscoped", "yolo")))
assert(9 == base.counters(Seq("scoped", "subscoped", "yolo")))
}
test("scopeSuffix") {
val base = new InMemoryStatsReceiver
val scoped = base.scope("scoped")
val subscoped = BroadcastStatsReceiver(Seq(base, scoped))
.scopeSuffix("suffixed")
.scope("sub")
val counter = subscoped.counter("yolo")
counter.incr(9)
assert(9 == base.counters(Seq("sub", "suffixed", "yolo")))
assert(9 == base.counters(Seq("scoped", "sub", "suffixed", "yolo")))
}
test("time") {
val recv1 = new InMemoryStatsReceiver
val recv2 = new InMemoryStatsReceiver
val recv = BroadcastStatsReceiver(Seq(recv1, recv2))
val statName = Seq("meh")
recv1.stats.get(statName).isEmpty should be(true)
recv2.stats.get(statName).isEmpty should be(true)
val stat = recv.stat("meh")
Stat.time(stat) { () }
recv1.stats(statName).size should be(1)
recv2.stats(statName).size should be(1)
Stat.time(stat) { () }
recv1.stats(statName).size should be(2)
recv2.stats(statName).size should be(2)
}
test("timeFuture") {
val recv1 = new InMemoryStatsReceiver
val recv2 = new InMemoryStatsReceiver
val recv = BroadcastStatsReceiver(Seq(recv1, recv2))
val statName = Seq("meh")
recv1.stats.get(statName).isEmpty should be(true)
recv2.stats.get(statName).isEmpty should be(true)
Await.result(Stat.timeFuture(recv.stat("meh"))(Future.Unit))
recv1.stats(statName).size should be(1)
recv2.stats(statName).size should be(1)
Await.result(Stat.timeFuture(recv.stat("meh"))(Future.Unit))
recv1.stats(statName).size should be(2)
recv2.stats(statName).size should be(2)
}
}
| edombowsky/util | util-stats/src/test/scala/com/twitter/finagle/stats/BroadcastStatsReceiverTest.scala | Scala | apache-2.0 | 4,455 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.kafka
import java.util.Properties
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import scala.util.Random
import scala.util.control.NonFatal
import kafka.api._
import kafka.common.{ErrorMapping, OffsetAndMetadata, OffsetMetadataAndError, TopicAndPartition}
import kafka.consumer.{ConsumerConfig, SimpleConsumer}
import org.apache.spark.SparkException
import org.apache.spark.annotation.DeveloperApi
/**
* :: DeveloperApi ::
* Convenience methods for interacting with a Kafka cluster.
* See <a href="https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol">
* A Guide To The Kafka Protocol</a> for more details on individual api calls.
* @param kafkaParams Kafka <a href="http://kafka.apache.org/documentation.html#configuration">
* configuration parameters</a>.
* Requires "metadata.broker.list" or "bootstrap.servers" to be set with Kafka broker(s),
* NOT zookeeper servers, specified in host1:port1,host2:port2 form
*/
@DeveloperApi
class KafkaCluster(val kafkaParams: Map[String, String]) extends Serializable {
import KafkaCluster.{Err, LeaderOffset, SimpleConsumerConfig}
// ConsumerConfig isn't serializable
@transient private var _config: SimpleConsumerConfig = null
def config: SimpleConsumerConfig = this.synchronized {
if (_config == null) {
_config = SimpleConsumerConfig(kafkaParams)
}
_config
}
def connect(host: String, port: Int): SimpleConsumer =
new SimpleConsumer(host, port, config.socketTimeoutMs,
config.socketReceiveBufferBytes, config.clientId)
def connectLeader(topic: String, partition: Int): Either[Err, SimpleConsumer] =
findLeader(topic, partition).right.map(hp => connect(hp._1, hp._2))
// Metadata api
// scalastyle:off
// https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-MetadataAPI
// scalastyle:on
def findLeader(topic: String, partition: Int): Either[Err, (String, Int)] = {
val req = TopicMetadataRequest(TopicMetadataRequest.CurrentVersion,
0, config.clientId, Seq(topic))
val errs = new Err
withBrokers(Random.shuffle(config.seedBrokers), errs) { consumer =>
val resp: TopicMetadataResponse = consumer.send(req)
resp.topicsMetadata.find(_.topic == topic).flatMap { tm: TopicMetadata =>
tm.partitionsMetadata.find(_.partitionId == partition)
}.foreach { pm: PartitionMetadata =>
pm.leader.foreach { leader =>
return Right((leader.host, leader.port))
}
}
}
Left(errs)
}
def findLeaders(
topicAndPartitions: Set[TopicAndPartition]
): Either[Err, Map[TopicAndPartition, (String, Int)]] = {
val topics = topicAndPartitions.map(_.topic)
val response = getPartitionMetadata(topics).right
val answer = response.flatMap { tms: Set[TopicMetadata] =>
val leaderMap = tms.flatMap { tm: TopicMetadata =>
tm.partitionsMetadata.flatMap { pm: PartitionMetadata =>
val tp = TopicAndPartition(tm.topic, pm.partitionId)
if (topicAndPartitions(tp)) {
pm.leader.map { l =>
tp -> (l.host -> l.port)
}
} else {
None
}
}
}.toMap
if (leaderMap.keys.size == topicAndPartitions.size) {
Right(leaderMap)
} else {
val missing = topicAndPartitions.diff(leaderMap.keySet)
val err = new Err
err.append(new SparkException(s"Couldn't find leaders for ${missing}"))
Left(err)
}
}
answer
}
def getPartitions(topics: Set[String]): Either[Err, Set[TopicAndPartition]] = {
getPartitionMetadata(topics).right.map { r =>
r.flatMap { tm: TopicMetadata =>
tm.partitionsMetadata.map { pm: PartitionMetadata =>
TopicAndPartition(tm.topic, pm.partitionId)
}
}
}
}
def getPartitionMetadata(topics: Set[String]): Either[Err, Set[TopicMetadata]] = {
val req = TopicMetadataRequest(
TopicMetadataRequest.CurrentVersion, 0, config.clientId, topics.toSeq)
val errs = new Err
withBrokers(Random.shuffle(config.seedBrokers), errs) { consumer =>
val resp: TopicMetadataResponse = consumer.send(req)
val respErrs = resp.topicsMetadata.filter(m => m.errorCode != ErrorMapping.NoError)
if (respErrs.isEmpty) {
return Right(resp.topicsMetadata.toSet)
} else {
respErrs.foreach { m =>
val cause = ErrorMapping.exceptionFor(m.errorCode)
val msg = s"Error getting partition metadata for '${m.topic}'. Does the topic exist?"
errs.append(new SparkException(msg, cause))
}
}
}
Left(errs)
}
// Leader offset api
// scalastyle:off
// https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-OffsetAPI
// scalastyle:on
def getLatestLeaderOffsets(
topicAndPartitions: Set[TopicAndPartition]
): Either[Err, Map[TopicAndPartition, LeaderOffset]] =
getLeaderOffsets(topicAndPartitions, OffsetRequest.LatestTime)
def getEarliestLeaderOffsets(
topicAndPartitions: Set[TopicAndPartition]
): Either[Err, Map[TopicAndPartition, LeaderOffset]] =
getLeaderOffsets(topicAndPartitions, OffsetRequest.EarliestTime)
def getLeaderOffsets(
topicAndPartitions: Set[TopicAndPartition],
before: Long
): Either[Err, Map[TopicAndPartition, LeaderOffset]] = {
getLeaderOffsets(topicAndPartitions, before, 1).right.map { r =>
r.map { kv =>
// mapValues isn't serializable, see SI-7005
kv._1 -> kv._2.head
}
}
}
private def flip[K, V](m: Map[K, V]): Map[V, Seq[K]] =
m.groupBy(_._2).map { kv =>
kv._1 -> kv._2.keys.toSeq
}
def getLeaderOffsets(
topicAndPartitions: Set[TopicAndPartition],
before: Long,
maxNumOffsets: Int
): Either[Err, Map[TopicAndPartition, Seq[LeaderOffset]]] = {
findLeaders(topicAndPartitions).right.flatMap { tpToLeader =>
val leaderToTp: Map[(String, Int), Seq[TopicAndPartition]] = flip(tpToLeader)
val leaders = leaderToTp.keys
var result = Map[TopicAndPartition, Seq[LeaderOffset]]()
val errs = new Err
withBrokers(leaders, errs) { consumer =>
val partitionsToGetOffsets: Seq[TopicAndPartition] =
leaderToTp((consumer.host, consumer.port))
val reqMap = partitionsToGetOffsets.map { tp: TopicAndPartition =>
tp -> PartitionOffsetRequestInfo(before, maxNumOffsets)
}.toMap
val req = OffsetRequest(reqMap)
val resp = consumer.getOffsetsBefore(req)
val respMap = resp.partitionErrorAndOffsets
partitionsToGetOffsets.foreach { tp: TopicAndPartition =>
respMap.get(tp).foreach { por: PartitionOffsetsResponse =>
if (por.error == ErrorMapping.NoError) {
if (por.offsets.nonEmpty) {
result += tp -> por.offsets.map { off =>
LeaderOffset(consumer.host, consumer.port, off)
}
} else {
errs.append(new SparkException(
s"Empty offsets for ${tp}, is ${before} before log beginning?"))
}
} else {
errs.append(ErrorMapping.exceptionFor(por.error))
}
}
}
if (result.keys.size == topicAndPartitions.size) {
return Right(result)
}
}
val missing = topicAndPartitions.diff(result.keySet)
errs.append(new SparkException(s"Couldn't find leader offsets for ${missing}"))
Left(errs)
}
}
// Consumer offset api
// scalastyle:off
// https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-OffsetCommit/FetchAPI
// scalastyle:on
// this 0 here indicates api version, in this case the original ZK backed api.
private def defaultConsumerApiVersion: Short = 0
/** Requires Kafka >= 0.8.1.1. Defaults to the original ZooKeeper backed api version. */
def getConsumerOffsets(
groupId: String,
topicAndPartitions: Set[TopicAndPartition]
): Either[Err, Map[TopicAndPartition, Long]] =
getConsumerOffsets(groupId, topicAndPartitions, defaultConsumerApiVersion)
def getConsumerOffsets(
groupId: String,
topicAndPartitions: Set[TopicAndPartition],
consumerApiVersion: Short
): Either[Err, Map[TopicAndPartition, Long]] = {
getConsumerOffsetMetadata(groupId, topicAndPartitions, consumerApiVersion).right.map { r =>
r.map { kv =>
kv._1 -> kv._2.offset
}
}
}
/** Requires Kafka >= 0.8.1.1. Defaults to the original ZooKeeper backed api version. */
def getConsumerOffsetMetadata(
groupId: String,
topicAndPartitions: Set[TopicAndPartition]
): Either[Err, Map[TopicAndPartition, OffsetMetadataAndError]] =
getConsumerOffsetMetadata(groupId, topicAndPartitions, defaultConsumerApiVersion)
def getConsumerOffsetMetadata(
groupId: String,
topicAndPartitions: Set[TopicAndPartition],
consumerApiVersion: Short
): Either[Err, Map[TopicAndPartition, OffsetMetadataAndError]] = {
var result = Map[TopicAndPartition, OffsetMetadataAndError]()
val req = OffsetFetchRequest(groupId, topicAndPartitions.toSeq, consumerApiVersion)
val errs = new Err
withBrokers(Random.shuffle(config.seedBrokers), errs) { consumer =>
val resp = consumer.fetchOffsets(req)
val respMap = resp.requestInfo
val needed = topicAndPartitions.diff(result.keySet)
needed.foreach { tp: TopicAndPartition =>
respMap.get(tp).foreach { ome: OffsetMetadataAndError =>
if (ome.error == ErrorMapping.NoError) {
result += tp -> ome
} else {
errs.append(ErrorMapping.exceptionFor(ome.error))
}
}
}
if (result.keys.size == topicAndPartitions.size) {
return Right(result)
}
}
val missing = topicAndPartitions.diff(result.keySet)
errs.append(new SparkException(s"Couldn't find consumer offsets for ${missing}"))
Left(errs)
}
/** Requires Kafka >= 0.8.1.1. Defaults to the original ZooKeeper backed api version. */
def setConsumerOffsets(
groupId: String,
offsets: Map[TopicAndPartition, Long]
): Either[Err, Map[TopicAndPartition, Short]] =
setConsumerOffsets(groupId, offsets, defaultConsumerApiVersion)
def setConsumerOffsets(
groupId: String,
offsets: Map[TopicAndPartition, Long],
consumerApiVersion: Short
): Either[Err, Map[TopicAndPartition, Short]] = {
val meta = offsets.map { kv =>
kv._1 -> OffsetAndMetadata(kv._2)
}
setConsumerOffsetMetadata(groupId, meta, consumerApiVersion)
}
/** Requires Kafka >= 0.8.1.1. Defaults to the original ZooKeeper backed api version. */
def setConsumerOffsetMetadata(
groupId: String,
metadata: Map[TopicAndPartition, OffsetAndMetadata]
): Either[Err, Map[TopicAndPartition, Short]] =
setConsumerOffsetMetadata(groupId, metadata, defaultConsumerApiVersion)
def setConsumerOffsetMetadata(
groupId: String,
metadata: Map[TopicAndPartition, OffsetAndMetadata],
consumerApiVersion: Short
): Either[Err, Map[TopicAndPartition, Short]] = {
var result = Map[TopicAndPartition, Short]()
val req = OffsetCommitRequest(groupId, metadata, consumerApiVersion)
val errs = new Err
val topicAndPartitions = metadata.keySet
withBrokers(Random.shuffle(config.seedBrokers), errs) { consumer =>
val resp = consumer.commitOffsets(req)
val respMap = resp.commitStatus
val needed = topicAndPartitions.diff(result.keySet)
needed.foreach { tp: TopicAndPartition =>
respMap.get(tp).foreach { err: Short =>
if (err == ErrorMapping.NoError) {
result += tp -> err
} else {
errs.append(ErrorMapping.exceptionFor(err))
}
}
}
if (result.keys.size == topicAndPartitions.size) {
return Right(result)
}
}
val missing = topicAndPartitions.diff(result.keySet)
errs.append(new SparkException(s"Couldn't set offsets for ${missing}"))
Left(errs)
}
// Try a call against potentially multiple brokers, accumulating errors
private def withBrokers(brokers: Iterable[(String, Int)], errs: Err)
(fn: SimpleConsumer => Any): Unit = {
brokers.foreach { hp =>
var consumer: SimpleConsumer = null
try {
consumer = connect(hp._1, hp._2)
fn(consumer)
} catch {
case NonFatal(e) =>
errs.append(e)
} finally {
if (consumer != null) {
consumer.close()
}
}
}
}
}
@DeveloperApi
object KafkaCluster {
type Err = ArrayBuffer[Throwable]
/** If the result is right, return it, otherwise throw SparkException */
def checkErrors[T](result: Either[Err, T]): T = {
result.fold(
errs => throw new SparkException(errs.mkString("\\n")),
ok => ok
)
}
case class LeaderOffset(host: String, port: Int, offset: Long)
/**
* High-level kafka consumers connect to ZK. ConsumerConfig assumes this use case.
* Simple consumers connect directly to brokers, but need many of the same configs.
* This subclass won't warn about missing ZK params, or presence of broker params.
*/
class SimpleConsumerConfig private(brokers: String, originalProps: Properties)
extends ConsumerConfig(originalProps) {
val seedBrokers: Array[(String, Int)] = brokers.split(",").map { hp =>
val hpa = hp.split(":")
if (hpa.size == 1) {
throw new SparkException(s"Broker not in the correct format of <host>:<port> [$brokers]")
}
(hpa(0), hpa(1).toInt)
}
}
object SimpleConsumerConfig {
/**
* Make a consumer config without requiring group.id or zookeeper.connect,
* since communicating with brokers also needs common settings such as timeout
*/
def apply(kafkaParams: Map[String, String]): SimpleConsumerConfig = {
// These keys are from other pre-existing kafka configs for specifying brokers, accept either
val brokers = kafkaParams.get("metadata.broker.list")
.orElse(kafkaParams.get("bootstrap.servers"))
.getOrElse(throw new SparkException(
"Must specify metadata.broker.list or bootstrap.servers"))
val props = new Properties()
kafkaParams.foreach { case (key, value) =>
// prevent warnings on parameters ConsumerConfig doesn't know about
if (key != "metadata.broker.list" && key != "bootstrap.servers") {
props.put(key, value)
}
}
Seq("zookeeper.connect", "group.id").foreach { s =>
if (!props.containsKey(s)) {
props.setProperty(s, "")
}
}
new SimpleConsumerConfig(brokers, props)
}
}
}
| gioenn/xSpark | external/kafka-0-8/src/main/scala/org/apache/spark/streaming/kafka/KafkaCluster.scala | Scala | apache-2.0 | 15,781 |
/**
* Copyright (C) 2014 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.fr
import org.orbeon.oxf.test.{DocumentTestBase, ResourceManagerSupport}
import org.orbeon.oxf.xforms.action.XFormsAPI._
import org.orbeon.oxf.xforms.control.XFormsComponentControl
import org.orbeon.oxf.xml.Dom4j.elemToDocument
import org.orbeon.scaxon.SimplePath._
import org.scalatest.FunSpecLike
class ErrorSummaryTest
extends DocumentTestBase
with ResourceManagerSupport
with FunSpecLike {
describe("fr:error-summary") {
it("#1689: show errors when placed before observed") {
withTestExternalContext { _ ⇒
val doc = this setupDocument
<xh:html xmlns:xh="http://www.w3.org/1999/xhtml"
xmlns:xf="http://www.w3.org/2002/xforms"
xmlns:fr="http://orbeon.org/oxf/xml/form-runner">
<xh:head>
<xf:model>
<xf:instance>
<invalid/>
</xf:instance>
<xf:bind ref="." constraint="false()"/>
<xf:dispatch
event="xforms-ready"
name="fr-visit-all"
targetid="error-summary"/>
</xf:model>
</xh:head>
<xh:body>
<fr:error-summary id="error-summary" observer="my-group"/>
<xf:group id="my-group">
<xf:input ref="." id="my-input">
<xf:alert>alert</xf:alert>
</xf:input>
</xf:group>
</xh:body>
</xh:html>
withContainingDocument(doc) {
val errorSummary = resolveObject[XFormsComponentControl]("error-summary").get
val stateInstance = errorSummary.nestedContainer.models.head.getInstance("fr-state-instance").documentInfo
val visibleAlertCountAttr = stateInstance / "state" / "visible-counts" /@ "alert"
val visibleAlertCountValue = visibleAlertCountAttr.headOption.map(_.stringValue).getOrElse("")
assert(visibleAlertCountValue === "1")
}
}
}
}
}
| brunobuzzi/orbeon-forms | form-runner/jvm/src/test/scala/org/orbeon/oxf/fr/ErrorSummaryTest.scala | Scala | lgpl-2.1 | 2,695 |
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt
package std
import Types._
import Task._
import java.io.{BufferedInputStream, BufferedReader, File, InputStream}
sealed trait MultiInTask[In <: HList]
{
def flatMap[T](f: In => Task[T]): Task[T]
def flatMapR[T](f: Results[In] => Task[T]): Task[T]
def map[T](f: In => T): Task[T]
def mapR[T](f: Results[In] => T): Task[T]
def flatFailure[T](f: Seq[Incomplete] => Task[T]): Task[T]
def mapFailure[T](f: Seq[Incomplete] => T): Task[T]
}
sealed trait SingleInTask[S]
{
def flatMapR[T](f: Result[S] => Task[T]): Task[T]
def flatMap[T](f: S => Task[T]): Task[T]
def map[T](f: S => T): Task[T]
def mapR[T](f: Result[S] => T): Task[T]
def flatFailure[T](f: Incomplete => Task[T]): Task[T]
def mapFailure[T](f: Incomplete => T): Task[T]
def dependsOn(tasks: Task[_]*): Task[S]
def andFinally(fin: => Unit): Task[S]
def doFinally(t: Task[Unit]): Task[S]
def || [T >: S](alt: Task[T]): Task[T]
def && [T](alt: Task[T]): Task[T]
}
sealed trait TaskInfo[S]
{
def describedAs(s: String): Task[S]
def named(s: String): Task[S]
}
sealed trait ForkTask[S, CC[_]]
{
def fork[T](f: S => T): CC[Task[T]]
def tasks: Seq[Task[S]]
}
sealed trait JoinTask[S, CC[_]]
{
def join: Task[CC[S]]
// had to rename from 'reduce' for 2.9.0
def reduced(f: (S,S) => S): Task[S]
}
sealed trait BinaryPipe
{
def binary[T](f: BufferedInputStream => T): Task[T]
def binary[T](sid: String)(f: BufferedInputStream => T): Task[T]
def #>(f: File): Task[Unit]
def #>(sid: String, f: File): Task[Unit]
}
sealed trait TextPipe
{
def text[T](f: BufferedReader => T): Task[T]
def text[T](sid: String)(f: BufferedReader => T): Task[T]
}
sealed trait TaskLines
{
def lines: Task[List[String]]
def lines(sid: String): Task[List[String]]
}
sealed trait ProcessPipe
{
def #| (p: ProcessBuilder): Task[Int]
def pipe(sid: String)(p: ProcessBuilder): Task[Int]
}
trait TaskExtra
{
final def nop: Task[Unit] = constant( () )
final def constant[T](t: T): Task[T] = task(t)
final implicit def t2ToMulti[A,B](t: (Task[A],Task[B])) = multInputTask(t._1 :^: t._2 :^: KNil)
final implicit def f2ToHfun[A,B,R](f: (A,B) => R): (A :+: B :+: HNil => R) = { case a :+: b :+: HNil => f(a,b) }
final implicit def t3ToMulti[A,B,C](t: (Task[A],Task[B],Task[C])) = multInputTask(t._1 :^: t._2 :^: t._3 :^: KNil)
final implicit def f3ToHfun[A,B,C,R](f: (A,B,C) => R): (A :+: B :+: C :+: HNil => R) = { case a :+: b :+: c :+: HNil => f(a,b,c) }
final implicit def actionToTask[T](a: Action[T]): Task[T] = Task(Info(), a)
final def task[T](f: => T): Task[T] = toTask(f _)
final implicit def toTask[T](f: () => T): Task[T] = new Pure(f, false)
final def inlineTask[T](value: T): Task[T] = new Pure(() => value, true)
final implicit def upcastTask[A >: B, B](t: Task[B]): Task[A] = t map { x => x : B }
final implicit def toTasks[S](in: Seq[() => S]): Seq[Task[S]] = in.map(toTask)
final implicit def iterableTask[S](in: Seq[S]): ForkTask[S, Seq] = new ForkTask[S, Seq] {
def fork[T](f: S => T): Seq[Task[T]] = in.map(x => task(f(x)))
def tasks: Seq[Task[S]] = fork(idFun)
}
final implicit def joinAnyTasks(in: Seq[Task[_]]): JoinTask[Any, Seq] = joinTasks[Any](in map (x => x: Task[Any]))
final implicit def joinTasks[S](in: Seq[Task[S]]): JoinTask[S, Seq] = new JoinTask[S, Seq] {
def join: Task[Seq[S]] = new Join(in, (s: Seq[Result[S]]) => Right(TaskExtra.all(s)) )
def reduced(f: (S,S) => S): Task[S] = TaskExtra.reduced(in.toIndexedSeq, f)
}
import TaskExtra.{allM, anyFailM, failM, successM}
final implicit def multInputTask[In <: HList](tasks: Tasks[In]): MultiInTask[In] = new MultiInTask[In] {
def flatMapR[T](f: Results[In] => Task[T]): Task[T] = new FlatMapped(tasks, f)
def flatMap[T](f: In => Task[T]): Task[T] = flatMapR(f compose allM)
def flatFailure[T](f: Seq[Incomplete] => Task[T]): Task[T] = flatMapR(f compose anyFailM)
def mapR[T](f: Results[In] => T): Task[T] = new Mapped(tasks, f)
def map[T](f: In => T): Task[T] = mapR(f compose allM)
def mapFailure[T](f: Seq[Incomplete] => T): Task[T] = mapR(f compose anyFailM)
}
final implicit def singleInputTask[S](in: Task[S]): SingleInTask[S] = new SingleInTask[S] {
type HL = S :+: HNil
private val ml = in :^: KNil
private def headM = (_: Results[HL]).combine.head
def flatMapR[T](f: Result[S] => Task[T]): Task[T] = new FlatMapped[T, HL](ml, f ∙ headM)
def mapR[T](f: Result[S] => T): Task[T] = new Mapped[T, HL](ml, f ∙ headM)
def dependsOn(tasks: Task[_]*): Task[S] = new DependsOn(in, tasks)
def flatMap[T](f: S => Task[T]): Task[T] = flatMapR(f compose successM)
def flatFailure[T](f: Incomplete => Task[T]): Task[T] = flatMapR(f compose failM)
def map[T](f: S => T): Task[T] = mapR(f compose successM)
def mapFailure[T](f: Incomplete => T): Task[T] = mapR(f compose failM)
def andFinally(fin: => Unit): Task[S] = mapR(x => Result.tryValue[S]( { fin; x }))
def doFinally(t: Task[Unit]): Task[S] = flatMapR(x => t.mapR { tx => Result.tryValues[S](tx :: Nil, x) })
def || [T >: S](alt: Task[T]): Task[T] = flatMapR { case Value(v) => task(v); case Inc(i) => alt }
def && [T](alt: Task[T]): Task[T] = flatMap( _ => alt )
}
final implicit def toTaskInfo[S](in: Task[S]): TaskInfo[S] = new TaskInfo[S] {
def describedAs(s: String): Task[S] = in.copy(info = in.info.setDescription(s))
def named(s: String): Task[S] = in.copy(info = in.info.setName(s))
}
final implicit def pipeToProcess[Key](t: Task[_])(implicit streams: Task[TaskStreams[Key]], key: Task[_] => Key): ProcessPipe = new ProcessPipe {
def #| (p: ProcessBuilder): Task[Int] = pipe0(None, p)
def pipe(sid: String)(p: ProcessBuilder): Task[Int] = pipe0(Some(sid), p)
private def pipe0(sid: Option[String], p: ProcessBuilder): Task[Int] =
for(s <- streams) yield {
val in = s.readBinary(key(t), sid)
val pio = TaskExtra.processIO(s).withInput( out => { BasicIO.transferFully(in, out); out.close() } )
(p run pio).exitValue
}
}
final implicit def binaryPipeTask[Key](in: Task[_])(implicit streams: Task[TaskStreams[Key]], key: Task[_] => Key): BinaryPipe = new BinaryPipe {
def binary[T](f: BufferedInputStream => T): Task[T] = pipe0(None, f)
def binary[T](sid: String)(f: BufferedInputStream => T): Task[T] = pipe0(Some(sid), f)
def #>(f: File): Task[Unit] = pipe0(None, toFile(f))
def #>(sid: String, f: File): Task[Unit] = pipe0(Some(sid), toFile(f))
private def pipe0 [T](sid: Option[String], f: BufferedInputStream => T): Task[T] =
streams map { s => f(s.readBinary(key(in), sid)) }
private def toFile(f: File) = (in: InputStream) => IO.transfer(in, f)
}
final implicit def textPipeTask[Key](in: Task[_])(implicit streams: Task[TaskStreams[Key]], key: Task[_] => Key): TextPipe = new TextPipe {
def text[T](f: BufferedReader => T): Task[T] = pipe0(None, f)
def text [T](sid: String)(f: BufferedReader => T): Task[T] = pipe0(Some(sid), f)
private def pipe0 [T](sid: Option[String], f: BufferedReader => T): Task[T] =
streams map { s => f(s.readText(key(in), sid)) }
}
final implicit def linesTask[Key](in: Task[_])(implicit streams: Task[TaskStreams[Key]], key: Task[_] => Key): TaskLines = new TaskLines {
def lines: Task[List[String]] = lines0(None)
def lines(sid: String): Task[List[String]] = lines0(Some(sid))
private def lines0 [T](sid: Option[String]): Task[List[String]] =
streams map { s => IO.readLines(s.readText(key(in), sid) ) }
}
implicit def processToTask(p: ProcessBuilder)(implicit streams: Task[TaskStreams[_]]): Task[Int] = streams map { s =>
val pio = TaskExtra.processIO(s)
(p run pio).exitValue
}
}
object TaskExtra extends TaskExtra
{
def processIO(s: TaskStreams[_]): ProcessIO =
{
def transfer(id: String) = (in: InputStream) => BasicIO.transferFully(in, s.binary(id))
new ProcessIO(BasicIO.closeOut, transfer(s.outID), transfer(s.errorID), inheritInput = {_ => false})
}
def reduced[S](i: IndexedSeq[Task[S]], f: (S, S) => S): Task[S] =
i match
{
case Seq() => error("Cannot reduce empty sequence")
case Seq(x) => x
case Seq(x, y) => reducePair(x, y, f)
case z =>
val (a, b) = i.splitAt(i.size / 2)
reducePair( reduced(a, f), reduced(b, f), f )
}
def reducePair[S](a: Task[S], b: Task[S], f: (S, S) => S): Task[S] =
(a :^: b :^: KNil) map { case x :+: y :+: HNil => f(x,y) }
def anyFailM[In <: HList]: Results[In] => Seq[Incomplete] = in =>
{
val incs = failuresM(in)
if(incs.isEmpty) expectedFailure else incs
}
def failM[T]: Result[T] => Incomplete = { case Inc(i) => i; case x => expectedFailure }
def expectedFailure = throw Incomplete(None, message = Some("Expected dependency to fail."))
def successM[T]: Result[T] => T = { case Inc(i) => throw i; case Value(t) => t }
def allM[In <: HList]: Results[In] => In = in =>
{
val incs = failuresM(in)
if(incs.isEmpty) in.down(Result.tryValue) else throw incompleteDeps(incs)
}
def failuresM[In <: HList]: Results[In] => Seq[Incomplete] = x => failures[Any](x.toList)
def all[D](in: Seq[Result[D]]) =
{
val incs = failures(in)
if(incs.isEmpty) in.map(Result.tryValue.fn[D]) else throw incompleteDeps(incs)
}
def failures[A](results: Seq[Result[A]]): Seq[Incomplete] = results.collect { case Inc(i) => i }
def incompleteDeps(incs: Seq[Incomplete]): Incomplete = Incomplete(None, causes = incs)
}
| jamesward/xsbt | tasks/standard/TaskExtra.scala | Scala | bsd-3-clause | 9,408 |
package scala.lms
package common
import java.io.PrintWriter
import scala.lms.internal._
import scala.collection.mutable.Set
import scala.reflect.SourceContext
trait SetOps extends Base {
object Set {
def apply[A:Manifest](xs: Rep[A]*)(implicit pos: SourceContext) = set_new[A](xs)
}
implicit def repSetToSetOps[A:Manifest](v: Rep[Set[A]]) = new setOpsCls(v)
class setOpsCls[A:Manifest](s: Rep[Set[A]]) {
def contains(i: Rep[A])(implicit pos: SourceContext) = set_contains(s, i)
def add(i: Rep[A])(implicit pos: SourceContext) = set_add(s, i)
def remove(i: Rep[A])(implicit pos: SourceContext) = set_remove(s, i)
def size(implicit pos: SourceContext) = set_size(s)
def clear()(implicit pos: SourceContext) = set_clear(s)
def toSeq(implicit pos: SourceContext) = set_toseq(s)
def toArray(implicit pos: SourceContext) = set_toarray(s)
}
def set_new[A:Manifest](xs: Seq[Rep[A]])(implicit pos: SourceContext) : Rep[Set[A]]
def set_contains[A:Manifest](s: Rep[Set[A]], i: Rep[A])(implicit pos: SourceContext) : Rep[Boolean]
def set_add[A:Manifest](s: Rep[Set[A]], i: Rep[A])(implicit pos: SourceContext) : Rep[Unit]
def set_remove[A:Manifest](s: Rep[Set[A]], i: Rep[A])(implicit pos: SourceContext) : Rep[Unit]
def set_size[A:Manifest](s: Rep[Set[A]])(implicit pos: SourceContext) : Rep[Int]
def set_clear[A:Manifest](s: Rep[Set[A]])(implicit pos: SourceContext) : Rep[Unit]
def set_toseq[A:Manifest](s: Rep[Set[A]])(implicit pos: SourceContext): Rep[Seq[A]]
def set_toarray[A:Manifest](s: Rep[Set[A]])(implicit pos: SourceContext): Rep[Array[A]]
}
trait SetOpsExp extends SetOps with ArrayOps with EffectExp {
case class SetNew[A:Manifest](xs: Seq[Exp[A]], mA: Manifest[A]) extends Def[Set[A]]
case class SetContains[A:Manifest](s: Exp[Set[A]], i: Exp[A]) extends Def[Boolean]
case class SetAdd[A:Manifest](s: Exp[Set[A]], i: Exp[A]) extends Def[Unit]
case class SetRemove[A:Manifest](s: Exp[Set[A]], i: Exp[A]) extends Def[Unit]
case class SetSize[A:Manifest](s: Exp[Set[A]]) extends Def[Int]
case class SetClear[A:Manifest](s: Exp[Set[A]]) extends Def[Unit]
case class SetToSeq[A:Manifest](s: Exp[Set[A]]) extends Def[Seq[A]]
case class SetToArray[A:Manifest](s: Exp[Set[A]]) extends Def[Array[A]] {
//val array = unit(manifest[A].newArray(0))
val array = NewArray[A](s.size)
}
def set_new[A:Manifest](xs: Seq[Exp[A]])(implicit pos: SourceContext) = reflectMutable(SetNew(xs, manifest[A]))
def set_contains[A:Manifest](s: Exp[Set[A]], i: Exp[A])(implicit pos: SourceContext) = SetContains(s, i)
def set_add[A:Manifest](s: Exp[Set[A]], i: Exp[A])(implicit pos: SourceContext) = reflectWrite(s)(SetAdd(s, i))
def set_remove[A:Manifest](s: Exp[Set[A]], i: Exp[A])(implicit pos: SourceContext) = reflectWrite(s)(SetRemove(s, i))
def set_size[A:Manifest](s: Exp[Set[A]])(implicit pos: SourceContext) = SetSize(s)
def set_clear[A:Manifest](s: Exp[Set[A]])(implicit pos: SourceContext) = reflectWrite(s)(SetClear(s))
def set_toseq[A:Manifest](s: Exp[Set[A]])(implicit pos: SourceContext) = SetToSeq(s)
def set_toarray[A:Manifest](s: Exp[Set[A]])(implicit pos: SourceContext) = SetToArray(s)
}
trait BaseGenSetOps extends GenericNestedCodegen {
val IR: SetOpsExp
import IR._
}
trait ScalaGenSetOps extends BaseGenSetOps with ScalaGenEffect {
val IR: SetOpsExp
import IR._
override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match {
case SetNew(xs, mA) => emitValDef(sym, src"collection.mutable.HashSet[$mA](" + (xs map {quote}).mkString(",") + ")")
case SetContains(s,i) => emitValDef(sym, src"$s.contains($i)")
case SetAdd(s,i) => emitValDef(sym, src"$s.add($i)")
case SetRemove(s,i) => emitValDef(sym, src"$s.remove($i)")
case SetSize(s) => emitValDef(sym, src"$s.size")
case SetClear(s) => emitValDef(sym, src"$s.clear()")
case SetToSeq(s) => emitValDef(sym, src"$s.toSeq")
case n@SetToArray(s) => //emitValDef(sym, quote(s) + ".toArray")
gen"""// workaround for refinedManifest problem
|val $sym = {
|val out = $n.array
|val in = $s.toSeq
|var i = 0
|while (i < in.length) {
|out(i) = in(i)
|i += 1
|}
|out
|}"""
case _ => super.emitNode(sym, rhs)
}
}
trait CLikeGenSetOps extends BaseGenSetOps with CLikeCodegen {
val IR: SetOpsExp
import IR._
// override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match {
// case _ => super.emitNode(sym, rhs)
// }
}
trait CudaGenSetOps extends CudaGenEffect with CLikeGenSetOps
trait OpenCLGenSetOps extends OpenCLGenEffect with CLikeGenSetOps
trait CGenSetOps extends CGenEffect with CLikeGenSetOps
| scalan/virtualization-lms-core | src/common/SetOps.scala | Scala | bsd-3-clause | 4,729 |
package akka
import akka.actor.{Actor, ActorSystem, Props}
/**
* Created by Om Prakash C on 16-06-2017.
*/
object SimpleActorExample extends App {
class SimpleActor extends Actor {
def receive = {
case s: String => println("String = " + s)
case i: Int => println("Int = " + i)
}
}
val system = ActorSystem("SimpleSystem")
val actor = system.actorOf(Props[SimpleActor], "SimpleActor1")
actor ! "Hi there"
actor ! 24
system.terminate()
}
| comprakash/learning-scala | concurrency/src/main/scala/akka/SimpleActorExample.scala | Scala | gpl-3.0 | 479 |
package com.wavesplatform.transaction.smart.script
import com.wavesplatform.lang.directives.Directive.extractValue
import com.wavesplatform.lang.directives.DirectiveKey._
import com.wavesplatform.lang.directives._
import com.wavesplatform.lang.directives.values._
import com.wavesplatform.lang.script.v1.ExprScript
import com.wavesplatform.lang.script.{ContractScript, Script, ScriptPreprocessor}
import com.wavesplatform.lang.utils._
import com.wavesplatform.lang.v1.compiler.{ContractCompiler, ExpressionCompiler}
import com.wavesplatform.lang.v1.estimator.ScriptEstimator
import com.wavesplatform.utils._
object ScriptCompiler extends ScorexLogging {
@Deprecated
def apply(
scriptText: String,
isAssetScript: Boolean,
estimator: ScriptEstimator
): Either[String, (Script, Long)] =
applyAndEstimate(scriptText, isAssetScript, estimator, Script.estimate, StdLibVersion.VersionDic.default)
def compile(
scriptText: String,
estimator: ScriptEstimator,
libraries: Map[String, String] = Map(),
defaultStdLib: => StdLibVersion = StdLibVersion.VersionDic.default
): Either[String, (Script, Long)] =
compileAndEstimate(scriptText, estimator, libraries, Script.estimate, defaultStdLib)
def compileAndEstimateCallables(
scriptText: String,
estimator: ScriptEstimator,
libraries: Map[String, String] = Map(),
defaultStdLib: => StdLibVersion = StdLibVersion.VersionDic.default
): Either[String, (Script, Script.ComplexityInfo)] =
compileAndEstimate(scriptText, estimator, libraries, Script.complexityInfo, defaultStdLib)
def compileAndEstimate[C](
scriptText: String,
estimator: ScriptEstimator,
libraries: Map[String, String] = Map(),
estimate: (Script, ScriptEstimator, Boolean) => Either[String, C],
defaultStdLib: => StdLibVersion = StdLibVersion.VersionDic.default
): Either[String, (Script, C)] =
for {
directives <- DirectiveParser(scriptText)
ds <- Directive.extractDirectives(directives, defaultStdLib)
linkedInput <- ScriptPreprocessor(scriptText, libraries, ds.imports)
result <- applyAndEstimate(linkedInput, ds.scriptType == Asset, estimator, estimate, defaultStdLib)
} yield result
private def applyAndEstimate[C](
scriptText: String,
isAssetScript: Boolean,
estimator: ScriptEstimator,
estimate: (Script, ScriptEstimator, Boolean) => Either[String, C],
defaultStdLib: => StdLibVersion // = StdLibVersion.VersionDic.default
): Either[String, (Script, C)] =
for {
directives <- DirectiveParser(scriptText)
contentType = extractValue(directives, CONTENT_TYPE)
version = extractValue(directives, STDLIB_VERSION)(Some(defaultStdLib))
scriptType = if (isAssetScript) Asset else Account
_ <- DirectiveSet(version, scriptType, contentType)
script <- tryCompile(scriptText, contentType, version, isAssetScript)
complexity <- estimate(script, estimator, !isAssetScript)
} yield (script, complexity)
private def tryCompile(src: String, cType: ContentType, version: StdLibVersion, isAssetScript: Boolean): Either[String, Script] = {
val ctx = compilerContext(version, cType, isAssetScript)
try {
cType match {
case Expression => ExpressionCompiler.compileBoolean(src, ctx).flatMap(expr => ExprScript.apply(version, expr))
case DApp => ContractCompiler.compile(src, ctx, version).flatMap(expr => ContractScript.apply(version, expr))
case Library => ExpressionCompiler.compileDecls(src, ctx).flatMap(ExprScript(version, _))
}
} catch {
case ex: Throwable =>
log.error("Error compiling script", ex)
log.error(src)
val msg = Option(ex.getMessage).getOrElse("Parsing failed: Unknown error")
Left(msg)
}
}
}
| wavesplatform/Waves | node/src/main/scala/com/wavesplatform/transaction/smart/script/ScriptCompiler.scala | Scala | mit | 3,881 |
package com.twitter.finagle.netty3
import com.twitter.finagle.benchmark.StdBenchAnnotations
import com.twitter.finagle.util.{BufWriter, BufReader}
import com.twitter.io.{Buf, Charsets}
import org.jboss.netty.buffer.{ChannelBuffer, ChannelBuffers}
import org.openjdk.jmh.annotations._
import scala.collection.mutable.ArrayBuffer
/**
* Benchmarks various buffer wrappers in the presence of length
* encoded fields. This is useful since many of our protocols
* (e.g. Mux) make heavy use of length encoding.
*/
@State(Scope.Benchmark)
class BufCodecBenchmark extends StdBenchAnnotations {
import BufCodecBenchmark._
@Param(Array("100"))
var size: Int = _
private[this] var bufs: Seq[Buf] = _
private[this] var cbs: Seq[ChannelBuffer] = _
private[this] var encodedCB: ChannelBuffer = _
private[this] var encodedBuf: Buf = _
@Setup(Level.Iteration)
def setup(): Unit = {
val values = List.fill(size)("value")
bufs = values.map { case v =>
Buf.ByteArray.Owned(v.getBytes(Charsets.Utf8))
}
encodedBuf = TwitterBuf.encode(bufs)
cbs = values.map { case v =>
ChannelBuffers.wrappedBuffer(v.getBytes(Charsets.Utf8))
}
encodedCB = NettyChannelBuffer.encode(cbs)
encodedCB.markReaderIndex()
}
@Benchmark
def encodeCB(): ChannelBuffer = NettyChannelBuffer.encode(cbs)
@Benchmark
def encodeBuf(): Buf = TwitterBuf.encode(bufs)
@Benchmark
def decodeCB(): Seq[ChannelBuffer] = {
encodedCB.resetReaderIndex()
NettyChannelBuffer.decode(encodedCB)
}
@Benchmark
def decodeBuf(): Seq[Buf] = {
TwitterBuf.decode(encodedBuf)
}
@Benchmark
def roundTripCB(): Seq[ChannelBuffer] = {
NettyChannelBuffer.decode(NettyChannelBuffer.encode(cbs))
}
@Benchmark
def roundTripBuf(): Seq[Buf] = {
TwitterBuf.decode(TwitterBuf.encode(bufs))
}
}
object BufCodecBenchmark {
object NettyChannelBuffer {
def encode(values: Seq[ChannelBuffer]): ChannelBuffer = {
var iter = values.iterator
var size = 0
while (iter.hasNext) {
size += iter.next().readableBytes + 4
}
val cb = ChannelBuffers.buffer(size)
iter = values.iterator
while (iter.hasNext) {
val v = iter.next()
cb.writeInt(v.readableBytes)
cb.writeBytes(v.slice())
}
cb
}
def decode(cb: ChannelBuffer): Seq[ChannelBuffer] = {
val values = new ArrayBuffer[ChannelBuffer]
while (cb.readableBytes() > 0) {
val v = cb.readSlice(cb.readInt())
values += v
}
values
}
}
object TwitterBuf {
def encode(values: Seq[Buf]): Buf = {
var size = 0
var iter = values.iterator
while (iter.hasNext) {
size += iter.next().length + 4
}
val bw = BufWriter.fixed(size)
iter = values.iterator
while (iter.hasNext) {
iter.next() match { case v =>
bw
.writeIntBE(v.length)
.writeBytes(Buf.ByteArray.Owned.extract(v))
}
}
bw.owned()
}
def decode(buf: Buf): Seq[Buf] = {
val values = new ArrayBuffer[Buf]
val br = BufReader(buf)
while (br.remaining > 0) {
val v = br.readBytes(br.readIntBE())
values += v
}
values
}
}
} | sveinnfannar/finagle | finagle-benchmark/src/main/scala/com/twitter/finagle/netty3/BufCodecBenchmark.scala | Scala | apache-2.0 | 3,271 |
/*
* Copyright 2015 Paul Horn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ff7
package monsters
import scalaz._
import Scalaz._
import scala.reflect.runtime.{universe ⇒ ru}
object ReflectUtil {
private val loader = getClass.getClassLoader
private val m = ru.runtimeMirror(loader)
def loadObject[A: ru.TypeTag](name: String): String \\/ A = for {
sym ← loadObjectSym(name)
inst ← objectInstance[A](sym)
} yield inst
private def loadObjectSym(name: String): String \\/ ru.ModuleSymbol =
for {
target ← loadClass(name + "$")
symbol ← objectSymbol(target)
} yield symbol
private def objectInstance[A: ru.TypeTag](sym: ru.ModuleSymbol): String \\/ A = {
val targetTpe = ru.typeOf[A]
if (sym.typeSignature <:< targetTpe) for {
obj ← TryE(m.reflectModule(sym))
ins ← TryE(obj.instance.asInstanceOf[A])
} yield ins
else s"[$sym] is not <:< [$targetTpe]".left
}
private def loadClass(name: String): String \\/ Class[_] =
\\/.fromTryCatchNonFatal(loader.loadClass(name))
.leftMap(_ ⇒ s"The class [$name] could not be found")
private def objectSymbol(clazz: Class[_]): String \\/ ru.ModuleSymbol =
TryE(m.moduleSymbol(clazz))
}
| knutwalker/ff7-simulator | items/src/main/scala/ff7/monsters/ReflectUtil.scala | Scala | apache-2.0 | 1,752 |
package com.readclosely.model
/*
Copyright 2009-2010 Karl Pichotta
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import _root_.net.liftweb.mapper._
import _root_.net.liftweb.util._
import com.readclosely.util._
class DeletedAnnotation extends LongKeyedMapper[DeletedAnnotation] with IdPK{
def getSingleton = DeletedAnnotation // what's the "meta" server
object annid extends MappedLong(this)
object deletionDateTimeMillis extends MappedLong(this)
object commentText extends MappedTextarea(this, DeletedAnnotation.MAX_LEN) {
override def textareaRows = 10
override def textareaCols = 50
}
object score extends MappedInt(this)
object authorID extends MappedLongForeignKey(this, User) {
override def dbIndexed_? = false
}
object passageID extends MappedLongForeignKey(this, Passage) {
override def dbIndexed_? = false
}
object sentenceID extends MappedInt(this)
//had to change since MappedDateTime sets time = 00:00:000
//object submissionDatetime extends MappedDateTime(this)
object submissionDateTimeMillis extends MappedLong(this)
//may be null.
object lastEditDateTimeMillis extends MappedLong(this)
//@todo: add dependency
object inReplyTo extends MappedLong(this) {
override def dbIndexed_? = false
}
object parentAnn extends MappedLong(this)
/**
*Int dictating which element in a "conversation" list this annotation appears.
*Everything that is not a reply to something else will have replyOrder 0;
*the first reply to a comment will have replyOrder 1, ....
*/
object replyOrder extends MappedInt(this) {
override def defaultValue = 0
}
object numReplies extends MappedInt(this) {
override def defaultValue = 0
}
}
/**
* The singleton that has methods for accessing the database
*/
object DeletedAnnotation extends DeletedAnnotation with LongKeyedMetaMapper[DeletedAnnotation] {
val MAX_LEN = 2000
}
| kpich/readclosely | src/main/scala/com/readclosely/model/DeletedAnnotation.scala | Scala | apache-2.0 | 2,490 |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boxdata.ejb
import boxdata.cdi.util.DtoBuilder
import boxdata.data.dto.SystemLoadDto
import org.slf4j.LoggerFactory
import collection.mutable.ListBuffer
import javax.ejb.Lock
import javax.ejb.LockType
import javax.ejb.Singleton
import javax.inject.Inject
import java.lang.management.ManagementFactory
@Singleton(name = "SystemLoadEjb")
class SystemLoadEjb {
val LOG = LoggerFactory.getLogger(getClass)
val MAX_RECORDS = 10000
val systemLoad: ListBuffer[SystemLoadDto] = ListBuffer()
val osBean = ManagementFactory.getOperatingSystemMXBean
val memoryBean = ManagementFactory.getMemoryMXBean
@Inject
var builder: DtoBuilder = _
@Lock(LockType.WRITE)
def readData() {
LOG.debug("Reading system information (load)...")
val runtime = Runtime.getRuntime
val free = runtime.freeMemory()
val total = runtime.totalMemory()
val heap = memoryBean.getHeapMemoryUsage
val nonHeap = memoryBean.getNonHeapMemoryUsage
systemLoad += builder.buildSystemLoadDto(
System.currentTimeMillis(),
osBean.getSystemLoadAverage,
total,
free,
heap,
nonHeap
)
while (systemLoad.size > MAX_RECORDS) {
systemLoad -= systemLoad.head
}
}
@Lock(LockType.READ)
def getSystemLoad: List[SystemLoadDto] = {
systemLoad.iterator.toList
}
}
| tveronezi/boxdata | src/main/scala/boxdata/ejb/SystemLoadEjb.scala | Scala | apache-2.0 | 2,255 |
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v3
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.ct600.v3.retriever.CT600BoxRetriever
case class B860(value: Option[Int]) extends CtBoxIdentifier("Repayment amount upper bound") with CtOptionalInteger with Input with ValidatableBox[CT600BoxRetriever] {
override def validate(boxRetriever: CT600BoxRetriever): Set[CtValidation] = {
val repaymentsQ1 = boxRetriever.retrieveREPAYMENTSQ1()
repaymentsQ1.value match {
case Some(false) => validateAsMandatory(this) ++ validateZeroOrPositiveInteger(this)
case _ => Set()
}
}
}
| ahudspith-equalexperts/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v3/B860.scala | Scala | apache-2.0 | 1,189 |
package scala.pickling.runtime
import org.scalatest.FunSuite
import scala.pickling._, scala.pickling.Defaults._, json._
class Person(val name: String, val age: Int)
class PersonRuntimeTest extends FunSuite {
test("main") {
val p: Any = new Person("joe", 23)
// the following is invoking the macro to generate a Pickler[Any],
// because p has type Any.
// the trick is that we should detect that we're pickling a Person
// and switch to runtime picklers.
val pickle = p.pickle
assert(pickle.value === """
|{
| "$type": "scala.pickling.runtime.Person",
| "name": "joe",
| "age": 23
|}
""".stripMargin.trim)
}
}
| scala/pickling | core/src/test/scala/scala/pickling/runtime/PersonRuntimeTest.scala | Scala | bsd-3-clause | 684 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.convert2.metrics
import com.codahale.metrics.{ConsoleReporter, MetricRegistry, Slf4jReporter}
import com.typesafe.config.ConfigFactory
import com.typesafe.scalalogging.LazyLogging
import org.junit.runner.RunWith
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class ReporterFactoryTest extends Specification with LazyLogging {
"ReporterFactory" should {
"load console configs" in {
val conf = ConfigFactory.parseString(
"""
|{
| type = "console"
| units = "MILLISECONDS"
|}
""".stripMargin
)
val registry = new MetricRegistry()
val reporter = ReporterFactory(conf, registry)
try {
reporter must beAnInstanceOf[ConsoleReporter]
} finally {
reporter.close()
}
}
"load slf configs" in {
val conf = ConfigFactory.parseString(
"""
|{
| type = "slf4j"
| logger = "org.locationtech.geomesa.convert2.metrics.ReporterFactoryTest"
| level = "INFO"
| rate-units = "SECONDS"
| duration-units = "MILLISECONDS"
| interval = "10 seconds"
|}
""".stripMargin
)
val registry = new MetricRegistry()
val reporter = ReporterFactory(conf, registry)
try {
reporter must beAnInstanceOf[Slf4jReporter]
} finally {
reporter.close()
}
}
}
}
| locationtech/geomesa | geomesa-convert/geomesa-convert-common/src/test/scala/org/locationtech/geomesa/convert2/metrics/ReporterFactoryTest.scala | Scala | apache-2.0 | 2,008 |
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import java.util.concurrent.CountDownLatch
import java.io.PrintStream
import org.scalatest.events._
import DispatchReporter.propagateDispose
import java.util.concurrent.LinkedBlockingQueue
/**
* A <code>Reporter</code> that dispatches test results to other <code>Reporter</code>s.
* Attempts to dispatch each method invocation to each contained <code>Reporter</code>,
* even if some <code>Reporter</code> methods throw <code>Exception</code>s. Catches
* <code>Exception</code>s thrown by <code>Reporter</code> methods and prints error
* messages to the standard error stream.
*
* The primary constructor creates a new <code>DispatchReporter</code> with specified <code>Reporter</code>s list.
* Each object in the <code>reporters</code> list must implement <code>Reporter</code>.
*
* @param reporters the initial <code>Reporter</code>s list for this
* <code>DispatchReporter</code>
* @throws NullPointerException if <code>reporters</code> is <code>null</code>.
* @author Bill Venners
*/
private[scalatest] class DispatchReporter(val reporters: List[Reporter], val out: PrintStream) extends CatchReporter {
private case object Dispose
private val latch = new CountDownLatch(1)
// Can be either Event or Dispose.type. Be nice to capture that in the type param.
private val queue = new LinkedBlockingQueue[AnyRef]
class Propagator extends Runnable {
def run() {
var alive = true // local variable. Only used by the Propagator's thread, so no need for synchronization
class Counter {
var testsSucceededCount = 0
var testsFailedCount = 0
var testsIgnoredCount = 0
var testsCanceledCount = 0
var testsPendingCount = 0
var suitesCompletedCount = 0
var suitesAbortedCount = 0
}
val counterMap = scala.collection.mutable.Map[Int, Counter]()
def incrementCount(event: Event, f: (Counter) => Unit) {
val runStamp = event.ordinal.runStamp
if (counterMap.contains(runStamp)) {
val counter = counterMap(runStamp)
f(counter)
}
else {
val counter = new Counter
f(counter)
counterMap(runStamp) = counter
}
}
// If None, that means don't update the summary so forward the old event. If Some,
// create a new event with everything the same except the old summary replaced by the new one
def updatedSummary(oldSummary: Option[Summary], ordinal: Ordinal): Option[Summary] = {
oldSummary match {
case None if (counterMap.contains(ordinal.runStamp)) => {
// Update the RunAborted so that it is the same except it has a new Some(Summary)
val counter = counterMap(ordinal.runStamp)
Some(
Summary(
counter.testsSucceededCount,
counter.testsFailedCount,
counter.testsIgnoredCount,
counter.testsPendingCount,
counter.testsCanceledCount,
counter.suitesCompletedCount,
counter.suitesAbortedCount
)
)
}
case _ => None // Also pass the old None summary through if it isn't in the counterMap
}
}
while (alive) {
queue.take() match {
case event: Event =>
try {
// The event will only actually be updated if it it is a RunCompleted/Aborted/Stopped event with None
// as its summary and its runstamp has a counter entry. In that case, it will be given a Summary taken
// from the counter. (And the counter will be removed from the counterMap.) These are counted here, because
// they need to be counted on this side of any FilterReporters that may be in place. (In early versions of
// ScalaTest, these were wrongly being counted by the reporters themselves, so if a FilterReporter filtered
// out TestSucceeded events, then they just weren't being counted.
val updatedEvent =
event match {
case _: RunStarting => counterMap(event.ordinal.runStamp) = new Counter; event
case _: TestSucceeded => incrementCount(event, _.testsSucceededCount += 1); event
case _: TestFailed => incrementCount(event, _.testsFailedCount += 1); event
case _: TestIgnored => incrementCount(event, _.testsIgnoredCount += 1); event
case _: TestCanceled => incrementCount(event, _.testsCanceledCount += 1); event
case _: TestPending => incrementCount(event, _.testsPendingCount += 1); event
case _: SuiteCompleted => incrementCount(event, _.suitesCompletedCount += 1); event
case _: SuiteAborted => incrementCount(event, _.suitesAbortedCount += 1); event
case oldRunCompleted @ RunCompleted(ordinal, duration, summary, formatter, location, payload, threadName, timeStamp) =>
updatedSummary(summary, ordinal) match {
case None => oldRunCompleted
case newSummary @ Some(_) =>
counterMap.remove(ordinal.runStamp)
// Update the RunCompleted so that it is the same except it has a new Some(Summary)
RunCompleted(ordinal, duration, newSummary, formatter, location, payload, threadName, timeStamp)
}
case oldRunStopped @ RunStopped(ordinal, duration, summary, formatter, location, payload, threadName, timeStamp) =>
updatedSummary(summary, ordinal) match {
case None => oldRunStopped
case newSummary @ Some(_) =>
counterMap.remove(ordinal.runStamp)
// Update the RunStopped so that it is the same except it has a new Some(Summary)
RunStopped(ordinal, duration, newSummary, formatter, location, payload, threadName, timeStamp)
}
case oldRunAborted @ RunAborted(ordinal, message, throwable, duration, summary, formatter, location, payload, threadName, timeStamp) =>
updatedSummary(summary, ordinal) match {
case None => oldRunAborted
case newSummary @ Some(_) =>
counterMap.remove(ordinal.runStamp)
// Update the RunAborted so that it is the same except it has a new Some(Summary)
RunAborted(ordinal, message, throwable, duration, newSummary, formatter, location, payload, threadName, timeStamp)
}
case _ => event
}
for (report <- reporters)
report(updatedEvent)
}
catch {
case e: Exception =>
val stringToPrint = Resources("reporterThrew", event)
out.println(stringToPrint)
e.printStackTrace(out)
}
case Dispose =>
try {
for (reporter <- reporters)
propagateDispose(reporter)
}
catch {
case e: Exception =>
val stringToPrint = Resources("reporterDisposeThrew")
out.println(stringToPrint)
e.printStackTrace(out)
}
finally {
alive = false
latch.countDown()
}
}
}
}
}
private val propagator = new Propagator
(new Thread(propagator)).start()
def this(reporters: List[Reporter]) = this(reporters, System.out)
def this(reporter: Reporter) = this(List(reporter), System.out)
// Invokes dispose on each Reporter in this DispatchReporter's reporters list.
// This method puts an event in the queue that is being used to serialize
// events, and at some time later the propagator's thread will attempts to invoke
// dispose on each contained Reporter, even if some Reporter's dispose methods throw
// Exceptions. This method catches any Exception thrown by
// a dispose method and handles it by printing an error message to the
// standard error stream. Once finished with that, the propagator's thread will return.
//
// This method will not return until the propagator's thread has exited.
//
def dispatchDisposeAndWaitUntilDone() {
queue.put(Dispose)
latch.await()
}
override def apply(event: Event) {
queue.put(event)
}
def doApply(event: Event) {}
def doDispose() {
dispatchDisposeAndWaitUntilDone()
}
def isDisposed = latch.getCount == 0
}
// TODO: Not a real problem, but if a DispatchReporter ever got itself in
// its list of reporters, this would end up being an infinite loop. But
// That first part, a DispatchReporter getting itself in there would be the real
// bug.
private[scalatest] object DispatchReporter {
def propagateDispose(reporter: Reporter) {
reporter match {
case dispatchReporter: DispatchReporter => dispatchReporter.dispatchDisposeAndWaitUntilDone()
case resourcefulReporter: ResourcefulReporter => resourcefulReporter.dispose()
case _ =>
}
}
}
| hubertp/scalatest | src/main/scala/org/scalatest/DispatchReporter.scala | Scala | apache-2.0 | 10,230 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import fixtures.VatRegistrationFixture
import models.ContactPreference
import org.mockito.ArgumentMatchers
import org.mockito.Mockito.when
import play.api.test.FakeRequest
import testHelpers.{ControllerSpec, FutureAssertions}
import views.html.contact_preference
import scala.concurrent.Future
class ContactPreferenceControllerSpec extends ControllerSpec with VatRegistrationFixture with FutureAssertions {
val view = app.injector.instanceOf[contact_preference]
class Setup {
val controller: ContactPreferenceController = new ContactPreferenceController(
mockAuthClientConnector,
mockSessionService,
mockBusinessContactService,
view
)
mockAuthenticated()
mockWithCurrentProfile(Some(currentProfile))
}
class SubmissionSetup extends Setup {
when(mockBusinessContactService.getBusinessContact(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any()))
.thenReturn(Future(validBusinessContactDetails))
when(mockBusinessContactService.updateBusinessContact[ContactPreference](ArgumentMatchers.any())(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any()))
.thenReturn(Future(validBusinessContactDetails.contactPreference.get))
}
"showing the contact preference page" should {
"return a 200" when {
"everything is okay" in new SubmissionSetup {
callAuthorised(controller.showContactPreference) {
_ isA 200
}
}
}
"throw an exception" when {
"getBusinessContact Fails" in new Setup {
when(mockBusinessContactService.getBusinessContact(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any()))
.thenReturn(Future(throw exception))
callAuthorised(controller.showContactPreference) {
_ failedWith exception
}
}
}
}
"submitting the contact preference page" should {
val fakeRequest = FakeRequest(routes.ContactPreferenceController.showContactPreference)
"return a 400" when {
"form is empty" in new SubmissionSetup {
submitAuthorised(controller.submitContactPreference, fakeRequest.withFormUrlEncodedBody()) {
_ isA 400
}
}
}
"return a 400" when {
"user provides invalid data" in new SubmissionSetup {
submitAuthorised(controller.submitContactPreference, fakeRequest.withFormUrlEncodedBody("value" -> "BadStuff")) {
_ isA 400
}
}
}
"return a 303" when {
"user selects email and redirect to the business activity description" in new SubmissionSetup {
submitAuthorised(controller.submitContactPreference, fakeRequest.withFormUrlEncodedBody("value" -> "email")) {
_ redirectsTo controllers.registration.sicandcompliance.routes.BusinessActivityDescriptionController.show.url
}
}
}
"return a 303" when {
"user selects letter and redirect to the business activity description" in new SubmissionSetup {
submitAuthorised(controller.submitContactPreference, fakeRequest.withFormUrlEncodedBody("value" -> "letter")) {
_ redirectsTo controllers.registration.sicandcompliance.routes.BusinessActivityDescriptionController.show.url
}
}
}
"return an exception" when {
"updateBusinessContact fails" in new SubmissionSetup {
when(mockBusinessContactService.updateBusinessContact[ContactPreference](ArgumentMatchers.any())(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any()))
.thenReturn(Future(throw exception))
submitAuthorised(controller.submitContactPreference, fakeRequest.withFormUrlEncodedBody("value" -> "email")) {
_ failedWith exception
}
}
}
}
} | hmrc/vat-registration-frontend | test/controllers/ContactPreferenceControllerSpec.scala | Scala | apache-2.0 | 4,375 |
/*
* Copyright 2013 Maurício Linhares
*
* Maurício Linhares licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.github.mauricio.async.db.postgresql.parsers
import com.github.mauricio.async.db.postgresql.messages.backend.{ProcessData, ServerMessage}
import io.netty.buffer.ByteBuf
object BackendKeyDataParser extends MessageParser {
override def parseMessage(b: ByteBuf): ServerMessage = {
new ProcessData(b.readInt(), b.readInt())
}
}
| dripower/postgresql-async | postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/parsers/BackendKeyDataParser.scala | Scala | apache-2.0 | 985 |
package core
import org.scalatest.{FunSpec, Matchers}
class UnionTypeSpec extends FunSpec with Matchers with helpers.ApiJsonHelpers {
val baseJson = """
{
"name": "Union Types Test",
"unions": {
"user": { %s
"types": [
{ "type": "%s", "description": "foobar" },
{ "type": "%s" }
]
}
},
"models": {
"%s": {
"fields": [
{ "name": "id", "type": "uuid" }
]
},
"guest": {
"fields": [
{ "name": "id", "type": "uuid" }
]
},
"other_random_user": {
"fields": [
{ "name": "id", "type": "long" }
]
},
"order": {
"fields": [
{ "name": "id", "type": "uuid" },
{ "name": "user", "type": "user" }
]
}
},
"resources": {
"user": {
"operations": [
{
"method": "GET",
"path": "/:id"
}
]
}
}
}
"""
describe("without discriminator") {
it("union types support descriptions") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format("", "string", "uuid", "registered"))
validator.errors should be(Nil)
val union = validator.service().unions.head
union.types.find(_.`type` == "string").get.description should be(Some("foobar"))
union.types.find(_.`type` == "uuid").get.description should be(None)
}
it("union types can have primitives") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format("", "string", "uuid", "registered"))
validator.errors should be(Nil)
validator.service().unions.head.types.map(_.`type`) should be(Seq("string", "uuid"))
}
it("union types can have models") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format("", "guest", "registered", "registered"))
validator.errors should be(Nil)
validator.service().unions.head.types.map(_.`type`) should be(Seq("guest", "registered"))
}
it("union types can have lists") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format("", "[guest]", "map[registered]", "registered"))
validator.errors should be(Nil)
validator.service().unions.head.types.map(_.`type`) should be(Seq("[guest]", "map[registered]"))
}
it("rejects blank types") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format("", "guest", "", "registered"))
validator.errors should be(Seq("Union[user] type[] type must be a non empty string"))
}
it("rejects circular type") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format("", "user", "guest", "registered"))
validator.errors should be(Seq("Union[user] cannot contain itself as one of its types or sub-types"))
}
it("rejects indirectly circular type") {
val json = """{
"name": "Union Types Test",
"unions": {
"foo": {
"types": [ { "type": "bar" } ]
},
"bar": {
"types": [ { "type": "baz" } ]
},
"baz": {
"types": [ { "type": "foo" } ]
}
},
"models": {},
"resources": {}
}"""
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors should be(Seq("Union[bar] cannot contain itself as one of its types or sub-types: bar->baz->foo->bar"))
}
it("rejects invalid types") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format("", "guest", "another_user", "registered"))
validator.errors should be(Seq("Union[user] type[another_user] not found"))
}
it("validates that union names do not overlap with model names") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format("", "string", "uuid", "user"))
validator.errors should be(Seq("Name[user] cannot be used as the name of both a model and a union type"))
}
it("validates unit type") {
TestHelper.serviceValidator(
makeApiJson(
unions = Map(
"user" -> makeUnion(
types = Seq(makeUnionType(`type` = "unit"))
)
)
)
).errors() should equal(
Seq("Union[user] Union types cannot contain unit. To make a particular field optional, use the required property.")
)
}
it("infers proper parameter type if field is common across all types") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format("", "guest", "registered", "registered"))
validator.service().resources.head.operations.head.parameters.find(_.name == "id").getOrElse {
sys.error("Could not find guid parameter")
}.`type` should be("uuid")
}
it("infers string parameter type if type varies") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format("", "other_random_user", "registered", "registered"))
validator.service().resources.head.operations.head.parameters.find(_.name == "id").getOrElse {
sys.error("Could not find guid parameter")
}.`type` should be("string")
}
}
describe("with discriminator") {
it("union types unique discriminator") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format(""""discriminator": "type",""", "guest", "registered", "registered"))
validator.errors should be(Nil)
val union = validator.service().unions.head
union.discriminator should be(Some("type"))
}
it("validates union types discriminator that is not a defined field") {
val validator = TestHelper.serviceValidatorFromApiJson(baseJson.format(""""discriminator": "id",""", "guest", "registered", "registered"))
validator.errors should be(Seq("Union[user] discriminator[id] must be unique. Field exists on: guest, registered"))
}
}
describe("with nested union type") {
val nestedUnionTypeJson = """
{
"name": "Union Types Test",
"unions": {
"user": {
"discriminator": "%s",
"types": [
{ "type": "registered" },
{ "type": "guest" }
]
},
"guest": {
"types": [
{ "type": "uuid" },
{ "type": "anonymous" }
]
}
},
"models": {
"registered": {
"fields": [
{ "name": "id", "type": "uuid" }
]
},
"anonymous": {
"fields": [
{ "name": "id", "type": "uuid" },
{ "name": "foo", "type": "string" }
]
}
}
}
"""
it("valid discriminator") {
val validator = TestHelper.serviceValidatorFromApiJson(nestedUnionTypeJson.format("type"))
validator.errors should be(Nil)
}
it("invalid discriminator") {
val validator = TestHelper.serviceValidatorFromApiJson(nestedUnionTypeJson.format("foo"))
validator.errors should be(Seq(
"Union[user] discriminator[foo] must be unique. Field exists on: guest.anonymous"
))
}
it("non text discriminator") {
val validator = TestHelper.serviceValidatorFromApiJson(nestedUnionTypeJson.format("!@KL#"))
validator.errors should be(Seq(
"Union[user] discriminator[!@KL#]: Name can only contain a-z, A-Z, 0-9, - and _ characters, Name must start with a letter"
))
}
it("'value' is reserved for primitive wrappers") {
val validator = TestHelper.serviceValidatorFromApiJson(nestedUnionTypeJson.format("value"))
validator.errors should be(Seq(
"Union[user] discriminator[value]: The keyword[value] is reserved and cannot be used as a discriminator"
))
}
it("'implicit' is reserved for future implicit discriminators") {
val validator = TestHelper.serviceValidatorFromApiJson(nestedUnionTypeJson.format("implicit"))
validator.errors should be(Seq(
"Union[user] discriminator[implicit]: The keyword[implicit] is reserved and cannot be used as a discriminator"
))
}
}
it("infers proper parameter type if field is common across all types including primitive") {
val json = """
{
"name": "Union Types Test",
"unions": {
"user": {
"types": [
{ "type": "registered" },
{ "type": "uuid" }
]
}
},
"models": {
"registered": {
"fields": [
{ "name": "id", "type": "uuid" }
]
}
},
"resources": {
"user": {
"operations": [
{
"method": "GET",
"path": "/:id"
}
]
}
}
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.service().resources.head.operations.head.parameters.find(_.name == "id").getOrElse {
sys.error("Could not find guid parameter")
}.`type` should be("uuid")
}
it("does not allow a type from an imported service") {
// This doesn't work because there is no way the imported class
// can extend the union trait that is defined in this service.
val common = """
{
"name": "common",
"namespace": "test.common",
"models": {
"reference": {
"fields": [
{ "name": "id", "type": "string" }
]
}
}
}
"""
val uri = "http://localhost/test/common/0.0.1/service.json"
val user = s"""
{
"name": "user",
"imports": [ { "uri": "$uri" } ],
"unions": {
"expandable_user": {
"types": [
{ "type": "test.common.models.reference" },
{ "type": "user" }
]
}
},
"models": {
"user": {
"fields": [
{ "name": "id", "type": "string" }
]
}
}
}
"""
val validator = TestHelper.serviceValidatorFromApiJson(common)
validator.errors should be(Nil)
validator.service().namespace should be("test.common")
validator.service().models.map(_.name) should be(Seq("reference"))
val fetcher = MockServiceFetcher()
fetcher.add(uri, validator.service)
TestHelper.serviceValidatorFromApiJson(user, fetcher = fetcher).errors should be(
Seq("Union[expandable_user] Type[test.common.models.reference] is invalid. Cannot use an imported type as part of a union as there is no way to declare that the imported type expands the union type defined here.")
)
}
it("only one type can be marked default") {
def test(userDefault: Boolean = false, guestDefault: Boolean = false) = {
TestHelper.serviceValidator(
makeApiJson(
models = Map("user" -> makeModelWithField(), "guest" -> makeModelWithField()),
unions = Map("visitor" -> makeUnion(
discriminator = Some("discriminator"),
types = Seq(
makeUnionType(`type` = "user", default = userDefault),
makeUnionType(`type` = "guest", default = guestDefault),
)
))
)
)
}
test(userDefault = false, guestDefault = false).errors() should be(Nil)
test(userDefault = true, guestDefault = false).errors() should be(Nil)
test(userDefault = false, guestDefault = true).errors() should be(Nil)
test(userDefault = true, guestDefault = true).errors() should be(
Seq("Union[visitor] Only 1 type can be specified as default. Currently the following types are marked as default: guest, user")
)
}
}
| gheine/apidoc | core/src/test/scala/core/UnionTypeSpec.scala | Scala | mit | 11,704 |
package gapt.examples.tip.prod
import gapt.expr._
import gapt.expr.ty.TBase
import gapt.proofs.context.update.InductiveType
import gapt.proofs.Sequent
import gapt.proofs.gaptic._
import gapt.provers.viper.aip.AnalyticInductionProver
object prop_20 extends TacticsProof {
// Sorts
ctx += TBase( "sk" )
// Inductive types
ctx += InductiveType( ty"list", hoc"'nil' :list", hoc"'cons' :sk>list>list" )
ctx += InductiveType( ty"Nat", hoc"'Z' :Nat", hoc"'S' :Nat>Nat" )
//Function constants
ctx += hoc"'length' :list>Nat"
ctx += hoc"'even' :Nat>o"
ctx += hoc"'append' :list>list>list"
val sequent =
hols"""
def_head: ∀x0 ∀x1 (head(cons(x0:sk, x1:list): list): sk) = x0,
def_tail: ∀x0 ∀x1 (tail(cons(x0:sk, x1:list): list): list) = x1,
def_p: ∀x0 (p(S(x0:Nat): Nat): Nat) = x0,
def_length_0: (length(nil:list): Nat) = #c(Z: Nat),
def_length_1: ∀y ∀xs (length(cons(y:sk, xs:list): list): Nat) = S(length(xs)),
def_even_0: even(#c(Z: Nat)): o,
def_even_1: ¬even(S(#c(Z: Nat)): Nat),
def_even_2: ∀z ((even(S(S(z:Nat): Nat)) → even(z)) ∧ (even(z) → even(S(S(z))))),
def_append_0: ∀y (append(nil:list, y:list): list) = y,
def_append_1: ∀z ∀xs ∀y (append(cons(z:sk, xs:list): list, y:list): list) = cons(z, append(xs, y)),
constr_inj_0: ∀y0 ∀y1 ¬(nil:list) = cons(y0:sk, y1:list),
constr_inj_1: ∀y0 ¬#c(Z: Nat) = S(y0:Nat)
:-
goal: ∀x even(length(append(x:list, x): list): Nat)
"""
val lemma = (
( "l0" -> hof"length(nil) = Z" ) +:
( "l1" -> hof"∀y ∀xs length(cons(y, xs)) = S(length(xs))" ) +:
( "a0" -> hof"∀y append(nil, y) = y" ) +:
( "a1" -> hof"∀z ∀xs ∀y append(cons(z, xs), y) = cons(z, append(xs, y))" ) +:
Sequent() :+ ( "lemma" -> hof"∀xs ∀ys ∀y length(append(xs, cons(y,ys))) = S(length(append(xs, ys)))" ) )
val lemma_proof = AnalyticInductionProver.singleInduction( lemma, hov"xs:list" )
val proof = Lemma( sequent ) {
cut( "lemma", hof"∀xs ∀ys ∀y length(append(xs, cons(y,ys))) = S(length(append(xs, ys)))" )
insert( lemma_proof )
allR; induction( hov"x:list" ); escargot.withDeskolemization.onAllSubGoals
}
val lemma_openind_proof = Lemma( lemma ) {
allR; allR; allR; induction( hov"xs:list" )
//- BC
rewrite.many ltr ( "a0", "l1" ) in "lemma"; refl
//- SC
escargot
}
val openind = Lemma( sequent ) {
cut( "lemma", hof"∀xs ∀ys ∀y length(append(xs, cons(y,ys))) = S(length(append(xs, ys)))" )
insert( lemma_openind_proof )
allR; induction( hov"x:list" ); escargot.withDeskolemization.onAllSubGoals
}
}
| gapt/gapt | examples/tip/prod/prop_20.scala | Scala | gpl-3.0 | 2,709 |
package com.coding42.gol
import com.coding42.gol.GameOfLife.{LifeBoard, Pos}
import scalafx.scene.canvas.Canvas
import scalafx.scene.image.WritableImage
import scalafx.scene.paint.Color
/**
* Handles zoom, drag and canvas updates
*/
case class CanvasPainter(canvas: Canvas, lifeBoardOp: Option[LifeBoard], zoom: Int, offset: Pos) {
val boardWidth = lifeBoardOp.map(_.length).getOrElse(0)
val boardHeight = lifeBoardOp.map(_(0).length).getOrElse(0)
val clampedOffset = clamp(offset, -boardWidth/2, boardWidth/2-1, -boardHeight/2, boardHeight/2-1)
def withZoom(zoom: Int) = this.copy(zoom = zoom)
def withBoard(lifeBoard: LifeBoard) = this.copy(lifeBoardOp = Some(lifeBoard))
def moveOffset(movement: Pos) = this.copy(offset = clampedOffset + movement)
def paintBoard() = {
val image = new WritableImage(canvas.width.toInt, canvas.height.toInt)
lifeBoardOp.foreach { lifeBoard =>
val pixelWriter = image.pixelWriter
val mapper = imageMapper(lifeBoard, offset, zoom)
lifeBoard.zipWithIndex.par.foreach { case (row, x) =>
row.zipWithIndex.foreach { case (alive, y) =>
pixelWriter.setColor(x, y, mapper(x, y))
}
}
}
canvas.graphicsContext2D.drawImage(image, 0, 0)
}
private def imageMapper(lifeBoard: LifeBoard, offset: Pos, zoom: Int): (Int, Int) => Color = {
val widthDiv2: Int = boardWidth / 2 + clampedOffset.x
val heightDiv2: Int = boardHeight / 2 + clampedOffset.y
(x, y) => {
val newX = (x - widthDiv2) / zoom + widthDiv2
val newY = (y - heightDiv2) / zoom + heightDiv2
if( lifeBoard(newX)(newY) )
Color.Black
else
Color.White
}
}
def clamp(pos: Pos, minX: Int, maxX: Int, minY: Int, maxY: Int): Pos = {
Pos(clamp(pos.x, minX, maxX), clamp(pos.y, minY, maxY))
}
def clamp(value: Int, min: Int, max: Int): Int = {
Math.min(Math.max(value, min), max)
}
}
| adrijardi/life | src/main/scala/com/coding42/gol/CanvasPainter.scala | Scala | mit | 1,937 |
object Versions extends WebJarsVersions with ScalaJSVersions with SharedVersions
{
val scala = "2.11.6"
val akkaHttp = "1.0-RC3"
val ammonite = "0.3.0"
}
trait ScalaJSVersions {
val threejsFacade = "0.0.71-0.1.5"
val jqueryFacade = "0.6"
val jsext = "0.5"
val dom = "0.8.1"
val codemirrorFacade = "5.3-0.5"
val binding = "0.7.15"
}
//versions for libs that are shared between client and server
trait SharedVersions
{
val autowire = "0.2.5"
val scalaRx = "0.2.8"
val quicklens = "1.3.1"
val scalaTags = "0.5.1"
val scalaCSS = "0.2.0"
val productCollections = "1.4.2"
val utest = "0.3.1"
}
trait WebJarsVersions{
val jquery = "2.1.3"
val semanticUI = "1.12.3"
val selectize = "0.12.1"
val threeJs = "r71"
val codemirror = "5.3"
}
| waman/threejs-facade | project/Versions.scala | Scala | mpl-2.0 | 778 |
// Copyright: 2010 - 2018 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.server
import io.netty.channel.{
Channel,
ChannelHandlerContext,
SimpleChannelInboundHandler
}
import io.netty.handler.codec.http.websocketx.WebSocketServerProtocolHandler.HandshakeComplete
import io.netty.handler.codec.http.websocketx.{
TextWebSocketFrame,
WebSocketFrame
}
import io.netty.util.AttributeKey
import org.slf4j.LoggerFactory
import org.ensime.api._
import org.ensime.server.WebServer._
class WebSocketFrameHandler(
hookHandlers: HookHandlers
) extends SimpleChannelInboundHandler[WebSocketFrame] {
val log = LoggerFactory.getLogger(this.getClass)
val inHandlerKey: AttributeKey[String => Unit] =
AttributeKey.valueOf(classOf[String => Unit], "INHANDLER");
val outHandlerKey: AttributeKey[OutgoingHandler] =
AttributeKey.valueOf(classOf[OutgoingHandler], "OUTHANDLER");
private def setInHandler(ctx: ChannelHandlerContext,
inHandler: String => Unit): Unit =
ctx.channel().attr(inHandlerKey).set(inHandler)
private def getInHandler(ctx: ChannelHandlerContext): String => Unit =
ctx.channel().attr(inHandlerKey).get()
private def setOutHandler(ctx: ChannelHandlerContext,
outHandler: OutgoingHandler): Unit =
ctx.channel().attr(outHandlerKey).set(outHandler)
private def getOutHandler(ctx: ChannelHandlerContext): OutgoingHandler =
ctx.channel().attr(outHandlerKey).get()
private def encoderFor(subprotocol: String): SubprotocolEncoder =
subprotocol match {
case "jerky" => JerkySubprotocolEncoder
case "swanky" => SwankySubprotocolEncoder
}
private def encodedOutHandler(
ch: Channel,
encoder: SubprotocolEncoder
): OutgoingHandler = { rpcResp =>
val response = encoder.writeFrame(rpcResp)
ch.writeAndFlush(new TextWebSocketFrame(response))
}
private def encodedInHandler(inHandler: IncomingHandler,
encoder: SubprotocolEncoder): String => Unit = {
frameText =>
val rpcReq = encoder.readFrame(frameText)
inHandler(rpcReq)
}
override protected def userEventTriggered(ctx: ChannelHandlerContext,
evt: Object): Unit =
if (evt.isInstanceOf[HandshakeComplete]) {
val serverHandshakeComplete = evt.asInstanceOf[HandshakeComplete];
val subprotocol = serverHandshakeComplete.selectedSubprotocol
val encoder = encoderFor(subprotocol)
val outHandler = encodedOutHandler(ctx.channel(), encoder)
val inHandler = encodedInHandler(hookHandlers(outHandler), encoder)
setInHandler(ctx, inHandler)
setOutHandler(ctx, outHandler)
log.info("Handlers ready")
}
override protected def channelRead0(ctx: ChannelHandlerContext,
frame: WebSocketFrame): Unit =
frame match {
case txtFrame: TextWebSocketFrame =>
getInHandler(ctx)(txtFrame.text)
case _ =>
val message = "Unsupported frame type: " + frame.getClass().getName()
throw new UnsupportedOperationException(message)
}
override protected def exceptionCaught(ctx: ChannelHandlerContext,
t: Throwable): Unit = {
log.error("Error while processing WebSocket message", t)
val error = RpcResponseEnvelope(
callId = None,
payload = EnsimeServerError(t.toString)
)
getOutHandler(ctx)(error)
}
}
trait SubprotocolEncoder {
def readFrame(request: String): RpcRequestEnvelope
def writeFrame(response: RpcResponseEnvelope): String
}
object JerkySubprotocolEncoder extends SubprotocolEncoder {
import spray.json._
import org.ensime.jerky.JerkyFormats._
override def readFrame(request: String): RpcRequestEnvelope =
request.parseJson.convertTo[RpcRequestEnvelope]
override def writeFrame(response: RpcResponseEnvelope): String =
response.toJson.prettyPrint
}
object SwankySubprotocolEncoder extends SubprotocolEncoder {
import org.ensime.sexp._
import org.ensime.swanky.SwankyFormats._
override def readFrame(request: String): RpcRequestEnvelope =
request.parseSexp.convertTo[RpcRequestEnvelope]
override def writeFrame(response: RpcResponseEnvelope): String =
response.toSexp.prettyPrint
}
| yyadavalli/ensime-server | server/src/main/scala/org/ensime/server/WebSocketFrameHandler.scala | Scala | gpl-3.0 | 4,443 |
package net.orfjackal.dimdwarf.mq
trait MessageSender[T] {
def send(message: T)
}
| orfjackal/dimdwarf | dimdwarf-core/src/main/scala/net/orfjackal/dimdwarf/mq/MessageSender.scala | Scala | apache-2.0 | 85 |
package integrationtest
import org.scalatra.test.scalatest._
import skinny._
import skinny.controller._
import skinny.test.SkinnyTestSupport
class AssetsSpec extends ScalatraFlatSpec with SkinnyTestSupport {
addFilter(AssetsController, "/*")
it should "show react jsx template resources" in {
get("/assets/js/hello-react.js") {
status should equal(200)
header("Content-Type") should equal("application/javascript;charset=UTF-8")
body.replaceFirst("\n$", "") should equal(
"""/** @jsx React.DOM */
|React.renderComponent(
| React.DOM.h1(null, "Hello, world!"),
| document.getElementById('example')
|);
|""".stripMargin)
}
}
it should "return 304 for react jsx if If-Modified-Since specified" in {
get(uri = "/assets/js/hello-react.js", headers = Map("If-Modified-Since" -> "Thu, 31 Dec 2037 12:34:56 GMT")) {
status should equal(304)
}
}
it should "show coffee script resources" in {
get("/assets/js/echo.js") {
status should equal(200)
header("Content-Type") should equal("application/javascript;charset=UTF-8")
body.replaceFirst("^// Generated by CoffeeScript .+\n", "").replaceFirst("\n$", "") should equal("""(function() {
var echo;
echo = function(v) {
return console.log(v);
};
echo("foo");
}).call(this);""")
}
}
it should "return 304 for coffee if If-Modified-Since specified" in {
get(uri = "/assets/js/echo.js", headers = Map("If-Modified-Since" -> "Thu, 31 Dec 2037 12:34:56 GMT")) {
status should equal(304)
}
}
it should "show less resources" in {
get("/assets/css/box.css") {
status should equal(200)
header("Content-Type") should equal("text/css;charset=UTF-8")
body should equal(""".box {
color: #fe33ac;
border-color: #fdcdea;
}
""")
}
}
it should "return 304 for less if If-Modified-Since specified" in {
get(uri = "/assets/css/box.css", headers = Map("If-Modified-Since" -> "Thu, 31 Dec 2037 12:34:56 GMT")) {
status should equal(304)
}
}
it should "show scss resources" in {
get("/assets/css/variables-in-scss.css") {
status should equal(200)
header("Content-Type") should equal("text/css;charset=UTF-8")
body.replaceFirst("\n$", "") should equal("""body {
| font: 100% Helvetica, sans-serif;
| color: #333333; }""".stripMargin)
}
}
it should "return 304 for scss if If-Modified-Since specified" in {
get(uri = "/assets/css/variables-in-scss.css", headers = Map("If-Modified-Since" -> "Thu, 31 Dec 2037 12:34:56 GMT")) {
status should equal(304)
}
}
it should "show sass resources" in {
get("/assets/css/indented-sass.css") {
status should equal(200)
header("Content-Type") should equal("text/css;charset=UTF-8")
body.replaceFirst("\n$", "") should equal("""#main {
| color: blue;
| font-size: 0.3em; }""".stripMargin)
}
}
it should "return 304 for sass if If-Modified-Since specified" in {
get(uri = "/assets/css/indented-sass.css", headers = Map("If-Modified-Since" -> "Thu, 31 Dec 2037 12:34:56 GMT")) {
status should equal(304)
}
}
}
| BlackPrincess/skinny-framework | example/src/test/scala/integrationtest/AssetsSpec.scala | Scala | mit | 3,219 |
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.iterators
import com.typesafe.scalalogging.slf4j.Logging
import com.vividsolutions.jts.geom.{Geometry, GeometryFactory}
import org.apache.accumulo.core.Constants
import org.apache.accumulo.core.data.{Mutation, Value}
import org.geotools.data.DataStore
import org.geotools.data.simple.SimpleFeatureSource
import org.geotools.factory.Hints
import org.geotools.feature.DefaultFeatureCollection
import org.joda.time.{DateTime, DateTimeZone}
import org.locationtech.geomesa.accumulo.data.AccumuloFeatureStore
import org.locationtech.geomesa.accumulo.data.AccumuloFeatureWriter.FeatureToWrite
import org.locationtech.geomesa.accumulo.index._
import org.locationtech.geomesa.features.avro.AvroSimpleFeatureFactory
import org.locationtech.geomesa.features.{SerializationType, SimpleFeatureSerializers}
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.text.WKTUtils
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import scala.collection.JavaConverters._
import scala.util.Random
object UnitTestEntryType {
def getTypeSpec = "POINT:String," + "LINESTRING:String," + "POLYGON:String," + "attr2:String," + spec
}
object TestData extends Logging {
val TEST_USER = "root"
val TEST_TABLE = "test_table"
val TEST_AUTHORIZATIONS = Constants.NO_AUTHS
val emptyBytes = new Value(Array[Byte]())
case class Entry(wkt: String, id: String, dt: DateTime = new DateTime(defaultDateTime))
// set up the geographic query polygon
val wktQuery = "POLYGON((45 23, 48 23, 48 27, 45 27, 45 23))"
val featureName = "feature"
val schemaEncoding =
new IndexSchemaBuilder("~")
.randomNumber(10)
.indexOrDataFlag()
.constant(featureName)
.geoHash(0, 3)
.date("yyyyMMdd")
.nextPart()
.geoHash(3, 2)
.nextPart()
.id()
.build()
def getTypeSpec(suffix: String = "2") = {
s"POINT:String,LINESTRING:String,POLYGON:String,attr$suffix:String:index=true," + spec
}
def getFeatureType(typeNameSuffix: String = "", attrNameSuffix: String = "2", tableSharing: Boolean = true) = {
val fn = s"$featureName$typeNameSuffix"
val ft: SimpleFeatureType = SimpleFeatureTypes.createType(fn, getTypeSpec(attrNameSuffix))
ft.setDtgField("dtg")
ft.setTableSharing(tableSharing)
ft
}
def buildFeatureSource(ds: DataStore, featureType: SimpleFeatureType, features: Seq[SimpleFeature]): SimpleFeatureSource = {
ds.createSchema(featureType)
val fs: AccumuloFeatureStore = ds.getFeatureSource(featureType.getTypeName).asInstanceOf[AccumuloFeatureStore]
val coll = new DefaultFeatureCollection(featureType.getTypeName)
coll.addAll(features.asJavaCollection)
logger.debug(s"Adding SimpleFeatures of type ${coll.getSchema.getTypeName} to feature store.")
fs.addFeatures(coll)
logger.debug("Done adding SimpleFeatures to feature store.")
fs
}
def getFeatureStore(ds: DataStore, simpleFeatureType: SimpleFeatureType, features: Seq[SimpleFeature]) = {
val names = ds.getNames
if(!names.contains(simpleFeatureType.getTypeName)) {
buildFeatureSource(ds, simpleFeatureType, features)
} else {
ds.getFeatureSource(simpleFeatureType.getTypeName)
}
}
// This is a quick trick to make sure that the userData is set.
lazy val featureType: SimpleFeatureType = getFeatureType()
lazy val featureEncoder = SimpleFeatureSerializers(getFeatureType(), SerializationType.AVRO)
lazy val indexValueEncoder = IndexValueEncoder(featureType)
lazy val indexEncoder = IndexSchema.buildKeyEncoder(featureType, schemaEncoding)
val defaultDateTime = new DateTime(2011, 6, 1, 0, 0, 0, DateTimeZone.forID("UTC")).toDate
// utility function that can encode multiple types of geometry
def createObject(id: String, wkt: String, dt: DateTime = new DateTime(defaultDateTime)): Seq[Mutation] = {
val geomType: String = wkt.split( """\\(""").head
val geometry: Geometry = WKTUtils.read(wkt)
val entry =
AvroSimpleFeatureFactory.buildAvroFeature(
featureType,
List(null, null, null, id, geometry, dt.toDate, dt.toDate),
s"|data|$id")
//entry.setAttribute(geomType, id)
entry.setAttribute("attr2", "2nd" + id)
indexEncoder.synchronized {
val toWrite = new FeatureToWrite(entry, "", featureEncoder, indexValueEncoder)
indexEncoder.encode(toWrite)
}
}
def createSF(e: Entry): SimpleFeature = createSF(e, featureType)
def createSF(e: Entry, sft: SimpleFeatureType): SimpleFeature = {
val geometry: Geometry = WKTUtils.read(e.wkt)
val entry =
AvroSimpleFeatureFactory.buildAvroFeature(
sft,
List(null, null, null, null, geometry, e.dt.toDate, e.dt.toDate),
s"${e.id}")
entry.setAttribute("attr2", "2nd" + e.id)
entry.getUserData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE)
entry
}
val points = List[Entry](
Entry("POINT(47.2 25.6)", "1"), // hit
Entry("POINT(17.2 35.6)", "2"),
Entry("POINT(87.2 15.6)", "3"),
Entry("POINT(47.2 25.6)", "4"), // hit
Entry("POINT(17.2 22.6)", "5"),
Entry("POINT(-47.2 -25.6)", "6"),
Entry("POINT(47.2 25.6)", "7"), // hit
Entry("POINT(67.2 -25.6)", "8"),
Entry("POINT(47.2 28.0)", "9"),
Entry("POINT(47.2 25.6)", "10"), // hit
Entry("POINT(47.2 25.6)", "11"), // hit
Entry("POINT(47.2 25.6)", "12"), // hit
Entry("POINT(47.2 25.6)", "13"), // hit
Entry("POINT(50.2 30.6)", "14"),
Entry("POINT(50.2 30.6)", "15"),
Entry("POINT(50.2 30.6)", "16"),
Entry("POINT(50.2 30.6)", "17"),
Entry("POINT(50.2 30.6)", "18"),
Entry("POINT(50.2 30.6)", "19"),
Entry("POINT(47.2 25.6)", "20"), // hit
Entry("POINT(47.2 25.6)", "21"), // hit
Entry("POINT(47.2 25.6)", "22"), // hit
Entry("POINT(47.2 25.6)", "23"), // hit
Entry("POINT(47.2 25.6)", "24"), // hit
Entry("POINT(47.2 25.6)", "25"), // hit
Entry("POINT(47.2 25.6)", "26"), // hit
Entry("POINT(47.2 25.6)", "27"), // hit
Entry("POINT(47.2 25.6)", "111"), // hit
Entry("POINT(47.2 25.6)", "112"), // hit
Entry("POINT(47.2 25.6)", "113"), // hit
Entry("POINT(50.2 30.6)", "114"),
Entry("POINT(50.2 30.6)", "115"),
Entry("POINT(50.2 30.6)", "116"),
Entry("POINT(50.2 30.6)", "117"),
Entry("POINT(50.2 30.6)", "118"),
Entry("POINT(50.2 30.6)", "119")
)
val allThePoints = (-180 to 180).map(lon => {
val x = lon.toString
val y = (lon / 2).toString
Entry(s"POINT($x $y)", x)
})
// add some lines to this query, both qualifying and non-qualifying
val lines = List(
Entry("LINESTRING(47.28515625 25.576171875, 48 26, 49 27)", "201"),
Entry("LINESTRING(-47.28515625 -25.576171875, -48 -26, -49 -27)", "202")
)
// add some polygons to this query, both qualifying and non-qualifying
// NOTE: Only the last of these will match the ColF set, because they tend
// to be decomposed into 15-bit (3-character) GeoHash cells.
val polygons = List(
Entry("POLYGON((44 24, 44 28, 49 27, 49 23, 44 24))", "301"),
Entry("POLYGON((-44 -24, -44 -28, -49 -27, -49 -23, -44 -24))", "302"),
Entry("POLYGON((47.28515625 25.576171875, 47.28515626 25.576171876, 47.28515627 25.576171875, 47.28515625 25.576171875))", "303")
)
val fullData = points ::: lines ::: polygons
val hugeData: Seq[Entry] = generateTestData(50000)
val mediumData: Seq[Entry] = generateTestData(1000)
def generateTestData(num: Int) = {
val rng = new Random(0)
val minTime = new DateTime(2010, 6, 1, 0, 0, 0, DateTimeZone.forID("UTC")).getMillis
val maxTime = new DateTime(2010, 8, 31, 23, 59, 59, DateTimeZone.forID("UTC")).getMillis
val pts = (1 to num).map(i => {
val wkt = "POINT(" +
(40.0 + 10.0 * rng.nextDouble()).toString + " " +
(20.0 + 10.0 * rng.nextDouble()).toString + " " +
")"
val dt = new DateTime(
math.round(minTime + (maxTime - minTime) * rng.nextDouble()),
DateTimeZone.forID("UTC")
)
Entry(wkt, (100000 + i).toString, dt)
}).toList
val gf = new GeometryFactory()
val linesPolys = pts.grouped(3).take(num/50).flatMap { threeEntries =>
val headEntry = threeEntries.head
val threeCoords = threeEntries.map(e => WKTUtils.read(e.wkt).getCoordinate)
val lineString = gf.createLineString(threeCoords.toArray)
val poly = gf.createPolygon((threeCoords :+ threeCoords.head).toArray)
val lsEntry = Entry(lineString.toString, headEntry.id+1000000, headEntry.dt)
val polyEntry = Entry(poly.toString, headEntry.id+2000000, headEntry.dt)
Seq(lsEntry, polyEntry)
}
pts ++ linesPolys
}
val pointWithNoID = List(Entry("POINT(-78.0 38.0)", null))
val shortListOfPoints = List[Entry](
Entry("POINT(47.2 25.6)", "1"), // hit
Entry("POINT(47.2 25.6)", "7"), // hit
Entry("POINT(50.2 30.6)", "117"),
Entry("POINT(50.2 30.6)", "118"),
Entry("POINT(47.2 25.6)", "4")
)
// this point's geohash overlaps with the query polygon so is a candidate result
// however, the point itself is outside of the candidate result
val geohashHitActualNotHit = List(Entry("POINT(47.999962 22.999969)", "9999"))
// 49 features
val includedDwithinPoints = Seq(
"POINT (-69.860982683110791 25.670999804594551)",
"POINT (-63.997858975742645 18.9943062994308)",
"POINT (-81.830327678750578 37.085775640526542)",
"POINT (-89.490770137868509 44.106303328073992)",
"POINT (-64.863769432654507 20.078089354931279)",
"POINT (-76.746987331939451 30.754452084293632)",
"POINT (-83.143545929322613 38.85610727559537)",
"POINT (-83.492602490558113 37.349478312620306)",
"POINT (-69.679833785334282 24.388803458126716)",
"POINT (-77.217346548139218 33.10117253660443)",
"POINT (-48.624646482440973 4.055888157616433)",
"POINT (-59.965969544921109 13.73922529393128)",
"POINT (-69.719571567790766 25.42188199205567)",
"POINT (-49.861755695550691 5.008207149016378)",
"POINT (-59.53028948688214 13.666221546357587)",
"POINT (-78.573811951638518 33.748446969499653)",
"POINT (-75.148246144186032 29.088689349563502)",
"POINT (-78.977185458964598 34.508762904115628)",
"POINT (-45.145757200454497 -0.675717483534498)",
"POINT (-70.814939235491693 24.670046948143156)",
"POINT (-63.816714527267649 18.489239068296545)",
"POINT (-54.20652730539409 9.01394728018499)",
"POINT (-71.982651812779181 26.781538560326045)",
"POINT (-51.71074903691521 7.783630450718865)",
"POINT (-57.277254589777193 11.028044049316886)",
"POINT (-51.778519694248303 7.700192534033889)",
"POINT (-54.576171577496979 9.411552717211283)",
"POINT (-58.018434745348337 13.069053319459581)",
"POINT (-79.297793388564656 33.297052361806031)",
"POINT (-54.041752176309622 8.401677730812796)",
"POINT (-77.022561401567557 31.287987114079616)",
"POINT (-54.273277144188896 8.423007576210081)",
"POINT (-82.635439242627612 37.220921020638443)",
"POINT (-66.240260183377984 22.333298874601866)",
"POINT (-63.174079891818458 18.590732503333914)",
"POINT (-49.604756624845336 3.603030252579086)",
"POINT (-51.052335953192923 7.155692678339275)",
"POINT (-79.426274623480495 34.457318692249387)",
"POINT (-50.914821524842488 5.997763902901978)",
"POINT (-58.002088202256417 13.707130381901802)",
"POINT (-82.754970200843246 37.225536788891802)",
"POINT (-58.739640682739136 13.619726121902358)",
"POINT (-85.512639282423464 40.180830488630278)",
"POINT (-88.352340439082099 44.029501612210311)",
"POINT (-71.510589787816485 27.40689166758548)",
"POINT (-47.028488437877314 2.675396523547844)",
"POINT (-69.025674259692593 23.367911342771055)",
"POINT (-67.336206873060874 22.855689772550061)",
"POINT (-45.821184492445006 1.02615639387446)",
"POINT (-59.943416863142957 15.425391686851068)"
)
// 150 features
val excludedDwithinPoints = Seq(
"POINT (-64.280357700776648 45.0)",
"POINT (-60.606123086601883 45.0)",
"POINT (-56.931888472427119 45.0)",
"POINT (-53.257653858252354 45.0)",
"POINT (-49.583419244077589 45.0)",
"POINT (-45.909184629902825 45.0)",
"POINT (-90.0 41.325765385825235)",
"POINT (-82.651530771650471 41.325765385825235)",
"POINT (-78.977296157475706 41.325765385825235)",
"POINT (-75.303061543300942 41.325765385825235)",
"POINT (-71.628826929126177 41.325765385825235)",
"POINT (-67.954592314951412 41.325765385825235)",
"POINT (-64.280357700776648 41.325765385825235)",
"POINT (-60.606123086601883 41.325765385825235)",
"POINT (-56.931888472427119 41.325765385825235)",
"POINT (-53.257653858252354 41.325765385825235)",
"POINT (-49.583419244077589 41.325765385825235)",
"POINT (-45.909184629902825 41.325765385825235)",
"POINT (-90.0 37.651530771650471)",
"POINT (-86.325765385825235 37.651530771650471)",
"POINT (-78.977296157475706 37.651530771650471)",
"POINT (-75.303061543300942 37.651530771650471)",
"POINT (-71.628826929126177 37.651530771650471)",
"POINT (-67.954592314951412 37.651530771650471)",
"POINT (-64.280357700776648 37.651530771650471)",
"POINT (-60.606123086601883 37.651530771650471)",
"POINT (-56.931888472427119 37.651530771650471)",
"POINT (-53.257653858252354 37.651530771650471)",
"POINT (-49.583419244077589 37.651530771650471)",
"POINT (-45.909184629902825 37.651530771650471)",
"POINT (-90.0 33.977296157475706)",
"POINT (-86.325765385825235 33.977296157475706)",
"POINT (-82.651530771650471 33.977296157475706)",
"POINT (-75.303061543300942 33.977296157475706)",
"POINT (-71.628826929126177 33.977296157475706)",
"POINT (-67.954592314951412 33.977296157475706)",
"POINT (-64.280357700776648 33.977296157475706)",
"POINT (-60.606123086601883 33.977296157475706)",
"POINT (-56.931888472427119 33.977296157475706)",
"POINT (-53.257653858252354 33.977296157475706)",
"POINT (-49.583419244077589 33.977296157475706)",
"POINT (-45.909184629902825 33.977296157475706)",
"POINT (-90.0 30.303061543300938)",
"POINT (-86.325765385825235 30.303061543300938)",
"POINT (-82.651530771650471 30.303061543300938)",
"POINT (-78.977296157475706 30.303061543300938)",
"POINT (-71.628826929126177 30.303061543300938)",
"POINT (-67.954592314951412 30.303061543300938)",
"POINT (-64.280357700776648 30.303061543300938)",
"POINT (-60.606123086601883 30.303061543300938)",
"POINT (-56.931888472427119 30.303061543300938)",
"POINT (-53.257653858252354 30.303061543300938)",
"POINT (-49.583419244077589 30.303061543300938)",
"POINT (-45.909184629902825 30.303061543300938)",
"POINT (-90.0 26.62882692912617)",
"POINT (-86.325765385825235 26.62882692912617)",
"POINT (-82.651530771650471 26.62882692912617)",
"POINT (-78.977296157475706 26.62882692912617)",
"POINT (-75.303061543300942 26.62882692912617)",
"POINT (-67.954592314951412 26.62882692912617)",
"POINT (-64.280357700776648 26.62882692912617)",
"POINT (-60.606123086601883 26.62882692912617)",
"POINT (-56.931888472427119 26.62882692912617)",
"POINT (-53.257653858252354 26.62882692912617)",
"POINT (-49.583419244077589 26.62882692912617)",
"POINT (-45.909184629902825 26.62882692912617)",
"POINT (-90.0 22.954592314951402)",
"POINT (-86.325765385825235 22.954592314951402)",
"POINT (-82.651530771650471 22.954592314951402)",
"POINT (-78.977296157475706 22.954592314951402)",
"POINT (-75.303061543300942 22.954592314951402)",
"POINT (-71.628826929126177 22.954592314951402)",
"POINT (-64.280357700776648 22.954592314951402)",
"POINT (-60.606123086601883 22.954592314951402)",
"POINT (-56.931888472427119 22.954592314951402)",
"POINT (-53.257653858252354 22.954592314951402)",
"POINT (-49.583419244077589 22.954592314951402)",
"POINT (-45.909184629902825 22.954592314951402)",
"POINT (-90.0 19.280357700776634)",
"POINT (-86.325765385825235 19.280357700776634)",
"POINT (-82.651530771650471 19.280357700776634)",
"POINT (-78.977296157475706 19.280357700776634)",
"POINT (-75.303061543300942 19.280357700776634)",
"POINT (-71.628826929126177 19.280357700776634)",
"POINT (-67.954592314951412 19.280357700776634)",
"POINT (-60.606123086601883 19.280357700776634)",
"POINT (-56.931888472427119 19.280357700776634)",
"POINT (-53.257653858252354 19.280357700776634)",
"POINT (-49.583419244077589 19.280357700776634)",
"POINT (-45.909184629902825 19.280357700776634)",
"POINT (-90.0 15.606123086601865)",
"POINT (-86.325765385825235 15.606123086601865)",
"POINT (-82.651530771650471 15.606123086601865)",
"POINT (-78.977296157475706 15.606123086601865)",
"POINT (-75.303061543300942 15.606123086601865)",
"POINT (-71.628826929126177 15.606123086601865)",
"POINT (-67.954592314951412 15.606123086601865)",
"POINT (-64.280357700776648 15.606123086601865)",
"POINT (-56.931888472427119 15.606123086601865)",
"POINT (-53.257653858252354 15.606123086601865)",
"POINT (-49.583419244077589 15.606123086601865)",
"POINT (-45.909184629902825 15.606123086601865)",
"POINT (-90.0 11.931888472427097)",
"POINT (-86.325765385825235 11.931888472427097)",
"POINT (-82.651530771650471 11.931888472427097)",
"POINT (-78.977296157475706 11.931888472427097)",
"POINT (-75.303061543300942 11.931888472427097)",
"POINT (-71.628826929126177 11.931888472427097)",
"POINT (-67.954592314951412 11.931888472427097)",
"POINT (-64.280357700776648 11.931888472427097)",
"POINT (-60.606123086601883 11.931888472427097)",
"POINT (-53.257653858252354 11.931888472427097)",
"POINT (-49.583419244077589 11.931888472427097)",
"POINT (-45.909184629902825 11.931888472427097)",
"POINT (-90.0 8.257653858252329)",
"POINT (-86.325765385825235 8.257653858252329)",
"POINT (-82.651530771650471 8.257653858252329)",
"POINT (-78.977296157475706 8.257653858252329)",
"POINT (-75.303061543300942 8.257653858252329)",
"POINT (-71.628826929126177 8.257653858252329)",
"POINT (-67.954592314951412 8.257653858252329)",
"POINT (-64.280357700776648 8.257653858252329)",
"POINT (-60.606123086601883 8.257653858252329)",
"POINT (-56.931888472427119 8.257653858252329)",
"POINT (-49.583419244077589 8.257653858252329)",
"POINT (-45.909184629902825 8.257653858252329)",
"POINT (-90.0 4.583419244077562)",
"POINT (-86.325765385825235 4.583419244077562)",
"POINT (-82.651530771650471 4.583419244077562)",
"POINT (-78.977296157475706 4.583419244077562)",
"POINT (-75.303061543300942 4.583419244077562)",
"POINT (-71.628826929126177 4.583419244077562)",
"POINT (-67.954592314951412 4.583419244077562)",
"POINT (-64.280357700776648 4.583419244077562)",
"POINT (-60.606123086601883 4.583419244077562)",
"POINT (-56.931888472427119 4.583419244077562)",
"POINT (-53.257653858252354 4.583419244077562)",
"POINT (-45.909184629902825 4.583419244077562)",
"POINT (-90.0 0.909184629902795)",
"POINT (-86.325765385825235 0.909184629902795)",
"POINT (-82.651530771650471 0.909184629902795)",
"POINT (-78.977296157475706 0.909184629902795)",
"POINT (-75.303061543300942 0.909184629902795)",
"POINT (-71.628826929126177 0.909184629902795)",
"POINT (-67.954592314951412 0.909184629902795)",
"POINT (-64.280357700776648 0.909184629902795)",
"POINT (-60.606123086601883 0.909184629902795)",
"POINT (-56.931888472427119 0.909184629902795)",
"POINT (-53.257653858252354 0.909184629902795)",
"POINT (-49.583419244077589 0.909184629902795)"
)
}
| lozpeng/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/iterators/TestData.scala | Scala | apache-2.0 | 20,424 |
package djinni
import djinni.ast._
import djinni.generatorTools._
import djinni.meta._
class CppMarshal(spec: Spec) extends Marshal(spec) {
override def typename(tm: MExpr): String = toCppType(tm, None)
def typename(name: String, ty: TypeDef): String = ty match {
case e: Enum => idCpp.enumType(name)
case i: Interface => idCpp.ty(name)
case r: Record => idCpp.ty(name)
}
override def fqTypename(tm: MExpr): String = toCppType(tm, Some(spec.cppNamespace))
def fqTypename(name: String, ty: TypeDef): String = ty match {
case e: Enum => withNs(Some(spec.cppNamespace), idCpp.enumType(name))
case i: Interface => withNs(Some(spec.cppNamespace), idCpp.ty(name))
case r: Record => withNs(Some(spec.cppNamespace), idCpp.ty(name))
}
override def paramType(tm: MExpr): String = toCppParamType(tm)
override def fqParamType(tm: MExpr): String = toCppParamType(tm, Some(spec.cppNamespace))
override def returnType(ret: Option[TypeRef]): String = ret.fold("void")(toCppType(_, None))
override def fqReturnType(ret: Option[TypeRef]): String = ret.fold("void")(toCppType(_, Some(spec.cppNamespace)))
override def fieldType(tm: MExpr): String = typename(tm)
override def fqFieldType(tm: MExpr): String = fqTypename(tm)
override def toCpp(tm: MExpr, expr: String): String = throw new AssertionError("cpp to cpp conversion")
override def fromCpp(tm: MExpr, expr: String): String = throw new AssertionError("cpp to cpp conversion")
def hppReferences(m: Meta, exclude: String, forwardDeclareOnly: Boolean): Seq[SymbolReference] = m match {
case p: MPrimitive => p.idlName match {
case "i8" | "i16" | "i32" | "i64" => List(ImportRef("<cstdint>"))
case _ => List()
}
case MString => List(ImportRef("<string>"))
case MDate => List(ImportRef("<chrono>"))
case MBinary => List(ImportRef("<vector>"), ImportRef("<cstdint>"))
case MOptional => List(ImportRef(spec.cppOptionalHeader))
case MList => List(ImportRef("<vector>"))
case MSet => List(ImportRef("<unordered_set>"))
case MMap => List(ImportRef("<unordered_map>"))
case d: MDef => d.defType match {
case DRecord =>
if (d.name != exclude) {
if (forwardDeclareOnly) {
List(DeclRef(s"struct ${typename(d.name, d.body)};", Some(spec.cppNamespace)))
} else {
List(ImportRef(include(d.name)))
}
} else {
List()
}
case DEnum =>
if (d.name != exclude) {
if (forwardDeclareOnly) {
List(DeclRef(s"enum class ${typename(d.name, d.body)};", Some(spec.cppNamespace)))
} else {
List(ImportRef(include(d.name)))
}
} else {
List()
}
case DInterface =>
val base = if (d.name != exclude) {
List(ImportRef("<memory>"), DeclRef(s"class ${typename(d.name, d.body)};", Some(spec.cppNamespace)))
} else {
List(ImportRef("<memory>"))
}
spec.cppNnHeader match {
case Some(nnHdr) => ImportRef(nnHdr) :: base
case _ => base
}
}
case e: MExtern => e.defType match {
// Do not forward declare extern types, they might be in arbitrary namespaces.
// This isn't a problem as extern types cannot cause dependency cycles with types being generated here
case DInterface => List(ImportRef("<memory>"), ImportRef(e.cpp.header))
case _ => List(ImportRef(e.cpp.header))
}
case p: MParam => List()
}
def cppReferences(m: Meta, exclude: String, forwardDeclareOnly: Boolean): Seq[SymbolReference] = {
// Only need to provide full definitions for cpp if forward decls were used in header
if (!forwardDeclareOnly) {
List()
} else {
m match {
case d: MDef => d.defType match {
case DRecord =>
if (d.name != exclude) {
List(ImportRef(include(d.name)))
} else {
List()
}
case DEnum =>
if (d.name != exclude) {
List(ImportRef(include(d.name)))
} else {
List()
}
case _ => List()
}
case _ => List()
}
}
}
def include(ident: String): String = q(spec.cppIncludePrefix + spec.cppFileIdentStyle(ident) + "." + spec.cppHeaderExt)
private def toCppType(ty: TypeRef, namespace: Option[String] = None): String = toCppType(ty.resolved, namespace)
private def toCppType(tm: MExpr, namespace: Option[String]): String = {
def base(m: Meta): String = m match {
case p: MPrimitive => p.cName
case MString => "std::string"
case MDate => "std::chrono::system_clock::time_point"
case MBinary => "std::vector<uint8_t>"
case MOptional => spec.cppOptionalTemplate
case MList => "std::vector"
case MSet => "std::unordered_set"
case MMap => "std::unordered_map"
case d: MDef =>
d.defType match {
case DEnum => withNs(namespace, idCpp.enumType(d.name))
case DRecord => withNs(namespace, idCpp.ty(d.name))
case DInterface => s"std::shared_ptr<${withNs(namespace, idCpp.ty(d.name))}>"
}
case e: MExtern => e.defType match {
case DInterface => s"std::shared_ptr<${e.cpp.typename}>"
case _ => e.cpp.typename
}
case p: MParam => idCpp.typeParam(p.name)
}
def expr(tm: MExpr): String = {
spec.cppNnType match {
case Some(nnType) => {
// if we're using non-nullable pointers for interfaces, then special-case
// both optional and non-optional interface types
val args = if (tm.args.isEmpty) "" else tm.args.map(expr).mkString("<", ", ", ">")
tm.base match {
case d: MDef =>
d.defType match {
case DInterface => s"${nnType}<${withNs(namespace, idCpp.ty(d.name))}>"
case _ => base(tm.base) + args
}
case MOptional =>
tm.args.head.base match {
case d: MDef =>
d.defType match {
case DInterface => s"std::shared_ptr<${withNs(namespace, idCpp.ty(d.name))}>"
case _ => base(tm.base) + args
}
case _ => base(tm.base) + args
}
case _ => base(tm.base) + args
}
}
case None =>
if (isOptionalInterface(tm)) {
// otherwise, interfaces are always plain old shared_ptr
expr(tm.args.head)
} else {
val args = if (tm.args.isEmpty) "" else tm.args.map(expr).mkString("<", ", ", ">")
base(tm.base) + args
}
}
}
expr(tm)
}
def byValue(tm: MExpr): Boolean = tm.base match {
case p: MPrimitive => true
case d: MDef => d.defType match {
case DEnum => true
case _ => false
}
case e: MExtern => e.defType match {
case DInterface => false
case DEnum => true
case DRecord => e.cpp.byValue
}
case MOptional => byValue(tm.args.head)
case _ => false
}
def byValue(td: TypeDecl): Boolean = td.body match {
case i: Interface => false
case r: Record => false
case e: Enum => true
}
// this can be used in c++ generation to know whether a const& should be applied to the parameter or not
private def toCppParamType(tm: MExpr, namespace: Option[String] = None): String = {
val cppType = toCppType(tm, namespace)
val refType = "const " + cppType + " &"
val valueType = cppType
if(byValue(tm)) valueType else refType
}
}
| jrogers/djinni | src/source/CppMarshal.scala | Scala | apache-2.0 | 7,638 |
package org.jetbrains.plugins.scala.conversion.ast
/**
* Created by Kate Ustyuzhanina
* on 10/22/15
*/
case class ArrayAccess(expression: IntermediateNode, idxExpression: IntermediateNode)
extends IntermediateNode
case class ClassCast(operand: IntermediateNode,
castType: IntermediateNode,
isPrimitive: Boolean) extends IntermediateNode with TypedElement {
def canSimplify: Boolean =
isPrimitive && List("Int", "Long", "Double", "Float", "Byte", "Char", "Short").contains(castType.asInstanceOf[TypeConstruction].inType)
override def getType: TypeConstruction = castType.asInstanceOf[TypedElement].getType
}
case class ArrayInitializer(expresions: Seq[IntermediateNode]) extends IntermediateNode
case class BinaryExpressionConstruction(firstPart: IntermediateNode, secondPart: IntermediateNode,
operation: String, inExpression:Boolean) extends IntermediateNode
case class ClassObjectAccess(expression: IntermediateNode) extends IntermediateNode
case class InstanceOfConstruction(operand: IntermediateNode,
mtype: IntermediateNode) extends IntermediateNode with TypedElement {
override def getType: TypeConstruction = mtype.asInstanceOf[TypedElement].getType
}
case class QualifiedExpression(qualifier: IntermediateNode, identifier: IntermediateNode) extends IntermediateNode
object MethodCallExpression extends IntermediateNode {
def build(reciever: IntermediateNode, methodName: String, args: IntermediateNode): MethodCallExpression = {
val identifier = methodName match {
case "this" => LiteralExpression(methodName)
case _ => LiteralExpression(escapeKeyword(methodName))
}
MethodCallExpression(methodName, if (reciever != null)
QualifiedExpression(reciever, identifier) else identifier, args, withSideEffects = false)
}
}
case class MethodCallExpression(name: String, method: IntermediateNode,
args: IntermediateNode, withSideEffects: Boolean) extends IntermediateNode
case class ExpressionList(data: Seq[IntermediateNode]) extends IntermediateNode
case class ThisExpression(value: Option[IntermediateNode]) extends IntermediateNode
case class SuperExpression(value: Option[IntermediateNode]) extends IntermediateNode
case class LiteralExpression(literal: String) extends IntermediateNode
case class ParenthesizedExpression(value: Option[IntermediateNode]) extends IntermediateNode
case class FunctionalExpression(params: IntermediateNode, body: IntermediateNode) extends IntermediateNode
object NewExpression {
def apply(mtype: IntermediateNode, arrayInitalizer: Seq[IntermediateNode],
withArrayInitalizer: Boolean = true): NewExpression = {
if (withArrayInitalizer)
NewExpression(mtype, arrayInitalizer, Seq[IntermediateNode]())
else
NewExpression(mtype, Seq[IntermediateNode](), arrayInitalizer)
}
}
case class NewExpression(mtype: IntermediateNode, arrayInitalizer: Seq[IntermediateNode],
arrayDimension: Seq[IntermediateNode]) extends IntermediateNode with TypedElement {
override def getType: TypeConstruction = mtype.asInstanceOf[TypedElement].getType
}
case class AnonymousClassExpression(anonymousClass: IntermediateNode) extends IntermediateNode
case class PolyadicExpression(args: Seq[IntermediateNode], operation: String) extends IntermediateNode
case class PrefixExpression(operand: IntermediateNode, signType: String, canBeSimplified: Boolean) extends IntermediateNode
case class PostfixExpression(operand: IntermediateNode, signType: String, canBeSimplified: Boolean) extends IntermediateNode | ilinum/intellij-scala | src/org/jetbrains/plugins/scala/conversion/ast/ExpressionsConstruction.scala | Scala | apache-2.0 | 3,688 |
/*
* Copyright © 2015-2019 the contributors (see Contributors.md).
*
* This file is part of Knora.
*
* Knora is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Knora is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with Knora. If not, see <http://www.gnu.org/licenses/>.
*/
package org.knora.webapi.e2e
import java.io.File
import akka.actor.ActorSystem
import akka.http.scaladsl.testkit.RouteTestTimeout
import com.typesafe.config.{Config, ConfigFactory}
import org.knora.webapi.testing.tags.E2ETest
import org.knora.webapi.util.IriConversions._
import org.knora.webapi.util.{FileUtil, StringFormatter}
import org.knora.webapi.{AssertionException, E2ESpec}
import scala.concurrent.ExecutionContextExecutor
/**
* Tests [[InstanceChecker]].
*/
@E2ETest
class InstanceCheckerSpec extends E2ESpec(InstanceCheckerSpec.config) {
private implicit val stringFormatter: StringFormatter = StringFormatter.getGeneralInstance
implicit def default(implicit system: ActorSystem): RouteTestTimeout = RouteTestTimeout(settings.defaultTimeout)
implicit val ec: ExecutionContextExecutor = system.dispatcher
private val jsonLDInstanceChecker: InstanceChecker = InstanceChecker.getJsonLDChecker
private val jsonInstanceChecker: InstanceChecker = InstanceChecker.getJsonChecker
"The InstanceChecker" should {
"accept a JSON-LD instance of anything:Thing" in {
val testDing = FileUtil.readTextFile(new File("src/test/resources/test-data/resourcesR2RV2/Testding.jsonld"))
jsonLDInstanceChecker.check(
instanceResponse = testDing,
expectedClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
knoraRouteGet = doGetRequest
)
}
"reject a JSON-LD instance of anything:Thing (in the complex schema) with an extra property" in {
val exception = intercept[AssertionException] {
jsonLDInstanceChecker.check(
instanceResponse = InstanceCheckerSpec.complexThingWithExtraProperty,
expectedClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
knoraRouteGet = doGetRequest
)
}
assert(exception.getMessage == "One or more instance properties are not allowed by cardinalities: http://0.0.0.0:3333/ontology/0001/anything/v2#hasExtraProperty")
}
"reject a JSON-LD instance of anything:Thing (in the complex schema) with an extra property object" in {
val exception = intercept[AssertionException] {
jsonLDInstanceChecker.check(
instanceResponse = InstanceCheckerSpec.complexThingWithExtraPropertyObject,
expectedClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
knoraRouteGet = doGetRequest
)
}
assert(exception.getMessage == "Property http://0.0.0.0:3333/ontology/0001/anything/v2#hasBoolean has 2 objects, but its cardinality is 0-1")
}
"reject a JSON-LD instance of anything:Thing (in the complex schema) with an invalid literal type" in {
val exception = intercept[AssertionException] {
jsonLDInstanceChecker.check(
instanceResponse = InstanceCheckerSpec.complexThingWithInvalidLiteralType,
expectedClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
knoraRouteGet = doGetRequest
)
}
assert(exception.getMessage == "Property http://api.knora.org/ontology/knora-api/v2#booleanValueAsBoolean has an object of type http://www.w3.org/2001/XMLSchema#string with literal content 'invalid literal', but type http://www.w3.org/2001/XMLSchema#boolean was expected")
}
"reject a JSON-LD instance of anything:Thing (in the complex schema) with an invalid object type" in {
val exception = intercept[AssertionException] {
jsonLDInstanceChecker.check(
instanceResponse = InstanceCheckerSpec.complexThingWithInvalidObjectType,
expectedClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
knoraRouteGet = doGetRequest
)
}
assert(exception.getMessage == "Instance type http://api.knora.org/ontology/knora-api/v2#DateValue is not compatible with expected class IRI http://api.knora.org/ontology/knora-api/v2#BooleanValue")
}
"reject a JSON-LD instance of anything:Thing (in the complex schema) with object content where an IRI is required" in {
val exception = intercept[AssertionException] {
jsonLDInstanceChecker.check(
instanceResponse = InstanceCheckerSpec.complexThingWithInvalidUseOfObjectInsteadOfIri,
expectedClassIri = "http://0.0.0.0:3333/ontology/0001/anything/v2#Thing".toSmartIri,
knoraRouteGet = doGetRequest
)
}
assert(exception.getMessage == "Property http://api.knora.org/ontology/knora-api/v2#textValueHasMapping requires an IRI referring to an instance of http://api.knora.org/ontology/knora-api/v2#XMLToStandoffMapping, but object content was received instead")
}
"reject a JSON-LD instance of anything:Thing (in the simple schema) with an invalid datatype" in {
val exception = intercept[AssertionException] {
jsonLDInstanceChecker.check(
instanceResponse = InstanceCheckerSpec.simpleThingWithInvalidDatatype,
expectedClassIri = "http://0.0.0.0:3333/ontology/0001/anything/simple/v2#Thing".toSmartIri,
knoraRouteGet = doGetRequest
)
}
assert(exception.getMessage == "Property http://0.0.0.0:3333/ontology/0001/anything/simple/v2#hasDecimal has an object of type http://api.knora.org/ontology/knora-api/simple/v2#Date with literal content 'GREGORIAN:1489 CE', but type http://www.w3.org/2001/XMLSchema#decimal was expected")
}
"reject a JSON-LD instance of anything:Thing (in the simple schema) without an rdfs:label" in {
val exception = intercept[AssertionException] {
jsonLDInstanceChecker.check(
instanceResponse = InstanceCheckerSpec.simpleThingWithMissingLabel,
expectedClassIri = "http://0.0.0.0:3333/ontology/0001/anything/simple/v2#Thing".toSmartIri,
knoraRouteGet = doGetRequest
)
}
assert(exception.getMessage == "Property http://www.w3.org/2000/01/rdf-schema#label has 0 objects, but its cardinality is 1")
}
"accept a correct JSON instance of an admin:User" in {
jsonInstanceChecker.check(
instanceResponse = InstanceCheckerSpec.correctUser,
expectedClassIri = "http://api.knora.org/ontology/knora-admin/v2#User".toSmartIri,
knoraRouteGet = doGetRequest
)
}
"reject a JSON instance of an admin:User with an extra property" in {
val exception = intercept[AssertionException] {
jsonInstanceChecker.check(
instanceResponse = InstanceCheckerSpec.userWithExtraProperty,
expectedClassIri = "http://api.knora.org/ontology/knora-admin/v2#User".toSmartIri,
knoraRouteGet = doGetRequest
)
}
assert(exception.getMessage == "One or more instance properties are not allowed by cardinalities: extraProperty")
}
"reject a JSON instance of an admin:User without a username" in {
val exception = intercept[AssertionException] {
jsonInstanceChecker.check(
instanceResponse = InstanceCheckerSpec.userWithMissingUsername,
expectedClassIri = "http://api.knora.org/ontology/knora-admin/v2#User".toSmartIri,
knoraRouteGet = doGetRequest
)
}
assert(exception.getMessage == "Property username has 0 objects, but its cardinality is 1")
}
"reject a JSON instance of an admin:User with an invalid literal object type" in {
val exception = intercept[AssertionException] {
jsonInstanceChecker.check(
instanceResponse = InstanceCheckerSpec.userWithInvalidObjectType,
expectedClassIri = "http://api.knora.org/ontology/knora-admin/v2#User".toSmartIri,
knoraRouteGet = doGetRequest
)
}
assert(exception.getMessage == "Property status has an object of type String with literal content 'invalidValue', but type http://www.w3.org/2001/XMLSchema#boolean was expected")
}
}
}
object InstanceCheckerSpec {
val config: Config = ConfigFactory.parseString(
"""
akka.loglevel = "DEBUG"
akka.stdout-loglevel = "DEBUG"
""".stripMargin)
val complexThingWithExtraProperty: String =
"""{
| "@id" : "http://rdfh.ch/0001/cUnhrC1DT821lwVWQSwEgg",
| "@type" : "anything:Thing",
| "anything:hasExtraProperty" : {
| "@id" : "http://rdfh.ch/0001/cUnhrC1DT821lwVWQSwEgg/values/o-j0jdxMQvanmAdpAIOcFA",
| "@type" : "knora-api:BooleanValue",
| "knora-api:attachedToUser" : {
| "@id" : "http://rdfh.ch/users/9XBCrDV3SRa7kS1WwynB4Q"
| },
| "knora-api:booleanValueAsBoolean" : true,
| "knora-api:hasPermissions" : "CR knora-admin:Creator|M knora-admin:ProjectMember|V knora-admin:KnownUser|RV knora-admin:UnknownUser",
| "knora-api:userHasPermission" : "CR",
| "knora-api:valueCreationDate" : {
| "@type" : "xsd:dateTimeStamp",
| "@value" : "2019-04-10T08:41:45.353992Z"
| }
| },
| "knora-api:arkUrl" : {
| "@type" : "xsd:anyURI",
| "@value" : "http://0.0.0.0:3336/ark:/72163/1/0001/cUnhrC1DT821lwVWQSwEgg0"
| },
| "knora-api:attachedToProject" : {
| "@id" : "http://rdfh.ch/projects/0001"
| },
| "knora-api:attachedToUser" : {
| "@id" : "http://rdfh.ch/users/9XBCrDV3SRa7kS1WwynB4Q"
| },
| "knora-api:creationDate" : {
| "@type" : "xsd:dateTimeStamp",
| "@value" : "2019-04-10T08:41:45.353992Z"
| },
| "knora-api:hasPermissions" : "CR knora-admin:Creator|M knora-admin:ProjectMember|V knora-admin:KnownUser|RV knora-admin:UnknownUser",
| "knora-api:userHasPermission" : "CR",
| "knora-api:versionArkUrl" : {
| "@type" : "xsd:anyURI",
| "@value" : "http://0.0.0.0:3336/ark:/72163/1/0001/cUnhrC1DT821lwVWQSwEgg0.20190410T084145353992Z"
| },
| "rdfs:label" : "test thing",
| "@context" : {
| "rdf" : "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
| "knora-api" : "http://api.knora.org/ontology/knora-api/v2#",
| "rdfs" : "http://www.w3.org/2000/01/rdf-schema#",
| "xsd" : "http://www.w3.org/2001/XMLSchema#",
| "anything" : "http://0.0.0.0:3333/ontology/0001/anything/v2#"
| }
|}
""".stripMargin
val complexThingWithExtraPropertyObject: String =
"""{
| "@id" : "http://rdfh.ch/0001/cUnhrC1DT821lwVWQSwEgg",
| "@type" : "anything:Thing",
| "anything:hasBoolean" : [ {
| "@id" : "http://rdfh.ch/0001/cUnhrC1DT821lwVWQSwEgg/values/o-j0jdxMQvanmAdpAIOcFA",
| "@type" : "knora-api:BooleanValue",
| "knora-api:attachedToUser" : {
| "@id" : "http://rdfh.ch/users/9XBCrDV3SRa7kS1WwynB4Q"
| },
| "knora-api:booleanValueAsBoolean" : true,
| "knora-api:hasPermissions" : "CR knora-admin:Creator|M knora-admin:ProjectMember|V knora-admin:KnownUser|RV knora-admin:UnknownUser",
| "knora-api:userHasPermission" : "CR",
| "knora-api:valueCreationDate" : {
| "@type" : "xsd:dateTimeStamp",
| "@value" : "2019-04-10T08:41:45.353992Z"
| }
| }, {
| "@id" : "http://rdfh.ch/0001/cUnhrC1DT821lwVWQSwEgg/values/o-j0jdxMQvanmAdpAIOcFA",
| "@type" : "knora-api:BooleanValue",
| "knora-api:attachedToUser" : {
| "@id" : "http://rdfh.ch/users/9XBCrDV3SRa7kS1WwynB4Q"
| },
| "knora-api:booleanValueAsBoolean" : false,
| "knora-api:hasPermissions" : "CR knora-admin:Creator|M knora-admin:ProjectMember|V knora-admin:KnownUser|RV knora-admin:UnknownUser",
| "knora-api:userHasPermission" : "CR",
| "knora-api:valueCreationDate" : {
| "@type" : "xsd:dateTimeStamp",
| "@value" : "2019-04-10T08:41:45.353992Z"
| }
| } ],
| "knora-api:arkUrl" : {
| "@type" : "xsd:anyURI",
| "@value" : "http://0.0.0.0:3336/ark:/72163/1/0001/cUnhrC1DT821lwVWQSwEgg0"
| },
| "knora-api:attachedToProject" : {
| "@id" : "http://rdfh.ch/projects/0001"
| },
| "knora-api:attachedToUser" : {
| "@id" : "http://rdfh.ch/users/9XBCrDV3SRa7kS1WwynB4Q"
| },
| "knora-api:creationDate" : {
| "@type" : "xsd:dateTimeStamp",
| "@value" : "2019-04-10T08:41:45.353992Z"
| },
| "knora-api:hasPermissions" : "CR knora-admin:Creator|M knora-admin:ProjectMember|V knora-admin:KnownUser|RV knora-admin:UnknownUser",
| "knora-api:userHasPermission" : "CR",
| "knora-api:versionArkUrl" : {
| "@type" : "xsd:anyURI",
| "@value" : "http://0.0.0.0:3336/ark:/72163/1/0001/cUnhrC1DT821lwVWQSwEgg0.20190410T084145353992Z"
| },
| "rdfs:label" : "test thing",
| "@context" : {
| "rdf" : "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
| "knora-api" : "http://api.knora.org/ontology/knora-api/v2#",
| "rdfs" : "http://www.w3.org/2000/01/rdf-schema#",
| "xsd" : "http://www.w3.org/2001/XMLSchema#",
| "anything" : "http://0.0.0.0:3333/ontology/0001/anything/v2#"
| }
|}
""".stripMargin
val complexThingWithInvalidLiteralType: String =
"""{
| "@id" : "http://rdfh.ch/0001/cUnhrC1DT821lwVWQSwEgg",
| "@type" : "anything:Thing",
| "anything:hasBoolean" : {
| "@id" : "http://rdfh.ch/0001/cUnhrC1DT821lwVWQSwEgg/values/o-j0jdxMQvanmAdpAIOcFA",
| "@type" : "knora-api:BooleanValue",
| "knora-api:attachedToUser" : {
| "@id" : "http://rdfh.ch/users/9XBCrDV3SRa7kS1WwynB4Q"
| },
| "knora-api:arkUrl" : {
| "@type" : "xsd:anyURI",
| "@value" : "http://0.0.0.0:3336/ark:/72163/1/0001/cUnhrC1DT821lwVWQSwEgg0/o=j0jdxMQvanmAdpAIOcFA"
| },
| "knora-api:versionArkUrl" : {
| "@type" : "xsd:anyURI",
| "@value" : "http://0.0.0.0:3336/ark:/72163/1/0001/cUnhrC1DT821lwVWQSwEgg0/o=j0jdxMQvanmAdpAIOcFA.20190410T084145353992Z"
| },
| "knora-api:valueHasUUID" : "o-j0jdxMQvanmAdpAIOcFA",
| "knora-api:booleanValueAsBoolean" : "invalid literal",
| "knora-api:hasPermissions" : "CR knora-admin:Creator|M knora-admin:ProjectMember|V knora-admin:KnownUser|RV knora-admin:UnknownUser",
| "knora-api:userHasPermission" : "CR",
| "knora-api:valueCreationDate" : {
| "@type" : "xsd:dateTimeStamp",
| "@value" : "2019-04-10T08:41:45.353992Z"
| }
| },
| "knora-api:arkUrl" : {
| "@type" : "xsd:anyURI",
| "@value" : "http://0.0.0.0:3336/ark:/72163/1/0001/cUnhrC1DT821lwVWQSwEgg0"
| },
| "knora-api:attachedToProject" : {
| "@id" : "http://rdfh.ch/projects/0001"
| },
| "knora-api:attachedToUser" : {
| "@id" : "http://rdfh.ch/users/9XBCrDV3SRa7kS1WwynB4Q"
| },
| "knora-api:creationDate" : {
| "@type" : "xsd:dateTimeStamp",
| "@value" : "2019-04-10T08:41:45.353992Z"
| },
| "knora-api:hasPermissions" : "CR knora-admin:Creator|M knora-admin:ProjectMember|V knora-admin:KnownUser|RV knora-admin:UnknownUser",
| "knora-api:userHasPermission" : "CR",
| "knora-api:versionArkUrl" : {
| "@type" : "xsd:anyURI",
| "@value" : "http://0.0.0.0:3336/ark:/72163/1/0001/cUnhrC1DT821lwVWQSwEgg0.20190410T084145353992Z"
| },
| "rdfs:label" : "test thing",
| "@context" : {
| "rdf" : "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
| "knora-api" : "http://api.knora.org/ontology/knora-api/v2#",
| "rdfs" : "http://www.w3.org/2000/01/rdf-schema#",
| "xsd" : "http://www.w3.org/2001/XMLSchema#",
| "anything" : "http://0.0.0.0:3333/ontology/0001/anything/v2#"
| }
|}
""".stripMargin
val complexThingWithInvalidObjectType: String =
"""{
| "@id" : "http://rdfh.ch/0001/cUnhrC1DT821lwVWQSwEgg",
| "@type" : "anything:Thing",
| "anything:hasBoolean" : {
| "@id" : "http://rdfh.ch/0001/cUnhrC1DT821lwVWQSwEgg/values/lj35qx3vRUa6s1Q8s5Z5SA",
| "@type" : "knora-api:DateValue",
| "knora-api:attachedToUser" : {
| "@id" : "http://rdfh.ch/users/9XBCrDV3SRa7kS1WwynB4Q"
| },
| "knora-api:dateValueHasCalendar" : "GREGORIAN",
| "knora-api:dateValueHasEndEra" : "CE",
| "knora-api:dateValueHasEndYear" : 1489,
| "knora-api:dateValueHasStartEra" : "CE",
| "knora-api:dateValueHasStartYear" : 1489,
| "knora-api:hasPermissions" : "CR knora-admin:Creator|M knora-admin:ProjectMember|V knora-admin:KnownUser|RV knora-admin:UnknownUser",
| "knora-api:userHasPermission" : "CR",
| "knora-api:valueAsString" : "GREGORIAN:1489 CE",
| "knora-api:valueCreationDate" : {
| "@type" : "xsd:dateTimeStamp",
| "@value" : "2019-04-10T08:41:45.353992Z"
| }
| },
| "knora-api:arkUrl" : {
| "@type" : "xsd:anyURI",
| "@value" : "http://0.0.0.0:3336/ark:/72163/1/0001/cUnhrC1DT821lwVWQSwEgg0"
| },
| "knora-api:attachedToProject" : {
| "@id" : "http://rdfh.ch/projects/0001"
| },
| "knora-api:attachedToUser" : {
| "@id" : "http://rdfh.ch/users/9XBCrDV3SRa7kS1WwynB4Q"
| },
| "knora-api:creationDate" : {
| "@type" : "xsd:dateTimeStamp",
| "@value" : "2019-04-10T08:41:45.353992Z"
| },
| "knora-api:hasPermissions" : "CR knora-admin:Creator|M knora-admin:ProjectMember|V knora-admin:KnownUser|RV knora-admin:UnknownUser",
| "knora-api:userHasPermission" : "CR",
| "knora-api:versionArkUrl" : {
| "@type" : "xsd:anyURI",
| "@value" : "http://0.0.0.0:3336/ark:/72163/1/0001/cUnhrC1DT821lwVWQSwEgg0.20190410T084145353992Z"
| },
| "rdfs:label" : "test thing",
| "@context" : {
| "rdf" : "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
| "knora-api" : "http://api.knora.org/ontology/knora-api/v2#",
| "rdfs" : "http://www.w3.org/2000/01/rdf-schema#",
| "xsd" : "http://www.w3.org/2001/XMLSchema#",
| "anything" : "http://0.0.0.0:3333/ontology/0001/anything/v2#"
| }
|}
""".stripMargin
val complexThingWithInvalidUseOfObjectInsteadOfIri: String =
"""{
| "@id" : "http://rdfh.ch/0001/cUnhrC1DT821lwVWQSwEgg",
| "@type" : "anything:Thing",
| "anything:hasRichtext" : {
| "@id" : "http://rdfh.ch/0001/cUnhrC1DT821lwVWQSwEgg/values/VY4XodOeSaOdttZ6rEkFPg",
| "@type" : "knora-api:TextValue",
| "knora-api:attachedToUser" : {
| "@id" : "http://rdfh.ch/users/9XBCrDV3SRa7kS1WwynB4Q"
| },
| "knora-api:hasPermissions" : "CR knora-admin:Creator|M knora-admin:ProjectMember|V knora-admin:KnownUser|RV knora-admin:UnknownUser",
| "knora-api:textValueAsXml" : "<?xml version=\\"1.0\\" encoding=\\"UTF-8\\"?>\\n<text><p><strong>this is</strong> text</p> with standoff</text>",
| "knora-api:textValueHasMapping" : {
| "@id" : "http://rdfh.ch/standoff/mappings/StandardMapping",
| "@type" : "knora-api:XMLToStandoffMapping",
| "knora-api:hasMappingElement" : {
| "@type" : "knora-base:MappingElement",
| "knora-api:mappingHasXMLTagname" : "p"
| }
| },
| "knora-api:arkUrl" : {
| "@type" : "xsd:anyURI",
| "@value" : "http://0.0.0.0:3336/ark:/72163/1/0001/cUnhrC1DT821lwVWQSwEgg0/VY4XodOeSaOdttZ6rEkFPg"
| },
| "knora-api:versionArkUrl" : {
| "@type" : "xsd:anyURI",
| "@value" : "http://0.0.0.0:3336/ark:/72163/1/0001/cUnhrC1DT821lwVWQSwEgg0/VY4XodOeSaOdttZ6rEkFPg.20190410T084145353992Z"
| },
| "knora-api:valueHasUUID" : "VY4XodOeSaOdttZ6rEkFPg",
| "knora-api:userHasPermission" : "CR",
| "knora-api:valueCreationDate" : {
| "@type" : "xsd:dateTimeStamp",
| "@value" : "2019-04-10T08:41:45.353992Z"
| }
| },
| "knora-api:arkUrl" : {
| "@type" : "xsd:anyURI",
| "@value" : "http://0.0.0.0:3336/ark:/72163/1/0001/cUnhrC1DT821lwVWQSwEgg0"
| },
| "knora-api:attachedToProject" : {
| "@id" : "http://rdfh.ch/projects/0001"
| },
| "knora-api:attachedToUser" : {
| "@id" : "http://rdfh.ch/users/9XBCrDV3SRa7kS1WwynB4Q"
| },
| "knora-api:creationDate" : {
| "@type" : "xsd:dateTimeStamp",
| "@value" : "2019-04-10T08:41:45.353992Z"
| },
| "knora-api:hasPermissions" : "CR knora-admin:Creator|M knora-admin:ProjectMember|V knora-admin:KnownUser|RV knora-admin:UnknownUser",
| "knora-api:userHasPermission" : "CR",
| "knora-api:versionArkUrl" : {
| "@type" : "xsd:anyURI",
| "@value" : "http://0.0.0.0:3336/ark:/72163/1/0001/cUnhrC1DT821lwVWQSwEgg0.20190410T084145353992Z"
| },
| "rdfs:label" : "test thing",
| "@context" : {
| "rdf" : "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
| "knora-api" : "http://api.knora.org/ontology/knora-api/v2#",
| "rdfs" : "http://www.w3.org/2000/01/rdf-schema#",
| "xsd" : "http://www.w3.org/2001/XMLSchema#",
| "anything" : "http://0.0.0.0:3333/ontology/0001/anything/v2#"
| }
|}
""".stripMargin
val simpleThingWithInvalidDatatype: String =
"""
|{
| "@id" : "http://rdfh.ch/0001/oGI65x9pQkK6JhsoqavTGA",
| "@type" : "anything:Thing",
| "anything:hasDecimal" : {
| "@type" : "knora-api:Date",
| "@value" : "GREGORIAN:1489 CE"
| },
| "knora-api:arkUrl" : {
| "@type" : "xsd:anyURI",
| "@value" : "http://0.0.0.0:3336/ark:/72163/1/0001/oGI65x9pQkK6JhsoqavTGAE"
| },
| "knora-api:versionArkUrl" : {
| "@type" : "xsd:anyURI",
| "@value" : "http://0.0.0.0:3336/ark:/72163/1/0001/oGI65x9pQkK6JhsoqavTGAE.20190410T124515840198Z"
| },
| "rdfs:label" : "test thing",
| "@context" : {
| "rdf" : "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
| "knora-api" : "http://api.knora.org/ontology/knora-api/simple/v2#",
| "rdfs" : "http://www.w3.org/2000/01/rdf-schema#",
| "xsd" : "http://www.w3.org/2001/XMLSchema#",
| "anything" : "http://0.0.0.0:3333/ontology/0001/anything/simple/v2#"
| }
|}
""".stripMargin
val simpleThingWithMissingLabel: String =
"""
|{
| "@id" : "http://rdfh.ch/0001/oGI65x9pQkK6JhsoqavTGA",
| "@type" : "anything:Thing",
| "@context" : {
| "rdf" : "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
| "knora-api" : "http://api.knora.org/ontology/knora-api/simple/v2#",
| "rdfs" : "http://www.w3.org/2000/01/rdf-schema#",
| "xsd" : "http://www.w3.org/2001/XMLSchema#",
| "anything" : "http://0.0.0.0:3333/ontology/0001/anything/simple/v2#"
| }
|}
""".stripMargin
val correctUser: String =
"""{
| "email": "[email protected]",
| "familyName": "User01",
| "givenName": "Anything",
| "groups": [
| {
| "description": "A group for thing searchers.",
| "id": "http://rdfh.ch/groups/0001/thing-searcher",
| "name": "Thing searcher",
| "project": {
| "description": [
| {
| "value": "Anything Project"
| }
| ],
| "id": "http://rdfh.ch/projects/0001",
| "keywords": [],
| "logo": null,
| "longname": "Anything Project",
| "ontologies": [
| "http://www.knora.org/ontology/0001/anything",
| "http://www.knora.org/ontology/0001/something"
| ],
| "selfjoin": false,
| "shortcode": "0001",
| "shortname": "anything",
| "status": true
| },
| "selfjoin": true,
| "status": true
| }
| ],
| "id": "http://rdfh.ch/users/9XBCrDV3SRa7kS1WwynB4Q",
| "lang": "de",
| "password": null,
| "permissions": {
| "administrativePermissionsPerProject": {
| "http://rdfh.ch/projects/0001": [
| {
| "additionalInformation": null,
| "name": "ProjectResourceCreateAllPermission",
| "permissionCode": null
| }
| ]
| },
| "groupsPerProject": {
| "http://rdfh.ch/projects/0001": [
| "http://rdfh.ch/groups/0001/thing-searcher",
| "http://www.knora.org/ontology/knora-admin#ProjectMember"
| ]
| }
| },
| "projects": [
| {
| "description": [
| {
| "value": "Anything Project"
| }
| ],
| "id": "http://rdfh.ch/projects/0001",
| "keywords": [],
| "logo": null,
| "longname": "Anything Project",
| "ontologies": [
| "http://www.knora.org/ontology/0001/anything",
| "http://www.knora.org/ontology/0001/something"
| ],
| "selfjoin": false,
| "shortcode": "0001",
| "shortname": "anything",
| "status": true
| }
| ],
| "sessionId": null,
| "status": true,
| "token": null,
| "username": "anything.user01"
|}""".stripMargin
val userWithExtraProperty: String =
"""
|{
| "username" : "test",
| "id" : "http://rdfh.ch/users/normaluser",
| "extraProperty" : "test",
| "email" : "[email protected]",
| "familyName" : "Tester",
| "givenName": "Test",
| "password" : "test",
| "lang" : "en",
| "status" : true,
| "permissions": {
| "administrativePermissionsPerProject": {
| "http://rdfh.ch/projects/0001": [
| {
| "additionalInformation": null,
| "name": "ProjectResourceCreateAllPermission",
| "permissionCode": null
| }
| ]
| },
| "groupsPerProject": {
| "http://rdfh.ch/projects/0001": [
| "http://rdfh.ch/groups/0001/thing-searcher",
| "http://www.knora.org/ontology/knora-admin#ProjectMember"
| ]
| }
| },
| "projects" : [ "http://rdfh.ch/projects/0001" ],
| "groups" : []
|}
""".stripMargin
val userWithMissingUsername: String =
"""
|{
| "id" : "http://rdfh.ch/users/normaluser",
| "email" : "[email protected]",
| "familyName" : "Tester",
| "givenName": "Test",
| "password" : "test",
| "lang" : "en",
| "status" : true,
| "permissions": {
| "administrativePermissionsPerProject": {
| "http://rdfh.ch/projects/0001": [
| {
| "additionalInformation": null,
| "name": "ProjectResourceCreateAllPermission",
| "permissionCode": null
| }
| ]
| },
| "groupsPerProject": {
| "http://rdfh.ch/projects/0001": [
| "http://rdfh.ch/groups/0001/thing-searcher",
| "http://www.knora.org/ontology/knora-admin#ProjectMember"
| ]
| }
| },
| "projects" : [ "http://rdfh.ch/projects/0001" ],
| "groups" : []
|}
""".stripMargin
val userWithInvalidObjectType: String =
"""
|{
| "id" : "http://rdfh.ch/users/normaluser",
| "username" : "test",
| "email" : "[email protected]",
| "familyName" : "Tester",
| "givenName": "Test",
| "password" : "test",
| "lang" : "en",
| "status" : "invalidValue",
| "permissions": {
| "administrativePermissionsPerProject": {
| "http://rdfh.ch/projects/0001": [
| {
| "additionalInformation": null,
| "name": "ProjectResourceCreateAllPermission",
| "permissionCode": null
| }
| ]
| },
| "groupsPerProject": {
| "http://rdfh.ch/projects/0001": [
| "http://rdfh.ch/groups/0001/thing-searcher",
| "http://www.knora.org/ontology/knora-admin#ProjectMember"
| ]
| }
| },
| "projects" : [ "http://rdfh.ch/projects/0001" ],
| "groups" : []
|}
""".stripMargin
}
| musicEnfanthen/Knora | webapi/src/test/scala/org/knora/webapi/e2e/InstanceCheckerSpec.scala | Scala | agpl-3.0 | 32,684 |
package sharry.backend.mail
import cats.effect._
import sharry.common._
import sharry.store._
import sharry.store.records.RAccount
import emil.MailAddress
import munit._
class QueriesTest extends FunSuite with StoreFixture {
test("get email from account") {
withStore { store =>
val accountId = Ident.unsafe("acc1")
val account = RAccount(
accountId,
CIIdent.unsafe("jdoe"),
AccountSource.intern,
AccountState.Active,
Password("test"),
Some("[email protected]"),
true,
0,
None,
Timestamp.Epoch
)
for {
_ <- store.transact(RAccount.insert(account))
e <- store.transact(Queries.getEmail(accountId))
_ <- IO(assertEquals(e, Some(MailAddress(Some("jdoe"), "[email protected]"))))
} yield ()
}
}
}
| eikek/sharry | modules/backend/src/test/scala/sharry/backend/mail/QueriesTest.scala | Scala | gpl-3.0 | 837 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package toplevel
package packaging
import statements.ScDeclaredElementsHolder
import base.ScStableCodeReferenceElement
import api.toplevel._
trait ScPackaging extends ScToplevelElement with ScImportsHolder with ScPackageContainer with ScDeclaredElementsHolder with ScPackageLike {
def fullPackageName: String
def getPackageName: String
def getBodyText: String
def reference: Option[ScStableCodeReferenceElement]
}
| consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/packaging/ScPackaging.scala | Scala | apache-2.0 | 501 |
package nexus.diff
import cats._
/**
* Typeclass witnessing a structure `S[_]` can be unrolled under the computation box `F[_]`.
* For example, a traced sequence `Traced[Seq[A]]` can be unrolled to `Seq[Traced[A]]`.
*
* This is a more specific abstraction than [[cats.Traverse]].
* @author Tongfei Chen
*/
trait Unroll[F[_], S[_]] {
def traverse[A, B](fa: F[A])(f: A *=> S[B]): S[F[B]]
def unroll[A](fsa: F[S[A]]): S[F[A]]
}
object Unroll {
implicit def unrollAnyId[S[_]]: Unroll[Id, S] = new Unroll[Id, S] {
def traverse[A, B](a: A)(f: A *=> S[B]) = f(a)
def unroll[A](sa: S[A]) = sa
}
}
| ctongfei/nexus | diff/src/main/scala/nexus/diff/Unroll.scala | Scala | mit | 619 |
package com.mesosphere.universe.v3.model
import com.mesosphere.cosmos.circe.Decoders._
import com.mesosphere.cosmos.circe.Encoders._
import io.circe.Decoder
import io.circe.Encoder
import io.circe.generic.semiauto.deriveDecoder
import io.circe.generic.semiauto.deriveEncoder
import java.nio.ByteBuffer
/**
* Conforms to: https://universe.mesosphere.com/v3/schema/repo#/definitions/marathon
*/
case class Marathon(v2AppMustacheTemplate: ByteBuffer)
object Marathon {
implicit val decodeMarathon: Decoder[Marathon] = deriveDecoder[Marathon]
implicit val encodeMarathon: Encoder[Marathon] = deriveEncoder[Marathon]
}
| dcos/cosmos | cosmos-common/src/main/scala/com/mesosphere/universe/v3/model/Marathon.scala | Scala | apache-2.0 | 625 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.database
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import org.rogach.scallop.{ScallopConfBase, Subcommand}
import spray.json.{JsObject, JsString, JsValue, RootJsonFormat}
import org.apache.openwhisk.common.{Logging, TransactionId}
import org.apache.openwhisk.core.cli.{CommandError, CommandMessages, IllegalState, WhiskCommand}
import org.apache.openwhisk.core.database.LimitsCommand.LimitEntity
import org.apache.openwhisk.core.entity.types.AuthStore
import org.apache.openwhisk.core.entity.{
DocId,
DocInfo,
DocRevision,
EntityName,
Subject,
UserLimits,
WhiskAuth,
WhiskDocumentReader
}
import org.apache.openwhisk.http.Messages
import org.apache.openwhisk.spi.SpiLoader
import scala.concurrent.{ExecutionContext, Future}
import scala.reflect.classTag
import scala.util.{Properties, Try}
class LimitsCommand extends Subcommand("limits") with WhiskCommand {
descr("manage namespace-specific limits")
val set = new Subcommand("set") {
descr("set limits for a given namespace")
val namespace = trailArg[String](descr = "the namespace to set limits for")
//name is explicitly mentioned for backward compatability
//otherwise scallop would convert it to - separated names
val invocationsPerMinute =
opt[Int](
descr = "invocations per minute allowed",
argName = "INVOCATIONSPERMINUTE",
validate = _ >= 0,
name = "invocationsPerMinute",
noshort = true)
val firesPerMinute =
opt[Int](
descr = "trigger fires per minute allowed",
argName = "FIRESPERMINUTE",
validate = _ >= 0,
name = "firesPerMinute",
noshort = true)
val concurrentInvocations =
opt[Int](
descr = "concurrent invocations allowed for this namespace",
argName = "CONCURRENTINVOCATIONS",
validate = _ >= 0,
name = "concurrentInvocations",
noshort = true)
val allowedKinds =
opt[List[String]](
descr = "list of runtime kinds allowed in this namespace",
argName = "ALLOWEDKINDS",
name = "allowedKinds",
noshort = true,
default = None)
val storeActivations =
opt[String](
descr = "enable or disable storing of activations to datastore for this namespace",
argName = "STOREACTIVATIONS",
name = "storeActivations",
noshort = true,
default = None)
lazy val limits: LimitEntity =
new LimitEntity(
EntityName(namespace()),
UserLimits(
invocationsPerMinute.toOption,
concurrentInvocations.toOption,
firesPerMinute.toOption,
allowedKinds.toOption.map(_.toSet),
storeActivations.toOption.map(_.toBoolean)))
}
addSubcommand(set)
val get = new Subcommand("get") {
descr("get limits for a given namespace (if none exist, system defaults apply)")
val namespace = trailArg[String](descr = "the namespace to get limits for`")
}
addSubcommand(get)
val delete = new Subcommand("delete") {
descr("delete limits for a given namespace (system defaults apply)")
val namespace = trailArg[String](descr = "the namespace to delete limits for")
}
addSubcommand(delete)
def exec(cmd: ScallopConfBase)(implicit system: ActorSystem,
logging: Logging,
materializer: ActorMaterializer,
transid: TransactionId): Future[Either[CommandError, String]] = {
implicit val executionContext = system.dispatcher
val authStore = LimitsCommand.createDataStore()
val result = cmd match {
case `set` => setLimits(authStore)
case `get` => getLimits(authStore)
case `delete` => delLimits(authStore)
}
result.onComplete { _ =>
authStore.shutdown()
}
result
}
def setLimits(authStore: AuthStore)(implicit transid: TransactionId,
ec: ExecutionContext): Future[Either[CommandError, String]] = {
authStore
.get[LimitEntity](set.limits.docinfo)
.flatMap { limits =>
val newLimits = set.limits.revision[LimitEntity](limits.rev)
authStore.put(newLimits).map(_ => Right(CommandMessages.limitsSuccessfullyUpdated(limits.name.asString)))
}
.recoverWith {
case _: NoDocumentException =>
authStore.put(set.limits).map(_ => Right(CommandMessages.limitsSuccessfullySet(set.limits.name.asString)))
}
}
def getLimits(authStore: AuthStore)(implicit transid: TransactionId,
ec: ExecutionContext): Future[Either[CommandError, String]] = {
val info = DocInfo(LimitsCommand.limitIdOf(EntityName(get.namespace())))
authStore
.get[LimitEntity](info)
.map { le =>
val l = le.limits
val msg = Seq(
l.concurrentInvocations.map(ci => s"concurrentInvocations = $ci"),
l.invocationsPerMinute.map(i => s"invocationsPerMinute = $i"),
l.firesPerMinute.map(i => s"firesPerMinute = $i"),
l.allowedKinds.map(k => s"allowedKinds = ${k.mkString(", ")}"),
l.storeActivations.map(sa => s"storeActivations = $sa")).flatten.mkString(Properties.lineSeparator)
Right(msg)
}
.recover {
case _: NoDocumentException =>
Right(CommandMessages.defaultLimits)
}
}
def delLimits(authStore: AuthStore)(implicit transid: TransactionId,
ec: ExecutionContext): Future[Either[CommandError, String]] = {
val info = DocInfo(LimitsCommand.limitIdOf(EntityName(delete.namespace())))
authStore
.get[LimitEntity](info)
.flatMap { l =>
authStore.del(l.docinfo).map(_ => Right(CommandMessages.limitsDeleted))
}
.recover {
case _: NoDocumentException =>
Left(IllegalState(CommandMessages.limitsNotFound(delete.namespace())))
}
}
}
object LimitsCommand {
def limitIdOf(name: EntityName) = DocId(s"${name.name}/limits")
def createDataStore()(implicit system: ActorSystem,
logging: Logging,
materializer: ActorMaterializer): ArtifactStore[WhiskAuth] =
SpiLoader
.get[ArtifactStoreProvider]
.makeStore[WhiskAuth]()(classTag[WhiskAuth], LimitsFormat, WhiskDocumentReader, system, logging, materializer)
class LimitEntity(val name: EntityName, val limits: UserLimits) extends WhiskAuth(Subject(), Set.empty) {
override def docid: DocId = limitIdOf(name)
//There is no api to write limits. So piggy back on WhiskAuth but replace auth json
//with limits!
override def toJson: JsObject = UserLimits.serdes.write(limits).asJsObject
}
private object LimitsFormat extends RootJsonFormat[WhiskAuth] {
override def read(json: JsValue): WhiskAuth = {
val r = Try[LimitEntity] {
val limits = UserLimits.serdes.read(json)
val JsString(id) = json.asJsObject.fields("_id")
val JsString(rev) = json.asJsObject.fields("_rev")
val Array(name, _) = id.split('/')
new LimitEntity(EntityName(name), limits).revision[LimitEntity](DocRevision(rev))
}
if (r.isSuccess) r.get else throw DocumentUnreadable(Messages.corruptedEntity)
}
override def write(obj: WhiskAuth): JsValue = obj.toDocumentRecord
}
}
| houshengbo/openwhisk | tools/admin/src/main/scala/org/apache/openwhisk/core/database/LimitsCommand.scala | Scala | apache-2.0 | 8,207 |
package sangria
import sangria.validation._
package object schema {
val IntType = ScalarType[Int]("Int",
description = Some("32-bit integer value"),
coerceOutput = ast.IntValue(_),
coerceUserInput = {
case i: Int => Right(i)
case i: Long if i.isValidInt => Right(i.toInt)
case i: BigInt if !i.isValidInt => Left(BigIntCoercionViolation)
case i: BigInt => Right(i.intValue)
case _ => Left(IntCoercionViolation)
},
coerceInput = {
case ast.IntValue(i, _) => Right(i)
case ast.BigIntValue(i, _) if !i.isValidInt => Left(BigIntCoercionViolation)
case ast.BigIntValue(i, _) => Right(i.intValue)
case _ => Left(IntCoercionViolation)
})
val LongType = ScalarType[Long]("Long",
description = Some("64-bit integer value"),
coerceOutput = l => ast.BigIntValue(BigInt(l)),
coerceUserInput = {
case i: Int => Right(i: Long)
case i: Long => Right(i)
case i: BigInt if !i.isValidLong => Left(BigLongCoercionViolation)
case i: BigInt => Right(i.longValue)
case _ => Left(LongCoercionViolation)
},
coerceInput = {
case ast.IntValue(i, _) => Right(i: Long)
case ast.BigIntValue(i, _) if !i.isValidLong => Left(BigLongCoercionViolation)
case ast.BigIntValue(i, _) => Right(i.longValue)
case _ => Left(LongCoercionViolation)
})
val BigIntType = ScalarType[BigInt]("BigInt",
description = Some("Arbitrary big integer value"),
coerceOutput = ast.BigIntValue(_),
coerceUserInput = {
case i: Int => Right(BigInt(i))
case i: Long => Right(BigInt(i))
case i: BigInt => Right(i)
case _ => Left(IntCoercionViolation)
},
coerceInput = {
case ast.IntValue(i, _) => Right(i)
case ast.BigIntValue(i, _) => Right(i)
case _ => Left(IntCoercionViolation)
})
val FloatType = ScalarType[Double]("Float",
coerceOutput = ast.FloatValue(_),
coerceUserInput = {
case i: Int => Right(i.toDouble)
case i: Long => Right(i.toDouble)
case i: BigInt if !i.isValidDouble => Left(BigDecimalCoercionViolation)
case i: BigInt => Right(i.doubleValue())
case d: Double => Right(d)
case d: BigDecimal if !d.isDecimalDouble => Left(BigDecimalCoercionViolation)
case d: BigDecimal => Right(d.doubleValue())
case _ => Left(FloatCoercionViolation)
},
coerceInput = {
case ast.FloatValue(d, _) => Right(d)
case ast.BigDecimalValue(d, _) if !d.isDecimalDouble => Left(BigDecimalCoercionViolation)
case ast.BigDecimalValue(d, _) => Right(d.doubleValue)
case ast.IntValue(i, _) => Right(i)
case ast.BigIntValue(i, _) if !i.isValidDouble => Left(BigDecimalCoercionViolation)
case ast.BigIntValue(i, _) => Right(i.doubleValue)
case _ => Left(FloatCoercionViolation)
})
val BigDecimalType = ScalarType[BigDecimal]("BigDecimal",
coerceOutput = ast.BigDecimalValue(_),
coerceUserInput = {
case i: Int => Right(BigDecimal(i))
case i: Long => Right(BigDecimal(i))
case i: BigInt => Right(BigDecimal(i))
case d: Double => Right(BigDecimal(d))
case d: BigDecimal => Right(d)
case _ => Left(FloatCoercionViolation)
},
coerceInput = {
case ast.BigDecimalValue(d, _) => Right(d)
case ast.FloatValue(d, _) => Right(BigDecimal(d))
case ast.IntValue(i, _) => Right(BigDecimal(i))
case ast.BigIntValue(i, _) => Right(BigDecimal(i))
case _ => Left(FloatCoercionViolation)
})
val BooleanType = ScalarType[Boolean]("Boolean",
coerceOutput = b => ast.BooleanValue(b),
coerceUserInput = {
case b: Boolean => Right(b)
case _ => Left(BooleanCoercionViolation)
},
coerceInput = {
case ast.BooleanValue(b, _) => Right(b)
case _ => Left(BooleanCoercionViolation)
})
val StringType = ScalarType[String]("String",
coerceOutput = s => ast.StringValue(s),
coerceUserInput = {
case s: String => Right(s)
case _ => Left(StringCoercionViolation)
},
coerceInput = {
case ast.StringValue(s, _) => Right(s)
case _ => Left(StringCoercionViolation)
})
val IDType = ScalarType[String]("ID",
coerceOutput = s => ast.StringValue(s),
coerceUserInput = {
case s: String => Right(s)
case i: Int => Right(i.toString)
case i: Long => Right(i.toString)
case i: BigInt => Right(i.toString)
case _ => Left(IDCoercionViolation)
},
coerceInput = {
case ast.StringValue(id, _) => Right(id)
case ast.IntValue(id, _) => Right(id.toString)
case ast.BigIntValue(id, _) => Right(id.toString)
case _ => Left(IDCoercionViolation)
})
val BuiltinScalars = IntType :: LongType :: BigIntType :: FloatType :: BigDecimalType :: BooleanType :: StringType :: IDType :: Nil
val IfArg = Argument("if", BooleanType, "Included when true.")
val IncludeDirective = Directive("include",
description = Some("Directs the executor to include this field or fragment only when the `if` argument is true."),
arguments = IfArg :: Nil,
onOperation = false,
onFragment = true,
onField = true,
shouldInclude = ctx => ctx.arg[Boolean](IfArg))
val SkipDirective = Directive("skip",
description = Some("Directs the executor to skip this field or fragment when the `if` argument is true."),
arguments = IfArg :: Nil,
onOperation = false,
onFragment = true,
onField = true,
shouldInclude = ctx => !ctx.arg[Boolean](IfArg))
val BuiltinDirectives = IncludeDirective :: SkipDirective :: Nil
def fields[Ctx, Val](fields: Field[Ctx, Val]*): List[Field[Ctx, Val]] = fields.toList
def interfaces[Ctx, Concrete](interfaces: PossibleInterface[Ctx, Concrete]*): List[PossibleInterface[Ctx, Concrete]] =
interfaces.toList
def possibleTypes[Ctx, Abstract](objectTypes: PossibleObject[Ctx, Abstract]*): List[PossibleObject[Ctx, Abstract]] =
objectTypes.toList
}
| narahari92/sangria | src/main/scala/sangria/schema/package.scala | Scala | apache-2.0 | 5,984 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.scheduler
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.rpc.RpcEndpointRef
/**
* :: DeveloperApi ::
* Class having information about a receiver
*/
@DeveloperApi
case class ReceiverInfo(
streamId: Int,
name: String,
active: Boolean,
location: String,
lastErrorMessage: String = "",
lastError: String = "",
lastErrorTime: Long = -1L
) {
}
| practice-vishnoi/dev-spark-1 | streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceiverInfo.scala | Scala | apache-2.0 | 1,236 |
package com.arcusys.learn.liferay.services
import com.arcusys.learn.liferay.LiferayClasses.{LDynamicQuery, LRatingsEntry}
import com.liferay.portal.kernel.service.ServiceContextThreadLocal
import com.liferay.ratings.kernel.service.RatingsEntryLocalServiceUtil
import scala.collection.JavaConverters._
object RatingsEntryLocalServiceHelper {
// Used to convert rating values for Liferay 7.
// Liferay 7 uses 0.0 - 1.0 values.
// Liferay 6 uses constants of MIN (1) and MAX (5) rating values (not in 0.0 - 1.0 interval).
// Valamis uses 1-5 rating
private val RatingMaxScore = 5
def updateRatingsEntry(rating: LRatingsEntry): LRatingsEntry = {
RatingsEntryLocalServiceUtil.updateRatingsEntry(toLiferay(rating))
}
def dynamicQuery(query: LDynamicQuery): Seq[LRatingsEntry] = {
RatingsEntryLocalServiceUtil.dynamicQuery[LRatingsEntry](query).asScala
.map(toValamis)
}
def dynamicQuery(): LDynamicQuery = {
RatingsEntryLocalServiceUtil.dynamicQuery()
}
def getRatingEntry(userId: Long, className: String, classPK: Long): LRatingsEntry = {
val entity = RatingsEntryLocalServiceUtil.getEntry(userId, className, classPK)
toValamis(entity)
}
def getEntries(className: String, classPK: Long):Seq[LRatingsEntry] = {
RatingsEntryLocalServiceUtil.getEntries(className, classPK).asScala
.map(toValamis)
}
def deleteEntry(userId: Long, className: String, classPK: Long): Unit = {
RatingsEntryLocalServiceUtil.deleteEntry(userId, className, classPK)
}
def updateEntry(userId: Long, className: String, classPK: Long, score: Double): LRatingsEntry = {
val serviceContext = ServiceContextThreadLocal.getServiceContext
RatingsEntryLocalServiceUtil.updateEntry(userId, className, classPK, toLiferay(score), serviceContext)
}
private def toLiferay(value: Double): Double = {
value / RatingMaxScore
}
private def toLiferay(rating: LRatingsEntry): LRatingsEntry = {
rating.setScore(toLiferay(rating.getScore))
rating
}
private def toValamis(rating: LRatingsEntry): LRatingsEntry = {
rating.setScore(rating.getScore * RatingMaxScore)
rating
}
}
| arcusys/Valamis | learn-liferay700-services/src/main/scala/com/arcusys/learn/liferay/services/RatingsEntryLocalServiceHelper.scala | Scala | gpl-3.0 | 2,152 |
package mesosphere.marathon
package api
object MarathonMediaType {
/**
* JSON media type plus a weight which prefers this media type over alternatives if there are multiple
* matches and no other has a qs value of >= 2.
*
* Related issue: https://github.com/mesosphere/marathon/issues/1647
*
* Further information: http://scribbles.fried.se/2011/04/browser-views-in-jersey-and-fed-up.html
* A higher "qs" value indicates a higher precedence if there are multiple handlers
* with a matching @Produces value.
*/
final val PREFERRED_APPLICATION_JSON = "application/json;qs=2"
}
| guenter/marathon | src/main/scala/mesosphere/marathon/api/MarathonMediaType.scala | Scala | apache-2.0 | 616 |
package com.salcedo.rapbot.sense
import akka.actor.{Actor, ActorLogging, Props}
import akka.dispatch.Futures
import com.salcedo.rapbot.sense.SenseActor._
import com.salcedo.rapbot.snapshot.RemoteSnapshot
import com.salcedo.rapbot.snapshot.SnapshotActor.TakeSubSystemSnapshot
import scala.concurrent.Future
object SenseActor {
case class OrientationRequest()
case class Orientation(yaw: Double, pitch: Double, roll: Double)
case class AccelerationRequest()
case class Acceleration(x: Double, y: Double, z: Double)
case class GyroscopeRequest()
case class Gyroscope(x: Double, y: Double, z: Double)
case class CompassRequest()
case class Compass(north: Double)
case class TemperatureRequest()
case class Temperature(celsius: Double)
case class HumidityRequest()
case class Humidity(atmospheres: Double)
case class Environment(
orientation: Orientation,
acceleration: Acceleration,
gyroscope: Gyroscope,
compass: Compass,
temperature: Temperature,
humidity: Humidity
)
def props(): Props = Props(new SenseActor())
}
class SenseActor() extends Actor with RemoteSnapshot with ActorLogging {
override def receive: PartialFunction[Any, Unit] = {
case _: OrientationRequest => this.orientation()
case _: AccelerationRequest => this.acceleration()
case _: GyroscopeRequest => this.gyroscope()
case _: CompassRequest => this.compass()
case _: TemperatureRequest => this.temperature()
case _: HumidityRequest => this.humidity()
case _: TakeSubSystemSnapshot => this.snapshot()
}
def orientation(): Unit = {
}
def acceleration(): Unit = {
}
def gyroscope(): Unit = {
}
def compass(): Unit = {
}
def temperature(): Unit = {
}
def humidity(): Unit = {
}
override def remoteSnapshot: Future[Environment] = Futures.successful(null)
}
| misalcedo/RapBot | Hub/src/main/scala/com/salcedo/rapbot/sense/SenseActor.scala | Scala | mit | 2,014 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import scala.collection.parallel.immutable.ParRange
import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
import org.apache.spark.scheduler.{SparkListener, SparkListenerJobStart}
import org.apache.spark.sql.SparkSession
class SQLExecutionSuite extends SparkFunSuite {
test("concurrent query execution (SPARK-10548)") {
val conf = new SparkConf()
.setMaster("local[*]")
.setAppName("test")
val goodSparkContext = new SparkContext(conf)
try {
testConcurrentQueryExecution(goodSparkContext)
} finally {
goodSparkContext.stop()
}
}
test("concurrent query execution with fork-join pool (SPARK-13747)") {
val spark = SparkSession.builder
.master("local[*]")
.appName("test")
.getOrCreate()
import spark.implicits._
try {
// Should not throw IllegalArgumentException
new ParRange(1 to 100).foreach { _ =>
spark.sparkContext.parallelize(1 to 5).map { i => (i, i) }.toDF("a", "b").count()
}
} finally {
spark.sparkContext.stop()
}
}
/**
* Trigger SPARK-10548 by mocking a parent and its child thread executing queries concurrently.
*/
private def testConcurrentQueryExecution(sc: SparkContext): Unit = {
val spark = SparkSession.builder.getOrCreate()
import spark.implicits._
// Initialize local properties. This is necessary for the test to pass.
sc.getLocalProperties
// Set up a thread that runs executes a simple SQL query.
// Before starting the thread, mutate the execution ID in the parent.
// The child thread should not see the effect of this change.
var throwable: Option[Throwable] = None
val child = new Thread {
override def run(): Unit = {
try {
sc.parallelize(1 to 100).map { i => (i, i) }.toDF("a", "b").collect()
} catch {
case t: Throwable =>
throwable = Some(t)
}
}
}
sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, "anything")
child.start()
child.join()
// The throwable is thrown from the child thread so it doesn't have a helpful stack trace
throwable.foreach { t =>
t.setStackTrace(t.getStackTrace ++ Thread.currentThread.getStackTrace)
throw t
}
}
test("Finding QueryExecution for given executionId") {
val spark = SparkSession.builder.master("local[*]").appName("test").getOrCreate()
import spark.implicits._
var queryExecution: QueryExecution = null
spark.sparkContext.addSparkListener(new SparkListener {
override def onJobStart(jobStart: SparkListenerJobStart): Unit = {
val executionIdStr = jobStart.properties.getProperty(SQLExecution.EXECUTION_ID_KEY)
if (executionIdStr != null) {
queryExecution = SQLExecution.getQueryExecution(executionIdStr.toLong)
}
SQLExecutionSuite.canProgress = true
}
})
val df = spark.range(1).map { x =>
while (!SQLExecutionSuite.canProgress) {
Thread.sleep(1)
}
x
}
df.collect()
assert(df.queryExecution === queryExecution)
spark.stop()
}
}
object SQLExecutionSuite {
@volatile var canProgress = false
}
| jkbradley/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/SQLExecutionSuite.scala | Scala | apache-2.0 | 4,033 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.businessmatching.updateservice
import controllers.actions.SuccessfulAuthAction
import models.businessmatching.{ForeignExchange, _}
import models.businessmatching.updateservice.ServiceChangeRegister
import models.flowmanagement.ChangeSubSectorFlowModel
import models.moneyservicebusiness.{MoneyServiceBusiness => MSB, _}
import org.mockito.Mockito.{never, verify}
import org.mockito.Matchers.{any, eq => eqTo}
import models.tradingpremises.{ChequeCashingScrapMetal => TPChequeCashingScrapMetal, CurrencyExchange => TPCurrencyExchange, TransmittingMoney => TPTransmittingMoney, ChequeCashingNotScrapMetal => _, _}
import org.scalatest.concurrent.ScalaFutures
import play.api.test.Helpers._
import utils.{AmlsSpec, DependencyMocks}
class ChangeSubSectorHelperSpec extends AmlsSpec with ScalaFutures {
trait Fixture extends DependencyMocks {
self =>
val helper = new ChangeSubSectorHelper(
SuccessfulAuthAction,
mockCacheConnector)
}
"requires a PSR Number" when {
"Transmitting money is selected and there is no PSR number" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(TransmittingMoney, ChequeCashingNotScrapMetal)), None)
helper.requiresPSRNumber(model) mustBe true
}
}
"does not require a PSR Number" when {
"Transmitting money is selected and there is a PSR number" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(TransmittingMoney, ChequeCashingNotScrapMetal)), Some(BusinessAppliedForPSRNumberYes("XXXX")))
helper.requiresPSRNumber(model) mustBe false
}
"Transmitting money is not selected" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(ChequeCashingScrapMetal, ChequeCashingNotScrapMetal)), None)
helper.requiresPSRNumber(model) mustBe false
}
}
"creating the flow model" must {
"populate the sub sectors from the data cache" in new Fixture {
val expectedModel = ChangeSubSectorFlowModel(Some(Set(TransmittingMoney, ChequeCashingNotScrapMetal)), Some(BusinessAppliedForPSRNumberYes("XXXX")))
mockCacheFetch[BusinessMatching](
Some(BusinessMatching(
msbServices = Some(BusinessMatchingMsbServices(Set(TransmittingMoney, ChequeCashingNotScrapMetal))),
businessAppliedForPSRNumber = Some(BusinessAppliedForPSRNumberYes("XXXX")))),
Some(BusinessMatching.key))
await {
helper.createFlowModel("internalId")
} mustBe expectedModel
}
}
"get or create the flow model" must {
"create and populate a new one when it doesn't exist" in new Fixture {
val expectedModel = ChangeSubSectorFlowModel(Some(Set(TransmittingMoney, ChequeCashingNotScrapMetal)), Some(BusinessAppliedForPSRNumberYes("XXXX")))
mockCacheFetch[ChangeSubSectorFlowModel](None, Some(ChangeSubSectorFlowModel.key))
mockCacheFetch[BusinessMatching](
Some(BusinessMatching(
msbServices = Some(BusinessMatchingMsbServices(Set(TransmittingMoney, ChequeCashingNotScrapMetal))),
businessAppliedForPSRNumber = Some(BusinessAppliedForPSRNumberYes("XXXX")))),
Some(BusinessMatching.key))
await {
helper.getOrCreateFlowModel("internalId")
} mustBe expectedModel
}
"return an existing one whe it does exist" in new Fixture {
val expectedModel = ChangeSubSectorFlowModel(Some(Set(TransmittingMoney, ChequeCashingNotScrapMetal)), Some(BusinessAppliedForPSRNumberYes("XXXX")))
mockCacheFetch[ChangeSubSectorFlowModel](Some(expectedModel), Some(ChangeSubSectorFlowModel.key))
await {
helper.getOrCreateFlowModel("internalId")
} mustBe expectedModel
}
}
"updating the sub sectors" must {
"return an empty entity where no msb exists in cache" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(TransmittingMoney)), Some(BusinessAppliedForPSRNumberYes("XXXX")))
mockCacheFetch[MSB](None, Some(MSB.key))
await {
helper.updateMsb("internalId", model)
} mustBe MSB()
}
"wipe the currency exchange questions when it isn't set" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(TransmittingMoney)), Some(BusinessAppliedForPSRNumberYes("XXXX")))
mockCacheFetch[MSB](
Some(MSB(
ceTransactionsInNext12Months = Some(mock[CETransactionsInNext12Months]),
whichCurrencies = Some(mock[WhichCurrencies]),
hasAccepted = true)),
Some(MSB.key))
mockCacheFetch[BusinessMatching](
Some(BusinessMatching(
msbServices = Some(BusinessMatchingMsbServices(Set(TransmittingMoney, ChequeCashingNotScrapMetal))),
businessAppliedForPSRNumber = Some(BusinessAppliedForPSRNumberYes("XXXX")))),
Some(BusinessMatching.key))
mockCacheSave[MSB]
val updatedMsb = await(helper.updateMsb("internalId", model))
updatedMsb.ceTransactionsInNext12Months mustBe None
updatedMsb.whichCurrencies mustBe None
updatedMsb.hasAccepted mustBe true
}
"wipe the transmitting money questions when it isn't set" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(ChequeCashingNotScrapMetal)), None)
mockCacheFetch[MSB](
Some(MSB(
businessUseAnIPSP = Some(mock[BusinessUseAnIPSP]),
fundsTransfer = Some(mock[FundsTransfer]),
transactionsInNext12Months = Some(mock[TransactionsInNext12Months]),
sendMoneyToOtherCountry = Some(mock[SendMoneyToOtherCountry]),
sendTheLargestAmountsOfMoney = Some(mock[SendTheLargestAmountsOfMoney]),
mostTransactions = Some(mock[MostTransactions]),
hasAccepted = true)),
Some(MSB.key))
mockCacheFetch[BusinessMatching](
Some(BusinessMatching(
msbServices = Some(BusinessMatchingMsbServices(Set(ChequeCashingNotScrapMetal))),
businessAppliedForPSRNumber = Some(BusinessAppliedForPSRNumberYes("XXXX")))),
Some(BusinessMatching.key))
mockCacheSave[MSB]
val updatedMsb = await(helper.updateMsb("internalId", model))
updatedMsb.businessUseAnIPSP mustBe None
updatedMsb.fundsTransfer mustBe None
updatedMsb.transactionsInNext12Months mustBe None
updatedMsb.sendMoneyToOtherCountry mustBe None
updatedMsb.sendTheLargestAmountsOfMoney mustBe None
updatedMsb.mostTransactions mustBe None
updatedMsb.hasChanged mustBe false
updatedMsb.hasAccepted mustBe true
}
"leave MSB alone if there are no sub-sectors to add" in new Fixture {
val model = ChangeSubSectorFlowModel(None, None)
val msb = MSB(
businessUseAnIPSP = Some(mock[BusinessUseAnIPSP]),
fundsTransfer = Some(mock[FundsTransfer]),
transactionsInNext12Months = Some(mock[TransactionsInNext12Months]),
sendMoneyToOtherCountry = Some(mock[SendMoneyToOtherCountry]),
sendTheLargestAmountsOfMoney = Some(mock[SendTheLargestAmountsOfMoney]),
mostTransactions = Some(mock[MostTransactions]),
hasAccepted = true)
mockCacheFetch[MSB](Some(msb), Some(MSB.key))
await(helper.updateMsb("internalId", model)) mustEqual msb
verify(mockCacheConnector, never).save(any(), eqTo(MSB.key), any[MSB])(any(), any())
}
"wipe the psr number when transmitting money isn't set" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(ChequeCashingNotScrapMetal)), None)
mockCacheFetch[BusinessMatching](
Some(BusinessMatching(
msbServices = Some(BusinessMatchingMsbServices(Set(TransmittingMoney, ChequeCashingNotScrapMetal))),
businessAppliedForPSRNumber = Some(BusinessAppliedForPSRNumberYes("XXXX")),
hasAccepted = true)),
Some(BusinessMatching.key))
mockCacheSave[BusinessMatching]
val updatedBm = await(helper.updateBusinessMatching("internalId", model))
updatedBm.businessAppliedForPSRNumber mustBe None
updatedBm.hasAccepted mustBe true
}
"apply the PSR number when one is given, and transmitting money is given" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(TransmittingMoney)), Some(BusinessAppliedForPSRNumberYes("12345678")))
mockCacheFetch[BusinessMatching](
Some(BusinessMatching(
msbServices = Some(BusinessMatchingMsbServices(Set(ChequeCashingScrapMetal))))),
Some(BusinessMatching.key))
mockCacheSave[BusinessMatching]
val updatedBm = await(helper.updateBusinessMatching("internalId", model))
updatedBm.businessAppliedForPSRNumber mustBe Some(BusinessAppliedForPSRNumberYes("12345678"))
updatedBm.hasChanged mustBe true
}
"update the business matching sub sectors" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(ChequeCashingScrapMetal)), None)
mockCacheFetch[BusinessMatching](
Some(BusinessMatching(
msbServices = Some(BusinessMatchingMsbServices(Set(TransmittingMoney, ChequeCashingNotScrapMetal))),
businessAppliedForPSRNumber = Some(BusinessAppliedForPSRNumberYes("XXXX")),
hasAccepted = true)),
Some(BusinessMatching.key))
mockCacheSave[BusinessMatching]
val updatedBm = await(helper.updateBusinessMatching("internalId", model))
updatedBm.msbServices.get.msbServices mustBe Set(ChequeCashingScrapMetal)
updatedBm.hasAccepted mustBe true
}
"leave Business Matching alone if there are no sub-sectors to add" in new Fixture {
val model = ChangeSubSectorFlowModel(None, None)
val bm = BusinessMatching(
msbServices = Some(BusinessMatchingMsbServices(Set(TransmittingMoney, ChequeCashingNotScrapMetal))),
businessAppliedForPSRNumber = Some(BusinessAppliedForPSRNumberYes("XXXX")),
hasAccepted = true)
mockCacheFetch[BusinessMatching](
Some(bm),
Some(BusinessMatching.key))
await(helper.updateBusinessMatching("internalId", model)) mustBe bm
verify(mockCacheConnector, never).save(any(), eqTo(BusinessMatching.key), any[BusinessMatching])(any(), any())
}
"update the business matching sub sectors when it has transmitting money" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(ChequeCashingScrapMetal, TransmittingMoney)), None)
mockCacheFetch[BusinessMatching](
Some(BusinessMatching(
msbServices = Some(BusinessMatchingMsbServices(Set(TransmittingMoney, ChequeCashingNotScrapMetal))),
businessAppliedForPSRNumber = Some(BusinessAppliedForPSRNumberYes("XXXX")),
hasAccepted = true)),
Some(BusinessMatching.key))
mockCacheSave[BusinessMatching]
val updatedBm = await(helper.updateBusinessMatching("internalId", model))
updatedBm.msbServices.get.msbServices mustBe Set(ChequeCashingScrapMetal, TransmittingMoney)
updatedBm.hasAccepted mustBe true
}
"remove the sub sector from trading premises when it has been removed" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(ChequeCashingScrapMetal)), None)
mockCacheUpdate(Some(TradingPremises.key), Seq(
TradingPremises(whatDoesYourBusinessDoAtThisAddress = Some(WhatDoesYourBusinessDo(Set(MoneyServiceBusiness))),
msbServices = Some(TradingPremisesMsbServices(Set(TPChequeCashingScrapMetal, TPTransmittingMoney))),
hasAccepted = true),
TradingPremises(whatDoesYourBusinessDoAtThisAddress = Some(WhatDoesYourBusinessDo(Set(MoneyServiceBusiness))),
msbServices = Some(TradingPremisesMsbServices(Set(TPChequeCashingScrapMetal, TPCurrencyExchange))),
hasAccepted = true)
))
val updatedTps = await(helper.updateTradingPremises("internalId", model))
updatedTps.size mustBe 2
updatedTps.head.msbServices.get mustBe TradingPremisesMsbServices(Set(TPChequeCashingScrapMetal))
updatedTps.last.msbServices.get mustBe TradingPremisesMsbServices(Set(TPChequeCashingScrapMetal))
updatedTps.head.hasAccepted mustBe true
updatedTps.last.hasAccepted mustBe true
}
"leave non-MSB trading premises alone" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(ChequeCashingScrapMetal)), None)
mockCacheUpdate(Some(TradingPremises.key), Seq(
TradingPremises(whatDoesYourBusinessDoAtThisAddress = Some(WhatDoesYourBusinessDo(Set(HighValueDealing))), hasAccepted = true),
TradingPremises(whatDoesYourBusinessDoAtThisAddress = Some(WhatDoesYourBusinessDo(Set(MoneyServiceBusiness))),
msbServices = Some(TradingPremisesMsbServices(Set(TPChequeCashingScrapMetal, TPCurrencyExchange))),
hasAccepted = true)
))
val updatedTps = await(helper.updateTradingPremises("internalId", model))
updatedTps.size mustBe 2
updatedTps.head.msbServices must not be defined
updatedTps.last.msbServices.get mustBe TradingPremisesMsbServices(Set(TPChequeCashingScrapMetal))
updatedTps.head.hasAccepted mustBe true
updatedTps.last.hasAccepted mustBe true
}
"update trading premises with empty sub-sectors with the one remaining sub-sector" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(ChequeCashingScrapMetal)), None)
mockCacheUpdate(Some(TradingPremises.key), Seq(
TradingPremises(whatDoesYourBusinessDoAtThisAddress = Some(WhatDoesYourBusinessDo(Set(MoneyServiceBusiness))),
msbServices = Some(TradingPremisesMsbServices(Set(TPCurrencyExchange))),
hasAccepted = true)
))
val updatedTps = await(helper.updateTradingPremises("internalId", model))
updatedTps.head.msbServices.get mustBe TradingPremisesMsbServices(Set(TPChequeCashingScrapMetal))
updatedTps.head.hasAccepted mustBe true
}
"leave trading premises alone when there are no sub-sectors to change" in new Fixture {
val model = ChangeSubSectorFlowModel(None, None)
val tradingPremises = Seq(
TradingPremises(whatDoesYourBusinessDoAtThisAddress = Some(WhatDoesYourBusinessDo(Set(MoneyServiceBusiness))),
msbServices = Some(TradingPremisesMsbServices(Set(TPCurrencyExchange))),
hasAccepted = true)
)
mockCacheUpdate(Some(TradingPremises.key), tradingPremises)
await(helper.updateTradingPremises("internalId", model)) mustEqual Seq.empty
}
"handle when there is no trading presmises" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(ChequeCashingScrapMetal)), None)
mockCacheUpdate(Some(TradingPremises.key), Seq.empty)
val updatedTps = await(helper.updateTradingPremises("internalId", model))
updatedTps.size mustBe 0
}
"leave trading premises with an empty list if we are adding more than one sub sector" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(ChequeCashingScrapMetal, TransmittingMoney)), None)
mockCacheUpdate(Some(TradingPremises.key), Seq(
TradingPremises(whatDoesYourBusinessDoAtThisAddress = Some(WhatDoesYourBusinessDo(Set(MoneyServiceBusiness))),
msbServices = Some(TradingPremisesMsbServices(Set(TPCurrencyExchange))),
hasAccepted = true
)
))
val updatedTps = await(helper.updateTradingPremises("internalId", model))
updatedTps.head.msbServices.get mustBe TradingPremisesMsbServices(Set.empty)
updatedTps.head.hasAccepted mustBe true
}
"update the service change register" when {
"something already exists in the register" in new Fixture {
mockCacheFetch[BusinessMatching](Some(
BusinessMatching(msbServices = Some(BusinessMatchingMsbServices(Set(TransmittingMoney))))),
Some(BusinessMatching.key))
val model = ServiceChangeRegister(Some(Set(MoneyServiceBusiness)))
mockCacheUpdate(Some(ServiceChangeRegister.key), model)
val result = await(helper.updateServiceRegister("internalId", ChangeSubSectorFlowModel(Some(Set(TransmittingMoney, CurrencyExchange)))))
result mustBe model.copy(addedSubSectors = Some(Set(CurrencyExchange)))
}
}
}
"needs updateChangeFlag method which" when {
"called with the same msb subsectors in BM and ChangeSubsectorFlowModel" must {
"return false" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(ChequeCashingScrapMetal, TransmittingMoney)), None)
mockCacheFetch[BusinessMatching](
Some(BusinessMatching(
msbServices = Some(BusinessMatchingMsbServices(Set(TransmittingMoney, ChequeCashingScrapMetal))),
businessAppliedForPSRNumber = Some(BusinessAppliedForPSRNumberYes("XXXX")))),
Some(BusinessMatching.key))
val result = await(helper.updateChangeFlag("internalId", model))
result mustBe false
}
}
"called when added CurrencyExchange to msb subsectors in ChangeSubsectorFlowModel" must {
"return true" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(ChequeCashingScrapMetal, TransmittingMoney, CurrencyExchange)), None)
mockCacheFetch[BusinessMatching](
Some(BusinessMatching(
msbServices = Some(BusinessMatchingMsbServices(Set(TransmittingMoney, ChequeCashingScrapMetal))),
businessAppliedForPSRNumber = Some(BusinessAppliedForPSRNumberYes("XXXX")))),
Some(BusinessMatching.key))
val result = await(helper.updateChangeFlag("internalId", model))
result mustBe true
}
}
"called when removed TransmittingMoney from msb subsectors in ChangeSubsectorFlowModel" must {
"return true" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(ChequeCashingScrapMetal)), None)
mockCacheFetch[BusinessMatching](
Some(BusinessMatching(
msbServices = Some(BusinessMatchingMsbServices(Set(TransmittingMoney, ChequeCashingScrapMetal))),
businessAppliedForPSRNumber = Some(BusinessAppliedForPSRNumberYes("XXXX")))),
Some(BusinessMatching.key))
val result = await(helper.updateChangeFlag("internalId", model))
result mustBe true
}
}
"called when added ForeignExchange and TransmittingMoney from msb subsectors in ChangeSubsectorFlowModel" must {
"return true" in new Fixture {
val model = ChangeSubSectorFlowModel(Some(Set(ChequeCashingScrapMetal, ForeignExchange, TransmittingMoney)), None)
mockCacheFetch[BusinessMatching](
Some(BusinessMatching(
msbServices = Some(BusinessMatchingMsbServices(Set(ChequeCashingScrapMetal))),
businessAppliedForPSRNumber = Some(BusinessAppliedForPSRNumberYes("XXXX")))),
Some(BusinessMatching.key))
val result = await(helper.updateChangeFlag("internalId", model))
result mustBe true
}
}
}
}
| hmrc/amls-frontend | test/controllers/businessmatching/updateservice/ChangeSubSectorHelperSpec.scala | Scala | apache-2.0 | 19,481 |
package models
import java.util.UUID
import org.joda.time.{LocalDate, LocalTime}
import play.api.libs.functional.syntax._
import play.api.libs.json._
import utils.Ops.JsPathX
import utils.date.DateTimeJsonFormatter._
sealed trait ScheduleEntryLike extends UniqueEntity {
def labworkId: UUID
def start: LocalTime
def end: LocalTime
def date: LocalDate
def groupId: UUID
def roomId: UUID
}
case class ScheduleEntry(labwork: UUID, start: LocalTime, end: LocalTime, date: LocalDate, room: UUID, supervisor: Set[UUID], group: UUID, id: UUID = UUID.randomUUID) extends ScheduleEntryLike {
override def labworkId = labwork
override def groupId = group
override def roomId = room
}
case class ScheduleEntryAtom(labwork: LabworkAtom, start: LocalTime, end: LocalTime, date: LocalDate, room: Room, supervisor: Set[User], group: Group, id: UUID) extends ScheduleEntryLike {
override def labworkId = labwork.id
override def groupId = group.id
override def roomId = room.id
}
case class ScheduleEntryProtocol(labwork: UUID, start: LocalTime, end: LocalTime, date: LocalDate, room: UUID, supervisor: Set[UUID], group: UUID)
object ScheduleEntry {
implicit val writes: Writes[ScheduleEntry] = Json.writes[ScheduleEntry]
implicit val reads: Reads[ScheduleEntry] = Json.reads[ScheduleEntry]
}
object ScheduleEntryProtocol {
implicit val reads: Reads[ScheduleEntryProtocol] = Json.reads[ScheduleEntryProtocol]
implicit val writes: Writes[ScheduleEntryProtocol] = Json.writes[ScheduleEntryProtocol]
}
object ScheduleEntryAtom {
implicit val writes: Writes[ScheduleEntryAtom] = (
(JsPath \\ "labwork").write[LabworkAtom](LabworkAtom.writes) and
(JsPath \\ "start").write[LocalTime] and
(JsPath \\ "end").write[LocalTime] and
(JsPath \\ "date").write[LocalDate] and
(JsPath \\ "room").write[Room](Room.writes) and
(JsPath \\ "supervisor").writeSet[User] and
(JsPath \\ "group").write[Group](Group.writes) and
(JsPath \\ "id").write[UUID]
) (unlift(ScheduleEntryAtom.unapply))
}
object ScheduleEntryLike {
implicit val writes: Writes[ScheduleEntryLike] = {
case scheduleEntry: ScheduleEntry => Json.toJson(scheduleEntry)(ScheduleEntry.writes)
case scheduleEntryAtom: ScheduleEntryAtom => Json.toJson(scheduleEntryAtom)(ScheduleEntryAtom.writes)
}
} | THK-ADV/lwm-reloaded | app/models/ScheduleEntry.scala | Scala | mit | 2,330 |
package org.ocular.utils
import android.util.Log
trait Logger {
def log(string: String) = if (Logger.enabled) Log.d("OCULAR", string)
}
object Logger {
val enabled = false
}
| dant3/ocular | core/src/main/scala/org/ocular/utils/Logger.scala | Scala | apache-2.0 | 181 |
package sampler.abc.actor.root.state.task.egen
import sampler.abc.UUID
import scala.collection.immutable.Queue
class ObservedIdsTrimmer(maxParticleMemory: Int) {
def apply(queuedIds: Queue[UUID]) = {
val queueSize = queuedIds.size
if (queueSize >= maxParticleMemory) {
val reducedNum: Int = (maxParticleMemory * 0.9).toInt
val toDrop = queueSize - reducedNum
queuedIds.drop(toDrop)
}
else queuedIds
}
} | tearne/Sampler | sampler-abc/src/main/scala/sampler/abc/actor/root/state/task/egen/ObservedIdsTrimmer.scala | Scala | apache-2.0 | 444 |
package filodb.core.store
import scala.concurrent.{Await, ExecutionContext, Future}
import scala.concurrent.duration._
import scala.util.Try
import filodb.core.metadata.RichProjection
import filodb.core.Types._
case class Histogram(min: Int, max: Int, sum: Int, numElems: Int, buckets: Map[Int, Int]) {
/** Adds an element to the Histogram, adding it to histogram buckets according to the
* bucket keys
*/
def add(elem: Int, bucketKeys: Array[Int]): Histogram = {
// See the JavaDoc for binarySearch...
val bucket = java.util.Arrays.binarySearch(bucketKeys, elem) match {
case x: Int if x < 0 => -x - 2
case x: Int if x >= bucketKeys.size => bucketKeys.size - 1
case x: Int => x
}
val key = bucketKeys(bucket)
copy(min = Math.min(elem, this.min),
max = Math.max(elem, this.max),
sum = this.sum + elem,
numElems = this.numElems + 1,
buckets = buckets + (key -> (buckets.getOrElse(key, 0) + 1)))
}
def prettify(name: String, maxBarLen: Int = 60): String = {
val head = s"===== $name =====\\n Min: $min\\n Max: $max\\n Average: ${sum/numElems.toDouble} ($numElems)"
val maxQuantity = Try(buckets.values.max).getOrElse(0)
buckets.toSeq.sortBy(_._1).foldLeft(head + "\\n") { case (str, (bucket, num)) =>
val numChars = (num * maxBarLen) / maxQuantity
str + "| %08d: %08d %s".format(bucket, num, "X" * numChars) + "\\n"
}
}
}
object Histogram {
val empty: Histogram = Histogram(Int.MaxValue, Int.MinValue, 0, 0, Map.empty)
}
case class ColumnStoreAnalysis(numSegments: Int,
numPartitions: Int,
rowsInSegment: Histogram,
chunksInSegment: Histogram,
segmentsInPartition: Histogram) {
def prettify(): String = {
s"ColumnStoreAnalysis\\n numSegments: $numSegments\\n numPartitions: $numPartitions\\n" +
rowsInSegment.prettify("# Rows in a segment") +
chunksInSegment.prettify("# Chunks in a segment") +
segmentsInPartition.prettify("# Segments in a partition")
}
}
/**
* Analyzes the segments and chunks for a given dataset/version. Gives useful information
* about distribution of segments and chunks within segments. Should be run offline, as could take a while.
*/
object Analyzer {
val NumSegmentsBucketKeys = Array(0, 10, 50, 100, 500)
val NumChunksPerSegmentBucketKeys = Array(0, 5, 10, 25, 50, 100)
val NumRowsPerSegmentBucketKeys = Array(0, 10, 100, 1000, 5000, 10000, 50000)
import scala.concurrent.ExecutionContext.Implicits.global
def analyze(cs: CachedMergingColumnStore,
metaStore: MetaStore,
dataset: TableName,
version: Int): ColumnStoreAnalysis = {
var numSegments = 0
var rowsInSegment: Histogram = Histogram.empty
var chunksInSegment: Histogram = Histogram.empty
var segmentsInPartition: Histogram = Histogram.empty
val partitionSegments = (new collection.mutable.HashMap[BinaryPartition, Int]).withDefaultValue(0)
val datasetObj = Await.result(metaStore.getDataset(dataset), 1.minutes)
val schema = Await.result(metaStore.getSchema(dataset, version), 1.minutes)
val projection = RichProjection(datasetObj, schema.values.toSeq)
val splits = cs.getScanSplits(dataset, 1)
val indexes = Await.result(cs.scanChunkRowMaps(projection, version,
FilteredPartitionScan(splits.head)), 1.minutes)
indexes.foreach { case SegmentIndex(partKey, _, _, _, rowmap) =>
// Figure out # chunks and rows per segment
val numRows = rowmap.chunkIds.length
val numChunks = rowmap.nextChunkId
numSegments = numSegments + 1
rowsInSegment = rowsInSegment.add(numRows, NumRowsPerSegmentBucketKeys)
chunksInSegment = chunksInSegment.add(numChunks, NumChunksPerSegmentBucketKeys)
partitionSegments(partKey) += 1
}
for { (partKey, numSegments) <- partitionSegments } {
segmentsInPartition = segmentsInPartition.add(numSegments, NumSegmentsBucketKeys)
}
ColumnStoreAnalysis(numSegments, partitionSegments.size,
rowsInSegment, chunksInSegment, segmentsInPartition)
}
} | adeandrade/FiloDB | core/src/main/scala/filodb.core/store/Analyzer.scala | Scala | apache-2.0 | 4,263 |
import java.text.DecimalFormat
package object viewsupport {
val level = new DecimalFormat("#.##")
}
| kristiankime/web-education-games | app/viewsupport/package.scala | Scala | mit | 103 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.streaming
import java.io.File
import java.nio.charset.StandardCharsets.UTF_8
import java.util.Collections
import java.util.concurrent.CountDownLatch
import scala.collection.mutable
import scala.util.{Success, Try}
import org.apache.commons.io.FileUtils
import org.apache.commons.lang3.RandomStringUtils
import org.apache.hadoop.fs.Path
import org.mockito.Mockito.when
import org.scalactic.TolerantNumerics
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.PatienceConfiguration.Timeout
import org.scalatestplus.mockito.MockitoSugar
import org.apache.spark.{SparkException, TestUtils}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{AnalysisException, Column, DataFrame, Dataset, Row}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{Literal, Rand, Randn, Shuffle, Uuid}
import org.apache.spark.sql.catalyst.plans.logical.LocalRelation
import org.apache.spark.sql.catalyst.streaming.InternalOutputModes.Complete
import org.apache.spark.sql.connector.read.InputPartition
import org.apache.spark.sql.connector.read.streaming.{Offset => OffsetV2}
import org.apache.spark.sql.execution.exchange.ReusedExchangeExec
import org.apache.spark.sql.execution.streaming._
import org.apache.spark.sql.execution.streaming.sources.{MemorySink, TestForeachWriter}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.util.{BlockingSource, MockSourceProvider, StreamManualClock}
import org.apache.spark.sql.types.StructType
class StreamingQuerySuite extends StreamTest with BeforeAndAfter with Logging with MockitoSugar {
import AwaitTerminationTester._
import testImplicits._
// To make === between double tolerate inexact values
implicit val doubleEquality = TolerantNumerics.tolerantDoubleEquality(0.01)
after {
sqlContext.streams.active.foreach(_.stop())
}
test("name unique in active queries") {
withTempDir { dir =>
def startQuery(name: Option[String]): StreamingQuery = {
val writer = MemoryStream[Int].toDS.writeStream
name.foreach(writer.queryName)
writer
.foreach(new TestForeachWriter)
.start()
}
// No name by default, multiple active queries can have no name
val q1 = startQuery(name = None)
assert(q1.name === null)
val q2 = startQuery(name = None)
assert(q2.name === null)
// Can be set by user
val q3 = startQuery(name = Some("q3"))
assert(q3.name === "q3")
// Multiple active queries cannot have same name
val e = intercept[IllegalArgumentException] {
startQuery(name = Some("q3"))
}
q1.stop()
q2.stop()
q3.stop()
}
}
test(
"id unique in active queries + persists across restarts, runId unique across start/restarts") {
val inputData = MemoryStream[Int]
withTempDir { dir =>
var cpDir: String = null
def startQuery(restart: Boolean): StreamingQuery = {
if (cpDir == null || !restart) cpDir = s"$dir/${RandomStringUtils.randomAlphabetic(10)}"
MemoryStream[Int].toDS().groupBy().count()
.writeStream
.format("memory")
.outputMode("complete")
.queryName(s"name${RandomStringUtils.randomAlphabetic(10)}")
.option("checkpointLocation", cpDir)
.start()
}
// id and runId unique for new queries
val q1 = startQuery(restart = false)
val q2 = startQuery(restart = false)
assert(q1.id !== q2.id)
assert(q1.runId !== q2.runId)
q1.stop()
q2.stop()
// id persists across restarts, runId unique across restarts
val q3 = startQuery(restart = false)
q3.stop()
val q4 = startQuery(restart = true)
q4.stop()
assert(q3.id === q3.id)
assert(q3.runId !== q4.runId)
// Only one query with same id can be active
withSQLConf(SQLConf.STREAMING_STOP_ACTIVE_RUN_ON_RESTART.key -> "false") {
val q5 = startQuery(restart = false)
val e = intercept[IllegalStateException] {
startQuery(restart = true)
}
}
}
}
testQuietly("isActive, exception, and awaitTermination") {
val inputData = MemoryStream[Int]
val mapped = inputData.toDS().map { 6 / _}
testStream(mapped)(
AssertOnQuery(_.isActive),
AssertOnQuery(_.exception.isEmpty),
AddData(inputData, 1, 2),
CheckAnswer(6, 3),
TestAwaitTermination(ExpectBlocked),
TestAwaitTermination(ExpectBlocked, timeoutMs = 2000),
TestAwaitTermination(ExpectNotBlocked, timeoutMs = 10, expectedReturnValue = false),
StopStream,
AssertOnQuery(_.isActive === false),
AssertOnQuery(_.exception.isEmpty),
TestAwaitTermination(ExpectNotBlocked),
TestAwaitTermination(ExpectNotBlocked, timeoutMs = 2000, expectedReturnValue = true),
TestAwaitTermination(ExpectNotBlocked, timeoutMs = 10, expectedReturnValue = true),
StartStream(),
AssertOnQuery(_.isActive),
AddData(inputData, 0),
ExpectFailure[SparkException](),
AssertOnQuery(_.isActive === false),
TestAwaitTermination(ExpectException[SparkException]),
TestAwaitTermination(ExpectException[SparkException], timeoutMs = 2000),
TestAwaitTermination(ExpectException[SparkException], timeoutMs = 10),
AssertOnQuery(q => {
q.exception.get.startOffset ===
q.committedOffsets.toOffsetSeq(Seq(inputData), OffsetSeqMetadata()).toString &&
q.exception.get.endOffset ===
q.availableOffsets.toOffsetSeq(Seq(inputData), OffsetSeqMetadata()).toString
}, "incorrect start offset or end offset on exception")
)
}
testQuietly("OneTime trigger, commit log, and exception") {
import Trigger.Once
val inputData = MemoryStream[Int]
val mapped = inputData.toDS().map { 6 / _}
testStream(mapped)(
AssertOnQuery(_.isActive),
StopStream,
AddData(inputData, 1, 2),
StartStream(trigger = Once),
CheckAnswer(6, 3),
StopStream, // clears out StreamTest state
AssertOnQuery { q =>
// both commit log and offset log contain the same (latest) batch id
q.commitLog.getLatest().map(_._1).getOrElse(-1L) ==
q.offsetLog.getLatest().map(_._1).getOrElse(-2L)
},
AssertOnQuery { q =>
// blow away commit log and sink result
q.commitLog.purge(1)
q.sink.asInstanceOf[MemorySink].clear()
true
},
StartStream(trigger = Once),
CheckAnswer(6, 3), // ensure we fall back to offset log and reprocess batch
StopStream,
AddData(inputData, 3),
StartStream(trigger = Once),
CheckLastBatch(2), // commit log should be back in place
StopStream,
AddData(inputData, 0),
StartStream(trigger = Once),
ExpectFailure[SparkException](),
AssertOnQuery(_.isActive === false),
AssertOnQuery(q => {
q.exception.get.startOffset ===
q.committedOffsets.toOffsetSeq(Seq(inputData), OffsetSeqMetadata()).toString &&
q.exception.get.endOffset ===
q.availableOffsets.toOffsetSeq(Seq(inputData), OffsetSeqMetadata()).toString
}, "incorrect start offset or end offset on exception")
)
}
testQuietly("status, lastProgress, and recentProgress") {
import StreamingQuerySuite._
clock = new StreamManualClock
/** Custom MemoryStream that waits for manual clock to reach a time */
val inputData = new MemoryStream[Int](0, sqlContext) {
private def dataAdded: Boolean = currentOffset.offset != -1
// latestOffset should take 50 ms the first time it is called after data is added
override def latestOffset(): OffsetV2 = synchronized {
if (dataAdded) clock.waitTillTime(1050)
super.latestOffset()
}
// getBatch should take 100 ms the first time it is called
override def planInputPartitions(start: OffsetV2, end: OffsetV2): Array[InputPartition] = {
synchronized {
clock.waitTillTime(1150)
super.planInputPartitions(start, end)
}
}
}
// query execution should take 350 ms the first time it is called
val mapped = inputData.toDS.coalesce(1).as[Long].map { x =>
clock.waitTillTime(1500) // this will only wait the first time when clock < 1500
10 / x
}.agg(count("*")).as[Long]
case class AssertStreamExecThreadIsWaitingForTime(targetTime: Long)
extends AssertOnQuery(q => {
eventually(Timeout(streamingTimeout)) {
if (q.exception.isEmpty) {
assert(clock.isStreamWaitingFor(targetTime))
}
}
if (q.exception.isDefined) {
throw q.exception.get
}
true
}, "") {
override def toString: String = s"AssertStreamExecThreadIsWaitingForTime($targetTime)"
}
case class AssertClockTime(time: Long)
extends AssertOnQuery(q => clock.getTimeMillis() === time, "") {
override def toString: String = s"AssertClockTime($time)"
}
var lastProgressBeforeStop: StreamingQueryProgress = null
testStream(mapped, OutputMode.Complete)(
StartStream(Trigger.ProcessingTime(1000), triggerClock = clock),
AssertStreamExecThreadIsWaitingForTime(1000),
AssertOnQuery(_.status.isDataAvailable === false),
AssertOnQuery(_.status.isTriggerActive === false),
AssertOnQuery(_.status.message === "Waiting for next trigger"),
AssertOnQuery(_.recentProgress.count(_.numInputRows > 0) === 0),
// Test status and progress when `latestOffset` is being called
AddData(inputData, 1, 2),
AdvanceManualClock(1000), // time = 1000 to start new trigger, will block on `latestOffset`
AssertStreamExecThreadIsWaitingForTime(1050),
AssertOnQuery(_.status.isDataAvailable === false),
AssertOnQuery(_.status.isTriggerActive),
AssertOnQuery(_.status.message.startsWith("Getting offsets from")),
AssertOnQuery(_.recentProgress.count(_.numInputRows > 0) === 0),
AdvanceManualClock(50), // time = 1050 to unblock `latestOffset`
AssertClockTime(1050),
// will block on `planInputPartitions` that needs 1350
AssertStreamExecThreadIsWaitingForTime(1150),
AssertOnQuery(_.status.isDataAvailable),
AssertOnQuery(_.status.isTriggerActive),
AssertOnQuery(_.status.message === "Processing new data"),
AssertOnQuery(_.recentProgress.count(_.numInputRows > 0) === 0),
AdvanceManualClock(100), // time = 1150 to unblock `planInputPartitions`
AssertClockTime(1150),
AssertStreamExecThreadIsWaitingForTime(1500), // will block on map task that needs 1500
AssertOnQuery(_.status.isDataAvailable),
AssertOnQuery(_.status.isTriggerActive),
AssertOnQuery(_.status.message === "Processing new data"),
AssertOnQuery(_.recentProgress.count(_.numInputRows > 0) === 0),
// Test status and progress while batch processing has completed
AdvanceManualClock(350), // time = 1500 to unblock map task
AssertClockTime(1500),
CheckAnswer(2),
AssertStreamExecThreadIsWaitingForTime(2000), // will block until the next trigger
AssertOnQuery(_.status.isDataAvailable),
AssertOnQuery(_.status.isTriggerActive === false),
AssertOnQuery(_.status.message === "Waiting for next trigger"),
AssertOnQuery { query =>
assert(query.lastProgress != null)
assert(query.recentProgress.exists(_.numInputRows > 0))
assert(query.recentProgress.last.eq(query.lastProgress))
val progress = query.lastProgress
assert(progress.id === query.id)
assert(progress.name === query.name)
assert(progress.batchId === 0)
assert(progress.timestamp === "1970-01-01T00:00:01.000Z") // 100 ms in UTC
assert(progress.numInputRows === 2)
assert(progress.processedRowsPerSecond === 4.0)
assert(progress.durationMs.get("latestOffset") === 50)
assert(progress.durationMs.get("queryPlanning") === 100)
assert(progress.durationMs.get("walCommit") === 0)
assert(progress.durationMs.get("addBatch") === 350)
assert(progress.durationMs.get("triggerExecution") === 500)
assert(progress.sources.length === 1)
assert(progress.sources(0).description contains "MemoryStream")
assert(progress.sources(0).startOffset === null) // no prior offset
assert(progress.sources(0).endOffset === "0")
assert(progress.sources(0).processedRowsPerSecond === 4.0) // 2 rows processed in 500 ms
assert(progress.stateOperators.length === 1)
assert(progress.stateOperators(0).numRowsUpdated === 1)
assert(progress.stateOperators(0).numRowsTotal === 1)
assert(progress.sink.description contains "MemorySink")
true
},
// Test whether input rate is updated after two batches
AssertStreamExecThreadIsWaitingForTime(2000), // blocked waiting for next trigger time
AddData(inputData, 1, 2),
AdvanceManualClock(500), // allow another trigger
AssertClockTime(2000),
AssertStreamExecThreadIsWaitingForTime(3000), // will block waiting for next trigger time
CheckAnswer(4),
AssertOnQuery(_.status.isDataAvailable),
AssertOnQuery(_.status.isTriggerActive === false),
AssertOnQuery(_.status.message === "Waiting for next trigger"),
AssertOnQuery { query =>
assert(query.recentProgress.last.eq(query.lastProgress))
assert(query.lastProgress.batchId === 1)
assert(query.lastProgress.inputRowsPerSecond === 2.0)
assert(query.lastProgress.sources(0).inputRowsPerSecond === 2.0)
assert(query.lastProgress.sources(0).startOffset === "0")
assert(query.lastProgress.sources(0).endOffset === "1")
true
},
// Test status and progress after data is not available for a trigger
AdvanceManualClock(1000), // allow another trigger
AssertStreamExecThreadIsWaitingForTime(4000),
AssertOnQuery(_.status.isDataAvailable === false),
AssertOnQuery(_.status.isTriggerActive === false),
AssertOnQuery(_.status.message === "Waiting for next trigger"),
// Test status and progress after query stopped
AssertOnQuery { query =>
lastProgressBeforeStop = query.lastProgress
true
},
StopStream,
AssertOnQuery(_.lastProgress.json === lastProgressBeforeStop.json),
AssertOnQuery(_.status.isDataAvailable === false),
AssertOnQuery(_.status.isTriggerActive === false),
AssertOnQuery(_.status.message === "Stopped"),
// Test status and progress after query terminated with error
StartStream(Trigger.ProcessingTime(1000), triggerClock = clock),
AdvanceManualClock(1000), // ensure initial trigger completes before AddData
AddData(inputData, 0),
AdvanceManualClock(1000), // allow another trigger
ExpectFailure[SparkException](),
AssertOnQuery(_.status.isDataAvailable === false),
AssertOnQuery(_.status.isTriggerActive === false),
AssertOnQuery(_.status.message.startsWith("Terminated with exception"))
)
}
test("lastProgress should be null when recentProgress is empty") {
BlockingSource.latch = new CountDownLatch(1)
withTempDir { tempDir =>
val sq = spark.readStream
.format("org.apache.spark.sql.streaming.util.BlockingSource")
.load()
.writeStream
.format("org.apache.spark.sql.streaming.util.BlockingSource")
.option("checkpointLocation", tempDir.toString)
.start()
// Creating source is blocked so recentProgress is empty and lastProgress should be null
assert(sq.lastProgress === null)
// Release the latch and stop the query
BlockingSource.latch.countDown()
sq.stop()
}
}
test("codahale metrics") {
val inputData = MemoryStream[Int]
/** Whether metrics of a query is registered for reporting */
def isMetricsRegistered(query: StreamingQuery): Boolean = {
val sourceName = s"spark.streaming.${query.id}"
val sources = spark.sparkContext.env.metricsSystem.getSourcesByName(sourceName)
require(sources.size <= 1)
sources.nonEmpty
}
// Disabled by default
assert(spark.conf.get(SQLConf.STREAMING_METRICS_ENABLED.key).toBoolean === false)
withSQLConf(SQLConf.STREAMING_METRICS_ENABLED.key -> "false") {
testStream(inputData.toDF)(
AssertOnQuery { q => !isMetricsRegistered(q) },
StopStream,
AssertOnQuery { q => !isMetricsRegistered(q) }
)
}
// Registered when enabled
withSQLConf(SQLConf.STREAMING_METRICS_ENABLED.key -> "true") {
testStream(inputData.toDF)(
AssertOnQuery { q => isMetricsRegistered(q) },
StopStream,
AssertOnQuery { q => !isMetricsRegistered(q) }
)
}
}
test("SPARK-22975: MetricsReporter defaults when there was no progress reported") {
withSQLConf(SQLConf.STREAMING_METRICS_ENABLED.key -> "true") {
BlockingSource.latch = new CountDownLatch(1)
withTempDir { tempDir =>
val sq = spark.readStream
.format("org.apache.spark.sql.streaming.util.BlockingSource")
.load()
.writeStream
.format("org.apache.spark.sql.streaming.util.BlockingSource")
.option("checkpointLocation", tempDir.toString)
.start()
.asInstanceOf[StreamingQueryWrapper]
.streamingQuery
val gauges = sq.streamMetrics.metricRegistry.getGauges
assert(gauges.get("latency").getValue.asInstanceOf[Long] == 0)
assert(gauges.get("processingRate-total").getValue.asInstanceOf[Double] == 0.0)
assert(gauges.get("inputRate-total").getValue.asInstanceOf[Double] == 0.0)
assert(gauges.get("eventTime-watermark").getValue.asInstanceOf[Long] == 0)
assert(gauges.get("states-rowsTotal").getValue.asInstanceOf[Long] == 0)
assert(gauges.get("states-usedBytes").getValue.asInstanceOf[Long] == 0)
sq.stop()
}
}
}
test("SPARK-37147: MetricsReporter does not fail when durationMs is empty") {
val stateOpProgressMock = mock[StreamingQueryProgress]
when(stateOpProgressMock.durationMs).thenReturn(Collections.emptyMap[String, java.lang.Long]())
val streamExecMock = mock[StreamExecution]
when(streamExecMock.lastProgress).thenReturn(stateOpProgressMock)
val gauges = new MetricsReporter(streamExecMock, "").metricRegistry.getGauges()
assert(Try(gauges.get("latency").getValue) == Success(0L))
}
test("input row calculation with same V1 source used twice in self-join") {
val streamingTriggerDF = spark.createDataset(1 to 10).toDF
val streamingInputDF = createSingleTriggerStreamingDF(streamingTriggerDF).toDF("value")
val progress = getStreamingQuery(streamingInputDF.join(streamingInputDF, "value"))
.recentProgress.head
assert(progress.numInputRows === 20) // data is read multiple times in self-joins
assert(progress.sources.size === 1)
assert(progress.sources(0).numInputRows === 20)
}
test("input row calculation with mixed batch and streaming V1 sources") {
val streamingTriggerDF = spark.createDataset(1 to 10).toDF
val streamingInputDF = createSingleTriggerStreamingDF(streamingTriggerDF).toDF("value")
val staticInputDF = spark.createDataFrame(Seq(1 -> "1", 2 -> "2")).toDF("value", "anotherValue")
// Trigger input has 10 rows, static input has 2 rows,
// therefore after the first trigger, the calculated input rows should be 10
val progress = getStreamingQuery(streamingInputDF.join(staticInputDF, "value"))
.recentProgress.head
assert(progress.numInputRows === 10)
assert(progress.sources.size === 1)
assert(progress.sources(0).numInputRows === 10)
}
test("input row calculation with trigger input DF having multiple leaves in V1 source") {
val streamingTriggerDF =
spark.createDataset(1 to 5).toDF.union(spark.createDataset(6 to 10).toDF)
require(streamingTriggerDF.logicalPlan.collectLeaves().size > 1)
val streamingInputDF = createSingleTriggerStreamingDF(streamingTriggerDF)
// After the first trigger, the calculated input rows should be 10
val progress = getStreamingQuery(streamingInputDF).recentProgress.head
assert(progress.numInputRows === 10)
assert(progress.sources.size === 1)
assert(progress.sources(0).numInputRows === 10)
}
test("input row calculation with same V2 source used twice in self-union") {
val streamInput = MemoryStream[Int]
testStream(streamInput.toDF().union(streamInput.toDF()))(
AddData(streamInput, 1, 2, 3),
CheckAnswer(1, 1, 2, 2, 3, 3),
AssertOnQuery { q =>
val lastProgress = getLastProgressWithData(q)
assert(lastProgress.nonEmpty)
assert(lastProgress.get.sources.length == 1)
// The source is scanned twice because of self-union
assert(lastProgress.get.numInputRows == 6)
true
}
)
}
test("input row calculation with same V2 source used twice in self-join") {
def checkQuery(check: AssertOnQuery): Unit = {
val memoryStream = MemoryStream[Int]
// TODO: currently the streaming framework always add a dummy Project above streaming source
// relation, which breaks exchange reuse, as the optimizer will remove Project from one side.
// Here we manually add a useful Project, to trigger exchange reuse.
val streamDF = memoryStream.toDF().select('value + 0 as "v")
testStream(streamDF.join(streamDF, "v"))(
AddData(memoryStream, 1, 2, 3),
CheckAnswer(1, 2, 3),
check
)
}
withSQLConf(SQLConf.EXCHANGE_REUSE_ENABLED.key -> "false") {
checkQuery(AssertOnQuery { q =>
val lastProgress = getLastProgressWithData(q)
assert(lastProgress.nonEmpty)
assert(lastProgress.get.sources.length == 1)
// The source is scanned twice because of self-join
assert(lastProgress.get.numInputRows == 6)
true
})
}
withSQLConf(SQLConf.EXCHANGE_REUSE_ENABLED.key -> "true") {
checkQuery(AssertOnQuery { q =>
val lastProgress = getLastProgressWithData(q)
assert(lastProgress.nonEmpty)
assert(lastProgress.get.sources.length == 1)
assert(q.lastExecution.executedPlan.collect {
case r: ReusedExchangeExec => r
}.length == 1)
// The source is scanned only once because of exchange reuse
assert(lastProgress.get.numInputRows == 3)
true
})
}
}
test("input row calculation with trigger having data for only one of two V2 sources") {
val streamInput1 = MemoryStream[Int]
val streamInput2 = MemoryStream[Int]
testStream(streamInput1.toDF().union(streamInput2.toDF()))(
AddData(streamInput1, 1, 2, 3),
CheckLastBatch(1, 2, 3),
AssertOnQuery { q =>
val lastProgress = getLastProgressWithData(q)
assert(lastProgress.nonEmpty)
assert(lastProgress.get.numInputRows == 3)
assert(lastProgress.get.sources.length == 2)
assert(lastProgress.get.sources(0).numInputRows == 3)
assert(lastProgress.get.sources(1).numInputRows == 0)
true
},
AddData(streamInput2, 4, 5),
CheckLastBatch(4, 5),
AssertOnQuery { q =>
val lastProgress = getLastProgressWithData(q)
assert(lastProgress.nonEmpty)
assert(lastProgress.get.numInputRows == 2)
assert(lastProgress.get.sources.length == 2)
assert(lastProgress.get.sources(0).numInputRows == 0)
assert(lastProgress.get.sources(1).numInputRows == 2)
true
}
)
}
test("input row calculation with mixed batch and streaming V2 sources") {
val streamInput = MemoryStream[Int]
val staticInputDF = spark.createDataFrame(Seq(1 -> "1", 2 -> "2")).toDF("value", "anotherValue")
testStream(streamInput.toDF().join(staticInputDF, "value"))(
AddData(streamInput, 1, 2, 3),
AssertOnQuery { q =>
q.processAllAvailable()
// The number of leaves in the trigger's logical plan should be same as the executed plan.
require(
q.lastExecution.logical.collectLeaves().length ==
q.lastExecution.executedPlan.collectLeaves().length)
val lastProgress = getLastProgressWithData(q)
assert(lastProgress.nonEmpty)
assert(lastProgress.get.numInputRows == 3)
assert(lastProgress.get.sources.length == 1)
assert(lastProgress.get.sources(0).numInputRows == 3)
true
}
)
val streamInput2 = MemoryStream[Int]
val staticInputDF2 = staticInputDF.union(staticInputDF).cache()
testStream(streamInput2.toDF().join(staticInputDF2, "value"))(
AddData(streamInput2, 1, 2, 3),
AssertOnQuery { q =>
q.processAllAvailable()
// The number of leaves in the trigger's logical plan should be different from
// the executed plan. The static input will have two leaves in the logical plan
// (due to the union), but will be converted to a single leaf in the executed plan
// (due to the caching, the cached subplan is replaced by a single InMemoryTableScanExec).
require(
q.lastExecution.logical.collectLeaves().length !=
q.lastExecution.executedPlan.collectLeaves().length)
// Despite the mismatch in total number of leaves in the logical and executed plans,
// we should be able to attribute streaming input metrics to the streaming sources.
val lastProgress = getLastProgressWithData(q)
assert(lastProgress.nonEmpty)
assert(lastProgress.get.numInputRows == 3)
assert(lastProgress.get.sources.length == 1)
assert(lastProgress.get.sources(0).numInputRows == 3)
true
}
)
}
testQuietly("StreamExecution metadata garbage collection") {
val inputData = MemoryStream[Int]
val mapped = inputData.toDS().map(6 / _)
withSQLConf(SQLConf.MIN_BATCHES_TO_RETAIN.key -> "1") {
// Run 3 batches, and then assert that only 2 metadata files is are at the end
// since the first should have been purged.
testStream(mapped)(
AddData(inputData, 1, 2),
CheckAnswer(6, 3),
AddData(inputData, 1, 2),
CheckAnswer(6, 3, 6, 3),
AddData(inputData, 4, 6),
CheckAnswer(6, 3, 6, 3, 1, 1),
AssertOnQuery("metadata log should contain only two files") { q =>
val metadataLogDir = new java.io.File(q.offsetLog.metadataPath.toUri)
val logFileNames = metadataLogDir.listFiles().toSeq.map(_.getName())
val toTest = logFileNames.filter(!_.endsWith(".crc")).sorted // Workaround for SPARK-17475
assert(toTest.size == 2 && toTest.head == "1")
true
}
)
}
val inputData2 = MemoryStream[Int]
withSQLConf(SQLConf.MIN_BATCHES_TO_RETAIN.key -> "2") {
// Run 5 batches, and then assert that 3 metadata files is are at the end
// since the two should have been purged.
testStream(inputData2.toDS())(
AddData(inputData2, 1, 2),
CheckAnswer(1, 2),
AddData(inputData2, 1, 2),
CheckAnswer(1, 2, 1, 2),
AddData(inputData2, 3, 4),
CheckAnswer(1, 2, 1, 2, 3, 4),
AddData(inputData2, 5, 6),
CheckAnswer(1, 2, 1, 2, 3, 4, 5, 6),
AddData(inputData2, 7, 8),
CheckAnswer(1, 2, 1, 2, 3, 4, 5, 6, 7, 8),
AssertOnQuery("metadata log should contain three files") { q =>
val metadataLogDir = new java.io.File(q.offsetLog.metadataPath.toUri)
val logFileNames = metadataLogDir.listFiles().toSeq.map(_.getName())
val toTest = logFileNames.filter(!_.endsWith(".crc")).sorted // Workaround for SPARK-17475
assert(toTest.size == 3 && toTest.head == "2")
true
}
)
}
}
testQuietly("StreamingQuery should be Serializable but cannot be used in executors") {
def startQuery(ds: Dataset[Int], queryName: String): StreamingQuery = {
ds.writeStream
.queryName(queryName)
.format("memory")
.start()
}
val input = MemoryStream[Int] :: MemoryStream[Int] :: MemoryStream[Int] :: Nil
val q1 = startQuery(input(0).toDS, "stream_serializable_test_1")
val q2 = startQuery(input(1).toDS.map { i =>
// Emulate that `StreamingQuery` get captured with normal usage unintentionally.
// It should not fail the query.
val q = q1
i
}, "stream_serializable_test_2")
val q3 = startQuery(input(2).toDS.map { i =>
// Emulate that `StreamingQuery` is used in executors. We should fail the query with a clear
// error message.
q1.explain()
i
}, "stream_serializable_test_3")
try {
input.foreach(_.addData(1))
// q2 should not fail since it doesn't use `q1` in the closure
q2.processAllAvailable()
// The user calls `StreamingQuery` in the closure and it should fail
val e = intercept[StreamingQueryException] {
q3.processAllAvailable()
}
assert(e.getCause.isInstanceOf[SparkException])
assert(e.getCause.getCause.getCause.isInstanceOf[IllegalStateException])
TestUtils.assertExceptionMsg(e, "StreamingQuery cannot be used in executors")
} finally {
q1.stop()
q2.stop()
q3.stop()
}
}
test("StreamExecution should call stop() on sources when a stream is stopped") {
var calledStop = false
val source = new Source {
override def stop(): Unit = {
calledStop = true
}
override def getOffset: Option[Offset] = None
override def getBatch(start: Option[Offset], end: Offset): DataFrame = {
spark.emptyDataFrame
}
override def schema: StructType = MockSourceProvider.fakeSchema
}
MockSourceProvider.withMockSources(source) {
val df = spark.readStream
.format("org.apache.spark.sql.streaming.util.MockSourceProvider")
.load()
testStream(df)(StopStream)
assert(calledStop, "Did not call stop on source for stopped stream")
}
}
testQuietly("SPARK-19774: StreamExecution should call stop() on sources when a stream fails") {
var calledStop = false
val source1 = new Source {
override def stop(): Unit = {
throw new RuntimeException("Oh no!")
}
override def getOffset: Option[Offset] = Some(LongOffset(1))
override def getBatch(start: Option[Offset], end: Offset): DataFrame = {
spark.range(2).toDF(MockSourceProvider.fakeSchema.fieldNames: _*)
}
override def schema: StructType = MockSourceProvider.fakeSchema
}
val source2 = new Source {
override def stop(): Unit = {
calledStop = true
}
override def getOffset: Option[Offset] = None
override def getBatch(start: Option[Offset], end: Offset): DataFrame = {
spark.emptyDataFrame
}
override def schema: StructType = MockSourceProvider.fakeSchema
}
MockSourceProvider.withMockSources(source1, source2) {
val df1 = spark.readStream
.format("org.apache.spark.sql.streaming.util.MockSourceProvider")
.load()
.as[Int]
val df2 = spark.readStream
.format("org.apache.spark.sql.streaming.util.MockSourceProvider")
.load()
.as[Int]
testStream(df1.union(df2).map(i => i / 0))(
AssertOnQuery { sq =>
intercept[StreamingQueryException](sq.processAllAvailable())
sq.exception.isDefined && !sq.isActive
}
)
assert(calledStop, "Did not call stop on source for stopped stream")
}
}
test("get the query id in source") {
@volatile var queryId: String = null
val source = new Source {
override def stop(): Unit = {}
override def getOffset: Option[Offset] = {
queryId = spark.sparkContext.getLocalProperty(StreamExecution.QUERY_ID_KEY)
None
}
override def getBatch(start: Option[Offset], end: Offset): DataFrame = spark.emptyDataFrame
override def schema: StructType = MockSourceProvider.fakeSchema
}
MockSourceProvider.withMockSources(source) {
val df = spark.readStream
.format("org.apache.spark.sql.streaming.util.MockSourceProvider")
.load()
testStream(df)(
AssertOnQuery { sq =>
sq.processAllAvailable()
assert(sq.id.toString === queryId)
assert(sq.runId.toString !== queryId)
true
}
)
}
}
test("processAllAvailable should not block forever when a query is stopped") {
val input = MemoryStream[Int]
input.addData(1)
val query = input.toDF().writeStream
.trigger(Trigger.Once())
.format("console")
.start()
failAfter(streamingTimeout) {
query.processAllAvailable()
}
}
test("SPARK-22238: don't check for RDD partitions during streaming aggregation preparation") {
val stream = MemoryStream[(Int, Int)]
val baseDf = Seq((1, "A"), (2, "b")).toDF("num", "char").where("char = 'A'")
val otherDf = stream.toDF().toDF("num", "numSq")
.join(broadcast(baseDf), "num")
.groupBy('char)
.agg(sum('numSq))
testStream(otherDf, OutputMode.Complete())(
AddData(stream, (1, 1), (2, 4)),
CheckLastBatch(("A", 1)))
}
test("Uuid in streaming query should not produce same uuids in each execution") {
val uuids = mutable.ArrayBuffer[String]()
def collectUuid: Seq[Row] => Unit = { rows: Seq[Row] =>
rows.foreach(r => uuids += r.getString(0))
}
val stream = MemoryStream[Int]
val df = stream.toDF().select(new Column(Uuid()))
testStream(df)(
AddData(stream, 1),
CheckAnswer(collectUuid),
AddData(stream, 2),
CheckAnswer(collectUuid)
)
assert(uuids.distinct.size == 2)
}
test("Rand/Randn in streaming query should not produce same results in each execution") {
val rands = mutable.ArrayBuffer[Double]()
def collectRand: Seq[Row] => Unit = { rows: Seq[Row] =>
rows.foreach { r =>
rands += r.getDouble(0)
rands += r.getDouble(1)
}
}
val stream = MemoryStream[Int]
val df = stream.toDF().select(new Column(new Rand()), new Column(new Randn()))
testStream(df)(
AddData(stream, 1),
CheckAnswer(collectRand),
AddData(stream, 2),
CheckAnswer(collectRand)
)
assert(rands.distinct.size == 4)
}
test("Shuffle in streaming query should not produce same results in each execution") {
val rands = mutable.ArrayBuffer[Seq[Int]]()
def collectShuffle: Seq[Row] => Unit = { rows: Seq[Row] =>
rows.foreach { r =>
rands += r.getSeq[Int](0)
}
}
val stream = MemoryStream[Int]
val df = stream.toDF().select(new Column(new Shuffle(Literal.create[Seq[Int]](0 until 100))))
testStream(df)(
AddData(stream, 1),
CheckAnswer(collectShuffle),
AddData(stream, 2),
CheckAnswer(collectShuffle)
)
assert(rands.distinct.size == 2)
}
test("StreamingRelationV2/StreamingExecutionRelation/ContinuousExecutionRelation.toJSON " +
"should not fail") {
val df = spark.readStream.format("rate").load()
assert(df.logicalPlan.toJSON.contains("StreamingRelationV2"))
testStream(df)(
AssertOnQuery(_.logicalPlan.toJSON.contains("StreamingDataSourceV2Relation"))
)
testStream(df)(
StartStream(trigger = Trigger.Continuous(100)),
AssertOnQuery(_.logicalPlan.toJSON.contains("StreamingDataSourceV2Relation"))
)
}
test("special characters in checkpoint path") {
withTempDir { tempDir =>
val checkpointDir = new File(tempDir, "chk @#chk")
val inputData = MemoryStream[Int]
inputData.addData(1)
val q = inputData.toDF()
.writeStream
.format("noop")
.option("checkpointLocation", checkpointDir.getCanonicalPath)
.start()
try {
q.processAllAvailable()
assert(checkpointDir.listFiles().toList.nonEmpty)
} finally {
q.stop()
}
}
}
/**
* Copy the checkpoint generated by Spark 2.4.0 from test resource to `dir` to set up a legacy
* streaming checkpoint.
*/
private def setUp2dot4dot0Checkpoint(dir: File): Unit = {
val input = getClass.getResource("/structured-streaming/escaped-path-2.4.0")
assert(input != null, "cannot find test resource '/structured-streaming/escaped-path-2.4.0'")
val inputDir = new File(input.toURI)
// Copy test files to tempDir so that we won't modify the original data.
FileUtils.copyDirectory(inputDir, dir)
// Spark 2.4 and earlier escaped the _spark_metadata path once
val legacySparkMetadataDir = new File(
dir,
new Path("output %@#output/_spark_metadata").toUri.toString)
// Migrate from legacy _spark_metadata directory to the new _spark_metadata directory.
// Ideally we should copy "_spark_metadata" directly like what the user is supposed to do to
// migrate to new version. However, in our test, "tempDir" will be different in each run and
// we need to fix the absolute path in the metadata to match "tempDir".
val sparkMetadata = FileUtils.readFileToString(new File(legacySparkMetadataDir, "0"), UTF_8)
FileUtils.write(
new File(legacySparkMetadataDir, "0"),
sparkMetadata.replaceAll("TEMPDIR", dir.getCanonicalPath), UTF_8)
}
test("detect escaped path and report the migration guide") {
// Assert that the error message contains the migration conf, path and the legacy path.
def assertMigrationError(errorMessage: String, path: File, legacyPath: File): Unit = {
Seq(SQLConf.STREAMING_CHECKPOINT_ESCAPED_PATH_CHECK_ENABLED.key,
path.getCanonicalPath,
legacyPath.getCanonicalPath).foreach { msg =>
assert(errorMessage.contains(msg))
}
}
withTempDir { tempDir =>
setUp2dot4dot0Checkpoint(tempDir)
// Here are the paths we will use to create the query
val outputDir = new File(tempDir, "output %@#output")
val checkpointDir = new File(tempDir, "chk %@#chk")
val sparkMetadataDir = new File(tempDir, "output %@#output/_spark_metadata")
// The escaped paths used by Spark 2.4 and earlier.
// Spark 2.4 and earlier escaped the checkpoint path three times
val legacyCheckpointDir = new File(
tempDir,
new Path(new Path(new Path("chk %@#chk").toUri.toString).toUri.toString).toUri.toString)
// Spark 2.4 and earlier escaped the _spark_metadata path once
val legacySparkMetadataDir = new File(
tempDir,
new Path("output %@#output/_spark_metadata").toUri.toString)
// Reading a file sink output in a batch query should detect the legacy _spark_metadata
// directory and throw an error
val e = intercept[SparkException] {
spark.read.load(outputDir.getCanonicalPath).as[Int]
}
assertMigrationError(e.getMessage, sparkMetadataDir, legacySparkMetadataDir)
// Restarting the streaming query should detect the legacy _spark_metadata directory and
// throw an error
val inputData = MemoryStream[Int]
val e2 = intercept[SparkException] {
inputData.toDF()
.writeStream
.format("parquet")
.option("checkpointLocation", checkpointDir.getCanonicalPath)
.start(outputDir.getCanonicalPath)
}
assertMigrationError(e2.getMessage, sparkMetadataDir, legacySparkMetadataDir)
// Move "_spark_metadata" to fix the file sink and test the checkpoint path.
FileUtils.moveDirectory(legacySparkMetadataDir, sparkMetadataDir)
// Restarting the streaming query should detect the legacy
// checkpoint path and throw an error.
val e3 = intercept[SparkException] {
inputData.toDF()
.writeStream
.format("parquet")
.option("checkpointLocation", checkpointDir.getCanonicalPath)
.start(outputDir.getCanonicalPath)
}
assertMigrationError(e3.getMessage, checkpointDir, legacyCheckpointDir)
// Fix the checkpoint path and verify that the user can migrate the issue by moving files.
FileUtils.moveDirectory(legacyCheckpointDir, checkpointDir)
val q = inputData.toDF()
.writeStream
.format("parquet")
.option("checkpointLocation", checkpointDir.getCanonicalPath)
.start(outputDir.getCanonicalPath)
try {
q.processAllAvailable()
// Check the query id to make sure it did use checkpoint
assert(q.id.toString == "09be7fb3-49d8-48a6-840d-e9c2ad92a898")
// Verify that the batch query can read "_spark_metadata" correctly after migration.
val df = spark.read.load(outputDir.getCanonicalPath)
assert(df.queryExecution.executedPlan.toString contains "MetadataLogFileIndex")
checkDatasetUnorderly(df.as[Int], 1, 2, 3)
} finally {
q.stop()
}
}
}
test("ignore the escaped path check when the flag is off") {
withTempDir { tempDir =>
setUp2dot4dot0Checkpoint(tempDir)
val outputDir = new File(tempDir, "output %@#output")
val checkpointDir = new File(tempDir, "chk %@#chk")
withSQLConf(SQLConf.STREAMING_CHECKPOINT_ESCAPED_PATH_CHECK_ENABLED.key -> "false") {
// Verify that the batch query ignores the legacy "_spark_metadata"
val df = spark.read.load(outputDir.getCanonicalPath)
assert(!(df.queryExecution.executedPlan.toString contains "MetadataLogFileIndex"))
checkDatasetUnorderly(df.as[Int], 1, 2, 3)
val inputData = MemoryStream[Int]
val q = inputData.toDF()
.writeStream
.format("parquet")
.option("checkpointLocation", checkpointDir.getCanonicalPath)
.start(outputDir.getCanonicalPath)
try {
q.processAllAvailable()
// Check the query id to make sure it ignores the legacy checkpoint
assert(q.id.toString != "09be7fb3-49d8-48a6-840d-e9c2ad92a898")
} finally {
q.stop()
}
}
}
}
test("containsSpecialCharsInPath") {
Seq("foo/b ar",
"/foo/b ar",
"file:/foo/b ar",
"file://foo/b ar",
"file:///foo/b ar",
"file://foo:bar@bar/foo/b ar").foreach { p =>
assert(StreamExecution.containsSpecialCharsInPath(new Path(p)), s"failed to check $p")
}
Seq("foo/bar",
"/foo/bar",
"file:/foo/bar",
"file://foo/bar",
"file:///foo/bar",
"file://foo:bar@bar/foo/bar",
// Special chars not in a path should not be considered as such urls won't hit the escaped
// path issue.
"file://foo:b ar@bar/foo/bar",
"file://foo:bar@b ar/foo/bar",
"file://f oo:bar@bar/foo/bar").foreach { p =>
assert(!StreamExecution.containsSpecialCharsInPath(new Path(p)), s"failed to check $p")
}
}
test("SPARK-32456: SQL union in streaming query of append mode without watermark") {
val inputData1 = MemoryStream[Int]
val inputData2 = MemoryStream[Int]
withTempView("s1", "s2") {
inputData1.toDF().createOrReplaceTempView("s1")
inputData2.toDF().createOrReplaceTempView("s2")
val unioned = spark.sql(
"select s1.value from s1 union select s2.value from s2")
checkExceptionMessage(unioned)
}
}
test("SPARK-32456: distinct in streaming query of append mode without watermark") {
val inputData = MemoryStream[Int]
withTempView("deduptest") {
inputData.toDF().toDF("value").createOrReplaceTempView("deduptest")
val distinct = spark.sql("select distinct value from deduptest")
checkExceptionMessage(distinct)
}
}
test("SPARK-32456: distinct in streaming query of complete mode") {
val inputData = MemoryStream[Int]
withTempView("deduptest") {
inputData.toDF().toDF("value").createOrReplaceTempView("deduptest")
val distinct = spark.sql("select distinct value from deduptest")
testStream(distinct, Complete)(
AddData(inputData, 1, 2, 3, 3, 4),
CheckAnswer(Row(1), Row(2), Row(3), Row(4))
)
}
}
testQuietly("limit on empty batch should not cause state store error") {
// The source only produces two batches, the first batch is empty and the second batch has data.
val source = new Source {
var batchId = 0
override def stop(): Unit = {}
override def getOffset: Option[Offset] = {
Some(LongOffset(batchId + 1))
}
override def getBatch(start: Option[Offset], end: Offset): DataFrame = {
if (batchId == 0) {
batchId += 1
Dataset.ofRows(spark, LocalRelation(schema.toAttributes, Nil, isStreaming = true))
} else {
Dataset.ofRows(spark,
LocalRelation(schema.toAttributes, InternalRow(10) :: Nil, isStreaming = true))
}
}
override def schema: StructType = MockSourceProvider.fakeSchema
}
MockSourceProvider.withMockSources(source) {
val df = spark.readStream
.format("org.apache.spark.sql.streaming.util.MockSourceProvider")
.load()
.limit(1)
testStream(df)(
StartStream(),
AssertOnQuery { q =>
q.processAllAvailable()
true
},
CheckAnswer(10))
}
}
private def checkExceptionMessage(df: DataFrame): Unit = {
withTempDir { outputDir =>
withTempDir { checkpointDir =>
val exception = intercept[AnalysisException](
df.writeStream
.option("checkpointLocation", checkpointDir.getCanonicalPath)
.start(outputDir.getCanonicalPath))
assert(exception.getMessage.contains(
"Append output mode not supported when there are streaming aggregations on streaming " +
"DataFrames/DataSets without watermark"))
}
}
}
/** Create a streaming DF that only execute one batch in which it returns the given static DF */
private def createSingleTriggerStreamingDF(triggerDF: DataFrame): DataFrame = {
require(!triggerDF.isStreaming)
// A streaming Source that generate only on trigger and returns the given Dataframe as batch
val source = new Source() {
override def schema: StructType = triggerDF.schema
override def getOffset: Option[Offset] = Some(LongOffset(0))
override def getBatch(start: Option[Offset], end: Offset): DataFrame = {
sqlContext.internalCreateDataFrame(
triggerDF.queryExecution.toRdd, triggerDF.schema, isStreaming = true)
}
override def stop(): Unit = {}
}
StreamingExecutionRelation(source, spark)
}
/** Returns the query at the end of the first trigger of streaming DF */
private def getStreamingQuery(streamingDF: DataFrame): StreamingQuery = {
try {
val q = streamingDF.writeStream.format("memory").queryName("test").start()
q.processAllAvailable()
q
} finally {
spark.streams.active.map(_.stop())
}
}
/** Returns the last query progress from query.recentProgress where numInputRows is positive */
def getLastProgressWithData(q: StreamingQuery): Option[StreamingQueryProgress] = {
q.recentProgress.filter(_.numInputRows > 0).lastOption
}
/**
* A [[StreamAction]] to test the behavior of `StreamingQuery.awaitTermination()`.
*
* @param expectedBehavior Expected behavior (not blocked, blocked, or exception thrown)
* @param timeoutMs Timeout in milliseconds
* When timeoutMs is less than or equal to 0, awaitTermination() is
* tested (i.e. w/o timeout)
* When timeoutMs is greater than 0, awaitTermination(timeoutMs) is
* tested
* @param expectedReturnValue Expected return value when awaitTermination(timeoutMs) is used
*/
case class TestAwaitTermination(
expectedBehavior: ExpectedBehavior,
timeoutMs: Int = -1,
expectedReturnValue: Boolean = false
) extends AssertOnQuery(
TestAwaitTermination.assertOnQueryCondition(expectedBehavior, timeoutMs, expectedReturnValue),
"Error testing awaitTermination behavior"
) {
override def toString(): String = {
s"TestAwaitTermination($expectedBehavior, timeoutMs = $timeoutMs, " +
s"expectedReturnValue = $expectedReturnValue)"
}
}
object TestAwaitTermination {
/**
* Tests the behavior of `StreamingQuery.awaitTermination`.
*
* @param expectedBehavior Expected behavior (not blocked, blocked, or exception thrown)
* @param timeoutMs Timeout in milliseconds
* When timeoutMs is less than or equal to 0, awaitTermination() is
* tested (i.e. w/o timeout)
* When timeoutMs is greater than 0, awaitTermination(timeoutMs) is
* tested
* @param expectedReturnValue Expected return value when awaitTermination(timeoutMs) is used
*/
def assertOnQueryCondition(
expectedBehavior: ExpectedBehavior,
timeoutMs: Int,
expectedReturnValue: Boolean
)(q: StreamExecution): Boolean = {
def awaitTermFunc(): Unit = {
if (timeoutMs <= 0) {
q.awaitTermination()
} else {
val returnedValue = q.awaitTermination(timeoutMs)
assert(returnedValue === expectedReturnValue, "Returned value does not match expected")
}
}
AwaitTerminationTester.test(expectedBehavior, () => awaitTermFunc())
true // If the control reached here, then everything worked as expected
}
}
}
object StreamingQuerySuite {
// Singleton reference to clock that does not get serialized in task closures
var clock: StreamManualClock = null
}
| nchammas/spark | sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala | Scala | apache-2.0 | 50,892 |
//
// author: Cosmin Basca
//
// Copyright 2010 University of Zurich
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package com.sparqlclient
import com.ning.http.client.Response
/**
* Created by basca on 25/07/14.
*/
class HttpException(msg: String) extends RuntimeException(msg)
object HttpException {
def apply(code: Int) : HttpException = new HttpException(s"HTTP Request failed with status code: $code")
def apply(response: Response): HttpException = new HttpException(s"HTTP Request failed with status code: ${response.getStatusCode} (content: ${response.getResponseBody}, headers: ${response.getHeaders})")
def apply(code: Int, cause: Throwable): Throwable = HttpException(code).initCause(cause)
def apply(response: Response, cause: Throwable): Throwable = HttpException(response).initCause(cause)
}
| cosminbasca/sparqlclient | src/main/scala/com/sparqlclient/HttpException.scala | Scala | apache-2.0 | 1,339 |
package db
import java.io.File
import scala.collection.JavaConverters._
import org.biojava3.core.sequence.{DNASequence, ProteinSequence}
import org.biojava3.core.sequence.io.FastaReaderHelper
import scala.slick.driver.JdbcProfile
/**
* Created by preecha on 10/10/14 AD.
*/
class DAL(val driver:JdbcProfile) extends GeneComponent with DriverComponent with ProteinComponent {
import driver.simple._
def create(implicit session: Session) = (genes.ddl ++ proteins.ddl).create
def insert(gene: Protein)(implicit session: Session): Unit = proteins.insert(gene)
// Query
def batchProteinQuery(list: List[String])(implicit session: Session): List[Protein] = {
val results = for {
protein <- proteins if protein.symbol inSet list
} yield protein
results.list
}
def batchProteinQueryLike(list: List[String])(implicit session: Session): List[Protein] = {
val result = list.map { s: String =>
(for {
protein <- proteins if protein.symbol like s"$s%"
} yield protein).list
}
result.flatten
}
def batchGeneQuery(list: List[String])(implicit session: Session): List[Gene] = {
val results = for {
gene <- genes if gene.symbol inSet list
} yield gene
results.list
}
def batchGeneQueryLike(list: List[String])(implicit session: Session): List[Gene] = {
val result = list.map { s: String =>
(for {
gene <- genes if gene.symbol like s"$s%"
} yield gene).list
}
result.flatten
}
//Import
def insertAAFASTA(fname: String)(implicit session: Session): Unit = {
FastaReaderHelper.readFastaProteinSequence(new File(fname)).asScala foreach { entry: (String, ProteinSequence) =>
val protein = Protein(entry._2.getOriginalHeader, entry._2.getSequenceAsString)
proteins.insert(protein)
}
}
def insertNAFASTA(fname: String)(implicit session: Session): Unit = {
FastaReaderHelper.readFastaDNASequence(new File(fname)).asScala foreach { entry: (String, DNASequence) =>
val gene = Gene(entry._2.getOriginalHeader, entry._2.getSequenceAsString)
genes.insert(gene)
}
}
} | yumyai/fasta-search | src/main/scala/db/DAL.scala | Scala | apache-2.0 | 2,129 |
def unit[A]: A => Unit = _ => () | hmemcpy/milewski-ctfp-pdf | src/content/1.5/code/scala/snippet02.scala | Scala | gpl-3.0 | 32 |
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.core.util
import org.openjdk.jol.info.ClassLayout
import org.openjdk.jol.info.GraphLayout
import munit.FunSuite
import scala.util.Random
class IntHashSetSuite extends FunSuite {
test("add") {
val s = new IntHashSet(-1, 10)
s.add(11)
assertEquals(List(11), s.toList)
assertEquals(1, s.size)
}
test("dedup") {
val s = new IntHashSet(-1, 10)
s.add(42)
assertEquals(List(42), s.toList)
assertEquals(1, s.size)
s.add(42)
assertEquals(List(42), s.toList)
assertEquals(1, s.size)
}
test("resize") {
val s = new IntHashSet(-1, 10)
(0 until 10000).foreach(s.add)
assertEquals((0 until 10000).toSet, s.toList.toSet)
assertEquals(s.size, 10000)
}
test("random") {
val jset = new scala.collection.mutable.HashSet[Int]
val iset = new IntHashSet(-1, 10)
(0 until 10000).foreach { i =>
val v = Random.nextInt()
iset.add(v)
jset.add(v)
}
assertEquals(jset.toSet, iset.toList.toSet)
}
private def arrayCompare(a1: Array[Int], a2: Array[Int]): Unit = {
// Need to sort as traversal order could be different when generating the arrays
java.util.Arrays.sort(a1)
java.util.Arrays.sort(a2)
assertEquals(a1.toSeq, a2.toSeq)
}
test("toArray") {
val jset = new scala.collection.mutable.HashSet[Int]
val iset = new IntHashSet(-1, 10)
(0 until 10000).foreach { i =>
val v = Random.nextInt()
iset.add(v)
jset.add(v)
}
arrayCompare(jset.toArray, iset.toArray)
}
test("memory per set") {
// Sanity check to verify if some change introduces more overhead per set
val bytes = ClassLayout.parseClass(classOf[IntHashSet]).instanceSize()
assertEquals(bytes, 32L)
}
test("memory - 5 items") {
val iset = new IntHashSet(-1, 10)
val jset = new java.util.HashSet[Int](10)
(0 until 5).foreach { i =>
iset.add(i)
jset.add(i)
}
val igraph = GraphLayout.parseInstance(iset)
//val jgraph = GraphLayout.parseInstance(jset)
//println(igraph.toFootprint)
//println(jgraph.toFootprint)
// Only objects should be the array and the set itself
assertEquals(igraph.totalCount(), 2L)
// Sanity check size is < 100 bytes
assert(igraph.totalSize() <= 100)
}
test("memory - 10k items") {
val iset = new IntHashSet(-1, 10)
val jset = new java.util.HashSet[Int](10)
(0 until 10000).foreach { i =>
iset.add(i)
jset.add(i)
}
val igraph = GraphLayout.parseInstance(iset)
//val jgraph = GraphLayout.parseInstance(jset)
//println(igraph.toFootprint)
//println(jgraph.toFootprint)
// Only objects should be the array and the set itself
assertEquals(igraph.totalCount(), 2L)
// Sanity check size is < 110kb
assert(igraph.totalSize() <= 110000)
}
test("negative absolute value") {
val s = new IntHashSet(-1, 10)
s.add(Integer.MIN_VALUE)
}
}
| Netflix/atlas | atlas-core/src/test/scala/com/netflix/atlas/core/util/IntHashSetSuite.scala | Scala | apache-2.0 | 3,557 |
import javax.jcr.{Node, PropertyType, Value}
import net.devkat.ocm.OcmException
import java.util.Calendar
import org.apache.commons.io.IOUtils
import scala.reflect.runtime.universe._
sealed trait JcrValue {
def jcrValue: Value
}
abstract class AbstractJcrValue[T](value: Value, v: T) extends JcrValue {
def jcrValue = value
}
object JcrValue {
import PropertyType._
/*
// http://stackoverflow.com/questions/18499384/polymorphic-instantiation-in-scala-using-typetag-and-classtag
def newInstance[T <: JcrValue : TypeTag](v: Value): T = {
val tpe = typeOf[T]
def fail = throw new IllegalArgumentException(s"Cannot instantiate $tpe")
val ctor = tpe.member(nme.CONSTRUCTOR) match {
case symbol: TermSymbol =>
symbol.alternatives.collectFirst {
case constr: MethodSymbol if (constr.paramss match {
case List(List(param)) if (param.typeSignature =:= typeOf[Value]) => true
case _ => false
}) => constr
} getOrElse fail
case NoSymbol => fail
}
val classMirror = typeTag[T].mirror.reflectClass(tpe.typeSymbol.asClass)
classMirror.reflectConstructor(ctor).apply(v).asInstanceOf[T]
}
implicit def extract[T <: JcrValue : TypeTag](v: Value): T = newInstance[T](v)
*/
protected def typeName(jcrPropertyType: Int) =
javax.jcr.PropertyType.nameFromValue(jcrPropertyType)
implicit def value2jcrValue[T <: JcrValue : TypeTag](v: Value): T = {
val reqType = jcrType[T]
if (v.getType != reqType) {
throw new OcmException(s"Found property type ${typeName(v.getType)}, required type ${typeName(reqType)}.")
} else {
(v.getType match {
case BINARY => binary(v)
case BOOLEAN => boolean(v)
case DATE => date(v)
case DECIMAL => decimal(v)
case DOUBLE => double(v)
case LONG => long(v)
case NAME => name(v)
case PATH => path(v)
case REFERENCE => reference(v)
case STRING => string(v)
case URI => uri(v)
case WEAKREFERENCE => weakReference(v)
}).asInstanceOf[T]
}
}
implicit def jcrValue2value(t: JcrValue) = t.jcrValue
case class binary(v: Value) extends AbstractJcrValue[Array[Byte]](v, IOUtils.toByteArray(v.getBinary.getStream))
case class boolean(v: Value) extends AbstractJcrValue[Boolean](v, v.getBoolean)
case class date(v: Value) extends AbstractJcrValue[Calendar](v, v.getDate)
case class decimal(v: Value) extends AbstractJcrValue[BigDecimal](v, v.getDecimal)
case class double(v: Value) extends AbstractJcrValue[Double](v, v.getDouble)
case class long(v: Value) extends AbstractJcrValue[Long](v, v.getLong)
case class name(v: Value) extends AbstractJcrValue[String](v, v.getString)
case class path(v: Value) extends AbstractJcrValue[String](v, v.getString)
case class reference(v: Value) extends AbstractJcrValue[String](v, v.getString)
case class string(v: Value) extends AbstractJcrValue[String](v, v.getString)
case class uri(v: Value) extends AbstractJcrValue[String](v, v.getString)
case class weakReference(v: Value) extends AbstractJcrValue[String](v, v.getString)
def jcrType[T <: JcrValue: TypeTag]: Int = {
val t = typeOf[T]
if (t =:= typeOf[binary]) BINARY else if (t =:= typeOf[boolean]) BOOLEAN else if (t =:= typeOf[date]) DATE else if (t =:= typeOf[decimal]) DECIMAL else if (t =:= typeOf[double]) DOUBLE else if (t =:= typeOf[long]) LONG else if (t =:= typeOf[name]) NAME else if (t =:= typeOf[path]) PATH else if (t =:= typeOf[reference]) REFERENCE else if (t =:= typeOf[string]) STRING else if (t =:= typeOf[uri]) URI else if (t =:= typeOf[weakReference]) WEAKREFERENCE else
throw new OcmException(s"Unsupported type ${t}")
}
}
sealed trait ValueExtractor
object ValueExtractor {
case object missing extends ValueExtractor
case class single(v: Value) extends ValueExtractor
case class multiple(v: Iterable[Value]) extends ValueExtractor
def apply(node: Node, name: String): ValueExtractor = {
if (node.hasProperty(name)) {
val p = node.getProperty(name)
if (p.isMultiple) multiple(p.getValues)
else single(p.getValue)
} else missing
}
}
case class Simple[T](v: T) {
def get: T = v
}
sealed trait PropertyAccessor[C[_]] {
import javax.jcr.PropertyType._
import JcrValue._
def read[T <: JcrValue: TypeTag](node: Node, name: String): C[T]
def write[T <: JcrValue: TypeTag](node: Node, name: String, v: C[T]): Unit
protected def extractValue[T <: JcrValue : TypeTag](value: Value): T = value2jcrValue(value)
}
object PropertyAccessor {
import ValueExtractor._
import JcrValue._
import Simple._
implicit val simple = new PropertyAccessor[Simple] {
def read[T <: JcrValue: TypeTag](node: Node, name: String): Simple[T] = {
val extract = extractValue[T] _
ValueExtractor(node, name) match {
case single(v) => Simple(extract(v))
case multiple(_) => throw new OcmException(s"Property ${name} is multiple.")
case missing => throw new OcmException(s"Property ${name} missing.")
}
}
def write[T <: JcrValue: TypeTag](node: Node, name: String, v: Simple[T]) {
node.setProperty(name, v.get)
}
}
implicit val optional = new PropertyAccessor[Option] {
def read[T <: JcrValue: TypeTag](node: Node, name: String): Option[T] = {
val extract = extractValue[T] _
ValueExtractor(node, name) match {
case single(v) => Some(extract(v))
case multiple(_) => throw new OcmException(s"Property ${name} is multiple.")
case missing => None.asInstanceOf[Option[T]]
}
}
def write[T <: JcrValue: TypeTag](node: Node, name: String, v: Option[T]) {
def clear = if (node.hasProperty(name)) node.getProperty(name).remove()
v match {
case Some(w) => node.setProperty(name, w)
case None => clear
}
}
}
def read[C[_]: PropertyAccessor, T <: JcrValue: TypeTag](node: Node, name: String): C[T] =
implicitly[PropertyAccessor[C]].read[T](node, name)
def write[C[_]: PropertyAccessor, T <: JcrValue: TypeTag](node: Node, name: String, v: C[T]): Unit =
implicitly[PropertyAccessor[C]].write[T](node, name, v)
}
| devkat/scala-ocm | common/src/main/scala/net/devkat/ocm/JcrValue.scala | Scala | apache-2.0 | 6,228 |
package org.jetbrains.plugins.scala.lang.typeInference
import org.jetbrains.plugins.scala.base.ScalaLightCodeInsightFixtureTestAdapter
/**
* @author anton.yalyshev
* @since 07.09.18.
*/
class ApplyConformanceTest extends ScalaLightCodeInsightFixtureTestAdapter {
def testSCL13654(): Unit = {
checkTextHasNoErrors(
s"""
|class Id {
| def apply(param: Int): Int =
| param
| }
|
| implicit def id2function(clz: Id): String => String =
| str => clz(str.toInt).toString
|
| val id = new Id
|
| id { "1" }
""".stripMargin)
}
def testSCL11912(): Unit = {
checkTextHasNoErrors(
s"""
|object test {
| final case class Kleisli[F[_], A, B](run: A => F[B])
| val f = Kleisli { (x: Int) => Some(x + 1) }
|}
""".stripMargin)
}
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/lang/typeInference/ApplyConformanceTest.scala | Scala | apache-2.0 | 917 |
/*
* Copyright (c) 2014-2016
* nonblocking.at gmbh [http://www.nonblocking.at]
*
* This file is part of Cliwix.
*
* Cliwix is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package at.nonblocking.cliwix.core.liferay62.util
import at.nonblocking.cliwix.core.util.TrashFilter
import com.liferay.portal.model.{TrashedModel, BaseModel}
/**
* Trash filter impl for Liferay 62.
*/
class TrashFilter62 extends TrashFilter {
override def isInTrash(entity: BaseModel[_]) = {
assert(entity.isInstanceOf[TrashedModel], s"${entity.getClass.getName} must be a TrashedModel")
entity.asInstanceOf[TrashedModel].isInTrash
}
}
| nonblocking/cliwix | cliwix-core-handlers-6-2/src/main/scala/at/nonblocking/cliwix/core/liferay62/util/TrashFilter62.scala | Scala | agpl-3.0 | 1,239 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ly.stealth.mesos.exhibitor
import java.io.{IOException, PrintStream}
import java.net.{HttpURLConnection, URL, URLEncoder}
import play.api.libs.json.{JsValue, Json}
import scopt.OptionParser
import scala.concurrent.duration.Duration
import scala.io.Source
import scala.util.{Failure, Success, Try}
object Cli {
private[exhibitor] var out: PrintStream = System.out
def main(args: Array[String]) {
try {
exec(args)
} catch {
case e: CliError =>
System.err.println("Error: " + e.getMessage)
sys.exit(1)
}
}
def exec(args: Array[String]) {
if (args.length == 0) {
handleHelp()
printLine()
throw CliError("No command supplied")
}
val command = args.head
val commandArgs = args.tail
command match {
case "help" => if (commandArgs.isEmpty) handleHelp() else handleHelp(commandArgs.head)
case "scheduler" => handleScheduler(commandArgs)
case "add" => handleAdd(commandArgs)
case "start" => handleStart(commandArgs)
case "stop" => handleStop(commandArgs)
case "remove" => handleRemove(commandArgs)
case "status" => handleStatus(commandArgs)
case "config" => handleConfig(commandArgs)
case _ => throw CliError(s"Unknown command: $command\\n")
}
}
def handleHelp(command: String = "") {
command match {
case "" =>
printLine("Usage: <command>\\n")
printGenericHelp()
case "scheduler" => Parsers.scheduler.showUsage
case "add" => Parsers.add.showUsage
case "start" => Parsers.start.showUsage
case "stop" => Parsers.stop.showUsage
case "remove" => Parsers.remove.showUsage
case "status" => Parsers.status.showUsage
case "config" => Parsers.config.showUsage
case _ =>
printLine(s"Unknown command: $command\\n")
printGenericHelp()
}
}
def handleScheduler(args: Array[String]) {
Parsers.scheduler.parse(args, Map()) match {
case Some(config) =>
resolveApi(config.get("api"))
Config.master = config("master")
Config.user = config("user")
config.get("frameworkname").foreach(name => Config.frameworkName = name)
config.get("frameworktimeout").foreach(timeout => Config.frameworkTimeout = Duration(timeout))
config.get("ensemblemodifyretries").foreach(retries => Config.ensembleModifyRetries = retries.toInt)
config.get("ensemblemodifybackoff").foreach(backoff => Config.ensembleModifyBackoff = backoff.toLong)
config.get("debug").foreach(debug => Config.debug = debug.toBoolean)
Scheduler.start()
case None => throw CliError("Invalid arguments")
}
}
def handleAdd(args: Array[String]) {
val id = getID(args, () => Parsers.add.showUsage)
Parsers.add.parse(args.tail, Map("id" -> id)) match {
case Some(config) =>
resolveApi(config.get("api"))
val response = sendRequest("/add", config).as[ApiResponse]
printLine(response.message)
printLine()
response.value.foreach(printCluster)
case None => throw CliError("Invalid arguments")
}
}
def handleStart(args: Array[String]) {
val id = getID(args, () => Parsers.start.showUsage)
Parsers.start.parse(args.tail, Map("id" -> id)) match {
case Some(config) =>
resolveApi(config.get("api"))
val response = sendRequest("/start", config).as[ApiResponse]
printLine(response.message)
printLine()
response.value.foreach(printCluster)
case None => throw CliError("Invalid arguments")
}
}
def handleStop(args: Array[String]) {
val id = getID(args, () => Parsers.stop.showUsage)
Parsers.stop.parse(args.tail, Map("id" -> id)) match {
case Some(config) =>
resolveApi(config.get("api"))
val response = sendRequest("/stop", config).as[ApiResponse]
printLine(response.message)
printLine()
case None => throw CliError("Invalid arguments")
}
}
def handleRemove(args: Array[String]) {
val id = getID(args, () => Parsers.remove.showUsage)
Parsers.remove.parse(args.tail, Map("id" -> id)) match {
case Some(config) =>
resolveApi(config.get("api"))
val response = sendRequest("/remove", config).as[ApiResponse]
printLine(response.message)
printLine()
case None => throw CliError("Invalid arguments")
}
}
def handleStatus(args: Array[String]) {
Parsers.status.parse(args, Map()) match {
case Some(config) =>
resolveApi(config.get("api"))
val cluster = sendRequest("/status", config).as[ApiResponse]
printCluster(cluster.value.get)
case None => throw CliError("Invalid arguments")
}
}
def handleConfig(args: Array[String]) {
val id = getID(args, () => Parsers.config.showUsage)
Parsers.config.parse(args.tail, Map("id" -> id)) match {
case Some(config) =>
resolveApi(config.get("api"))
val response = sendRequest("/config", config).as[ApiResponse]
printLine(response.message)
printLine()
response.value.foreach(printCluster)
case None => throw CliError("Invalid arguments")
}
}
private def getID(args: Array[String], usage: () => Unit): String = {
args.headOption match {
case Some(ids) => Try(ids.split(",").map(Util.Range(_))) match {
case Success(_) => ids
case Failure(e) => throw CliError(s"Invalid id range: ${e.getMessage}")
}
case None =>
usage()
throw CliError("Argument required")
}
}
private def resolveApi(apiOption: Option[String]) {
if (Config.api != null) return
if (apiOption.isDefined) {
Config.api = apiOption.get
return
}
if (System.getenv("EM_API") != null) {
Config.api = System.getenv("EM_API")
return
}
throw CliError("Undefined API url. Please provide either a CLI --api option or EM_API env.")
}
private[exhibitor] def sendRequest(uri: String, params: Map[String, String]): JsValue = {
def queryString(params: Map[String, String]): String = {
var s = ""
params.foreach { case (name, value) =>
if (!s.isEmpty) s += "&"
s += URLEncoder.encode(name, "utf-8")
if (value != null) s += "=" + URLEncoder.encode(value, "utf-8")
}
s
}
val qs: String = queryString(params)
val url: String = Config.api + (if (Config.api.endsWith("/")) "" else "/") + "api" + uri + "?" + qs
val connection: HttpURLConnection = new URL(url).openConnection().asInstanceOf[HttpURLConnection]
var response: String = null
try {
try {
response = Source.fromInputStream(connection.getInputStream).getLines().mkString
}
catch {
case e: IOException =>
if (connection.getResponseCode != 200) throw new IOException(connection.getResponseCode + " - " + connection.getResponseMessage)
else throw e
}
} finally {
connection.disconnect()
}
Json.parse(response)
}
private def printLine(s: AnyRef = "", indent: Int = 0) = out.println(" " * indent + s)
private def printGenericHelp() {
printLine("Commands:")
printLine("help - print this message.", 1)
printLine("help [cmd] - print command-specific help.", 1)
printLine("scheduler - start scheduler.", 1)
printLine("status - print cluster status.", 1)
printLine("add - add servers to cluster.", 1)
printLine("config - configure servers in cluster.", 1)
printLine("start - start servers in cluster.", 1)
printLine("stop - stop servers in cluster.", 1)
printLine("remove - remove servers in cluster.", 1)
}
private def printConstraintExamples() {
printLine("constraint examples:")
printLine("like:slave0 - value equals 'slave0'", 1)
printLine("unlike:slave0 - value is not equal to 'slave0'", 1)
printLine("like:slave.* - value starts with 'slave'", 1)
printLine("unique - all values are unique", 1)
printLine("cluster - all values are the same", 1)
printLine("cluster:slave0 - value equals 'slave0'", 1)
printLine("groupBy - all values are the same", 1)
printLine("groupBy:3 - all values are within 3 different groups", 1)
}
private def printCluster(cluster: Cluster) {
printLine("cluster:")
cluster.servers.foreach(printExhibitorServer(_, 1))
}
private def printExhibitorServer(server: ExhibitorServer, indent: Int = 0) {
printLine("server:", indent)
printLine(s"id: ${server.id}", indent + 1)
printLine(s"state: ${server.state}", indent + 1)
if (!server.config.hostname.isEmpty && server.config.exhibitorConfig.get("port").isDefined) {
printLine(s"endpoint: ${server.url}/exhibitor/v1/ui/index.html", indent + 1)
}
if (server.constraints.nonEmpty)
printLine(s"constraints: ${Util.formatConstraints(server.constraints)}", indent + 1)
printTaskConfig(server.config, indent + 1)
printLine()
}
private def printTaskConfig(config: TaskConfig, indent: Int) {
printLine("exhibitor config:", indent)
config.exhibitorConfig.foreach { case (k, v) =>
printLine(s"$k: $v", indent + 1)
}
printLine("shared config overrides:", indent)
config.sharedConfigOverride.foreach { case (k, v) =>
printLine(s"$k: $v", indent + 1)
}
printLine(s"cpu: ${config.cpus}", indent)
printLine(s"mem: ${config.mem}", indent)
printLine(s"sharedConfigChangeBackoff: ${config.sharedConfigChangeBackoff}", indent)
val ports = config.ports match {
case Nil => "auto"
case _ => config.ports.mkString(",")
}
printLine(s"port: $ports", indent)
}
private object Parsers {
val scheduler = new CliOptionParser("scheduler") {
opt[String]('m', "master").required().text("Mesos Master addresses. Required.").action { (value, config) =>
config.updated("master", value)
}
opt[String]('a', "api").optional().text("Binding host:port for http/artifact server. Optional if EM_API env is set.").action { (value, config) =>
config.updated("api", value)
}
opt[String]('u', "user").required().text("Mesos user. Required.").action { (value, config) =>
config.updated("user", value)
}
opt[String]("frameworkname").optional().text("Mesos framework name. Defaults to Exhibitor. Optional").action { (value, config) =>
config.updated("frameworkname", value)
}
opt[String]("frameworktimeout").optional().text("Mesos framework failover timeout. Allows to recover from failure before killing running tasks. Should be a parsable Scala Duration value. Defaults to 30 days. Optional").action { (value, config) =>
Duration(value)
config.updated("frameworktimeout", value)
}
opt[Int]("ensemblemodifyretries").optional().text("Number of retries to modify (add/remove server) ensemble. Defaults to 60. Optional.").action { (value, config) =>
config.updated("ensemblemodifyretries", value.toString)
}
opt[Long]("ensemblemodifybackoff").optional().text("Backoff between retries to modify (add/remove server) ensemble in milliseconds. Defaults to 1000. Optional.").action { (value, config) =>
config.updated("ensemblemodifybackoff", value.toString)
}
opt[Boolean]('d', "debug").optional().text("Debug mode. Optional. Defaults to false.").action { (value, config) =>
config.updated("debug", value.toString)
}
}
val add = new CliOptionParser("add <id>") {
override def showUsage {
super.showUsage
printLine()
printConstraintExamples()
}
opt[String]('c', "cpu").optional().text(s"CPUs for server. Optional.").action { (value, config) =>
config.updated("cpu", value)
}
opt[String]('m', "mem").optional().text("Memory for server. Optional.").action { (value, config) =>
config.updated("mem", value)
}
opt[String]("constraints").optional().text("Constraints (hostname=like:master,rack=like:1.*). See below. Defaults to 'hostname=unique'. Optional.").action { (value, config) =>
config.updated("constraints", value)
}
opt[Long]('b', "configchangebackoff").optional().text("Backoff between checks whether the shared configuration changed in milliseconds. Defaults to 10000. Optional.").action { (value, config) =>
config.updated("configchangebackoff", value.toString)
}
opt[String]('a', "api").optional().text("Binding host:port for http/artifact server. Optional if EM_API env is set.").action { (value, config) =>
config.updated("api", value)
}
opt[String]("port").optional().text("Port ranges to accept, when offer is issued. Optional").action { (value, config) =>
config.updated("port", value)
}
}
val start = defaultParser("start <id>")
val stop = defaultParser("stop <id>")
val remove = defaultParser("remove <id>")
val status = defaultParser("status")
val config = new CliOptionParser("config <id>") {
opt[String]('a', "api").optional().text("Binding host:port for http/artifact server. Optional if EM_API env is set.").action { (value, config) =>
config.updated("api", value)
}
// Exhibitor configs
opt[String]("configtype").optional().text("Config type to use: s3 or zookeeper. Optional.").action { (value, config) =>
config.updated("configtype", value)
}
opt[String]("configcheckms").optional().text("Period (ms) to check for shared config updates. Optional.").action { (value, config) =>
config.updated("configcheckms", value)
}
opt[String]("defaultconfig").optional().text("Full path to a file that contains initial/default values for Exhibitor/ZooKeeper config values. The file is a standard property file. Optional.").action { (value, config) =>
config.updated("defaultconfig", value)
}
opt[String]("headingtext").optional().text("Extra text to display in UI header. Optional.").action { (value, config) =>
config.updated("headingtext", value)
}
opt[String]("hostname").optional().text("Hostname to use for this JVM. Optional.").action { (value, config) =>
config.updated("hostname", value)
}
opt[String]("jquerystyle").optional().text("Styling used for the JQuery-based UI. Optional.").action { (value, config) =>
config.updated("jquerystyle", value)
}
opt[String]("loglines").optional().text("Max lines of logging to keep in memory for display. Default is 1000. Optional.").action { (value, config) =>
config.updated("loglines", value)
}
opt[String]("nodemodification").optional().text("If true, the Explorer UI will allow nodes to be modified (use with caution). Default is true. Optional.").action { (value, config) =>
config.updated("nodemodification", value)
}
opt[String]("prefspath").optional().text("Certain values (such as Control Panel values) are stored in a preferences file. By default, Preferences.userRoot() is used. Optional.").action { (value, config) =>
config.updated("prefspath", value)
}
opt[String]("servo").optional().text("true/false (default is false). If enabled, ZooKeeper will be queried once a minute for its state via the 'mntr' four letter word (this requires ZooKeeper 3.4.x+). Servo will be used to publish this data via JMX. Optional.").action { (value, config) =>
config.updated("servo", value)
}
opt[String]("timeout").optional().text("Connection timeout (ms) for ZK connections. Default is 30000. Optional.").action { (value, config) =>
config.updated("timeout", value)
}
// S3 options
opt[String]("s3credentials").optional().text("Credentials to use for s3backup or s3config. Optional.").action { (value, config) =>
config.updated("s3credentials", value)
}
opt[String]("s3region").optional().text("Region for S3 calls (e.g. \\"eu-west-1\\"). Optional.").action { (value, config) =>
config.updated("s3region", value)
}
// Configuration Options for Type "s3"
opt[String]("s3config").optional().text("The bucket name and key to store the config (s3credentials may be provided as well). Argument is [bucket name]:[key]. Optional.").action { (value, config) =>
config.updated("s3config", value)
}
opt[String]("s3configprefix").optional().text("When using AWS S3 shared config files, the prefix to use for values such as locks. Optional.").action { (value, config) =>
config.updated("s3configprefix", value)
}
// Configuration Options for Type "zookeeper"
opt[String]("zkconfigconnect").optional().text("The initial connection string for ZooKeeper shared config storage. E.g: host1:2181,host2:2181... Optional.").action { (value, config) =>
config.updated("zkconfigconnect", value)
}
opt[String]("zkconfigexhibitorpath").optional().text("Used if the ZooKeeper shared config is also running Exhibitor. This is the URI path for the REST call. The default is: /. Optional.").action { (value, config) =>
config.updated("zkconfigexhibitorpath", value)
}
opt[String]("zkconfigexhibitorport").optional().text("Used if the ZooKeeper shared config is also running Exhibitor. This is the port that Exhibitor is listening on. IMPORTANT: if this value is not set it implies that Exhibitor is not being used on the ZooKeeper shared config. Optional.").action { (value, config) =>
config.updated("zkconfigexhibitorport", value)
}
opt[String]("zkconfigpollms").optional().text("The period in ms to check for changes in the config ensemble. The default is: 10000. Optional.").action { (value, config) =>
config.updated("zkconfigpollms", value)
}
opt[String]("zkconfigretry").optional().text("The retry values to use in the form sleep-ms:retry-qty. The default is: 1000:3. Optional.").action { (value, config) =>
config.updated("zkconfigretry", value)
}
opt[String]("zkconfigzpath").optional().text("The base ZPath that Exhibitor should use. E.g: /exhibitor/config. Optional.").action { (value, config) =>
config.updated("zkconfigzpath", value)
}
// Backup Options
opt[String]("filesystembackup").optional().text("If true, enables file system backup of ZooKeeper log files. Optional.").action { (value, config) =>
config.updated("filesystembackup", value)
}
opt[String]("s3backup").optional().text("If true, enables AWS S3 backup of ZooKeeper log files (s3credentials may be provided as well). Optional.").action { (value, config) =>
config.updated("s3backup", value)
}
// ACL Options
opt[String]("aclid").optional().text("Enable ACL for Exhibitor's internal ZooKeeper connection. This sets the ACL's ID. Optional.").action { (value, config) =>
config.updated("aclid", value)
}
opt[String]("aclperms").optional().text("Enable ACL for Exhibitor's internal ZooKeeper connection. This sets the ACL's Permissions - a comma list of possible permissions. If this isn't specified the permission is set to ALL. Values: read, write, create, delete, admin. Optional.").action { (value, config) =>
config.updated("aclperms", value)
}
opt[String]("aclscheme").optional().text("Enable ACL for Exhibitor's internal ZooKeeper connection. This sets the ACL's Scheme. Optional.").action { (value, config) =>
config.updated("aclscheme", value)
}
// shared configs
opt[String]("log-index-directory").optional().text("The directory where indexed Zookeeper logs should be kept. Optional.").action { (value, config) =>
config.updated("log-index-directory", value)
}
opt[String]("zookeeper-install-directory").optional().text("The directory where the Zookeeper server is installed. Optional.").action { (value, config) =>
config.updated("zookeeper-install-directory", value)
}
opt[String]("zookeeper-data-directory").optional().text("The directory where Zookeeper snapshot data is stored. Optional.").action { (value, config) =>
config.updated("zookeeper-data-directory", value)
}
opt[String]("zookeeper-log-directory").optional().text("The directory where Zookeeper transaction log data is stored. Optional.").action { (value, config) =>
config.updated("zookeeper-log-directory", value)
}
opt[String]("backup-extra").optional().text("Backup extra shared config. Optional.").action { (value, config) =>
config.updated("backup-extra", value)
}
opt[String]("zoo-cfg-extra").optional().text("Any additional properties to be added to the zoo.cfg file in form: key1\\\\\\\\=value1&key2\\\\\\\\=value2. Optional.").action { (value, config) =>
config.updated("zoo-cfg-extra", value)
}
opt[String]("java-environment").optional().text("Script to write as the 'java.env' file which gets executed as a part of Zookeeper start script. Optional.").action { (value, config) =>
config.updated("java-environment", value)
}
opt[String]("log4j-properties").optional().text("Contents of the log4j.properties file. Optional.").action { (value, config) =>
config.updated("log4j-properties", value)
}
opt[String]("client-port").optional().text("The port that clients use to connect to Zookeeper. Defaults to 2181. Optional.").action { (value, config) =>
config.updated("client-port", value)
}
opt[String]("connect-port").optional().text("The port that other Zookeeper instances use to connect to Zookeeper. Defaults to 2888. Optional.").action { (value, config) =>
config.updated("connect-port", value)
}
opt[String]("election-port").optional().text("The port that other Zookeeper instances use for election. Defaults to 3888. Optional.").action { (value, config) =>
config.updated("election-port", value)
}
opt[String]("check-ms").optional().text("The number of milliseconds between live-ness checks on Zookeeper server. Defaults to 30000. Optional.").action { (value, config) =>
config.updated("check-ms", value)
}
opt[String]("cleanup-period-ms").optional().text("The number of milliseconds between Zookeeper log file cleanups. Defaults to 43200000. Optional.").action { (value, config) =>
config.updated("cleanup-period-ms", value)
}
opt[String]("cleanup-max-files").optional().text("The max number of Zookeeper log files to keep when cleaning up. Defaults to 3. Optional.").action { (value, config) =>
config.updated("cleanup-max-files", value)
}
opt[String]("backup-max-store-ms").optional().text("Backup max store ms shared config. Optional.").action { (value, config) =>
config.updated("backup-max-store-ms", value)
}
opt[String]("backup-period-ms").optional().text("Backup period ms shared config. Optional.").action { (value, config) =>
config.updated("backup-period-ms", value)
}
opt[String]("port").optional().text("Port ranges to accept, when offer is issued. Optional").action { (value, config) =>
config.updated("port", value)
}
}
private def defaultParser(descr: String): OptionParser[Map[String, String]] = new CliOptionParser(descr) {
opt[String]('a', "api").optional().text("Binding host:port for http/artifact server. Optional if EM_API env is set.").action { (value, config) =>
config.updated("api", value)
}
}
}
class CliOptionParser(descr: String) extends OptionParser[Map[String, String]](descr) {
override def showUsage {
Cli.out.println(usage)
}
}
case class CliError(message: String) extends RuntimeException(message)
}
| CiscoCloud/exhibitor-mesos-framework | src/main/scala/ly/stealth/mesos/exhibitor/Cli.scala | Scala | apache-2.0 | 24,796 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.jdk
import java.util.{Optional, OptionalDouble, OptionalInt, OptionalLong}
import java.{lang => jl}
import scala.annotation.implicitNotFound
/** A type class implementing conversions from a generic Scala `Option` or Java `Optional` to
* a specialized Java variant (for `Double`, `Int` and `Long`).
*
* @tparam A the primitive type wrapped in an option
* @tparam O the specialized Java `Optional` wrapping an element of type `A`
*/
@implicitNotFound("No specialized Optional type exists for elements of type ${A}")
sealed abstract class OptionShape[A, O] {
/** Converts from `Optional` to the specialized variant `O` */
def fromJava(o: Optional[A]): O
/** Converts from `Option` to the specialized variant `O` */
def fromScala(o: Option[A]): O
}
object OptionShape {
implicit val doubleOptionShape: OptionShape[Double, OptionalDouble] = new OptionShape[Double, OptionalDouble] {
def fromJava(o: Optional[Double]): OptionalDouble =
if (o.isPresent) OptionalDouble.of(o.get) else OptionalDouble.empty
def fromScala(o: Option[Double]): OptionalDouble = o match {
case Some(d) => OptionalDouble.of(d)
case _ => OptionalDouble.empty
}
}
implicit val jDoubleOptionShape: OptionShape[jl.Double, OptionalDouble] = doubleOptionShape.asInstanceOf[OptionShape[jl.Double, OptionalDouble]]
implicit val intOptionShape: OptionShape[Int, OptionalInt] = new OptionShape[Int, OptionalInt] {
def fromJava(o: Optional[Int]): OptionalInt =
if (o.isPresent) OptionalInt.of(o.get) else OptionalInt.empty
def fromScala(o: Option[Int]): OptionalInt = o match {
case Some(d) => OptionalInt.of(d)
case _ => OptionalInt.empty
}
}
implicit val jIntegerOptionShape: OptionShape[jl.Integer, OptionalInt] = intOptionShape.asInstanceOf[OptionShape[jl.Integer, OptionalInt]]
implicit val longOptionShape: OptionShape[Long, OptionalLong] = new OptionShape[Long, OptionalLong] {
def fromJava(o: Optional[Long]): OptionalLong =
if (o.isPresent) OptionalLong.of(o.get) else OptionalLong.empty
def fromScala(o: Option[Long]): OptionalLong = o match {
case Some(d) => OptionalLong.of(d)
case _ => OptionalLong.empty
}
}
implicit val jLongOptionShape: OptionShape[jl.Long, OptionalLong] = longOptionShape.asInstanceOf[OptionShape[jl.Long, OptionalLong]]
}
| lrytz/scala | src/library/scala/jdk/OptionShape.scala | Scala | apache-2.0 | 2,654 |
// timber -- Copyright 2012-2021 -- Justin Patterson
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.scalawag.timber.backend.dispatcher.configuration.dsl
import org.scalawag.timber.backend.dispatcher.EntryFacets
trait Condition {
/** Tests the condition against the EntryFacets specified. It should always return the same thing given the
* same input EntryFacets.
*
* If the condition returns a definitive answer when passed an empty EntryFacets, it means that this is the
* answer regardless of the input and the condition may be optimized out of the configuration graph as a constant.
*
* @return Some(true) or Some(false) if the test is decisive and None if it can't be sure given the EntryFacets.
* None will be treated as 'false' for evaluation purposes.
*/
def accepts(entryFacets: EntryFacets): Option[Boolean]
def and(that: Condition) = Condition.AndCondition(this, that)
def or(that: Condition) = Condition.OrCondition(this, that)
def &&(that: Condition) = Condition.AndCondition(this, that)
def ||(that: Condition) = Condition.OrCondition(this, that)
def unary_!() = Condition.NotCondition(this)
}
object Condition {
def apply(accepts: Boolean) = if (accepts) AcceptAll else RejectAll
trait ConstantCondition extends Condition
case object AcceptAll extends ConstantCondition {
override def accepts(entryFacets: EntryFacets): Option[Boolean] = Some(true)
override lazy val toString = "true"
}
case object RejectAll extends ConstantCondition {
override def accepts(entryFacets: EntryFacets): Option[Boolean] = Some(false)
override lazy val toString = "false"
}
trait LogicalOperationCondition extends Condition
case class NotCondition(val condition: Condition) extends LogicalOperationCondition {
override def accepts(entryFacets: EntryFacets): Option[Boolean] = condition.accepts(entryFacets).map(!_)
override val toString = "not(" + condition + ")"
}
case class AndCondition(val conditions: Condition*) extends LogicalOperationCondition {
override def accepts(entryFacets: EntryFacets): Option[Boolean] = {
val votes = conditions.map(_.accepts(entryFacets))
if (votes.forall(_.isDefined))
Some(votes.map(_.get).forall(identity))
else
None
}
override val toString = conditions.map(_.toString).mkString("(", ") and (", ")")
}
case class OrCondition(val conditions: Condition*) extends LogicalOperationCondition {
override def accepts(entryFacets: EntryFacets): Option[Boolean] = {
val votes = conditions.map(_.accepts(entryFacets))
if (votes.forall(_.isDefined))
Some(votes.map(_.get).exists(identity))
else
None
}
override val toString = conditions.map(_.toString).mkString("(", ") or (", ")")
}
}
| scalawag/timber | timber-backend/src/main/scala/org/scalawag/timber/backend/dispatcher/configuration/dsl/Condition.scala | Scala | apache-2.0 | 3,338 |
package services.alertwatcherxr
import javax.inject._
import scala.concurrent.Future
import utils.Awaits
import models.alertwatcherxr.Alert
import dao.alertwatcherxr.AlertWatcherXRDao
import dao.alertwatcherxr.IAlertWatcherXRDao
import org.joda.time.DateTime
import org.joda.time.LocalDate
import java.sql.Timestamp
import com.github.tototoshi.slick.PostgresJodaSupport._
trait IAlertWatcherXRService extends BaseService[Alert]{
def insert(alert: Alert): Future[Unit]
def update(id: Long, alert: Alert): Future[Unit]
def remove(id: Long): Future[Int]
def findById(id: Long): Future[Option[Alert]]
def findBySiteId(siteid: String): Future[Option[Seq[Alert]]]
def findAll(): Future[Option[Seq[Alert]]]
def findAllAlerts(): Seq[(Long, String)]
}
@Singleton
class AlertWatcherXRService @Inject() (dao:IAlertWatcherXRDao) extends IAlertWatcherXRService{
import play.api.libs.concurrent.Execution.Implicits.defaultContext
def insert(alert: Alert): Future[Unit] = {
dao.insert(alert);
}
def update(id: Long, alert: Alert): Future[Unit] = {
// alert.id = Option(id.toInt)
// alert.id = id
dao.update(alert)
}
def remove(id: Long): Future[Int] = {
dao.remove(id)
}
def findById(id: Long): Future[Option[Alert]] = {
dao.findById(id)
}
def findBySiteId(siteid: String): Future[Option[Seq[Alert]]] = {
dao.findBySiteId(siteid).map { x => Option(x) }
}
def findAll(): Future[Option[Seq[Alert]]] = {
dao.findAll().map { x => Option(x) }
}
private def validateId(id: Long): Unit = {
val future = findById(id)
val entry = Awaits.get(5, future)
if (entry==null || entry.equals(None)) throw new RuntimeException("Could not find Alert: " + id)
}
def findAllAlerts(): Seq[(Long, String)] = {
val future = this.findAll()
val result = Awaits.get(5, future)
val alerts: Seq[(Long, String)] = result
.getOrElse(Seq(Alert(0, "", "", 0, new LocalDate(), new DateTime(), "", "", "",
Some(""), Some(""), Some(""),
0, 0, 0, "", "", "", "")))
.toSeq
// .map { alert => (alert.id.get.toString,alert.name) }
.map { alert => (alert.id, alert.sitename) }
return alerts
}
}
| tnddn/iv-web | portal/rest-portal/app/services/alertwatcherxr/AlertWatcherXRService.scala | Scala | apache-2.0 | 2,272 |
package routes
import java.util.concurrent.TimeUnit
import akka.actor.ActorRef
import akka.pattern._
import akka.util.Timeout
import spray.http.HttpHeaders.`Content-Type`
import spray.http.MediaTypes._
import spray.httpx.marshalling._
import spray.json._
import spray.json.DefaultJsonProtocol._
import spray.routing._
import cattamer.CatMasterGeneral._
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.Success
import spray.httpx.SprayJsonSupport._
import spray.util._
import spray.httpx.marshalling.MetaMarshallers._
/**
* Created by ruguer
* 3/25/15.
*/
trait CatListRoute extends HttpService with MetaMarshallers {
self : RequireMasterGeneral =>
implicit val timeout = Timeout(10.seconds)
import scala.concurrent.ExecutionContext.Implicits.global
val allCats : Route =
path("listCats") {
get {
respondWithMediaType(`application/json`) {
complete {
(catMasterGeneral ? CatList).mapTo[AllCats].map(t => JsObject("cats" -> JsArray(t.catNames.map(JsString.apply).toVector)))
}
}
}
}
}
| raymondpoling/CatsOfUlthar | src/main/scala/routes/CatListRoute.scala | Scala | apache-2.0 | 1,092 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.bigtable
import java.util.UUID
import com.google.bigtable.admin.v2.{DeleteTableRequest, GetTableRequest, ListTablesRequest}
import com.google.bigtable.v2.{Mutation, Row, RowFilter}
import com.google.cloud.bigtable.config.BigtableOptions
import com.google.cloud.bigtable.grpc._
import com.google.protobuf.ByteString
import com.spotify.scio._
import com.spotify.scio.testing._
import org.joda.time.Duration
import scala.jdk.CollectionConverters._
object BigtableIT {
val projectId = "data-integration-test"
val instanceId = "scio-bigtable-it"
val clusterId = "scio-bigtable-it-cluster"
val zoneId = "us-central1-f"
val tableId = "scio-bigtable-it-counts"
val uuid: String = UUID.randomUUID().toString.substring(0, 8)
val testData: Seq[(String, Long)] =
Seq((s"$uuid-key1", 1L), (s"$uuid-key2", 2L), (s"$uuid-key3", 3L))
val bigtableOptions: BigtableOptions = BigtableOptions
.builder()
.setProjectId(projectId)
.setInstanceId(instanceId)
.build
val FAMILY_NAME: String = "count"
val COLUMN_QUALIFIER: ByteString = ByteString.copyFromUtf8("long")
def toWriteMutation(key: String, value: Long): (ByteString, Iterable[Mutation]) = {
val m = Mutations.newSetCell(
FAMILY_NAME,
COLUMN_QUALIFIER,
ByteString.copyFromUtf8(value.toString),
0L
)
(ByteString.copyFromUtf8(key), Iterable(m))
}
def toDeleteMutation(key: String): (ByteString, Iterable[Mutation]) = {
val m = Mutations.newDeleteFromRow
(ByteString.copyFromUtf8(key), Iterable(m))
}
def fromRow(r: Row): (String, Long) =
(r.getKey.toStringUtf8, r.getValue(FAMILY_NAME, COLUMN_QUALIFIER).get.toStringUtf8.toLong)
def listTables(client: BigtableTableAdminGrpcClient): Set[String] = {
val instancePath = s"projects/$projectId/instances/$instanceId"
val tables = client.listTables(ListTablesRequest.newBuilder().setParent(instancePath).build)
tables.getTablesList.asScala.map(t => new BigtableTableName(t.getName).getTableId).toSet
}
}
class BigtableIT extends PipelineSpec {
import BigtableIT._
// "Update number of bigtable nodes" should "work" in {
ignore should "update number of bigtable nodes" in {
val bt = new BigtableClusterUtilities(bigtableOptions)
val sc = ScioContext()
sc.updateNumberOfBigtableNodes(projectId, instanceId, 4, Duration.standardSeconds(10))
sc.getBigtableClusterSizes(projectId, instanceId)(clusterId) shouldBe 4
bt.getClusterNodeCount(clusterId, zoneId) shouldBe 4
sc.updateNumberOfBigtableNodes(projectId, instanceId, 3, Duration.standardSeconds(10))
sc.getBigtableClusterSizes(projectId, instanceId)(clusterId) shouldBe 3
bt.getClusterNodeCount(clusterId, zoneId) shouldBe 3
}
"BigtableIO" should "work" in {
TableAdmin.ensureTables(bigtableOptions, Map(tableId -> List(FAMILY_NAME)))
try {
// Write rows to table
val sc1 = ScioContext()
sc1
.parallelize(testData.map(kv => toWriteMutation(kv._1, kv._2)))
.saveAsBigtable(projectId, instanceId, tableId)
sc1.run().waitUntilFinish()
// Read rows back
val sc2 = ScioContext()
// Filter rows in case there are other keys in the table
val rowFilter = RowFilter
.newBuilder()
.setRowKeyRegexFilter(ByteString.copyFromUtf8(s"$uuid-.*"))
.build()
sc2
.bigtable(projectId, instanceId, tableId, rowFilter = rowFilter)
.map(fromRow) should containInAnyOrder(testData)
sc2.run().waitUntilFinish()
} catch {
case e: Throwable => throw e
} finally {
// Delete rows afterwards
val sc = ScioContext()
sc.parallelize(testData.map(kv => toDeleteMutation(kv._1)))
.saveAsBigtable(projectId, instanceId, tableId)
sc.run().waitUntilFinish()
()
}
}
"TableAdmin" should "work" in {
val tables = Map(
s"scio-bigtable-empty-table-$uuid" -> List(),
s"scio-bigtable-one-cf-table-$uuid" -> List("colfam1"),
s"scio-bigtable-two-cf-table-$uuid" -> List("colfam1", "colfam2")
)
val channel = ChannelPoolCreator.createPool(bigtableOptions)
val executorService = BigtableSessionSharedThreadPools.getInstance().getRetryExecutor
val client = new BigtableTableAdminGrpcClient(channel, executorService, bigtableOptions)
val instancePath = s"projects/$projectId/instances/$instanceId"
val tableIds = tables.keys.toSet
def tablePath(table: String): String = s"$instancePath/tables/$table"
def deleteTable(table: String): Unit =
client.deleteTable(DeleteTableRequest.newBuilder().setName(tablePath(table)).build)
// Delete any tables that could be left around from previous IT run.
val oldTables = listTables(client).intersect(tableIds)
oldTables.foreach(deleteTable)
// Ensure that the tables don't exist now
listTables(client).intersect(tableIds) shouldBe empty
// Run UUT
TableAdmin.ensureTables(bigtableOptions, tables)
// Tables must exist
listTables(client).intersect(tableIds) shouldEqual tableIds
// Assert Column families exist
for ((table, columnFamilies) <- tables) {
val tableInfo = client.getTable(
GetTableRequest
.newBuilder()
.setName(tablePath(table))
.build
)
val actualColumnFamilies = tableInfo.getColumnFamiliesMap.asScala.keys
actualColumnFamilies should contain theSameElementsAs columnFamilies
}
// Clean up and delete
tables.keys.foreach(deleteTable)
}
}
| spotify/scio | scio-google-cloud-platform/src/it/scala/com/spotify/scio/bigtable/BigtableIT.scala | Scala | apache-2.0 | 6,136 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes.physical.batch
import org.apache.flink.api.dag.Transformation
import org.apache.flink.runtime.operators.DamBehavior
import org.apache.flink.streaming.api.operators.SimpleOperatorFactory
import org.apache.flink.table.data.RowData
import org.apache.flink.table.planner.delegation.BatchPlanner
import org.apache.flink.table.planner.plan.cost.FlinkCost._
import org.apache.flink.table.planner.plan.cost.FlinkCostFactory
import org.apache.flink.table.planner.plan.nodes.exec.{BatchExecNode, ExecNode}
import org.apache.flink.table.planner.plan.utils.RelExplainUtil.fetchToString
import org.apache.flink.table.planner.plan.utils.SortUtil
import org.apache.flink.table.runtime.operators.sort.LimitOperator
import org.apache.calcite.plan.{RelOptCluster, RelOptCost, RelOptPlanner, RelTraitSet}
import org.apache.calcite.rel._
import org.apache.calcite.rel.core.Sort
import org.apache.calcite.rel.metadata.RelMetadataQuery
import org.apache.calcite.rex.RexNode
import java.util
import scala.collection.JavaConversions._
/**
* Batch physical RelNode for [[Sort]].
*
* This node will output `limit` records beginning with the first `offset` records without sort.
*/
class BatchExecLimit(
cluster: RelOptCluster,
traitSet: RelTraitSet,
inputRel: RelNode,
offset: RexNode,
fetch: RexNode,
val isGlobal: Boolean)
extends Sort(
cluster,
traitSet,
inputRel,
traitSet.getTrait(RelCollationTraitDef.INSTANCE),
offset,
fetch)
with BatchPhysicalRel
with BatchExecNode[RowData] {
private lazy val limitStart: Long = SortUtil.getLimitStart(offset)
private lazy val limitEnd: Long = SortUtil.getLimitEnd(offset, fetch)
override def copy(
traitSet: RelTraitSet,
newInput: RelNode,
newCollation: RelCollation,
offset: RexNode,
fetch: RexNode): Sort = {
new BatchExecLimit(cluster, traitSet, newInput, offset, fetch, isGlobal)
}
override def explainTerms(pw: RelWriter): RelWriter = {
pw.input("input", getInput)
.item("offset", limitStart)
.item("fetch", fetchToString(fetch))
.item("global", isGlobal)
}
override def computeSelfCost(planner: RelOptPlanner, mq: RelMetadataQuery): RelOptCost = {
val rowCount = mq.getRowCount(this)
val cpuCost = COMPARE_CPU_COST * rowCount
val costFactory = planner.getCostFactory.asInstanceOf[FlinkCostFactory]
costFactory.makeCost(rowCount, cpuCost, 0, 0, 0)
}
//~ ExecNode methods -----------------------------------------------------------
override def getDamBehavior: DamBehavior = DamBehavior.PIPELINED
override def getInputNodes: util.List[ExecNode[BatchPlanner, _]] =
List(getInput.asInstanceOf[ExecNode[BatchPlanner, _]])
override def replaceInputNode(
ordinalInParent: Int,
newInputNode: ExecNode[BatchPlanner, _]): Unit = {
replaceInput(ordinalInParent, newInputNode.asInstanceOf[RelNode])
}
override protected def translateToPlanInternal(
planner: BatchPlanner): Transformation[RowData] = {
val input = getInputNodes.get(0).translateToPlan(planner)
.asInstanceOf[Transformation[RowData]]
val inputType = input.getOutputType
val operator = new LimitOperator(isGlobal, limitStart, limitEnd)
ExecNode.createOneInputTransformation(
input,
getRelDetailedDescription,
SimpleOperatorFactory.of(operator),
inputType,
input.getParallelism)
}
}
| jinglining/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/physical/batch/BatchExecLimit.scala | Scala | apache-2.0 | 4,270 |
package org.zouzias.spray.actors
import akka.actor.{ActorRefFactory, ActorLogging}
import org.zouzias.spray.httpservices.{UserHttpService}
import spray.routing.HttpServiceActor
class UserActor extends HttpServiceActor with ActorLogging{
lazy val service = new UserHttpService {
override implicit def actorRefFactory: ActorRefFactory = context
}
def receive = runRoute(service.routes)
} | zouzias/spray-swagger-template | src/main/scala/org/zouzias/spray/actors/UserActor.scala | Scala | apache-2.0 | 400 |
package com.example.http4s
package blaze
import cats.effect._
import com.example.http4s.ssl.SslExample
import org.http4s.server.blaze.BlazeBuilder
object BlazeHttp2Example extends SslExample[IO] {
def builder: BlazeBuilder[IO] = BlazeBuilder[IO].enableHttp2(true)
}
| reactormonk/http4s | examples/blaze/src/main/scala/com/example/http4s/blaze/BlazeHttp2Example.scala | Scala | apache-2.0 | 270 |
/*
* The MIT License
*
* Copyright (c) 2016 Fulcrum Genomics LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.fulcrumgenomics.personal.nhomer
import com.fulcrumgenomics.FgBioDef._
import com.fulcrumgenomics.bam.api.{SamSource, SamWriter}
import com.fulcrumgenomics.cmdline.{ClpGroups, FgBioTool}
import com.fulcrumgenomics.commons.io.Io
import com.fulcrumgenomics.sopt._
import htsjdk.samtools.util.CloserUtil
import scala.collection.compat._
@clp(
description = "Splits an optional tag in a SAM or BAM into multiple optional tags.",
group = ClpGroups.Personal
)
class SplitTag
( @arg(doc = "Input SAM or BAM.") val input: PathToBam,
@arg(doc = "Output SAM or BAM.") val output: PathToBam,
@arg(doc = "Tag to split.") val tagToSplit: String,
@arg(doc = "Tag(s) to output. There should be one per produced token.", minElements = 1) val tagsToOutput: List[String],
@arg(doc = "The delimiter used to split the string.") val delimiter: String = "-"
) extends FgBioTool {
Io.assertReadable(input)
Io.assertCanWriteFile(output)
validate(tagToSplit.length == 2, s"The tag to split must be of length two (was ${tagToSplit.length}).")
tagsToOutput.foreach { tag => validate(tag.length == 2, s"The tag to output '$tag' must be of length two (was ${tag.length}).") }
override def execute(): Unit = {
val in = SamSource(input)
val out = SamWriter(output, in.header)
in.foreach { record =>
record.get[String](tagToSplit) match {
case None =>
fail(String.format("Record '%s' was missing the tag '%s'", record.name, tagToSplit))
case Some(value) =>
val tokens: Array[String] = value.split(delimiter)
if (tokens.length != tagsToOutput.size) fail(s"Record '${record.name}' did not have '${tagsToOutput.size}' tokens")
tagsToOutput.zip(tokens).foreach { case (tagToOutput, token) => record(tagToOutput) = token }
out += record
}
}
CloserUtil.close(in)
out.close()
}
}
| fulcrumgenomics/fgbio | src/main/scala/com/fulcrumgenomics/personal/nhomer/SplitTag.scala | Scala | mit | 3,040 |
package io.github.shogowada.scala.jsonrpc.example.e2e.websocket
object ElementIds {
val Ready = "ready"
val NewTodoDescription = "new-todo-description"
val AddTodo = "add-todo"
}
| shogowada/scala-json-rpc | examples/e2e-web-socket/shared/src/main/scala/io/github/shogowada/scala/jsonrpc/example/e2e/websocket/ElementIds.scala | Scala | mit | 186 |
package org.mitre.mandolin
/**
* This package includes functionality for model selection.
*/
package object mselect {
} | project-mandolin/mandolin | mandolin-core/src/main/scala/org/mitre/mandolin/mselect/package.scala | Scala | apache-2.0 | 123 |
// Solution-4a.scala
// Solution to Exercise 4 in "Imports & Packages"
// Solution 1: import 1 class
import com.atomicscala.trivia.Literature
val lit = new Literature
println("Imported 1 class")
/* OUTPUT_SHOULD_BE
Imported 1 class
*/
| P7h/ScalaPlayground | Atomic Scala/atomic-scala-solutions/14_ImportsAndPackages-2ndEdition/Solution-4a.scala | Scala | apache-2.0 | 238 |
package play.api.templates
import play.api.mvc._
import play.templates._
import play.api.http.MimeTypes
import org.apache.commons.lang3.StringEscapeUtils
/**
* Appendable content using a StringBuilder.
* @param buffer StringBuilder to use
* @tparam A self-type
*/
abstract class BufferedContent[A <: BufferedContent[A]](private val buffer: StringBuilder) extends Appendable[A] with Content with play.mvc.Content { this: A =>
def +=(other: A) = {
buffer.append(other.buffer)
this
}
override def toString = buffer.toString()
def body = toString
}
/**
* Content type used in default HTML templates.
*/
class Html(buffer: StringBuilder) extends BufferedContent[Html](buffer) {
/**
* Content type of HTML.
*/
val contentType = MimeTypes.HTML
}
/**
* Helper for HTML utility methods.
*/
object Html {
/**
* Creates an HTML fragment with initial content specified.
*/
def apply(text: String): Html = {
new Html(new StringBuilder(text))
}
/**
* Creates an empty HTML fragment.
*/
def empty: Html = new Html(new StringBuilder)
}
/**
* Formatter for HTML content.
*/
object HtmlFormat extends Format[Html] {
/**
* Creates a raw (unescaped) HTML fragment.
*/
def raw(text: String): Html = Html(text)
/**
* Creates a safe (escaped) HTML fragment.
*/
def escape(text: String): Html = {
// Using our own algorithm here because commons lang escaping wasn't designed for protecting against XSS, and there
// don't seem to be any other good generic escaping tools out there.
val sb = new StringBuilder(text.length)
text.foreach {
case '<' => sb.append("<")
case '>' => sb.append(">")
case '"' => sb.append(""")
case '\'' => sb.append("'")
case '&' => sb.append("&")
case c => sb += c
}
new Html(sb)
}
}
/**
* Content type used in default text templates.
*/
class Txt(buffer: StringBuilder) extends BufferedContent[Txt](buffer) {
/**
* Content type of text (`text/plain`).
*/
def contentType = MimeTypes.TEXT
}
/**
* Helper for utilities Txt methods.
*/
object Txt {
/**
* Creates a text fragment with initial content specified.
*/
def apply(text: String): Txt = {
new Txt(new StringBuilder(text))
}
/**
* Creates an empty text fragment.
*/
def empty = new Txt(new StringBuilder)
}
/**
* Formatter for text content.
*/
object TxtFormat extends Format[Txt] {
/**
* Create a text fragment.
*/
def raw(text: String) = Txt(text)
/**
* No need for a safe (escaped) text fragment.
*/
def escape(text: String) = Txt(text)
}
/**
* Content type used in default XML templates.
*/
class Xml(buffer: StringBuilder) extends BufferedContent[Xml](buffer) {
/**
* Content type of XML (`application/xml`).
*/
def contentType = MimeTypes.XML
}
/**
* Helper for XML utility methods.
*/
object Xml {
/**
* Creates an XML fragment with initial content specified.
*/
def apply(text: String): Xml = {
new Xml(new StringBuilder(text))
}
/**
* Create an empty XML fragment.
*/
def empty = new Xml(new StringBuilder)
}
/**
* Formatter for XML content.
*/
object XmlFormat extends Format[Xml] {
/**
* Creates an XML fragment.
*/
def raw(text: String) = Xml(text)
/**
* Creates an escaped XML fragment.
*/
def escape(text: String) = Xml(org.apache.commons.lang3.StringEscapeUtils.escapeXml(text))
}
/**
* Type used in default JavaScript templates.
*/
class JavaScript(buffer: StringBuilder) extends BufferedContent[JavaScript](buffer) {
/**
* Content type of JavaScript
*/
val contentType = MimeTypes.JAVASCRIPT
}
/**
* Helper for JavaScript utility methods.
*/
object JavaScript {
/**
* Creates a JavaScript fragment with initial content specified
*/
def apply(content: String) = new JavaScript(new StringBuilder(content))
}
/**
* Formatter for JavaScript content.
*/
object JavaScriptFormat extends Format[JavaScript] {
/**
* Integrate `text` without performing any escaping process.
* @param text Text to integrate
*/
def raw(text: String): JavaScript = JavaScript(text)
/**
* Escapes `text` using JavaScript String rules.
* @param text Text to integrate
*/
def escape(text: String): JavaScript = JavaScript(StringEscapeUtils.escapeEcmaScript(text))
}
/** Defines a magic helper for Play templates. */
object PlayMagic {
/**
* Generates a set of valid HTML attributes.
*
* For example:
* {{{
* toHtmlArgs(Seq('id -> "item", 'style -> "color:red"))
* }}}
*/
def toHtmlArgs(args: Map[Symbol, Any]) = Html(args.map({
case (s, None) => s.name
case (s, v) => s.name + "=\"" + HtmlFormat.escape(v.toString).body + "\""
}).mkString(" "))
}
| michaelahlers/team-awesome-wedding | vendor/play-2.2.1/framework/src/play/src/main/scala/play/api/templates/Templates.scala | Scala | mit | 4,805 |
package fs2
import scala.reflect.ClassTag
import org.scalacheck.Arbitrary
import org.scalatest.Matchers
import org.scalatest.prop.GeneratorDrivenPropertyChecks
import TestUtil._
object ChunkProps
extends Matchers
with GeneratorDrivenPropertyChecks {
def propSize[A: Arbitrary, C <: Chunk[A]: Arbitrary] =
forAll { c: C =>
c.size shouldBe c.toVector.size
}
def propTake[A: Arbitrary, C <: Chunk[A]: Arbitrary] =
forAll { (c: C, n: SmallNonnegative) =>
c.take(n.get).toVector shouldBe c.toVector.take(n.get)
}
def propDrop[A: Arbitrary, C <: Chunk[A]: Arbitrary] =
forAll { (c: C, n: SmallNonnegative) =>
c.drop(n.get).toVector shouldBe c.toVector.drop(n.get)
}
def propIsEmpty[A: Arbitrary, C <: Chunk[A]: Arbitrary] =
forAll { c: C =>
c.isEmpty shouldBe c.toVector.isEmpty
}
def propToArray[A: ClassTag: Arbitrary, C <: Chunk[A]: Arbitrary] =
forAll { c: C =>
c.toArray.toVector shouldBe c.toVector
}
def propToByteBuffer[C <: Chunk[Byte]: Arbitrary] =
forAll { c: C =>
val arr = new Array[Byte](c.size)
c.toByteBuffer.get(arr, 0, c.size)
arr.toVector shouldBe c.toArray.toVector
}
}
| zaneli/fs2 | core/shared/src/test/scala/fs2/ChunkProps.scala | Scala | mit | 1,208 |
package net.gree.aurora.scala.domain.clustergroup
import net.gree.aurora.domain.clustergroup.{ClusterGroupId => JClusterGroupId}
private[domain]
class ClusterGroupIdImpl(val underlying: JClusterGroupId) extends ClusterGroupId {
def value: String = underlying.getValue
}
| gree/aurora | aurora-scala/src/main/scala/net/gree/aurora/scala/domain/clustergroup/ClusterGroupIdImpl.scala | Scala | mit | 276 |
package com.codeaspect.tern.fw
class ConvertableTest {
} | urvaksh/tern | src/test/scala/com/codeaspect/tern/fw/ConvertableTest.scala | Scala | lgpl-3.0 | 58 |
package com.codahale.jersey.inject
import javax.ws.rs.core.MultivaluedMap
import com.sun.jersey.server.impl.model.parameter.multivalued.MultivaluedParameterExtractor
/**
* Given a parameter name and a possibly-null default value, attempts to extract
* the first parameter values and return a Some instance, returning the default
* value if no parameter exists. If defaultValue is null and no parameter
* exists, returns None.
*/
class ScalaOptionStringExtractor(parameter: String, defaultValue: String)
extends MultivaluedParameterExtractor {
private val default = Option(defaultValue)
def getName = parameter
def getDefaultStringValue = defaultValue
def extract(parameters: MultivaluedMap[String, String]) =
Option(parameters.getFirst(parameter)).orElse(default)
}
| codahale/jersey-scala | src/main/scala/com/codahale/jersey/inject/ScalaOptionStringExtractor.scala | Scala | mit | 794 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.codegen
import org.apache.flink.table.planner.codegen.CodeGenUtils.{boxedTypeTermForType, newName}
import org.apache.flink.table.runtime.typeutils.TypeCheckUtils
import org.apache.flink.table.types.logical.LogicalType
/**
* Describes a generated expression.
*
* @param resultTerm term to access the result of the expression
* @param nullTerm boolean term that indicates if expression is null
* @param code code necessary to produce resultTerm and nullTerm
* @param resultType type of the resultTerm
* @param literalValue None if the expression is not literal. Otherwise it represent the
* original object of the literal.
*/
case class GeneratedExpression(
resultTerm: String,
nullTerm: String,
code: String,
resultType: LogicalType,
literalValue: Option[Any] = None) {
/**
* Indicates a constant expression do not reference input and can thus be used
* in the member area (e.g. as constructor parameter of a reusable instance)
*
* @return true if the expression is literal
*/
def literal: Boolean = literalValue.isDefined
/**
* Copy result term to target term if the reference is changed.
* Note: We must ensure that the target can only be copied out, so that its object is definitely
* a brand new reference, not the object being re-used.
* @param target the target term that cannot be assigned a reusable reference.
* @return code.
*/
def copyResultTermToTargetIfChanged(ctx: CodeGeneratorContext, target: String): String = {
if (TypeCheckUtils.isMutable(resultType)) {
val typeTerm = boxedTypeTermForType(resultType)
val serTerm = ctx.addReusableTypeSerializer(resultType)
s"""
|if ($target != $resultTerm) {
| $target = (($typeTerm) $serTerm.copy($resultTerm));
|}
""".stripMargin
} else {
s"$target = $resultTerm;"
}
}
/**
* Deep copy the generated expression.
*
* NOTE: Please use this method when the result will be buffered.
* This method makes sure a new object/data is created when the type is mutable.
*/
def deepCopy(ctx: CodeGeneratorContext): GeneratedExpression = {
// only copy when type is mutable
if (TypeCheckUtils.isMutable(resultType)) {
// if the type need copy, it must be a boxed type
val typeTerm = boxedTypeTermForType(resultType)
val serTerm = ctx.addReusableTypeSerializer(resultType)
val newResultTerm = ctx.addReusableLocalVariable(typeTerm, "field")
val newCode =
s"""
|$code
|$newResultTerm = $resultTerm;
|if (!$nullTerm) {
| $newResultTerm = ($typeTerm) ($serTerm.copy($newResultTerm));
|}
""".stripMargin
GeneratedExpression(newResultTerm, nullTerm, newCode, resultType, literalValue)
} else {
this
}
}
}
object GeneratedExpression {
val ALWAYS_NULL = "true"
val NEVER_NULL = "false"
val NO_CODE = ""
}
| bowenli86/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/GeneratedExpression.scala | Scala | apache-2.0 | 3,825 |
package com.jaroop.anorm
import anorm._
package object relational {
/** Implicitly convert `SimpleSql[T]` to `RelationalSQL[T]` for almost seemless integration
* @param sql
* @return A `RelationalSQL` wrapper of `SimpleSql`
*/
implicit def simple2Relational[T](sql: SimpleSql[T]): RelationalSQL[T] = RelationalSQL(sql)
/** Implicitly convert `SqlQuery` to `RelationalSQL[Row]`
* @param sql
* @return A `RelationalSQL` wrapper of the `SqlQuery` as `SimpleSql[Row]`
*/
implicit def query2Relational(sql: SqlQuery): RelationalSQL[Row] = RelationalSQL(sql.asSimple[Row]())
} | mhzajac/anorm-relational | src/main/scala/com/jaroop/anorm/relational/relational.scala | Scala | apache-2.0 | 585 |
package lv.ddgatve.math
import scala.collection.mutable.MutableList
object OutlineParser {
// Parse the <part> element - return list of contained chunks
def makePart(arg: scala.xml.Node): List[Chunk] = {
val result = (arg \\\\ "item") map (
itemNode => makeItem(itemNode))
result.toList
}
// Parse the <item> element
def makeItem(arg: scala.xml.Node): Chunk = {
val itemTstamp = arg.attribute("tstamp").get(0).text
val itemText = arg.head.text.trim.replaceAll("""(?m)\\s+""", " ")
new Chunk(itemTstamp, itemText)
}
def parseXmlOutline(path: String): List[ProblemSlot] = {
val rootElem = scala.xml.XML.loadFile(path)
val languageCode = (rootElem \\\\ "problems").head.attribute("lang").get(0).text
var result = new MutableList[ProblemSlot]
// ignore those, which are not deployed ("youtube" tag empty),
// except those that are references ("youtube" tag does not exist at all).
for (
elt <- rootElem \\\\ "problems" \\\\ "problem" if ((elt \\\\ "youtube").size == 0 ||
(elt \\\\ "youtube").head.text.length > 0)
) {
if ((elt \\\\ "youtube").size > 0) {
val pVideo = new ProblemVideo
pVideo.id = elt.attribute("id").get(0).text
// println("Processing " + pVideo.id)
pVideo.languageCode = languageCode
pVideo.title = (elt \\\\ "title").head.text
pVideo.YouTubeId = (elt \\\\ "youtube").head.text
pVideo.topic = (elt \\\\ "topic").head.text
pVideo.description = (elt \\\\ "description").head.text.trim.replaceAll("""(?m)\\s+""", " ")
// read part titles only
val partTitleSeq = (elt \\\\ "part") map (
partNode => partNode.attribute("name").get(0).text)
pVideo.chunkListTitles = partTitleSeq.toList
// read notes
val noteSeq = (elt \\\\ "notes" \\\\ "item") map (
noteNode => noteNode.head.text.trim().replaceAll("""(?m)\\s+""", " "))
pVideo.notes = noteSeq.toList
// parse chunks in all parts
val chunkListSeq = (elt \\\\ "part") map {
partNode => makePart(partNode)
}
pVideo.chunkLists = chunkListSeq.toList
result += pVideo
}
else {
val pVideo = new ProblemReference
pVideo.id = elt.attribute("id").get(0).text
// println("Processing " + pVideo.id)
pVideo.languageCode = languageCode
pVideo.title = (elt \\\\ "title").head.text
pVideo.description = (elt \\\\ "description").head.text.trim.replaceAll("""(?m)\\s+""", " ")
pVideo.linkHref = (elt \\\\ "problemlink").head.attribute("href").get(0).text
pVideo.linkText = (elt \\\\ "problemlink").head.text.trim()
result += pVideo
}
}
result.toList
}
} | kapsitis/ddgatve-stat | src/main/scala/lv/ddgatve/math/OutlineParser.scala | Scala | apache-2.0 | 2,815 |
/*
* Copyright 2013 Akiyoshi Sugiki, University of Tsukuba
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kumoi.shell.event
/**
*
* @author Akiyoshi Sugiki
*/
abstract class FailureObliviousEvent extends Event
case class FailureObliviousList(op: Symbol, func: List[Any], list: List[(Any, Any)]) extends FailureObliviousEvent
case class FailureObliviousObject(cl: Class[_], method: String, params: List[Class[_]], args: List[Any], reason: Any) extends FailureObliviousEvent
| axi-sugiki/kumoi | src/kumoi/shell/event/FailureObliviousEvent.scala | Scala | apache-2.0 | 1,004 |
package recfun
import scala.annotation.tailrec
object Main extends App {
override def main(args: Array[String]) {
println("Pascal's Triangle")
for (row <- 0 to 10) {
for (col <- 0 to row)
print(pascal(col, row) + " ")
println()
}
}
/**
* Exercise 1
*/
def pascal: PartialFunction[(Int, Int), Int] = {
case (col, row) if col == row => 1
case (0, _) => 1
case (col, row) => pascal(col - 1, row - 1) + pascal(col, row -1)
}
/**
* Exercise 2
*/
def balance(chars: List[Char]): Boolean = {
@tailrec
def isBalanced(openedParenthesis: Int, chars: List[Char]): Boolean =
openedParenthesis match {
case x if x < 0 => false
case x =>
chars match {
case Nil => x == 0
case c :: cs => c match {
case '(' => isBalanced(x + 1, cs)
case ')' => isBalanced(x - 1, cs)
case _ => isBalanced(x, cs)
}
}
}
isBalanced(0, chars)
}
/**
* Exercise 3
*/
def countChange: PartialFunction[(Int, List[Int]), Int] = {
case (0, _) => 1
case (money, Nil) => 0
case (money, coins @ c :: cs) =>
val changeWithRemainingCoins = countChange(money, cs)
if (c <= money) changeWithRemainingCoins + countChange(money - c, coins)
else changeWithRemainingCoins
}
}
| faloi/progfun-scala | recfun/src/main/scala/recfun/Main.scala | Scala | mit | 1,387 |