code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package ch.epfl.lamp.slick.direct
import ch.epfl.directembedding.DirectEmbeddingUtils
import ch.epfl.yinyang.transformers.{ PostProcessing, PreProcessing }
import scala.reflect.macros.blackbox.Context
class ProjectionProcessing[C <: Context](ctx: C) extends PreProcessing(ctx)(Nil) {
import c.universe._
override val PreProcess = new (Tree => Tree) {
def apply(tree: Tree) = new FieldExtractor().transform(tree)
}
private final class FieldExtractor extends Transformer {
override def transform(tree: Tree): Tree = {
tree match {
case Function(lhs, rhs) =>
val args = tree.collect {
case ValDef(_, TermName(name), tpt, _) =>
name -> TypeTree(tpt.tpe.widen.dealias)
}.toMap
val result = new ColumnSelect(args).transform(tree)
result
case _ => super.transform(tree)
}
}
}
/**
* Convert all field accesses to liftColumnSelect
* @param ctx Map from argument to argument's type tree
*/
private final class ColumnSelect(ctx: Map[String, Tree]) extends Transformer {
override def transform(tree: Tree): Tree = {
tree match {
case Function(lhs, rhs) =>
val args = lhs.map { vd =>
// TODO: Can typeTransformer help here?
ValDef(vd.mods, vd.name, TypeTree(c.typeOf[AnyRef]), vd.rhs)
}
Function(args, transform(rhs))
case s @ Select(lhs @ Ident(TermName(obj)), TermName(field)) if ctx.contains(obj) =>
// TODO: Make configurable
q"liftColumnSelect[${ctx(obj)}, ${s.tpe.widen.dealias}]($lhs, ${Literal(Constant(field))}, ${Literal(Constant(s.tpe.widen.typeSymbol.fullName))})"
case _ => super.transform(tree)
}
}
}
}
| olafurpg/slick-direct | slick-direct/src/main/scala/ch/epfl/lamp/slick/direct/ProjectionProcessing.scala | Scala | bsd-3-clause | 1,762 |
package chandu0101.scalajs.react.components.util
import chandu0101.scalajs.react.components.models.{RElementPosition, RPoint}
import japgolly.scalajs.react.{ReactEventH, ReactEventI}
import org.scalajs.dom.Event
import scala.scalajs.js.Date
trait MTypes {
type REventIBooleanUnit = (ReactEventI, Boolean) => Unit
type REventIUnit = (ReactEventI) => Unit
type REventIIntUnit = (ReactEventI,Int) => Unit
type DateUnit = (Date) => Unit
type StringUnit = (String) => Unit
type DateString = (Date) => String
type EmptyFunc = () => Any
type StringIntUnit = (String,Int) => Unit
type StringStringIntAny = (String,String,Int) => Any
type REventIDateUnit = (ReactEventI, Date) => Unit
type REventHUnit = (ReactEventH) => Unit
type EventUnit = (Event) => Unit
type EventRElementPositionAny = (Event,RElementPosition) => Any
type REventIDoubleUnit = (ReactEventI, Double) => Unit
type REventIStringUnit = (ReactEventI, String) => Unit
type REventIStringAny = (ReactEventI, String) => _
type REventIIntStringUnit = (ReactEventI, Int, String) => Unit
type OnDropdownTap = (ReactEventI, String, String) => Unit
type REventIIntBooleanUnit = (ReactEventI, Int, Boolean) => Unit
type CssClassType = Map[String, Boolean]
type MapUnit = (Map[String,Any]) => Unit
type MapMapUnit = (Map[String,Any],Map[String,Any]) => Unit
type MapString = (Map[String,Any]) => String
type StringRPointAny = (String,RPoint) => Any
}
| mproch/scalajs-react-components | core/src/main/scala/chandu0101/scalajs/react/components/util/MTypes.scala | Scala | apache-2.0 | 1,474 |
package scalastudy.concurrent.wordstat
import akka.actor.Actor
import akka.event.Logging
import scala.collection.immutable.List
import scala.collection.mutable.{HashMap, Map}
import scalastudy.concurrent.ActorTerminationMsg
/**
* Created by lovesqcc on 16-4-2.
*/
object StatWordActor {
var stat:Map[String,Int] = new HashMap[String,Int]
def add(newstat:Map[String,Int]) = {
newstat.foreach { e =>
stat(e._1) = stat.getOrElse(e._1, 0) + newstat.getOrElse(e._1, 0)
}
}
def finalResult() = stat
private var recvCount = 0
private def inc() { recvCount +=1 }
private def count() = recvCount
}
class StatWordActor extends Actor {
val log = Logging(context.system, self)
override def receive: Actor.Receive = {
case WordListWrapper(wordlist: List[String]) =>
StatWordActor.inc()
val stat:Map[String,Int] = statWords(wordlist)
StatWordActor.add(stat)
case ActorTerminationMsg =>
log.info("received times: " + StatWordActor.count())
context.stop(self)
case _ =>
log.info("Unknown received.")
}
def statWords(words: List[String]):Map[String,Int] = {
val wordsMap = new HashMap[String,Int]
words.foreach { w =>
wordsMap(w) = wordsMap.getOrElse(w,0) + 1
}
return wordsMap
}
}
| shuqin/ALLIN | src/main/java/scalastudy/concurrent/wordstat/StatWordActor.scala | Scala | gpl-3.0 | 1,285 |
package unluac.decompile.target
import unluac.decompile.Output
case class UpvalueTarget(name:String) extends Target {
def print(out: Output) {
out.print(name)
}
def printMethod(out: Output) {
throw new IllegalStateException
}
} | danielwegener/unluac-scala | shared/src/main/scala/unluac/decompile/target/UpvalueTarget.scala | Scala | mit | 247 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.types._
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr1, expr2, expr3) - If `expr1` evaluates to true, then returns `expr2`; otherwise returns `expr3`.",
extended = """
Examples:
> SELECT _FUNC_(1 < 2, 'a', 'b');
a
""")
// scalastyle:on line.size.limit
case class If(predicate: Expression, trueValue: Expression, falseValue: Expression)
extends Expression {
override def children: Seq[Expression] = predicate :: trueValue :: falseValue :: Nil
override def nullable: Boolean = trueValue.nullable || falseValue.nullable
override def checkInputDataTypes(): TypeCheckResult = {
if (predicate.dataType != BooleanType) {
TypeCheckResult.TypeCheckFailure(
s"type of predicate expression in If should be boolean, not ${predicate.dataType}")
} else if (!trueValue.dataType.sameType(falseValue.dataType)) {
TypeCheckResult.TypeCheckFailure(s"differing types in '$sql' " +
s"(${trueValue.dataType.simpleString} and ${falseValue.dataType.simpleString}).")
} else {
TypeCheckResult.TypeCheckSuccess
}
}
override def dataType: DataType = trueValue.dataType
override def eval(input: InternalRow): Any = {
if (java.lang.Boolean.TRUE.equals(predicate.eval(input))) {
trueValue.eval(input)
} else {
falseValue.eval(input)
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val condEval = predicate.genCode(ctx)
val trueEval = trueValue.genCode(ctx)
val falseEval = falseValue.genCode(ctx)
// place generated code of condition, true value and false value in separate methods if
// their code combined is large
val combinedLength = condEval.code.length + trueEval.code.length + falseEval.code.length
val generatedCode = if (combinedLength > 1024 &&
// Split these expressions only if they are created from a row object
(ctx.INPUT_ROW != null && ctx.currentVars == null)) {
val (condFuncName, condGlobalIsNull, condGlobalValue) =
createAndAddFunction(ctx, condEval, predicate.dataType, "evalIfCondExpr")
val (trueFuncName, trueGlobalIsNull, trueGlobalValue) =
createAndAddFunction(ctx, trueEval, trueValue.dataType, "evalIfTrueExpr")
val (falseFuncName, falseGlobalIsNull, falseGlobalValue) =
createAndAddFunction(ctx, falseEval, falseValue.dataType, "evalIfFalseExpr")
s"""
$condFuncName(${ctx.INPUT_ROW});
boolean ${ev.isNull} = false;
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!$condGlobalIsNull && $condGlobalValue) {
$trueFuncName(${ctx.INPUT_ROW});
${ev.isNull} = $trueGlobalIsNull;
${ev.value} = $trueGlobalValue;
} else {
$falseFuncName(${ctx.INPUT_ROW});
${ev.isNull} = $falseGlobalIsNull;
${ev.value} = $falseGlobalValue;
}
"""
}
else {
s"""
${condEval.code}
boolean ${ev.isNull} = false;
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${condEval.isNull} && ${condEval.value}) {
${trueEval.code}
${ev.isNull} = ${trueEval.isNull};
${ev.value} = ${trueEval.value};
} else {
${falseEval.code}
${ev.isNull} = ${falseEval.isNull};
${ev.value} = ${falseEval.value};
}
"""
}
ev.copy(code = generatedCode)
}
private def createAndAddFunction(
ctx: CodegenContext,
ev: ExprCode,
dataType: DataType,
baseFuncName: String): (String, String, String) = {
val globalIsNull = ctx.freshName("isNull")
ctx.addMutableState("boolean", globalIsNull, s"$globalIsNull = false;")
val globalValue = ctx.freshName("value")
ctx.addMutableState(ctx.javaType(dataType), globalValue,
s"$globalValue = ${ctx.defaultValue(dataType)};")
val funcName = ctx.freshName(baseFuncName)
val funcBody =
s"""
|private void $funcName(InternalRow ${ctx.INPUT_ROW}) {
| ${ev.code.trim}
| $globalIsNull = ${ev.isNull};
| $globalValue = ${ev.value};
|}
""".stripMargin
val fullFuncName = ctx.addNewFunction(funcName, funcBody)
(fullFuncName, globalIsNull, globalValue)
}
override def toString: String = s"if ($predicate) $trueValue else $falseValue"
override def sql: String = s"(IF(${predicate.sql}, ${trueValue.sql}, ${falseValue.sql}))"
}
/**
* Abstract parent class for common logic in CaseWhen and CaseWhenCodegen.
*
* @param branches seq of (branch condition, branch value)
* @param elseValue optional value for the else branch
*/
abstract class CaseWhenBase(
branches: Seq[(Expression, Expression)],
elseValue: Option[Expression])
extends Expression with Serializable {
override def children: Seq[Expression] = branches.flatMap(b => b._1 :: b._2 :: Nil) ++ elseValue
// both then and else expressions should be considered.
def valueTypes: Seq[DataType] = branches.map(_._2.dataType) ++ elseValue.map(_.dataType)
def valueTypesEqual: Boolean = valueTypes.size <= 1 || valueTypes.sliding(2, 1).forall {
case Seq(dt1, dt2) => dt1.sameType(dt2)
}
override def dataType: DataType = branches.head._2.dataType
override def nullable: Boolean = {
// Result is nullable if any of the branch is nullable, or if the else value is nullable
branches.exists(_._2.nullable) || elseValue.map(_.nullable).getOrElse(true)
}
override def checkInputDataTypes(): TypeCheckResult = {
// Make sure all branch conditions are boolean types.
if (valueTypesEqual) {
if (branches.forall(_._1.dataType == BooleanType)) {
TypeCheckResult.TypeCheckSuccess
} else {
val index = branches.indexWhere(_._1.dataType != BooleanType)
TypeCheckResult.TypeCheckFailure(
s"WHEN expressions in CaseWhen should all be boolean type, " +
s"but the ${index + 1}th when expression's type is ${branches(index)._1}")
}
} else {
TypeCheckResult.TypeCheckFailure(
"THEN and ELSE expressions should all be same type or coercible to a common type")
}
}
override def eval(input: InternalRow): Any = {
var i = 0
val size = branches.size
while (i < size) {
if (java.lang.Boolean.TRUE.equals(branches(i)._1.eval(input))) {
return branches(i)._2.eval(input)
}
i += 1
}
if (elseValue.isDefined) {
return elseValue.get.eval(input)
} else {
return null
}
}
override def toString: String = {
val cases = branches.map { case (c, v) => s" WHEN $c THEN $v" }.mkString
val elseCase = elseValue.map(" ELSE " + _).getOrElse("")
"CASE" + cases + elseCase + " END"
}
override def sql: String = {
val cases = branches.map { case (c, v) => s" WHEN ${c.sql} THEN ${v.sql}" }.mkString
val elseCase = elseValue.map(" ELSE " + _.sql).getOrElse("")
"CASE" + cases + elseCase + " END"
}
}
/**
* Case statements of the form "CASE WHEN a THEN b [WHEN c THEN d]* [ELSE e] END".
* When a = true, returns b; when c = true, returns d; else returns e.
*
* @param branches seq of (branch condition, branch value)
* @param elseValue optional value for the else branch
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "CASE WHEN expr1 THEN expr2 [WHEN expr3 THEN expr4]* [ELSE expr5] END - When `expr1` = true, returns `expr2`; when `expr3` = true, return `expr4`; else return `expr5`.")
// scalastyle:on line.size.limit
case class CaseWhen(
val branches: Seq[(Expression, Expression)],
val elseValue: Option[Expression] = None)
extends CaseWhenBase(branches, elseValue) with CodegenFallback with Serializable {
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
super[CodegenFallback].doGenCode(ctx, ev)
}
def toCodegen(): CaseWhenCodegen = {
CaseWhenCodegen(branches, elseValue)
}
}
/**
* CaseWhen expression used when code generation condition is satisfied.
* OptimizeCodegen optimizer replaces CaseWhen into CaseWhenCodegen.
*
* @param branches seq of (branch condition, branch value)
* @param elseValue optional value for the else branch
*/
case class CaseWhenCodegen(
val branches: Seq[(Expression, Expression)],
val elseValue: Option[Expression] = None)
extends CaseWhenBase(branches, elseValue) with Serializable {
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
// Generate code that looks like:
//
// condA = ...
// if (condA) {
// valueA
// } else {
// condB = ...
// if (condB) {
// valueB
// } else {
// condC = ...
// if (condC) {
// valueC
// } else {
// elseValue
// }
// }
// }
val cases = branches.map { case (condExpr, valueExpr) =>
val cond = condExpr.genCode(ctx)
val res = valueExpr.genCode(ctx)
s"""
${cond.code}
if (!${cond.isNull} && ${cond.value}) {
${res.code}
${ev.isNull} = ${res.isNull};
${ev.value} = ${res.value};
}
"""
}
var generatedCode = cases.mkString("", "\\nelse {\\n", "\\nelse {\\n")
elseValue.foreach { elseExpr =>
val res = elseExpr.genCode(ctx)
generatedCode +=
s"""
${res.code}
${ev.isNull} = ${res.isNull};
${ev.value} = ${res.value};
"""
}
generatedCode += "}\\n" * cases.size
ev.copy(code = s"""
boolean ${ev.isNull} = true;
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
$generatedCode""")
}
}
/** Factory methods for CaseWhen. */
object CaseWhen {
def apply(branches: Seq[(Expression, Expression)], elseValue: Expression): CaseWhen = {
CaseWhen(branches, Option(elseValue))
}
/**
* A factory method to facilitate the creation of this expression when used in parsers.
*
* @param branches Expressions at even position are the branch conditions, and expressions at odd
* position are branch values.
*/
def createFromParser(branches: Seq[Expression]): CaseWhen = {
val cases = branches.grouped(2).flatMap {
case cond :: value :: Nil => Some((cond, value))
case value :: Nil => None
}.toArray.toSeq // force materialization to make the seq serializable
val elseValue = if (branches.size % 2 == 1) Some(branches.last) else None
CaseWhen(cases, elseValue)
}
}
/**
* Case statements of the form "CASE a WHEN b THEN c [WHEN d THEN e]* [ELSE f] END".
* When a = b, returns c; when a = d, returns e; else returns f.
*/
object CaseKeyWhen {
def apply(key: Expression, branches: Seq[Expression]): CaseWhen = {
val cases = branches.grouped(2).flatMap {
case Seq(cond, value) => Some((EqualTo(key, cond), value))
case Seq(value) => None
}.toArray.toSeq // force materialization to make the seq serializable
val elseValue = if (branches.size % 2 == 1) Some(branches.last) else None
CaseWhen(cases, elseValue)
}
}
| aokolnychyi/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala | Scala | apache-2.0 | 12,188 |
object Test extends App {
class A { class V }
abstract class B[S] {
def foo(t: S, a: A)(v: a.V): Unit
}
val b1 = new B[String] {
def foo(t: String, a: A)(v: a.V) = () // Bridge method required here!
}
b1.foo("", null)(null)
}
| lampepfl/dotty | tests/pending/run/t6135.scala | Scala | apache-2.0 | 249 |
/**
* Copyright 2015 Zaradai
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zaradai.lattrac.analytics
import com.zaradai.lattrac.capture.TraceEvent
trait EventListener {
def onTraceEvent(traceEvent: TraceEvent): Unit
}
| zaradai/lattrac | src/main/scala/com/zaradai/lattrac/analytics/EventListener.scala | Scala | apache-2.0 | 747 |
/*
* Copyright (c) 2010-2011 Belmont Technology Pty Ltd. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sodatest
package runtime.data.results
import runtime.data.blocks.{ParameterValuesContainer, ReportExecution, Line, ReportBlock}
class ExecutionResult[P <: ParameterValuesContainer](val execution: P, val error: Option[ExecutionError])
trait ExecutionResultContainer[P <: ParameterValuesContainer, R <: ExecutionResult[P]] {
val executionResults: List[R]
}
class ReportExecutionResult(execution: ReportExecution, val matchResult: ReportMatchResult, error: Option[ExecutionError] = None)
extends ExecutionResult[ReportExecution](execution, error) {
def this(execution: ReportExecution, error: ExecutionError) = this(execution, new ReportMatchResult(Nil), Some(error))
override def toString = String.format("ReportExecutionResult(execution: %s, matchResult: %s, error: %s)", execution, matchResult, error)
}
class ReportMatchResult(val lineResults: List[ReportMatchLineResult]) {
val passed: Boolean = lineResults.foldLeft(true)((allMatch, line) => {allMatch && line.isInstanceOf[ReportLineMatch]})
override def toString = if (passed) "Passed" else lineResults.toString
}
object ReportMatchResult {
def allGood(lines: List[Line]): ReportMatchResult = {
new ReportMatchResult(lines.map(line => new ReportLineMatch(line, line.cells)))
}
}
abstract sealed class ReportMatchLineResult
case class ReportLineMatch(sourceLine: Line, cells: List[String]) extends ReportMatchLineResult
case class ReportLineMismatch(sourceLine: Line, cellResults: List[ReportMatchCellResult]) extends ReportMatchLineResult
case class ReportLineMissing(sourceLine: Line) extends ReportMatchLineResult
case class ReportLineExtra(cells: Seq[String]) extends ReportMatchLineResult
abstract sealed class ReportMatchCellResult
case class ReportCellMatch(value: String) extends ReportMatchCellResult
case class ReportCellMismatch(expectedValue: String, actualValue: String) extends ReportMatchCellResult
case class ReportCellMissing(expectedValue: String) extends ReportMatchCellResult
case class ReportCellExtra(actualValue: String) extends ReportMatchCellResult
class ReportBlockResult(val executionResults: List[ReportExecutionResult], error: Option[ExecutionError] = None)(implicit block: ReportBlock)
extends BlockResult[ReportBlock](
block,
succeeded = (error == None && !(executionResults.map(r => r.matchResult.passed && r.error == None).contains(false))),
executionErrorOccurred = error != None || !executionResults.filter(_.error != None).isEmpty,
blockError = error)
with ExecutionResultContainer[ReportExecution, ReportExecutionResult] {
def this(error: ExecutionError)(implicit block: ReportBlock) = this(Nil, Some(error))
}
| GrahamLea/SodaTest | sodatest-runtime/src/main/scala/org/sodatest/runtime/data/results/ReportBlockResult.scala | Scala | apache-2.0 | 3,295 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.abstractnn.{TensorCriterion, TensorModule}
import com.intel.analytics.bigdl.tensor.Tensor
import org.scalatest.{FlatSpec, Matchers}
import com.intel.analytics.bigdl.utils.RandomGenerator._
@com.intel.analytics.bigdl.tags.Parallel
class AddSpec extends FlatSpec with Matchers {
"A Add with scaleB" should "work correctly" in {
val inputN = 5
val seed = 100
RNG.setSeed(seed)
val layer1 = new Add[Double](inputN)
val layer2 = layer1.cloneModule().asInstanceOf[Add[Double]]
.setScaleB(2.0)
val input = Tensor[Double](1, 5)
input(Array(1, 1)) = 1
input(Array(1, 2)) = 2
input(Array(1, 3)) = 3
input(Array(1, 4)) = 4
input(Array(1, 5)) = 5
val gradOutput = Tensor[Double](5)
gradOutput(Array(1)) = 2
gradOutput(Array(2)) = 5
gradOutput(Array(3)) = 10
gradOutput(Array(4)) = 17
gradOutput(Array(5)) = 26
val output1 = layer1.forward(input)
val gradInput1 = layer1.backward(input, gradOutput)
val output2 = layer2.forward(input)
val gradInput2 = layer2.backward(input, gradOutput)
output1 should be (output2)
gradInput1 should be (gradInput2)
layer2.gradBias should be (layer1.gradBias.mul(2))
}
}
| jenniew/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/AddSpec.scala | Scala | apache-2.0 | 1,880 |
package d2
import scala.language.{implicitConversions, higherKinds}
import scalaz.Monad
trait Directives1Interop[F[+_]] {
implicit def fromUnfilteredDirective[T, L, R](d1: unfiltered.directives.Directive[T, L, R])(implicit F: Monad[F]): d2.Directive[T, F, L, R] = {
import unfiltered.directives.{Result => Res}
Directive{ r =>
val res = d1(r)
res match {
case Res.Success(s) => F.point(Result.Success(s))
case Res.Failure(e) => F.point(Result.Failure(e))
case Res.Error(e) => F.point(Result.Error(e))
}
}
}
}
object Directives1Interop {
def apply[F[+_]](implicit M:Monad[F]): Directives[F] = new Directives[F] with Directives1Interop[F] {
implicit val F: Monad[F] = M
}
}
| shiplog/directives2 | src/main/scala/d2/Directives1Interop.scala | Scala | mit | 740 |
package pl.writeonly.son2.json.glue
import pl.writeonly.son2.apis.config.RWTConfig
import pl.writeonly.son2.funs.glue.CreatorConverterOr
class CreatorConverterOrJson
extends CreatorConverterOr(
new ChainNotationConfigJson().get,
new ChainNotationRWTJson()
) {
override def configOpt(s: String): Option[RWTConfig] =
chainNotationCreator.configOpt(s)
}
| writeonly/son2 | scallions-impl/scallions-json/src/main/scala/pl/writeonly/son2/json/glue/CreatorConverterOrJson.scala | Scala | apache-2.0 | 379 |
/*
* Copyright 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.markdown.ast
import laika.ast._
/** A top level HTML block as defined by the Markdown syntaxt description. It is surrounded by blank lines
* and has a block-level element (one that is not classified as "phrasing content" in the HTML specification)
* as its root element. It may contain other nested HTML elements and tags, but no spans produced by standard
* Markdown markup.
*/
case class HTMLBlock (root: HTMLElement, options: Options = NoOpt) extends Block {
type Self = HTMLBlock
def withOptions (options: Options): HTMLBlock = copy(options = options)
}
/** Base class for all verbatim HTML span elements.
*/
abstract class HTMLSpan extends Span
/** Represents a full HTML element with matching start and end tags. The content of this span container
* may contain further nested HTML elements and tags as well as simple text elements.
*/
case class HTMLElement (startTag: HTMLStartTag, content: Seq[Span], options: Options = NoOpt) extends HTMLSpan with SpanContainer {
type Self = HTMLElement
def withContent (newContent: Seq[Span]): HTMLElement = copy(content = newContent)
def withOptions (options: Options): HTMLElement = copy(options = options)
}
/** Represent a start tag. When this element is part of a final document tree, it represents
* an orphaned start tag without matching end tag. In HTML this may be legal (some tags like the p
* tag are defined as "auto-closing" under certain circumstances). This library however does not
* implement the full logic of a proper HTML parser to distinguish between legal and faulty
* occurrences of unmatched start tags.
*/
case class HTMLStartTag (name: String, attributes: List[HTMLAttribute], options: Options = NoOpt) extends HTMLSpan with Block {
type Self = HTMLStartTag
def withOptions (options: Options): HTMLStartTag = copy(options = options)
}
/** Represents an empty element (like `<br/>` or `<hr/>`) in case it contains the explicit
* slash to mark it as closed. Otherwise it will be classified as a start tag.
*/
case class HTMLEmptyElement (name: String, attributes: List[HTMLAttribute], options: Options = NoOpt) extends HTMLSpan with Block {
type Self = HTMLEmptyElement
def withOptions (options: Options): HTMLEmptyElement = copy(options = options)
}
/** Represents an orphaned end tag without matching start tag.
*/
case class HTMLEndTag (name: String, options: Options = NoOpt) extends HTMLSpan {
type Self = HTMLEndTag
def withOptions (options: Options): HTMLEndTag = copy(options = options)
}
/** Represents a standard HTML comment.
*/
case class HTMLComment (content: String, options: Options = NoOpt) extends HTMLSpan with Block with TextContainer {
type Self = HTMLComment
def withOptions (options: Options): HTMLComment = copy(options = options)
}
/** Represents a script element.
*/
case class HTMLScriptElement (attributes: List[HTMLAttribute], content: String, options: Options = NoOpt) extends HTMLSpan with TextContainer {
type Self = HTMLScriptElement
def withOptions (options: Options): HTMLScriptElement = copy(options = options)
}
/** Represents a numerical or named character reference.
*/
case class HTMLCharacterReference (content: String, options: Options = NoOpt) extends HTMLSpan with TextContainer {
type Self = HTMLCharacterReference
def withOptions (options: Options): HTMLCharacterReference = copy(options = options)
}
/** Represents a single HTML attribute. The value is provided as a list of TextContainers
* as it may contain HTML character references alongside regular Text elements.
*/
case class HTMLAttribute (name: String, value: List[TextContainer], quotedWith: Option[Char])
| planet42/Laika | core/shared/src/main/scala/laika/markdown/ast/elements.scala | Scala | apache-2.0 | 4,325 |
package org.embulk.input.dynamodb
import java.io.File
import java.{util => JUtil}
import com.amazonaws.services.dynamodbv2.model.AttributeValue
import com.fasterxml.jackson.databind.ObjectMapper
import org.embulk.input.dynamodb.deprecated.AttributeValueHelper._
import org.hamcrest.CoreMatchers._
import org.hamcrest.MatcherAssert.assertThat
import org.junit.Assert._
import org.junit.Test
import org.msgpack.value.ValueFactory
import scala.jdk.CollectionConverters._
class AttributeValueHelperTest {
@Test
def decodeTest(): Unit = {
val stringValue = decodeToValue(new AttributeValue().withS("STR"))
assertEquals(stringValue.asStringValue.asString, "STR")
val intValue = decodeToValue(new AttributeValue().withN("123456789"))
assertEquals(intValue.asNumberValue().toInt, 123456789)
val doubleValue = decodeToValue(
new AttributeValue().withN("-98765432.00000001")
)
assertEquals(doubleValue.asNumberValue().toDouble, -98765432.00000001, 0.0)
val trueValue = decodeToValue(new AttributeValue().withBOOL(true))
assertEquals(trueValue.asBooleanValue().getBoolean, true)
val falseValue = decodeToValue(new AttributeValue().withBOOL(false))
assertEquals(falseValue.asBooleanValue().getBoolean, false)
val nilValue = decodeToValue(new AttributeValue().withNULL(true))
assertEquals(nilValue.isNilValue, true)
}
@Test
def listDecodeTest(): Unit = {
val stringListValue = decodeToValue(
new AttributeValue().withL(
new AttributeValue().withS("ValueA"),
new AttributeValue().withS("ValueB"),
new AttributeValue().withS("ValueC")
)
)
assertTrue(stringListValue.isArrayValue)
assertEquals(stringListValue.asArrayValue().size(), 3)
assertTrue(stringListValue.asArrayValue().get(0).isStringValue)
assertEquals(
stringListValue.asArrayValue().get(0).asStringValue().asString(),
"ValueA"
)
assertEquals(
stringListValue.asArrayValue().get(1).asStringValue().asString(),
"ValueB"
)
assertEquals(
stringListValue.asArrayValue().get(2).asStringValue().asString(),
"ValueC"
)
val numberListValue = decodeToValue(
new AttributeValue().withL(
new AttributeValue().withN("123"),
new AttributeValue().withN("-456"),
new AttributeValue().withN("0.0000045679"),
new AttributeValue().withN("-1234567890.123")
)
)
assertTrue(numberListValue.isArrayValue)
assertEquals(numberListValue.asArrayValue().size(), 4)
assertTrue(numberListValue.asArrayValue().get(0).isIntegerValue)
assertEquals(
numberListValue.asArrayValue().get(0).asNumberValue().toInt,
123
)
assertEquals(
numberListValue.asArrayValue().get(1).asNumberValue().toInt,
-456
)
assertTrue(numberListValue.asArrayValue().get(2).isFloatValue)
assertEquals(
numberListValue.asArrayValue().get(2).asNumberValue().toDouble,
0.0000045679,
0.0
)
assertEquals(
numberListValue.asArrayValue().get(3).asNumberValue().toDouble,
-1234567890.123,
0.0
)
val stringSetValue = decodeToValue(
new AttributeValue().withSS(new JUtil.HashSet[String]() {
add("ValueA")
add("ValueB")
add("ValueC")
})
)
assertTrue(stringSetValue.isArrayValue)
assertEquals(stringSetValue.asArrayValue().size(), 3)
assertThat(
List("ValueA", "ValueB", "ValueC").asJava,
hasItems(
equalTo(stringSetValue.asArrayValue().get(0).asStringValue().asString),
equalTo(stringSetValue.asArrayValue().get(1).asStringValue().asString),
equalTo(stringSetValue.asArrayValue().get(2).asStringValue().asString)
)
)
val numberSetValue = decodeToValue(
new AttributeValue().withNS(new JUtil.HashSet[String]() {
add("123")
add("-456")
add("0.0000045679")
add("-1234567890.123")
})
)
assertTrue(numberSetValue.isArrayValue)
assertEquals(numberSetValue.asArrayValue().size(), 4)
}
@Test
def mapDecodeTest(): Unit = {
val stringMap = decodeToValue(
new AttributeValue().withM(new JUtil.HashMap[String, AttributeValue]() {
put("KeyA", new AttributeValue().withS("ValueA"))
put("KeyB", new AttributeValue().withS("ValueB"))
put("KeyC", new AttributeValue().withS("ValueC"))
})
)
assertTrue(stringMap.isMapValue)
assertEquals(stringMap.asMapValue().size(), 3)
assertEquals(
stringMap
.asMapValue()
.map()
.get(ValueFactory.newString("KeyA"))
.asStringValue()
.asString(),
"ValueA"
)
assertEquals(
stringMap
.asMapValue()
.map()
.get(ValueFactory.newString("KeyB"))
.asStringValue()
.asString(),
"ValueB"
)
assertEquals(
stringMap
.asMapValue()
.map()
.get(ValueFactory.newString("KeyC"))
.asStringValue()
.asString(),
"ValueC"
)
val numberMap = decodeToValue(
new AttributeValue().withM(new JUtil.HashMap[String, AttributeValue]() {
put("KeyA", new AttributeValue().withN("123"))
put("KeyB", new AttributeValue().withN("-456"))
put("KeyC", new AttributeValue().withN("0.0000045679"))
put("KeyD", new AttributeValue().withN("-1234567890.123"))
})
)
assertTrue(numberMap.isMapValue)
assertEquals(numberMap.asMapValue().size(), 4)
assertTrue(
numberMap
.asMapValue()
.map()
.get(ValueFactory.newString("KeyA"))
.isIntegerValue
)
assertEquals(
numberMap
.asMapValue()
.map()
.get(ValueFactory.newString("KeyA"))
.asNumberValue()
.toInt,
123
)
assertEquals(
numberMap
.asMapValue()
.map()
.get(ValueFactory.newString("KeyB"))
.asNumberValue()
.toInt,
-456
)
assertTrue(
numberMap
.asMapValue()
.map()
.get(ValueFactory.newString("KeyC"))
.isFloatValue
)
assertEquals(
numberMap
.asMapValue()
.map()
.get(ValueFactory.newString("KeyC"))
.asFloatValue()
.toDouble,
0.0000045679,
0.0
)
assertEquals(
numberMap
.asMapValue()
.map()
.get(ValueFactory.newString("KeyD"))
.asFloatValue()
.toDouble,
-1234567890.123,
0.0
)
}
def attr[A](value: A)(implicit f: A => AttributeValue): AttributeValue =
f(value)
implicit def StringAttributeValue(value: String): AttributeValue =
new AttributeValue().withS(value)
implicit def IntegerAttributeValue(value: Int): AttributeValue =
new AttributeValue().withN(value.toString)
implicit def LongAttributeValue(value: Long): AttributeValue =
new AttributeValue().withN(value.toString)
implicit def FloatAttributeValue(value: Float): AttributeValue =
new AttributeValue().withN(value.toString)
implicit def DoubleAttributeValue(value: Double): AttributeValue =
new AttributeValue().withN(value.toString)
implicit def BooleanAttributeValue(value: Boolean): AttributeValue =
new AttributeValue().withBOOL(value)
implicit def MapAttributeValue(
value: Map[String, AttributeValue]
): AttributeValue = new AttributeValue().withM(value.asJava)
implicit def ListAttributeValue(value: List[AttributeValue]): AttributeValue =
new AttributeValue().withL(value.asJava)
@Test
def nestedDecodeTest(): Unit = {
// TODO: Json -> AttributeValue...
val testData = decodeToValue(
attr(
Map(
"_id" -> attr("56d8e1377a72374918f73bd2"),
"index" -> attr(0),
"guid" -> attr("5309640c-499a-43f6-801d-3076c810892b"),
"isActive" -> attr(true),
"age" -> attr(37),
"name" -> attr("Battle Lancaster"),
"email" -> attr("[email protected]"),
"registered" -> attr("2014-07-16T04:40:58 -09:00"),
"latitude" -> attr(45.574906),
"longitude" -> attr(36.596302),
"tags" -> attr(
List(
attr("veniam"),
attr("exercitation"),
attr("velit"),
attr("pariatur"),
attr("sit"),
attr("non"),
attr("dolore")
)
),
"friends" -> attr(
List(
attr(
Map(
"id" -> attr(0),
"name" -> attr("Mejia Montgomery"),
"tags" -> attr(
List(attr("duis"), attr("proident"), attr("et"))
)
)
),
attr(
Map(
"id" -> attr(1),
"name" -> attr("Carpenter Reed"),
"tags" -> attr(
List(attr("labore"), attr("nisi"), attr("ipsum"))
)
)
),
attr(
Map(
"id" -> attr(2),
"name" -> attr("Gamble Watts"),
"tags" -> attr(
List(attr("occaecat"), attr("voluptate"), attr("eu"))
)
)
)
)
)
)
)
)
val testA = new ObjectMapper()
.readValue(testData.toJson, classOf[JUtil.Map[String, Any]])
val testB = new ObjectMapper().readValue(
new File("src/test/resources/json/test.json"),
classOf[JUtil.Map[String, Any]]
)
assertThat(testA, is(testB))
}
}
| lulichn/embulk-input-dynamodb | src/test/scala/org/embulk/input/dynamodb/AttributeValueHelperTest.scala | Scala | mit | 9,725 |
package ui.scene
trait SceneModule {
def initSceneModule(scene: Scene): Unit = {}
def sceneDraw(scene: Scene): Unit = {}
def sceneItemDraw(scene: Scene, sceneItem: SceneItem): Unit = {}
}
| gvatn/play-scalajs-webgl-spark | client/src/main/scala/ui/scene/SceneModule.scala | Scala | mit | 202 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.v2.avro
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.execution.datasources.PartitioningAwareFileIndex
import org.apache.spark.sql.execution.datasources.v2.FileScanBuilder
import org.apache.spark.sql.sources.v2.reader.Scan
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.util.CaseInsensitiveStringMap
class AvroScanBuilder (
sparkSession: SparkSession,
fileIndex: PartitioningAwareFileIndex,
schema: StructType,
dataSchema: StructType,
options: CaseInsensitiveStringMap)
extends FileScanBuilder(sparkSession, fileIndex, dataSchema) {
override def build(): Scan = {
AvroScan(sparkSession, fileIndex, dataSchema, readDataSchema(), readPartitionSchema(), options)
}
}
| techaddict/spark | external/avro/src/main/scala/org/apache/spark/sql/v2/avro/AvroScanBuilder.scala | Scala | apache-2.0 | 1,575 |
package com.twitter.finagle.toggle
import com.fasterxml.jackson.annotation.JsonProperty
import com.fasterxml.jackson.core.JsonFactory
import com.fasterxml.jackson.core.util.{DefaultIndenter, DefaultPrettyPrinter}
import com.fasterxml.jackson.databind.{MappingJsonFactory, ObjectMapper}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.twitter.util.Try
import java.net.URL
import scala.collection.mutable
import scala.collection.{breakOut, immutable}
/**
* [[ToggleMap ToggleMaps]] in JSON format.
*
* @define jsonschema The [[http://json-schema.org/ JSON Schema]] used is:
* {{{
* {
* "\\$schema": "http://json-schema.org/draft-04/schema#",
* "type": "object",
* "required": [
* "toggles"
* ],
* "properties": {
* "toggles": {
* "type": "array",
* "items": {
* "type": "object",
* "properties": {
* "id": { "type": "string" },
* "description": { "type": "string" },
* "fraction": {
* "type": "number",
* "minimum": 0.0,
* "maximum": 1.0,
* "exclusiveMinimum": false,
* "exclusiveMaxmimum": false
* },
* "comment": { "type": "string" }
* },
* "required": [
* "id",
* "fraction"
* ]
* }
* }
* }
* }
* }}}
*
* @define example Here is an example of a JSON [[ToggleMap]] input:
* {{{
* {
* "toggles": [
* {
* "id": "com.game.of.thrones.WargingEnabled",
* "description": "Use warging for computing the response.",
* "fraction": 0.1
* },
* {
* "id": "com.game.of.thrones.IsWinterComing",
* "description": "Controls whether or not winter is coming.",
* "fraction": 1.0,
* "comment": "We've seen the white walkers, we know that winter is coming."
* }
* ]
* }
* }}}
*
* With the exception of "comment", the properties correspond to the various
* fields on [[Toggle.Metadata]].
* - '''name''': Corresponds to `Toggle.Metadata.id`.
* - '''fraction''': Corresponds to `Toggle.Metadata.fraction` and values must be
* between `0.0` and `1.0`, inclusive.
* - '''description''': Corresponds to `Toggle.Metadata.description`.
* - '''comment''': For documentation purposes only and is not used in the
* creation of the [[ToggleMap]].
*/
object JsonToggleMap {
private[this] val mapper: ObjectMapper =
new ObjectMapper().registerModule(DefaultScalaModule)
/**
* How to treat the "description" field on a toggle.
*
* @see [[DescriptionIgnored]] and [[DescriptionRequired]].
*/
sealed abstract class DescriptionMode
/**
* Requires toggles to have a "description" field.
*
* This is useful for the library owner's base definitions of [[Toggle]].
*/
object DescriptionRequired extends DescriptionMode
/**
* Transforms the Toggle's "description" field into being empty.
*
* This is useful for service owner overrides of a toggle where
* the developer making modifications is not the one who has defined
* the toggle itself.
*/
object DescriptionIgnored extends DescriptionMode
private[this] case class JsonToggle(
@JsonProperty(required = true) id: String,
@JsonProperty(required = true) fraction: Double,
description: Option[String],
comment: Option[String])
private[this] case class JsonToggles(
@JsonProperty(required = true) toggles: Seq[JsonToggle]) {
def toToggleMap(
source: String,
descriptionMode: DescriptionMode
): ToggleMap = {
val invalid = toggles.find { md =>
descriptionMode match {
case DescriptionRequired => md.description.isEmpty
case DescriptionIgnored => false
}
}
invalid match {
case None => ()
case Some(md) =>
throw new IllegalArgumentException(s"Mandatory description is missing for: $md")
}
val metadata: immutable.Seq[Toggle.Metadata] =
toggles.map { jsonToggle =>
val description = descriptionMode match {
case DescriptionRequired => jsonToggle.description
case DescriptionIgnored => None
}
Toggle.Metadata(
jsonToggle.id,
jsonToggle.fraction,
description,
source)
}(breakOut)
val ids = metadata.map(_.id)
val uniqueIds = ids.distinct
if (ids.size != uniqueIds.size) {
throw new IllegalArgumentException(
s"Duplicate Toggle ids found: ${ids.mkString(",")}")
}
new ToggleMap.Immutable(metadata)
}
}
/**
* Attempts to parse the given JSON `String` into a [[ToggleMap]].
*
* $jsonschema
*
* $example
*
* @param descriptionMode how to treat the "description" field for a toggle.
*/
def parse(json: String, descriptionMode: DescriptionMode): Try[ToggleMap] = Try {
val jsonToggles = mapper.readValue(json, classOf[JsonToggles])
jsonToggles.toToggleMap("JSON String", descriptionMode)
}
/**
* Attempts to parse the given JSON `URL` into a [[ToggleMap]].
*
* Useful for loading resource files via [[StandardToggleMap]].
*
* $jsonschema
*
* $example
*
* @param descriptionMode how to treat the "description" field for a toggle.
*/
def parse(url: URL, descriptionMode: DescriptionMode): Try[ToggleMap] = Try {
val jsonToggles = mapper.readValue(url, classOf[JsonToggles])
jsonToggles.toToggleMap(url.toString, descriptionMode)
}
private case class Component(source: String, fraction: Double)
private case class LibraryToggle(current: Current, components: Seq[Component])
private case class Library(libraryName: String, toggles: Seq[LibraryToggle])
private case class Libraries(libraries: Seq[Library])
private case class Current(
id: String,
fraction: Double,
lastValue: Option[Boolean],
description: Option[String])
private val factory = new MappingJsonFactory()
factory.disable(JsonFactory.Feature.USE_THREAD_LOCAL_FOR_BUFFER_RECYCLING)
private val printer = new DefaultPrettyPrinter
printer.indentArraysWith(DefaultIndenter.SYSTEM_LINEFEED_INSTANCE)
mapper.writer(printer)
private[this] def toLibraryToggles(toggleMap: ToggleMap): Seq[LibraryToggle] = {
// create a map of id to metadata for faster lookups
val idToMetadata = toggleMap.iterator.map { md => md.id -> md }.toMap
// create a mapping of id to a seq of its components.
val idToComponents = mutable.Map.empty[String, mutable.ArrayBuffer[Component]]
ToggleMap.components(toggleMap).foreach { tm =>
tm.iterator.foreach { md =>
val components: mutable.ArrayBuffer[Component] =
idToComponents.getOrElse(md.id, mutable.ArrayBuffer.empty[Component])
idToComponents.put(md.id, components += Component(md.source, md.fraction))
}
}
idToComponents.map { case (id, details) =>
val md = idToMetadata(id)
val lastApply = toggleMap(id) match {
case captured: Toggle.Captured => captured.lastApply
case _ => None
}
LibraryToggle(Current(id, md.fraction, lastApply, md.description), details)
}.toSeq
}
/**
* Serialize a [[ToggleMap]] to JSON format
*/
def toJson(registry: Map[String, ToggleMap]): String = {
val libs = registry.map { case (name, toggleMap) => Library(name, toLibraryToggles(toggleMap)) }
mapper.writeValueAsString(Libraries(libs.toSeq))
}
}
| koshelev/finagle | finagle-toggle/src/main/scala/com/twitter/finagle/toggle/JsonToggleMap.scala | Scala | apache-2.0 | 7,524 |
package io.zengin.telegrambot
import io.zengin.telegrambot.types.requests._
import io.zengin.telegrambot.types._
import io.zengin.telegrambot.types.requests.RequestsJsonSupport._
import scala.util.{ Success, Failure }
import scala.concurrent.Future
import akka.actor.ActorSystem
import spray.client.pipelining._
import scala.concurrent.ExecutionContext.Implicits.global
import spray.http.{MediaTypes, BodyPart, MultipartFormData, ContentTypes, FormData, HttpHeaders, FormFile, HttpData}
import spray.http.HttpEntity._
import spray.httpx.UnsuccessfulResponseException
import spray.httpx.unmarshalling._
import spray.http._
class TelegramApi(token: String, implicit val system: ActorSystem) {
case class MarshallingException(message: String) extends Exception
import io.zengin.telegrambot.types.TypesJsonSupport._
private val apiUrl = s"https://api.telegram.org/bot$token/"
private val fileUrl = s"https://api.telegram.org/file/bot$token/"
private def buildFileBodyPart(key: String, file: InputFile) = {
val httpData = HttpData(file.bytes)
val httpEntitiy = HttpEntity(MediaTypes.`multipart/form-data`, httpData).asInstanceOf[HttpEntity.NonEmpty]
BodyPart(FormFile(file.name, httpEntitiy), key)
}
private def buildParameterBodyPart(key: String, value: String) = {
BodyPart(value, Seq(HttpHeaders.`Content-Disposition`("form-data", Map("name" -> key)) ))
}
private def failureAwareUnmarshal[E: FromResponseUnmarshaller, R: FromResponseUnmarshaller]: HttpResponse => Either[E, R] = { response =>
response.status match {
case spray.http.StatusCodes.Success(_) => response.as[R] match {
case Right(value) => Right(value)
case Left(error) => throw new MarshallingException(error.toString)
case error => throw new MarshallingException(error.toString)
}
case spray.http.StatusCodes.ClientError(_) => response.as[E] match {
case Right(value) => Left(value)
case Left(error) => throw new MarshallingException(error.toString)
case error => throw new MarshallingException(error.toString)
}
case error => throw new MarshallingException(error.toString)
}
}
def getMe(): Future[Either[FailResult, User]] = {
val pipeline = sendReceive ~> failureAwareUnmarshal[FailResult, Result[User]]
pipeline (Get(apiUrl + "getMe")) map {
case Right(Result(true, user)) => Right(user)
case Left(failResult) => Left(failResult)
}
}
def getUpdates(offset: Option[Int] = None): Future[Option[List[Update]]] = {
val pipeline = sendReceive ~> unmarshal[Result[List[Update]]]
pipeline (Get(apiUrl + "getUpdates?offset=" + offset.getOrElse(0))) map {
case Result(true, result) => Some(result)
} recover {
case e => None
}
}
def sendMessage(request: SendMessageRequest): Future[Either[FailResult, Message]] = {
val pipeline = sendReceive ~> failureAwareUnmarshal[FailResult, Result[Message]]
pipeline (Post(apiUrl + "sendMessage", request)) map {
case Right(Result(true, message)) => Right(message)
case Left(failResult) => Left(failResult)
}
}
def sendChatAction(request: SendChatActionRequest): Future[Either[FailResult, Boolean]] = {
val pipeline = sendReceive ~> failureAwareUnmarshal[FailResult, Result[Boolean]]
pipeline (Post(apiUrl + "sendChatAction", request)) map {
case Right(Result(true, true)) => Right(true) // yes this is ugly
case Left(failResult) => Left(failResult)
}
}
def sendLocation(request: SendLocationRequest): Future[Either[FailResult, Message]] = {
val pipeline = sendReceive ~> failureAwareUnmarshal[FailResult, Result[Message]]
pipeline (Post(apiUrl + "sendLocation", request)) map {
case Right(Result(true, message)) => Right(message)
case Left(failResult) => Left(failResult)
}
}
def getFile(id: String): Future[Either[FailResult, File]] = {
val pipeline = sendReceive ~> failureAwareUnmarshal[FailResult, Result[File]]
pipeline(Get(apiUrl + "getFile?file_id=" + id)) map {
case Right(Result(true, file)) => Right(file)
case Left(failResult) => Left(failResult)
}
}
def getUserProfilePhotos(userId: Int): Future[Either[FailResult, UserProfilePhotos]] = {
val pipeline = sendReceive ~> failureAwareUnmarshal[FailResult, Result[UserProfilePhotos]]
pipeline(Get(apiUrl + s"getUserProfilePhotos?user_id=$userId")) map {
case Right(Result(true, userProfilePhotos)) => Right(userProfilePhotos)
case Left(failResult) => Left(failResult)
}
}
def forwardMessage(request: ForwardMessageRequest): Future[Either[FailResult, Message]] = {
val pipeline = sendReceive ~> failureAwareUnmarshal[FailResult, Result[Message]]
pipeline (Post(apiUrl + "forwardMessage", request)) map {
case Right(Result(true, message)) => Right(message)
case Left(failResult) => Left(failResult)
}
}
def sendAudio(request: SendAudioRequest): Future[Option[Message]] = {
val pipeline = sendReceive ~> unmarshal[Result[Message]]
request match {
case SendAudioRequest(chatId, Left(audio), duration, performer, title, replyTo, _) =>
val fileBodyPart = buildFileBodyPart("audio", audio)
var formData = Seq(fileBodyPart)
formData = formData ++ Seq(chatId match {
case Right(chatId) => buildParameterBodyPart("chat_id", chatId.toString)
case Left(chatId) => buildParameterBodyPart("chat_id", chatId)
})
performer match {
case Some(performer) => formData = formData ++ Seq(buildParameterBodyPart("performer", performer))
case None =>
}
title match {
case Some(title) => formData = formData ++ Seq(buildParameterBodyPart("title", title))
case None =>
}
replyTo match {
case Some(replyTo) => formData = formData ++ Seq(buildParameterBodyPart("reply_to_message_id", replyTo.toString))
case None =>
}
pipeline(Post(apiUrl + "sendAudio", MultipartFormData(formData))) map {
case Result(true, message) => Some(message)
} recover {
case e => None
}
case SendAudioRequest(chatId, Right(fileId), _, _, _, _, _) =>
import io.zengin.telegrambot.types.requests.RequestsJsonSupport.sendAudioRequestFormat
pipeline(Post(apiUrl + "sendAudio", sendAudioRequestFormat.write(request))) map {
case Result(true, message) => Some(message)
} recover {
case e => None
}
case _ => Future { None }
}
}
def sendPhoto(request: SendPhotoRequest): Future[Option[Message]] = {
val pipeline = sendReceive ~> unmarshal[Result[Message]]
request match {
case SendPhotoRequest(chatId, Left(photo), caption, replyTo, _) => // we need to upload file
val fileBodyPart = buildFileBodyPart("photo", photo)
var formData = Seq(
fileBodyPart,
chatId match {
case Right(chatId) => BodyPart(chatId.toString, Seq(HttpHeaders.`Content-Disposition`("form-data", Map("name" -> "chat_id")) ))
case Left(chatId) => BodyPart(chatId, Seq(HttpHeaders.`Content-Disposition`("form-data", Map("name" -> "chat_id")) ))
}
)
caption match {
case Some(caption) => formData = formData ++ Seq(BodyPart(caption, Seq(HttpHeaders.`Content-Disposition`("form-data", Map("name" -> "caption")) )))
case _ =>
}
replyTo match {
case Some(replyTo) => formData = formData ++ Seq(BodyPart(replyTo.toString, Seq(HttpHeaders.`Content-Disposition`("form-data", Map("name" -> "reply_to_message_id")) )))
case _ =>
}
pipeline(Post(apiUrl + "sendPhoto", MultipartFormData(formData))) map {
case Result(true, message) => Some(message)
} recover {
case e => None
}
case SendPhotoRequest(_, Right(fileId), _, _, _) => // file must be already saved in telegram servers
pipeline(Post(apiUrl + "sendPhoto", sendPhotoRequestFormat.write(request))) map {
case Result(true, message) => Some(message)
} recover {
case e => None
}
case _ => Future { None }
}
}
def sendDocument(request: SendDocumentRequest): Future[Option[Message]] = {
val pipeline = sendReceive ~> unmarshal[Result[Message]]
request match {
case SendDocumentRequest(chatId, Left(document), replyTo, _) =>
val fileBodyPart = buildFileBodyPart("document", document)
var formData = Seq(fileBodyPart)
formData = formData ++ Seq(chatId match {
case Right(chatId) => buildParameterBodyPart("chat_id", chatId.toString)
case Left(chatId) => buildParameterBodyPart("chat_id", chatId)
})
replyTo match {
case Some(replyTo) => formData = formData ++ Seq(buildParameterBodyPart("reply_to_message_id", replyTo.toString))
case None =>
}
pipeline(Post(apiUrl + "sendDocument", MultipartFormData(formData))) map {
case Result(true, message) => Some(message)
} recover {
case e => None
}
case SendDocumentRequest(_, Right(fileId), _, _) =>
pipeline(Post(apiUrl + "sendDocument", sendDocumentRequestFormat.write(request))) map {
case Result(true, message) => Some(message)
} recover {
case e => None
}
}
}
def sendSticker(request: SendStickerRequest): Future[Option[Message]] = {
val pipeline = sendReceive ~> unmarshal[Result[Message]]
request match {
case SendStickerRequest(chatId, Left(sticker), replyTo, _) =>
val fileBodyPart = buildFileBodyPart("sticker", sticker)
var formData = Seq(fileBodyPart)
formData = formData ++ Seq(chatId match {
case Right(chatId) => buildParameterBodyPart("chat_id", chatId.toString)
case Left(chatId) => buildParameterBodyPart("chat_id", chatId)
})
replyTo match {
case Some(replyTo) => formData = formData ++ Seq(buildParameterBodyPart("reply_to_message_id", replyTo.toString))
case None =>
}
pipeline(Post(apiUrl + "sendSticker", MultipartFormData(formData))) map {
case Result(true, message) => Some(message)
} recover {
case e => None
}
case SendStickerRequest(_, Right(fileId), _, _) =>
pipeline(Post(apiUrl + "sendSticker", sendStickerRequestFormat.write(request))) map {
case Result(true, message) => Some(message)
} recover {
case e => None
}
}
}
}
| hzengin/telegrambot | src/main/scala/io/zengin/telegrambot/TelegramApi.scala | Scala | mit | 10,668 |
package org.jetbrains.plugins.scala
package annotator.createFromUsage
import com.intellij.codeInsight.template.TemplateBuilder
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.fileEditor.{FileEditorManager, OpenFileDescriptor}
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.psi.{PsiClass, PsiElement}
import org.jetbrains.plugins.scala.codeInspection.collections.MethodRepr
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns._
import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScParameterizedTypeElement, ScSimpleTypeElement, ScTupleTypeElement}
import org.jetbrains.plugins.scala.lang.psi.api.base.{ScConstructor, ScReferenceElement}
import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScExpression, ScReferenceExpression}
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScParameter, ScTypeParam}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypedDefinition
import org.jetbrains.plugins.scala.lang.psi.types.result.TypingContext
import org.jetbrains.plugins.scala.lang.psi.types.{Any => scTypeAny, ScType}
import org.jetbrains.plugins.scala.lang.refactoring.namesSuggester.NameSuggester
/**
* Nikolay.Tropin
* 2014-07-31
*/
object CreateFromUsageUtil {
def uniqueNames(names: Seq[String]) = {
names.foldLeft(List[String]()) { (r, h) =>
(h #:: Stream.from(1).map(h + _)).find(!r.contains(_)).get :: r
}.reverse
}
def nameByType(tp: ScType) = NameSuggester.suggestNamesByType(tp).headOption.getOrElse("value")
def nameAndTypeForArg(arg: PsiElement): (String, ScType) = arg match {
case ref: ScReferenceExpression => (ref.refName, ref.getType().getOrAny)
case expr: ScExpression =>
val tp = expr.getType().getOrAny
(nameByType(tp), tp)
case bp: ScBindingPattern if !bp.isWildcard => (bp.name, bp.getType(TypingContext.empty).getOrAny)
case p: ScPattern =>
val tp: ScType = p.getType(TypingContext.empty).getOrAny
(nameByType(tp), tp)
case _ => ("value", scTypeAny)
}
def paramsText(args: Seq[PsiElement]) = {
val (names, types) = args.map(nameAndTypeForArg).unzip
(uniqueNames(names), types).zipped.map((name, tpe) => s"$name: ${tpe.canonicalText}").mkString("(", ", ", ")")
}
def parametersText(ref: ScReferenceElement) = {
ref.getParent match {
case p: ScPattern =>
paramsText(patternArgs(p))
case MethodRepr(_, _, _, args) => paramsText(args) //for case class
case _ =>
val fromConstrArguments = PsiTreeUtil.getParentOfType(ref, classOf[ScConstructor]) match {
case ScConstructor(simple: ScSimpleTypeElement, args) if ref.getParent == simple => args
case ScConstructor(pt: ScParameterizedTypeElement, args) if ref.getParent == pt.typeElement => args
case _ => Seq.empty
}
fromConstrArguments.map(argList => paramsText(argList.exprs)).mkString
}
}
def patternArgs(pattern: ScPattern): Seq[ScPattern] = {
pattern match {
case cp: ScConstructorPattern => cp.args.patterns
case inf: ScInfixPattern => inf.leftPattern +: inf.rightPattern.toSeq
case _ => Seq.empty
}
}
def addParametersToTemplate(elem: PsiElement, builder: TemplateBuilder): Unit = {
elem.depthFirst.filterByType(classOf[ScParameter]).foreach { parameter =>
val id = parameter.getNameIdentifier
builder.replaceElement(id, id.getText)
parameter.paramType.foreach { it =>
builder.replaceElement(it, it.getText)
}
}
}
def addTypeParametersToTemplate(elem: PsiElement, builder: TemplateBuilder): Unit = {
elem.depthFirst.filterByType(classOf[ScTypeParam]).foreach { tp =>
builder.replaceElement(tp.nameId, tp.name)
}
}
def addQmarksToTemplate(elem: PsiElement, builder: TemplateBuilder): Unit = {
val Q_MARKS = "???"
elem.depthFirst.filterByType(classOf[ScReferenceExpression]).filter(_.getText == Q_MARKS)
.foreach { qmarks =>
builder.replaceElement(qmarks, Q_MARKS)
}
}
def addUnapplyResultTypesToTemplate(fun: ScFunction, builder: TemplateBuilder): Unit = {
fun.returnTypeElement match {
case Some(ScParameterizedTypeElement(_, Seq(tuple: ScTupleTypeElement))) => //Option[(A, B)]
tuple.components.foreach(te => builder.replaceElement(te, te.getText))
case Some(ScParameterizedTypeElement(_, args)) =>
args.foreach(te => builder.replaceElement(te, te.getText))
case _ =>
}
}
def positionCursor(element: PsiElement): Editor = {
val offset = element.getTextRange.getEndOffset
val project = element.getProject
val descriptor = new OpenFileDescriptor(project, element.getContainingFile.getVirtualFile, offset)
FileEditorManager.getInstance(project).openTextEditor(descriptor, true)
}
def unapplyMethodText(pattern: ScPattern) = {
val pType = pattern.expectedType.getOrElse(scTypeAny)
val pName = nameByType(pType)
s"def unapply($pName: ${pType.canonicalText}): ${unapplyMethodTypeText(pattern)} = ???"
}
def unapplyMethodTypeText(pattern: ScPattern) = {
val types = CreateFromUsageUtil.patternArgs(pattern).map(_.getType(TypingContext.empty).getOrAny)
val typesText = types.map(_.canonicalText).mkString(", ")
types.size match {
case 0 => "Boolean"
case 1 => s"Option[$typesText]"
case _ => s"Option[($typesText)]"
}
}
}
object InstanceOfClass {
def unapply(elem: PsiElement): Option[PsiClass] = elem match {
case ScExpression.Type(TypeAsClass(psiClass)) => Some(psiClass)
case ResolvesTo(typed: ScTypedDefinition) =>
typed.getType().toOption match {
case Some(TypeAsClass(psiClass)) => Some(psiClass)
case _ => None
}
case _ => None
}
}
object TypeAsClass {
def unapply(scType: ScType): Option[PsiClass] = scType match {
case ScType.ExtractClass(aClass) => Some(aClass)
case t: ScType => ScType.extractDesignatorSingletonType(t).flatMap(ScType.extractClass(_, None))
case _ => None
}
}
| LPTK/intellij-scala | src/org/jetbrains/plugins/scala/annotator/createFromUsage/CreateFromUsageUtil.scala | Scala | apache-2.0 | 6,157 |
package org.openmole.site
import java.util.UUID
import scalatags.Text.TypedTag
import scalatags.generic.StylePair
/*
* Copyright (C) 01/04/16 // [email protected]
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY, without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package object tools {
import scalatags.Text.all._
def listItem(content: Frag*): Frag = li(content)
def htmlList(items: Frag*): Frag = ul(items)
def paragraph(body: Frag*): Frag = Seq[Frag](body)
def aa = a(targetBlank)
val break = br(br)
object api {
def apiEntryTitle(entryName: String): Frag = Seq[Frag](b(entryName), ": ")
def newEntry(name: String, body: Frag*): Frag = Seq[Frag](apiEntryTitle(name), body)
}
object hl {
def apply(content: String, lang: String, clazz: Option[String] = Some("doc-code")) = highlight(content, lang)
def highlight(string: String, lang: String, clazz: Option[String] = Some("doc-code")) = {
val lines = string.split("\\n", -1)
val modif: Seq[Modifier] = clazz.toSeq.map(c ⇒ cls := c)
if (lines.length == 1) {
scalatags.Text.all.code(
cls := lang + " " + "hljs",
modif,
display := "inline",
padding := 0,
margin := 0,
lines(0))
}
else {
val minIndent = lines.filter(_.trim != "").map(_.takeWhile(_ == ' ').length).min
val stripped = lines.map(_.drop(minIndent))
.dropWhile(_ == "")
.mkString("\\n")
pre(
modif,
scalatags.Text.all.code(
cls := lang + " " + "hljs",
stripped)
)
}
}
object OptionalName {
implicit def fromString(s: String) = OptionalName(Some(s))
}
case class OptionalName(name: Option[String])
def openmole(code: String, header: String = "", name: OptionalName = OptionalName(None)) = {
if (Test.testing) Test.allTests += Test(header + "\\n" + code, name.name)
apply(code, "scala")
}
def code(code: String) = openmoleNoTest(code)
def plain(code: String) = apply(code, "plain")
def openmoleNoTest(code: String) = apply(code, "scala")
def python(code: String) = apply(code, "python")
def json(code: String) = apply(code, "json")
}
def openmole(code: String, header: String = "", name: hl.OptionalName = hl.OptionalName(None)) = hl.openmole(code, header, name)
def code(code: String) = hl.code(code)
def plain(code: String) = hl.plain(code)
/** heavily inspired from Section.scala **/
object links {
def anchor(elements: Seq[Any]): Seq[Modifier] =
link(elements) match {
case Some(t) ⇒ Seq(a(id := s"${shared.anchor(t)}", top := -60, position := "relative", display := "block"))
case None ⇒ Seq()
}
def link(elements: Seq[Any]) = elements.collect { case x: String ⇒ x }.headOption
def linkIcon(elements: Seq[Any]): Seq[Modifier] =
link(elements) match {
case Some(t) ⇒ Seq(" ", a(href := s"#${shared.anchor(t)}", tag("font")(size := 4, opacity := 0.4)("\\uD83D\\uDD17")))
case None ⇒ Seq()
}
def toModifier(element: Any): Modifier =
element match {
case e: String ⇒ e
case e: TypedTag[String] ⇒ e
case e: scalatags.generic.StylePair[Any, String] ⇒ e.s := e.v
case e: AttrPair ⇒ e
case _ ⇒ throw new RuntimeException("Unknown element type " + element.getClass)
}
}
object sitemap {
def siteMapSection(docSection: Seq[Page]) = for {
page ← docSection
} yield li(a(page.title, href := page.file))
}
def h2(elements: Any*): Frag = Seq(div(links.anchor(elements): _*), scalatags.Text.all.h2(elements.map(links.toModifier) ++ links.linkIcon(elements): _*))
def h3(elements: Any*): Frag = Seq(div(links.anchor(elements): _*), scalatags.Text.all.h3(elements.map(links.toModifier) ++ links.linkIcon(elements): _*))
def anchor(title: String) = s"#${shared.anchor(title)}"
def img(xs: Modifier*) = scalatags.Text.all.img(Seq(cls := "doc-img") ++ xs: _*)
def br = scalatags.Text.all.br(cls := "doc-br")
case class Parameter(name: String, `type`: String, description: String)
def parameters(p: Parameter*) = {
def toRow(p: Parameter) = li(p.name + ": " + p.`type` + ": " + p.description)
ul(p.map(toRow))
}
def tq = """""""""
def uuID: String = UUID.randomUUID.toString
implicit class ShortID(id: String) {
def short = id.split('-').head
}
// SCALATAGS METHODS
def classIs(s: String*): AttrPair = `class` := s.mkString(" ")
def to(page: Page): TypedTag[String] = to(Pages.file(page), otherTab = false)
def to(link: String, otherTab: Boolean = true, style: Seq[Modifier] = Seq()): TypedTag[String] = a(style, href := link)(if (otherTab) targetBlank else "")
def innerLink(page: Page, title: String) = to(page)(span(title))
def outerLink(linkName: String, link: String, otherTab: Boolean = true) = to(link, otherTab = otherTab)(span(linkName))
// CONVENIENT KEYS
implicit class SString(ss: String) {
def ++(s: String) = s"$ss $s"
}
def linkButton(title: String, link: String, buttonStyle: AttrPair = classIs(btn, btn_selected), openInOtherTab: Boolean = true) =
a(href := link)(if (openInOtherTab) targetBlank else "")(span(buttonStyle, `type` := "button", title))
def divLinkButton(content: TypedTag[_], link: String, buttonStyle: AttrPair = classIs(btn, btn_default), openInOtherTab: Boolean = true) =
a(href := link)(if (openInOtherTab) targetBlank else "")(span(content)(buttonStyle, `type` := "button"))
def pageLinkButton(title: String, page: Page, openInOtherTab: Boolean = true, buttonStyle: Seq[Modifier] = Seq(classIs(btn, btn_default))) =
to(page)(if (openInOtherTab) targetBlank else "")(span(buttonStyle, `type` := "button", title))
def glyphSpan(glyphicon: String, style: Seq[Modifier], page: Page, text: String = ""): TypedTag[_ <: String] =
to(page)(classIs(glyphicon), style, pointer, aria.hidden := "true")(text)
def leftGlyphButton(title: String, page: Page, glyph: String, openInOtherTab: Boolean = false, buttonStyle: Seq[Modifier] = Seq(classIs(btn, btn_default))) =
to(page)(if (openInOtherTab) targetBlank else "")(
span(buttonStyle, `type` := "button")(
span(classIs(glyph)),
span(s" $title")
)
)
def modificationLink(source: String) =
s"https://github.com/openmole/openmole/edit/${org.openmole.core.buildinfo.version.major}-dev/openmole/bin/org.openmole.site/jvm/src/main/scalatex/$source"
def rightGlyphButton(title: String, page: Page, glyph: String, openInOtherTab: Boolean = false, buttonStyle: Seq[Modifier] = Seq(classIs(btn, btn_default))) =
to(page)(if (openInOtherTab) targetBlank else "")(
span(buttonStyle, `type` := "button")(
span(s"$title "),
span(classIs(glyph))
)
)
def basicButton(title: String, buttonStyle: AttrPair = classIs(btn, btn_default)) =
span(buttonStyle, `type` := "button", title)
/*def getPageTitle(page: Page) = page.title match {
case None ⇒ page.name
case Some(x) ⇒ x
}*/
lazy val nav: String = "nav"
lazy val navbar: String = "navbar"
lazy val navbar_nav: String = "navbar-nav"
lazy val navbar_default: String = "navbar-default"
lazy val navbar_inverse: String = "navbar-inverse"
lazy val navbar_staticTop: String = "navbar-static-top"
lazy val navbar_staticBottom: String = "navbar-static-bottom"
lazy val navbar_fixedTop: String = "navbar-fixed-top"
lazy val navbar_fixedBottom: String = "navbar-fixed-bottom"
lazy val navbar_right: String = "navbar-right"
lazy val navbar_left: String = "navbar-left"
lazy val navbar_header: String = "navbar-header"
lazy val navbar_brand: String = "navbar-brand"
lazy val navbar_btn: String = "navbar-btn"
lazy val navbar_collapse: String = "navbar-collapse"
lazy val nav_pills: String = "nav-pills"
lazy val btn: String = "btn"
lazy val btn_default: String = "btn-default"
lazy val btn_selected: String = "btn-selected"
lazy val btn_primary: String = "btn-primary"
lazy val btn_danger: String = "btn-danger"
lazy val btn_mole: String = "btn-mole"
lazy val glyph_chevron_left: String = "glyphicon glyphicon-chevron-left"
lazy val glyph_chevron_right: String = "glyphicon glyphicon-chevron-right"
private def role(suffix: String): AttrPair = scalatags.Text.all.role := suffix
lazy val role_tablist = role("tablist")
lazy val role_presentation = role("presentation")
lazy val role_tab = role("tab")
lazy val tab_pane: String = "tab-pane"
lazy val tab_panel_role = role("tabpanel")
lazy val role_button = role("button")
lazy val container_fluid: String = "container-fluid"
lazy val pointer = cursor := "pointer"
lazy val fixedPosition = position := "fixed"
lazy val targetBlank = target := "_blank"
lazy val collapse: String = "collapse"
lazy val fade: String = "fade"
lazy val row: String = "row"
def colMD(nb: Int): String = s"col-md-$nb"
}
| openmole/openmole | openmole/bin/org.openmole.site/jvm/src/main/scala/org/openmole/site/tools.scala | Scala | agpl-3.0 | 9,618 |
package skinny.exception
/**
* Represents view template issue.
*
* @param message message
* @param cause cause
*/
case class ViewTemplateNotFoundException(message: String, cause: Throwable = null)
extends RuntimeException(message, cause)
| skinny-framework/skinny-framework | framework/src/main/scala/skinny/exception/ViewTemplateNotFoundException.scala | Scala | mit | 253 |
package com.maxmind.gatling.simulation
import ammonite.ops._
import io.gatling.app.GatlingStatusCodes._
import java.util.Date
import scala.collection.immutable.HashMap
import scala.sys.process._
import scalaz.Scalaz._
import scalaz._
import com.maxmind.gatling.simulation.RunnerConfig.{Quiet, Verbose, Verbosity}
/**
* A gatling simulation runner - launch in new process, as Gatling docs command.
*/
object Runner {
type CliArgs = Seq[String]
type CliEnv = Seq[(String, String)]
def apply(conf: RunnerConfig) = conf mkRunner ()
}
class Runner(conf: RunnerConfig) {
import Runner.CliArgs
def argsErr(args: CliArgs) = "Invalid args: " ++ args.mkString("(", ", ", ")")
def apply(): (Boolean, String) = {
conf() ▹ { case (args, env) ⇒
import io.gatling.app.GatlingStatusCodes
Process(args, None, env: _*).! match {
case GatlingStatusCodes.Success ⇒ (true, "OK")
case AssertionsFailed ⇒ (false, "Fail: simulation assertion")
case InvalidArguments ⇒ (false, argsErr(args))
}
}
}
}
object RunnerConfig {
sealed trait Verbosity
case object Quiet extends Verbosity
case object Verbose extends Verbosity
lazy val stubDirClassesRel: RelPath = 'target / "test-classes"
lazy val stubDirLibRel : RelPath = 'lib
lazy val pwd = cwd
lazy val propsDef = new HashMap[String, String]()
lazy val jarFileDef = pwd / 'target / "scala-2.11" / "gatlinggen.jar"
lazy val outDirSimResults = pwd / "sim-results"
lazy val outDirTmp = Path(Path.makeTmp)
lazy val outDirDef = (exists ! outDirSimResults) ? outDirSimResults | outDirTmp
lazy val runnerShDef = pwd / 'dev / "gatling.sh"
lazy val pathDate = new Date().toString replaceAll ("[ :]", "-")
lazy val simNameDef = s"gatlilng-sim-anon-$pathDate"
lazy val simDescDef = s"$simNameDef-description"
lazy val verbosityDef = Verbose
}
case class RunnerConfig(
simClassName: String,
props: Map[String, String] = RunnerConfig.propsDef,
jarFile: Path = RunnerConfig.jarFileDef,
outDir: Path = RunnerConfig.outDirDef,
runnerSh: Path = RunnerConfig.runnerShDef,
simName: String = RunnerConfig.simNameDef,
simDesc: String = RunnerConfig.simDescDef,
verbosity: Verbosity = RunnerConfig.verbosityDef) {
import Runner.{CliArgs, CliEnv}
lazy val isQuiet = verbosity match {
case Quiet ⇒ true
case Verbose ⇒ false
}
// gatling -h describes all CLI args listed here.
lazy val asArgs: CliArgs = Seq(
runnerSh.toString,
"--output-name", simName,
"--results-folder", outDir.toString,
"--run-description", simDesc,
"--simulation", simClassName.toString
)
lazy val asEnv: CliEnv = Seq(
"JAVA_CLASSPATH" → jarFile.toString,
"GATLING_HOME" → outDir.toString,
"JAVA_OPTS" → (props map { case (k: String, v: String) ⇒ s"-D$k=$v" }).mkString(" ")
)
def apply(): (CliArgs, CliEnv) = {
assume(
{ """^[0-9a-zA-Z._\\-]+$""".r findFirstIn simName }.isDefined,
s"Invalid sim-name '$simName'"
)
(asArgs, asEnv) ◃ { case (args, env) ⇒
if (!isQuiet) println(
"# Running gatling:\\n# \\t" ++ args.mkString(" ") ++ s"\\n# \\t" ++ env.mkString(" ")
)
}
}
def mkRunner(): Runner = new Runner(this ◃ { _ prepareOutDir () })
def prepareOutDir(): Unit =
for (d ← Seq(
RunnerConfig.stubDirClassesRel,
RunnerConfig.stubDirLibRel)
) (outDir / d) ◃ { mkdir ! _ } ◃ { d ⇒ assert((stat ! d).isDir, { s"No dir $d" }) }
}
| maxmind/gatling-gen | src/main/scala/com/maxmind/gatling/simulation/Runner.scala | Scala | apache-2.0 | 3,604 |
package com.mogproject.mogami.core.io.sfen
import com.mogproject.mogami.core.Square
import com.mogproject.mogami.core.game.Game.{HistoryHash, Position}
import com.mogproject.mogami.core.game.{Branch, Game}
import com.mogproject.mogami.core.io.RecordFormatException
import com.mogproject.mogami.core.move.{Move, MoveBuilderSfen, SpecialMove}
import com.mogproject.mogami.core.state.{State, StateCache}
import com.mogproject.mogami.core.state.StateHash.StateHash
import com.mogproject.mogami.util.Implicits._
import scala.util.{Success, Try}
/**
*
*/
trait SfenBranchReader {
/**
* Parse trunk description
*
* @param s "{board} {turn} {hand} {offset} [{move}...]"
*/
def parseSfenString(s: String, isFreeMode: Boolean)(implicit stateCache: StateCache): Branch = {
val tokens = s.split(" ").toIndexedSeq
parseSfenStringHelper(tokens.drop(3), None, _ => Branch(State.parseSfenString(tokens.take(3).mkString(" "))).initialHash, isFreeMode)
}
/**
* Parse branch description
*
* @param trunk trunk
* @param s "{offset} [{move}...]"
*/
def parseSfenString(trunk: Branch, s: String, isFreeMode: Boolean)(implicit stateCache: StateCache): Branch =
parseSfenStringHelper(tokens = s.split(" ").toIndexedSeq, Some(Range(trunk.offset, trunk.offset + trunk.history.length)), i => trunk.history(i - trunk.offset), isFreeMode)
// helper functions
private[this] def parseSfenStringHelper(tokens: Seq[String], range: Option[Range], initialStateFunc: Int => StateHash, isFreeMode: Boolean)
(implicit stateCache: StateCache): Branch = {
val offset = tokens.headOption.map(parseOffset(_, range)).getOrElse(throw new RecordFormatException(1, s"cannot find offset"))
val moves = tokens.drop(1).map(MoveBuilderSfen.parseSfenString)
moves.foldLeft[Branch](Branch(initialStateFunc(offset), offset, isFreeMode = isFreeMode)) { (br, m) =>
br.makeMove(m).getOrElse(throw new RecordFormatException(1, s"invalid move: ${m.toSfenString}"))
}
}
private[this] def parseOffset(s: String, range: Option[Range] = None): Int = (Try(s.toInt), range) match {
case (Success(n), None) if 0 <= n => n
case (Success(n), Some(r)) if r.contains(n) => n
case (Success(_), _) => throw new RecordFormatException(1, s"offset is out of range: ${s}")
case _ => throw new RecordFormatException(1, s"offset must be number: ${s}")
}
/**
* Parse USEN string as a trunk
*/
def parseUsenStringAsTrunk(s: String, initialState: State, isFreeMode: Boolean)(implicit stateCache: StateCache): Branch =
parseUsenStringHelper(s, _ => stateCache.set(initialState), _ => None, _ => None, None, isFreeMode)
/**
* Parse USEN string as a branch
*/
def parseUsenStringAsBranch(s: String, trunk: Branch, isFreeMode: Boolean)(implicit stateCache: StateCache): Branch =
parseUsenStringHelper(
s,
pos => trunk.getStateHash(pos).get,
pos => trunk.getHistoryHash(pos),
pos => trunk.getMove(pos - 1).map(_.to),
Some(Range(trunk.offset, trunk.offset + trunk.history.length)),
isFreeMode
)
// helper function
private[this] def parseUsenStringHelper(s: String,
initialStateFunc: Position => StateHash,
initialHistoryHashFunc: Position => Option[HistoryHash],
lastMoveFunc: Position => Option[Square],
offsetRange: Option[Range],
isFreeMode: Boolean = false
)(implicit stateCache: StateCache): Branch = {
val tokens = s.split("[.]", 3)
if (tokens.length != 3) throw new RecordFormatException(1, s"branch description must have three sections: ${s}")
val Array(os, mvs, fa) = tokens
// parse offset
val offset = parseOffset(os, offsetRange)
if (mvs.length % 3 != 0) throw new RecordFormatException(1, s"each move must be three characters: ${mvs}")
val moves = mvs.grouped(3).map(MoveBuilderSfen.parseUsenString)
// make moves
val initBranch: Branch = Branch(initialStateFunc(offset), offset, initialHistoryHash = initialHistoryHashFunc(offset), isFreeMode = isFreeMode)
val initLastMoveTo = lastMoveFunc(offset)
val b = moves.zipWithIndex.foldLeft[Branch](initBranch) { case (br, (m, i)) =>
br.makeMove(m, (i == 0).fold(initLastMoveTo, None)).getOrElse(throw new RecordFormatException(1, s"Invalid move: ${m.toUsenString}"))
}
// set final action
val finalAction = fa.nonEmpty.option(SpecialMove.parseUsenString(fa, b.lastState, b.lastMoveTo))
b.updateFinalAction(finalAction)
}
}
trait SfenGameReader {
def parseSfenString(s: String, isFreeMode: Boolean = false)(implicit stateCache: StateCache): Game = Game(Branch.parseSfenString(s, isFreeMode))
def parseUsenString(s: String, isFreeMode: Boolean = false)(implicit stateCache: StateCache): Game = {
val tokens = s.split("~")
if (tokens.length < 2) throw new RecordFormatException(1, s"game description must have at least two sections: ${s}")
val initialState = tokens(0).isEmpty.fold(State.HIRATE, State.parseUsenString(tokens(0))) // the first token can be empty
val trunk = Branch.parseUsenStringAsTrunk(tokens(1), initialState, isFreeMode)
val branches = tokens.drop(2).map(ss => Branch.parseUsenStringAsBranch(ss, trunk, isFreeMode)).toVector
Game(trunk, branches)
}
}
trait SfenBranchWriter {
def initialState: State
def offset: Int
def moves: Vector[Move]
def finalAction: Option[SpecialMove]
/**
* Make Sfen string
*
* @return "{initialState} {offset} [{move}...]"
*/
def toSfenString: String =
(Seq(initialState.toSfenString, offset.toString) ++ moves.map(_.toSfenString)).mkString(" ")
/**
* Make Usen string
*
* @return "{offset}.[{move}...].[{final}]"
*/
def toUsenString: String = {
Seq(offset.toString, moves.map(_.toUsenString).mkString, finalAction.map(_.toUsenString).getOrElse("")).mkString(".")
}
}
trait SfenGameWriter extends SfenLike with UsenLike {
def trunk: Branch
def branches: Vector[Branch]
/**
* Make trunk Sfen string
*
* @return "{board} {turn} {hand} {offset} [{move}...]"
*/
override def toSfenString: String = trunk.toSfenString
/**
* @note Trunk initial can be omitted if it is the same as the HIRATE state.
*/
override def toUsenString: String = {
val trunkInitial = (trunk.initialState == State.HIRATE).fold("", trunk.initialState.toUsenString)
(Seq(trunkInitial, trunk.toUsenString) ++ branches.map(_.toUsenString)).mkString("~")
}
} | mogproject/mog-core-scala | shared/src/main/scala/com/mogproject/mogami/core/io/sfen/SfenGameIO.scala | Scala | apache-2.0 | 6,751 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.stream.table
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.table.api.internal.TableEnvironmentInternal
import org.apache.flink.table.api.{Over, TableSchema, Tumble, Types}
import org.apache.flink.table.api.scala._
import org.apache.flink.table.utils.TableTestUtil._
import org.apache.flink.table.utils.{TableTestBase, TestNestedProjectableTableSource, TestProjectableTableSource, TestTableSourceWithTime}
import org.apache.flink.types.Row
import org.junit.Test
class TableSourceTest extends TableTestBase {
@Test
def testTableSourceWithLongRowTimeField(): Unit = {
val tableSchema = new TableSchema(
Array("id", "rowtime", "val", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.LONG, Types.LONG, Types.STRING)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "rowtime", "val", "name"))
val util = streamTestUtil()
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"rowTimeT",
new TestTableSourceWithTime[Row](tableSchema, returnType, Seq(), rowtime = "rowtime"))
val t = util.tableEnv.scan("rowTimeT").select($"rowtime", $"id", $"name", $"val")
val expected = "StreamTableSourceScan(table=[[default_catalog, default_database, rowTimeT]], " +
"fields=[rowtime, id, name, val], " +
"source=[TestTableSourceWithTime(id, rowtime, val, name)])"
util.verifyTable(t, expected)
}
@Test
def testTableSourceWithTimestampRowTimeField(): Unit = {
val tableSchema = new TableSchema(
Array("id", "rowtime", "val", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.STRING)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "rowtime", "val", "name"))
val util = streamTestUtil()
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"rowTimeT",
new TestTableSourceWithTime[Row](tableSchema, returnType, Seq(), rowtime = "rowtime"))
val t = util.tableEnv.scan("rowTimeT").select($"rowtime", $"id", $"name", $"val")
val expected = "StreamTableSourceScan(table=[[default_catalog, default_database, rowTimeT]], " +
"fields=[rowtime, id, name, val], " +
"source=[TestTableSourceWithTime(id, rowtime, val, name)])"
util.verifyTable(t, expected)
}
@Test
def testRowTimeTableSourceGroupWindow(): Unit = {
val tableSchema = new TableSchema(
Array("id", "rowtime", "val", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.STRING)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "rowtime", "val", "name"))
val util = streamTestUtil()
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"rowTimeT",
new TestTableSourceWithTime[Row](tableSchema, returnType, Seq(), rowtime = "rowtime"))
val t = util.tableEnv.scan("rowTimeT")
.filter($"val" > 100)
.window(Tumble over 10.minutes on 'rowtime as 'w)
.groupBy('name, 'w)
.select('name, 'w.end, 'val.avg)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
unaryNode(
"DataStreamCalc",
"StreamTableSourceScan(table=[[default_catalog, default_database, rowTimeT]], " +
"fields=[rowtime, val, name], " +
"source=[TestTableSourceWithTime(id, rowtime, val, name)])",
term("select", "rowtime", "val", "name"),
term("where", ">(val, 100)")
),
term("groupBy", "name"),
term("window", "TumblingGroupWindow('w, 'rowtime, 600000.millis)"),
term("select", "name", "AVG(val) AS EXPR$1", "end('w) AS EXPR$0")
),
term("select", "name", "EXPR$0", "EXPR$1")
)
util.verifyTable(t, expected)
}
@Test
def testProcTimeTableSourceSimple(): Unit = {
val tableSchema = new TableSchema(
Array("id", "proctime", "val", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.LONG, Types.STRING).asInstanceOf[Array[TypeInformation[_]]],
Array("id", "val", "name"))
val util = streamTestUtil()
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"procTimeT",
new TestTableSourceWithTime[Row](tableSchema, returnType, Seq(), proctime = "proctime"))
val t = util.tableEnv.scan("procTimeT").select($"proctime", $"id", $"name", $"val")
val expected =
unaryNode(
"DataStreamCalc",
"StreamTableSourceScan(table=[[default_catalog, default_database, procTimeT]], " +
"fields=[id, proctime, val, name], " +
"source=[TestTableSourceWithTime(id, proctime, val, name)])",
term("select", "PROCTIME(proctime) AS proctime", "id", "name", "val")
)
util.verifyTable(t, expected)
}
@Test
def testProcTimeTableSourceOverWindow(): Unit = {
val tableSchema = new TableSchema(
Array("id", "proctime", "val", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.LONG, Types.STRING).asInstanceOf[Array[TypeInformation[_]]],
Array("id", "val", "name"))
val util = streamTestUtil()
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"procTimeT",
new TestTableSourceWithTime[Row](tableSchema, returnType, Seq(), proctime = "proctime"))
val t = util.tableEnv.scan("procTimeT")
.window(Over partitionBy 'id orderBy 'proctime preceding 2.hours as 'w)
.select('id, 'name, 'val.sum over 'w as 'valSum)
.filter('valSum > 100)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
"StreamTableSourceScan(table=[[default_catalog, default_database, procTimeT]], " +
"fields=[id, proctime, val, name], " +
"source=[TestTableSourceWithTime(id, proctime, val, name)])",
term("partitionBy", "id"),
term("orderBy", "proctime"),
term("range", "BETWEEN 7200000 PRECEDING AND CURRENT ROW"),
term("select", "id", "proctime", "val", "name", "SUM(val) AS w0$o0")
),
term("select", "id", "name", "w0$o0 AS valSum"),
term("where", ">(w0$o0, 100)")
)
util.verifyTable(t, expected)
}
@Test
def testProjectWithRowtimeProctime(): Unit = {
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.STRING, Types.LONG, Types.LONG)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "name", "val", "rtime"))
val util = streamTestUtil()
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"T",
new TestProjectableTableSource(tableSchema, returnType, Seq(), "rtime", "ptime"))
val t = util.tableEnv.scan("T").select('name, 'val, 'id)
val expected = "StreamTableSourceScan(table=[[default_catalog, default_database, T]], " +
"fields=[name, val, id], " +
"source=[TestSource(physical fields: name, val, id)])"
util.verifyTable(t, expected)
}
@Test
def testProjectWithoutRowtime(): Unit = {
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.STRING, Types.LONG, Types.LONG)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "name", "val", "rtime"))
val util = streamTestUtil()
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"T",
new TestProjectableTableSource(tableSchema, returnType, Seq(), "rtime", "ptime"))
val t = util.tableEnv.scan("T").select('ptime, 'name, 'val, 'id)
val expected = unaryNode(
"DataStreamCalc",
"StreamTableSourceScan(table=[[default_catalog, default_database, T]], " +
"fields=[ptime, name, val, id], " +
"source=[TestSource(physical fields: name, val, id)])",
term("select", "PROCTIME(ptime) AS ptime", "name", "val", "id")
)
util.verifyTable(t, expected)
}
def testProjectWithoutProctime(): Unit = {
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.LONG, Types.LONG, Types.STRING)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "rtime", "val", "name"))
val util = streamTestUtil()
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"T",
new TestProjectableTableSource(tableSchema, returnType, Seq(), "rtime", "ptime"))
val t = util.tableEnv.scan("T").select('name, 'val, 'rtime, 'id)
val expected = "StreamTableSourceScan(table=[[default_catalog, default_database, T]], " +
"fields=[name, val, rtime, id], " +
"source=[TestSource(physical fields: name, val, rtime, id)])"
util.verifyTable(t, expected)
}
def testProjectOnlyProctime(): Unit = {
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.LONG, Types.LONG, Types.STRING)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "rtime", "val", "name"))
val util = streamTestUtil()
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"T",
new TestProjectableTableSource(tableSchema, returnType, Seq(), "rtime", "ptime"))
val t = util.tableEnv.scan("T").select('ptime)
val expected = "StreamTableSourceScan(table=[[default_catalog, default_database, T]], " +
"fields=[ptime], " +
"source=[TestSource(physical fields: )])"
util.verifyTable(t, expected)
}
def testProjectOnlyRowtime(): Unit = {
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.LONG, Types.LONG, Types.STRING)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "rtime", "val", "name"))
val util = streamTestUtil()
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"T",
new TestProjectableTableSource(tableSchema, returnType, Seq(), "rtime", "ptime"))
val t = util.tableEnv.scan("T").select('rtime)
val expected = "StreamTableSourceScan(table=[[default_catalog, default_database, T]], " +
"fields=[rtime], " +
"source=[TestSource(physical fields: rtime)])"
util.verifyTable(t, expected)
}
@Test
def testProjectWithMapping(): Unit = {
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.LONG, Types.INT, Types.STRING, Types.LONG)
.asInstanceOf[Array[TypeInformation[_]]],
Array("p-rtime", "p-id", "p-name", "p-val"))
val mapping = Map("rtime" -> "p-rtime", "id" -> "p-id", "val" -> "p-val", "name" -> "p-name")
val util = streamTestUtil()
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"T",
new TestProjectableTableSource(tableSchema, returnType, Seq(), "rtime", "ptime", mapping))
val t = util.tableEnv.scan("T").select('name, 'rtime, 'val)
val expected = "StreamTableSourceScan(table=[[default_catalog, default_database, T]], " +
"fields=[name, rtime, val], " +
"source=[TestSource(physical fields: remapped-p-name, remapped-p-rtime, remapped-p-val)])"
util.verifyTable(t, expected)
}
@Test
def testNestedProject(): Unit = {
val nested1 = new RowTypeInfo(
Array(Types.STRING, Types.INT).asInstanceOf[Array[TypeInformation[_]]],
Array("name", "value")
)
val nested2 = new RowTypeInfo(
Array(Types.INT, Types.BOOLEAN).asInstanceOf[Array[TypeInformation[_]]],
Array("num", "flag")
)
val deepNested = new RowTypeInfo(
Array(nested1, nested2).asInstanceOf[Array[TypeInformation[_]]],
Array("nested1", "nested2")
)
val tableSchema = new TableSchema(
Array("id", "deepNested", "nested", "name"),
Array(Types.INT, deepNested, nested1, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, deepNested, nested1, Types.STRING).asInstanceOf[Array[TypeInformation[_]]],
Array("id", "deepNested", "nested", "name"))
val util = streamTestUtil()
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"T",
new TestNestedProjectableTableSource(tableSchema, returnType, Seq()))
val t = util.tableEnv
.scan("T")
.select('id,
'deepNested.get("nested1").get("name") as 'nestedName,
'nested.get("value") as 'nestedValue,
'deepNested.get("nested2").get("flag") as 'nestedFlag,
'deepNested.get("nested2").get("num") as 'nestedNum)
val expected = unaryNode(
"DataStreamCalc",
"StreamTableSourceScan(table=[[default_catalog, default_database, T]], " +
"fields=[id, deepNested, nested], " +
"source=[TestSource(read nested fields: " +
"id.*, deepNested.nested2.num, deepNested.nested2.flag, " +
"deepNested.nested1.name, nested.value)])",
term("select", "id", "deepNested.nested1.name AS nestedName", "nested.value AS nestedValue",
"deepNested.nested2.flag AS nestedFlag", "deepNested.nested2.num AS nestedNum")
)
util.verifyTable(t, expected)
}
}
| hequn8128/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/TableSourceTest.scala | Scala | apache-2.0 | 15,474 |
package com.tribbloids.spookystuff
import java.util.UUID
import java.util.concurrent.TimeUnit
import com.esotericsoftware.kryo.Kryo
import com.tribbloids.spookystuff.conf.{DirConf, SpookyConf}
import com.tribbloids.spookystuff.dsl._
import com.tribbloids.spookystuff.doc._
import com.tribbloids.spookystuff.metrics.SpookyMetrics
import com.tribbloids.spookystuff.row.FetchedRow
import org.apache.spark.SerializableWritable
import org.apache.spark.serializer.KryoRegistrator
import org.apache.spark.sql.catalyst.ScalaReflection.universe.TypeTag
import scala.collection.immutable.ListMap
import scala.concurrent.duration.FiniteDuration
//TODO: not all classes are registered which renders this class useless
class SpookyKryoRegistrator extends KryoRegistrator {
override def registerClasses(kryo: Kryo): Unit = {
val array: Array[Class[_]] = Array(
//used by PageRow
classOf[TypeTag[_]],
classOf[FetchedRow],
classOf[ListMap[_, _]],
classOf[UUID],
classOf[Elements[_]],
classOf[Siblings[_]],
classOf[HtmlElement],
classOf[JsonElement],
classOf[Doc],
// classOf[UnknownElement],
// classOf[ExploreStage],
//used by broadcast & accumulator
classOf[SpookyConf],
classOf[DirConf],
classOf[SerializableWritable[_]],
classOf[SpookyContext],
classOf[SpookyMetrics],
//used by Expressions
// classOf[NamedFunction1]
//parameters
classOf[FiniteDuration],
classOf[TimeUnit],
FilePaths.getClass,
PartitionerFactories.getClass,
WebProxyFactories.getClass
)
array.foreach(kryo.register)
}
}
| tribbloid/spookystuff | core/src/main/scala/com/tribbloids/spookystuff/SpookyKryoRegistrator.scala | Scala | apache-2.0 | 1,658 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import scala.util.Random
import org.apache.spark.AccumulatorSuite
import org.apache.spark.sql.{RandomDataGenerator, Row, SQLConf}
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types._
/**
* A test suite that generates randomized data to test the [[TungstenSort]] operator.
*/
class TungstenSortSuite extends SparkPlanTest with SharedSQLContext {
override def beforeAll(): Unit = {
super.beforeAll()
ctx.conf.setConf(SQLConf.CODEGEN_ENABLED, true)
}
override def afterAll(): Unit = {
try {
ctx.conf.setConf(SQLConf.CODEGEN_ENABLED, SQLConf.CODEGEN_ENABLED.defaultValue.get)
} finally {
super.afterAll()
}
}
test("sort followed by limit") {
checkThatPlansAgree(
(1 to 100).map(v => Tuple1(v)).toDF("a"),
(child: SparkPlan) => Limit(10, TungstenSort('a.asc :: Nil, true, child)),
(child: SparkPlan) => Limit(10, Sort('a.asc :: Nil, global = true, child)),
sortAnswers = false
)
}
test("sorting does not crash for large inputs") {
val sortOrder = 'a.asc :: Nil
val stringLength = 1024 * 1024 * 2
checkThatPlansAgree(
Seq(Tuple1("a" * stringLength), Tuple1("b" * stringLength)).toDF("a").repartition(1),
TungstenSort(sortOrder, global = true, _: SparkPlan, testSpillFrequency = 1),
Sort(sortOrder, global = true, _: SparkPlan),
sortAnswers = false
)
}
test("sorting updates peak execution memory") {
val sc = ctx.sparkContext
AccumulatorSuite.verifyPeakExecutionMemorySet(sc, "unsafe external sort") {
checkThatPlansAgree(
(1 to 100).map(v => Tuple1(v)).toDF("a"),
(child: SparkPlan) => TungstenSort('a.asc :: Nil, true, child),
(child: SparkPlan) => Sort('a.asc :: Nil, global = true, child),
sortAnswers = false)
}
}
// Test sorting on different data types
for (
dataType <- DataTypeTestUtils.atomicTypes ++ Set(NullType);
nullable <- Seq(true, false);
sortOrder <- Seq('a.asc :: Nil, 'a.desc :: Nil);
randomDataGenerator <- RandomDataGenerator.forType(dataType, nullable)
) {
test(s"sorting on $dataType with nullable=$nullable, sortOrder=$sortOrder") {
val inputData = Seq.fill(1000)(randomDataGenerator())
val inputDf = ctx.createDataFrame(
ctx.sparkContext.parallelize(Random.shuffle(inputData).map(v => Row(v))),
StructType(StructField("a", dataType, nullable = true) :: Nil)
)
assert(TungstenSort.supportsSchema(inputDf.schema))
checkThatPlansAgree(
inputDf,
plan => ConvertToSafe(
TungstenSort(sortOrder, global = true, plan: SparkPlan, testSpillFrequency = 23)),
Sort(sortOrder, global = true, _: SparkPlan),
sortAnswers = false
)
}
}
}
| ArvinDevel/onlineAggregationOnSparkV2 | sql/core/src/test/scala/org/apache/spark/sql/execution/TungstenSortSuite.scala | Scala | apache-2.0 | 3,681 |
package onion.compiler.tools
import onion.tools.Shell
class DecrementSpec extends AbstractShellSpec {
describe("Decrement class") {
it("demonstrate decrement(--) feature") {
val result = shell.run(
"""
| class Decrement {
| public:
| static def main(args: String[]): Int {
| i = 10;
| for ; i >= 0; i-- { }
| return i;
| }
| }
""".stripMargin,
"None",
Array()
)
assert(Shell.Success(-1) == result)
}
}
} | kmizu/onion | src/test/scala/onion/compiler/tools/DecrementSpec.scala | Scala | bsd-3-clause | 563 |
package io.scalac.slack.websockets
import akka.actor.{Actor, Props}
import akka.io.IO
import io.scalac.slack._
import spray.can.Http
import spray.can.server.UHttp
import spray.can.websocket.WebSocketClientWorker
import spray.can.websocket.frame.{CloseFrame, StatusCode, TextFrame}
import spray.http.{HttpHeaders, HttpMethods, HttpRequest}
/**
* Created on 28.01.15 19:45
*/
class WSActor(eventBus: MessageEventBus) extends Actor with WebSocketClientWorker {
override def receive = connect orElse handshaking orElse closeLogic
val out = context.actorOf(Props(classOf[OutgoingMessageProcessor], self, eventBus))
val in = context.actorOf(Props(classOf[IncomingMessageProcessor], eventBus))
private def connect(): Receive = {
case WebSocket.Connect(host, port, resource, ssl) =>
val headers = List(
HttpHeaders.Host(host, port),
HttpHeaders.Connection("Upgrade"),
HttpHeaders.RawHeader("Upgrade", "websocket"),
HttpHeaders.RawHeader("Sec-WebSocket-Version", "13"),
HttpHeaders.RawHeader("Sec-WebSocket-Key", Config.websocketKey))
request = HttpRequest(HttpMethods.GET, resource, headers)
IO(UHttp)(context.system) ! Http.Connect(host, port, ssl)
}
override def businessLogic = {
case WebSocket.Release => close()
case TextFrame(msg) => //message received
// Each message without parsing is sent to eventprocessor
// Because all messages from websockets should be read fast
// If EventProcessor slow down with parsing
// can be used dispatcher
println(s"RECEIVED MESSAGE: ${msg.utf8String} ")
in ! msg.utf8String
case WebSocket.Send(message) => //message to send
println(s"SENT MESSAGE: $message ")
send(message)
case ignoreThis => // ignore
}
def send(message: String) = connection ! TextFrame(message)
def close() = connection ! CloseFrame(StatusCode.NormalClose)
private var request: HttpRequest = null
override def upgradeRequest = request
}
object WebSocket {
sealed trait WebSocketMessage
case class Connect(
host: String,
port: Int,
resource: String,
withSsl: Boolean = false) extends WebSocketMessage
case class Send(msg: String) extends WebSocketMessage
case object Release extends WebSocketMessage
}
| adhoclabs/scala-slack-bot-core | src/main/scala/io/scalac/slack/websockets/WSActor.scala | Scala | mit | 2,372 |
import Macro._
object Test extends App {
new StringContext().f3() // error
}
| som-snytt/dotty | tests/neg-macros/tasty-string-interpolator-position-b/Test_2.scala | Scala | apache-2.0 | 83 |
package ch.wsl.fireindices.app
import ch.wsl.fireindices.app.ui.App
object LauncherApp {
def main(args: Array[String]){
if (args.length == 0)
App.main(args)
else
ConsoleApp.main(args)
}
}
| Insubric/fire-calculator | Launcher.scala | Scala | gpl-2.0 | 213 |
package edu.jingw.raytracer
import org.scalatest.FlatSpec
class RaySpec extends FlatSpec {
"toString" should "give direction and start" in {
assert(Ray(Vector.I, Vector.J).toString == "x = <1.0, 0.0> + <0.0, 1.0>t")
}
}
| jingw/raytracer | src/test/scala/edu/jingw/raytracer/RaySpec.scala | Scala | mit | 230 |
/*
* Copyright (c) 2014 Dufresne Management Consulting LLC.
*/
package com.nickelsoftware.bettercare4me.hedis.hedis2014
import scala.util.Random
import org.joda.time.DateTime
import org.joda.time.Interval
import com.nickelsoftware.bettercare4me.hedis.HEDISRule
import com.nickelsoftware.bettercare4me.hedis.Scorecard
import com.nickelsoftware.bettercare4me.models.Claim
import com.nickelsoftware.bettercare4me.models.LabClaim
import com.nickelsoftware.bettercare4me.models.MedClaim
import com.nickelsoftware.bettercare4me.models.Patient
import com.nickelsoftware.bettercare4me.models.PatientHistory
import com.nickelsoftware.bettercare4me.models.PersistenceLayer
import com.nickelsoftware.bettercare4me.models.Provider
import com.nickelsoftware.bettercare4me.models.RuleConfig
import com.nickelsoftware.bettercare4me.utils.Utils
object CIS_HB {
val name = "CIS-HB-C-HEDIS-2014"
val hepBVaccine = "Hepatitis B Vaccine"
val hepBHistory = "Hepatitis B History"
/**
* CPT codes for Hep B vaccination
*/
val cptA = List("90723", "90740", "90744", "90747", "90748")
val cptAS = cptA.toSet
/**
* HCPCS for Hep B vaccination
*/
val hcpcsA = List("G0010")
val hcpcsAS = hcpcsA.toSet
/**
* ICD Diagnosis codes for Hep B
*/
val icdDA = List("070.2*", "070.3*", "V02.61")
val icdDAS = icdDA.toSet
}
/**
* Hepatitis B Vaccine
*
* Hepatitis B Vaccine indicates whether a child, who turned 2 years old during the measurement year, received three (3) hepatitis
* B vaccinations. This excludes children who had a previous adverse reaction to a vaccine, as well as those with a vaccine
* contraindication such as immunodeficiency syndrome, HIV, lymphoreticular or histiocytic tissue cancer, multiple myeloma, or
* leukemia.
*
* NUMERATOR:
* Identifies children, who turned 2 years old during the measurement year, and received three (3) hepatitis B vaccinations with
* different dates of service on or before the child's 2nd birthday. Evidence of the antigen or vaccine, a documented history of the
* illness, or a seropositive test result are counted in the numerator.
*
*/
class CIS_HB_Rule(config: RuleConfig, hedisDate: DateTime) extends CIS_RuleBase(config, hedisDate) {
val name = CIS_HB.name
val fullName = "Hepatitis B Vaccine"
val description = "Hepatitis B Vaccine indicates whether a child, who turned 2 years old during the measurement year, received three (3) hepatitis " +
"B vaccinations. This excludes children who had a previous adverse reaction to a vaccine, as well as those with a vaccine " +
"contraindication such as immunodeficiency syndrome, HIV, lymphoreticular or histiocytic tissue cancer, multiple myeloma, or " +
"leukemia."
import CIS_HB._
override def generateMeetMeasureClaims(pl: PersistenceLayer, patient: Patient, provider: Provider): List[Claim] = {
// after 42 days after birth and before 2 years of age
val days = Utils.daysBetween(patient.dob.plusDays(42), patient.dob.plusMonths(20))
val dos1 = patient.dob.plusDays(42 + Random.nextInt(days))
val dos2 = dos1.plusDays(30)
val dos3 = dos2.plusDays(30)
/* 3 hepatitis B vaccinations received on different dates
* of service (anytime prior to the child's 2nd birthday),
* or a history of the disease */
pickOne(List(
// Possible set: CPT
() => List(
pl.createMedClaim(patient.patientID, patient.firstName, patient.lastName, provider.providerID, provider.firstName, provider.lastName, dos1, dos1, cpt = pickOne(cptA)),
pl.createMedClaim(patient.patientID, patient.firstName, patient.lastName, provider.providerID, provider.firstName, provider.lastName, dos2, dos2, cpt = pickOne(cptA)),
pl.createMedClaim(patient.patientID, patient.firstName, patient.lastName, provider.providerID, provider.firstName, provider.lastName, dos3, dos3, cpt = pickOne(cptA))),
// Another possible set: HCPCS
() => List(
pl.createMedClaim(patient.patientID, patient.firstName, patient.lastName, provider.providerID, provider.firstName, provider.lastName, dos1, dos1, hcpcs = pickOne(hcpcsA)),
pl.createMedClaim(patient.patientID, patient.firstName, patient.lastName, provider.providerID, provider.firstName, provider.lastName, dos2, dos2, hcpcs = pickOne(hcpcsA)),
pl.createMedClaim(patient.patientID, patient.firstName, patient.lastName, provider.providerID, provider.firstName, provider.lastName, dos3, dos3, hcpcs = pickOne(hcpcsA))),
// Another possible set: ICD D
() => List(pl.createMedClaim(patient.patientID, patient.firstName, patient.lastName, provider.providerID, provider.firstName, provider.lastName, dos1, dos1, icdDPri = pickOne(icdDA)))))()
}
override def scorePatientMeetMeasure(scorecard: Scorecard, patient: Patient, ph: PatientHistory): Scorecard = {
// after 42 days after birth and before 2 years of age
val measurementInterval = new Interval(patient.dob.plusDays(42), patient.dob.plusMonths(24).plusDays(1))
def rules = List[(Scorecard) => Scorecard](
// Check for patient has CPT
(s: Scorecard) => {
val claims1 = filterClaims(ph.cpt, cptAS, { claim: MedClaim => measurementInterval.contains(claim.dos) })
val claims2 = filterClaims(ph.hcpcs, hcpcsAS, { claim: MedClaim => measurementInterval.contains(claim.dos) })
val claims = List.concat(claims1, claims2)
// need to have 3 claims with different dates
if (hasDifferentDates(3, claims)) s.addScore(name, fullName, HEDISRule.meetMeasure, hepBVaccine, claims)
else s
},
// Check for patient has ICD D (History of disease)
(s: Scorecard) => {
val claims = filterClaims(ph.icdD, icdDAS, { claim: MedClaim => !claim.dos.isAfter(hedisDate) })
s.addScore(name, fullName, HEDISRule.meetMeasure, hepBHistory, claims)
})
applyRules(scorecard, rules)
}
}
| reactivecore01/bettercare4.me | play/app/com/nickelsoftware/bettercare4me/hedis/hedis2014/CIS_HB_Rule.scala | Scala | apache-2.0 | 5,912 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import java.io.{ByteArrayOutputStream, DataOutputStream}
import scala.collection.JavaConverters._
import scala.language.existentials
import org.apache.spark.api.java.function.MapFunction
import org.apache.spark.api.r._
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Row
import org.apache.spark.sql.api.r.SQLUtils._
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.objects.Invoke
import org.apache.spark.sql.catalyst.plans.logical.{EventTimeWatermark, FunctionUtils, LogicalGroupState}
import org.apache.spark.sql.catalyst.plans.physical._
import org.apache.spark.sql.execution.python.BatchIterator
import org.apache.spark.sql.execution.r.ArrowRRunner
import org.apache.spark.sql.execution.streaming.GroupStateImpl
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.GroupStateTimeout
import org.apache.spark.sql.types._
/**
* Physical version of `ObjectProducer`.
*/
trait ObjectProducerExec extends SparkPlan {
// The attribute that reference to the single object field this operator outputs.
protected def outputObjAttr: Attribute
override def output: Seq[Attribute] = outputObjAttr :: Nil
override def producedAttributes: AttributeSet = AttributeSet(outputObjAttr)
def outputObjectType: DataType = outputObjAttr.dataType
}
/**
* Physical version of `ObjectConsumer`.
*/
trait ObjectConsumerExec extends UnaryExecNode {
assert(child.output.length == 1)
// This operator always need all columns of its child, even it doesn't reference to.
override def references: AttributeSet = child.outputSet
def inputObjectType: DataType = child.output.head.dataType
}
/**
* Takes the input row from child and turns it into object using the given deserializer expression.
* The output of this operator is a single-field safe row containing the deserialized object.
*/
case class DeserializeToObjectExec(
deserializer: Expression,
outputObjAttr: Attribute,
child: SparkPlan) extends UnaryExecNode with ObjectProducerExec with CodegenSupport {
override def outputPartitioning: Partitioning = child.outputPartitioning
override def inputRDDs(): Seq[RDD[InternalRow]] = {
child.asInstanceOf[CodegenSupport].inputRDDs()
}
protected override def doProduce(ctx: CodegenContext): String = {
child.asInstanceOf[CodegenSupport].produce(ctx, this)
}
override def doConsume(ctx: CodegenContext, input: Seq[ExprCode], row: ExprCode): String = {
val resultObj = BindReferences.bindReference(deserializer, child.output).genCode(ctx)
consume(ctx, resultObj :: Nil)
}
override protected def doExecute(): RDD[InternalRow] = {
child.execute().mapPartitionsWithIndexInternal { (index, iter) =>
val projection = GenerateSafeProjection.generate(deserializer :: Nil, child.output)
projection.initialize(index)
iter.map(projection)
}
}
}
/**
* Takes the input object from child and turns in into unsafe row using the given serializer
* expression. The output of its child must be a single-field row containing the input object.
*/
case class SerializeFromObjectExec(
serializer: Seq[NamedExpression],
child: SparkPlan) extends ObjectConsumerExec with CodegenSupport {
override def output: Seq[Attribute] = serializer.map(_.toAttribute)
override def outputPartitioning: Partitioning = child.outputPartitioning
override def inputRDDs(): Seq[RDD[InternalRow]] = {
child.asInstanceOf[CodegenSupport].inputRDDs()
}
protected override def doProduce(ctx: CodegenContext): String = {
child.asInstanceOf[CodegenSupport].produce(ctx, this)
}
override def doConsume(ctx: CodegenContext, input: Seq[ExprCode], row: ExprCode): String = {
val resultVars = serializer.map { expr =>
BindReferences.bindReference[Expression](expr, child.output).genCode(ctx)
}
consume(ctx, resultVars)
}
override protected def doExecute(): RDD[InternalRow] = {
child.execute().mapPartitionsWithIndexInternal { (index, iter) =>
val projection = UnsafeProjection.create(serializer)
projection.initialize(index)
iter.map(projection)
}
}
}
/**
* Helper functions for physical operators that work with user defined objects.
*/
object ObjectOperator {
def deserializeRowToObject(
deserializer: Expression,
inputSchema: Seq[Attribute]): InternalRow => Any = {
val proj = GenerateSafeProjection.generate(deserializer :: Nil, inputSchema)
(i: InternalRow) => proj(i).get(0, deserializer.dataType)
}
def deserializeRowToObject(deserializer: Expression): InternalRow => Any = {
val proj = GenerateSafeProjection.generate(deserializer :: Nil)
(i: InternalRow) => proj(i).get(0, deserializer.dataType)
}
def serializeObjectToRow(serializer: Seq[Expression]): Any => UnsafeRow = {
val proj = GenerateUnsafeProjection.generate(serializer)
val objType = serializer.head.collect { case b: BoundReference => b.dataType }.head
val objRow = new SpecificInternalRow(objType :: Nil)
(o: Any) => {
objRow(0) = o
proj(objRow)
}
}
def wrapObjectToRow(objType: DataType): Any => InternalRow = {
val outputRow = new SpecificInternalRow(objType :: Nil)
(o: Any) => {
outputRow(0) = o
outputRow
}
}
def unwrapObjectFromRow(objType: DataType): InternalRow => Any = {
(i: InternalRow) => i.get(0, objType)
}
}
/**
* Applies the given function to input object iterator.
* The output of its child must be a single-field row containing the input object.
*/
case class MapPartitionsExec(
func: Iterator[Any] => Iterator[Any],
outputObjAttr: Attribute,
child: SparkPlan)
extends ObjectConsumerExec with ObjectProducerExec {
override def outputPartitioning: Partitioning = child.outputPartitioning
override protected def doExecute(): RDD[InternalRow] = {
child.execute().mapPartitionsInternal { iter =>
val getObject = ObjectOperator.unwrapObjectFromRow(child.output.head.dataType)
val outputObject = ObjectOperator.wrapObjectToRow(outputObjAttr.dataType)
func(iter.map(getObject)).map(outputObject)
}
}
}
/**
* Similar with [[MapPartitionsExec]] and
* [[org.apache.spark.sql.execution.r.MapPartitionsRWrapper]] but serializes and deserializes
* input/output in Arrow format.
*
* This is somewhat similar with [[org.apache.spark.sql.execution.python.ArrowEvalPythonExec]]
*/
case class MapPartitionsInRWithArrowExec(
func: Array[Byte],
packageNames: Array[Byte],
broadcastVars: Array[Broadcast[Object]],
inputSchema: StructType,
output: Seq[Attribute],
child: SparkPlan) extends UnaryExecNode {
override def producedAttributes: AttributeSet = AttributeSet(output)
private val batchSize = conf.arrowMaxRecordsPerBatch
override def outputPartitioning: Partitioning = child.outputPartitioning
override protected def doExecute(): RDD[InternalRow] = {
child.execute().mapPartitionsInternal { inputIter =>
val outputTypes = schema.map(_.dataType)
// DO NOT use iter.grouped(). See BatchIterator.
val batchIter =
if (batchSize > 0) new BatchIterator(inputIter, batchSize) else Iterator(inputIter)
val runner = new ArrowRRunner(func, packageNames, broadcastVars, inputSchema,
SQLConf.get.sessionLocalTimeZone, RRunnerModes.DATAFRAME_DAPPLY)
// The communication mechanism is as follows:
//
// JVM side R side
//
// 1. Internal rows --------> Arrow record batches
// 2. Converts each Arrow record batch to each R data frame
// 3. Combine R data frames into one R data frame
// 4. Computes R native function on the data frame
// 5. Converts the R data frame to Arrow record batches
// 6. Columnar batches <-------- Arrow record batches
// 7. Each row from each batch
//
// Note that, unlike Python vectorization implementation, R side sends Arrow formatted
// binary in a batch due to the limitation of R API. See also ARROW-4512.
val columnarBatchIter = runner.compute(batchIter, -1)
val outputProject = UnsafeProjection.create(output, output)
columnarBatchIter.flatMap { batch =>
val actualDataTypes = (0 until batch.numCols()).map(i => batch.column(i).dataType())
assert(outputTypes == actualDataTypes, "Invalid schema from dapply(): " +
s"expected ${outputTypes.mkString(", ")}, got ${actualDataTypes.mkString(", ")}")
batch.rowIterator.asScala
}.map(outputProject)
}
}
}
/**
* Applies the given function to each input object.
* The output of its child must be a single-field row containing the input object.
*
* This operator is kind of a safe version of [[ProjectExec]], as its output is custom object,
* we need to use safe row to contain it.
*/
case class MapElementsExec(
func: AnyRef,
outputObjAttr: Attribute,
child: SparkPlan)
extends ObjectConsumerExec with ObjectProducerExec with CodegenSupport {
override def inputRDDs(): Seq[RDD[InternalRow]] = {
child.asInstanceOf[CodegenSupport].inputRDDs()
}
protected override def doProduce(ctx: CodegenContext): String = {
child.asInstanceOf[CodegenSupport].produce(ctx, this)
}
override def doConsume(ctx: CodegenContext, input: Seq[ExprCode], row: ExprCode): String = {
val (funcClass, methodName) = func match {
case m: MapFunction[_, _] => classOf[MapFunction[_, _]] -> "call"
case _ => FunctionUtils.getFunctionOneName(outputObjAttr.dataType, child.output(0).dataType)
}
val funcObj = Literal.create(func, ObjectType(funcClass))
val callFunc = Invoke(funcObj, methodName, outputObjAttr.dataType, child.output)
val result = BindReferences.bindReference(callFunc, child.output).genCode(ctx)
consume(ctx, result :: Nil)
}
override protected def doExecute(): RDD[InternalRow] = {
val callFunc: Any => Any = func match {
case m: MapFunction[_, _] => i => m.asInstanceOf[MapFunction[Any, Any]].call(i)
case _ => func.asInstanceOf[Any => Any]
}
child.execute().mapPartitionsInternal { iter =>
val getObject = ObjectOperator.unwrapObjectFromRow(child.output.head.dataType)
val outputObject = ObjectOperator.wrapObjectToRow(outputObjAttr.dataType)
iter.map(row => outputObject(callFunc(getObject(row))))
}
}
override def outputOrdering: Seq[SortOrder] = child.outputOrdering
override def outputPartitioning: Partitioning = child.outputPartitioning
}
/**
* Applies the given function to each input row, appending the encoded result at the end of the row.
*/
case class AppendColumnsExec(
func: Any => Any,
deserializer: Expression,
serializer: Seq[NamedExpression],
child: SparkPlan) extends UnaryExecNode {
override def output: Seq[Attribute] = child.output ++ serializer.map(_.toAttribute)
override def outputPartitioning: Partitioning = child.outputPartitioning
private def newColumnSchema = serializer.map(_.toAttribute).toStructType
override protected def doExecute(): RDD[InternalRow] = {
child.execute().mapPartitionsInternal { iter =>
val getObject = ObjectOperator.deserializeRowToObject(deserializer, child.output)
val combiner = GenerateUnsafeRowJoiner.create(child.schema, newColumnSchema)
val outputObject = ObjectOperator.serializeObjectToRow(serializer)
iter.map { row =>
val newColumns = outputObject(func(getObject(row)))
combiner.join(row.asInstanceOf[UnsafeRow], newColumns): InternalRow
}
}
}
}
/**
* An optimized version of [[AppendColumnsExec]], that can be executed
* on deserialized object directly.
*/
case class AppendColumnsWithObjectExec(
func: Any => Any,
inputSerializer: Seq[NamedExpression],
newColumnsSerializer: Seq[NamedExpression],
child: SparkPlan) extends ObjectConsumerExec {
override def output: Seq[Attribute] = (inputSerializer ++ newColumnsSerializer).map(_.toAttribute)
override def outputPartitioning: Partitioning = child.outputPartitioning
private def inputSchema = inputSerializer.map(_.toAttribute).toStructType
private def newColumnSchema = newColumnsSerializer.map(_.toAttribute).toStructType
override protected def doExecute(): RDD[InternalRow] = {
child.execute().mapPartitionsInternal { iter =>
val getChildObject = ObjectOperator.unwrapObjectFromRow(child.output.head.dataType)
val outputChildObject = ObjectOperator.serializeObjectToRow(inputSerializer)
val outputNewColumnOjb = ObjectOperator.serializeObjectToRow(newColumnsSerializer)
val combiner = GenerateUnsafeRowJoiner.create(inputSchema, newColumnSchema)
iter.map { row =>
val childObj = getChildObject(row)
val newColumns = outputNewColumnOjb(func(childObj))
combiner.join(outputChildObject(childObj), newColumns): InternalRow
}
}
}
}
/**
* Groups the input rows together and calls the function with each group and an iterator containing
* all elements in the group. The result of this function is flattened before being output.
*/
case class MapGroupsExec(
func: (Any, Iterator[Any]) => TraversableOnce[Any],
keyDeserializer: Expression,
valueDeserializer: Expression,
groupingAttributes: Seq[Attribute],
dataAttributes: Seq[Attribute],
outputObjAttr: Attribute,
child: SparkPlan) extends UnaryExecNode with ObjectProducerExec {
override def outputPartitioning: Partitioning = child.outputPartitioning
override def requiredChildDistribution: Seq[Distribution] =
ClusteredDistribution(groupingAttributes) :: Nil
override def requiredChildOrdering: Seq[Seq[SortOrder]] =
Seq(groupingAttributes.map(SortOrder(_, Ascending)))
override protected def doExecute(): RDD[InternalRow] = {
child.execute().mapPartitionsInternal { iter =>
val grouped = GroupedIterator(iter, groupingAttributes, child.output)
val getKey = ObjectOperator.deserializeRowToObject(keyDeserializer, groupingAttributes)
val getValue = ObjectOperator.deserializeRowToObject(valueDeserializer, dataAttributes)
val outputObject = ObjectOperator.wrapObjectToRow(outputObjAttr.dataType)
grouped.flatMap { case (key, rowIter) =>
val result = func(
getKey(key),
rowIter.map(getValue))
result.map(outputObject)
}
}
}
}
object MapGroupsExec {
def apply(
func: (Any, Iterator[Any], LogicalGroupState[Any]) => TraversableOnce[Any],
keyDeserializer: Expression,
valueDeserializer: Expression,
groupingAttributes: Seq[Attribute],
dataAttributes: Seq[Attribute],
outputObjAttr: Attribute,
timeoutConf: GroupStateTimeout,
child: SparkPlan): MapGroupsExec = {
val watermarkPresent = child.output.exists {
case a: Attribute if a.metadata.contains(EventTimeWatermark.delayKey) => true
case _ => false
}
val f = (key: Any, values: Iterator[Any]) => {
func(key, values, GroupStateImpl.createForBatch(timeoutConf, watermarkPresent))
}
new MapGroupsExec(f, keyDeserializer, valueDeserializer,
groupingAttributes, dataAttributes, outputObjAttr, child)
}
}
/**
* Groups the input rows together and calls the R function with each group and an iterator
* containing all elements in the group.
* The result of this function is flattened before being output.
*/
case class FlatMapGroupsInRExec(
func: Array[Byte],
packageNames: Array[Byte],
broadcastVars: Array[Broadcast[Object]],
inputSchema: StructType,
outputSchema: StructType,
keyDeserializer: Expression,
valueDeserializer: Expression,
groupingAttributes: Seq[Attribute],
dataAttributes: Seq[Attribute],
outputObjAttr: Attribute,
child: SparkPlan) extends UnaryExecNode with ObjectProducerExec {
override def output: Seq[Attribute] = outputObjAttr :: Nil
override def outputPartitioning: Partitioning = child.outputPartitioning
override def producedAttributes: AttributeSet = AttributeSet(outputObjAttr)
override def requiredChildDistribution: Seq[Distribution] =
if (groupingAttributes.isEmpty) {
AllTuples :: Nil
} else {
ClusteredDistribution(groupingAttributes) :: Nil
}
override def requiredChildOrdering: Seq[Seq[SortOrder]] =
Seq(groupingAttributes.map(SortOrder(_, Ascending)))
override protected def doExecute(): RDD[InternalRow] = {
val isSerializedRData = outputSchema == SERIALIZED_R_DATA_SCHEMA
val serializerForR = if (!isSerializedRData) {
SerializationFormats.ROW
} else {
SerializationFormats.BYTE
}
child.execute().mapPartitionsInternal { iter =>
val grouped = GroupedIterator(iter, groupingAttributes, child.output)
val getKey = ObjectOperator.deserializeRowToObject(keyDeserializer, groupingAttributes)
val getValue = ObjectOperator.deserializeRowToObject(valueDeserializer, dataAttributes)
val outputObject = ObjectOperator.wrapObjectToRow(outputObjAttr.dataType)
val runner = new RRunner[(Array[Byte], Iterator[Array[Byte]]), Array[Byte]](
func, SerializationFormats.ROW, serializerForR, packageNames, broadcastVars,
isDataFrame = true, colNames = inputSchema.fieldNames,
mode = RRunnerModes.DATAFRAME_GAPPLY)
val groupedRBytes = grouped.map { case (key, rowIter) =>
val deserializedIter = rowIter.map(getValue)
val newIter =
deserializedIter.asInstanceOf[Iterator[Row]].map { row => rowToRBytes(row) }
val newKey = rowToRBytes(getKey(key).asInstanceOf[Row])
(newKey, newIter)
}
val outputIter = runner.compute(groupedRBytes, -1)
if (!isSerializedRData) {
val result = outputIter.map { bytes => bytesToRow(bytes, outputSchema) }
result.map(outputObject)
} else {
val result = outputIter.map { bytes => Row.fromSeq(Seq(bytes)) }
result.map(outputObject)
}
}
}
}
/**
* Similar with [[FlatMapGroupsInRExec]] but serializes and deserializes input/output in
* Arrow format.
* This is also somewhat similar with
* [[org.apache.spark.sql.execution.python.FlatMapGroupsInPandasExec]].
*/
case class FlatMapGroupsInRWithArrowExec(
func: Array[Byte],
packageNames: Array[Byte],
broadcastVars: Array[Broadcast[Object]],
inputSchema: StructType,
output: Seq[Attribute],
keyDeserializer: Expression,
groupingAttributes: Seq[Attribute],
child: SparkPlan) extends UnaryExecNode {
override def outputPartitioning: Partitioning = child.outputPartitioning
override def producedAttributes: AttributeSet = AttributeSet(output)
override def requiredChildDistribution: Seq[Distribution] =
if (groupingAttributes.isEmpty) {
AllTuples :: Nil
} else {
ClusteredDistribution(groupingAttributes) :: Nil
}
override def requiredChildOrdering: Seq[Seq[SortOrder]] =
Seq(groupingAttributes.map(SortOrder(_, Ascending)))
override protected def doExecute(): RDD[InternalRow] = {
child.execute().mapPartitionsInternal { iter =>
val grouped = GroupedIterator(iter, groupingAttributes, child.output)
val getKey = ObjectOperator.deserializeRowToObject(keyDeserializer, groupingAttributes)
val keys = collection.mutable.ArrayBuffer.empty[Array[Byte]]
val groupedByRKey: Iterator[Iterator[InternalRow]] =
grouped.map { case (key, rowIter) =>
keys.append(rowToRBytes(getKey(key).asInstanceOf[Row]))
rowIter
}
val runner = new ArrowRRunner(func, packageNames, broadcastVars, inputSchema,
SQLConf.get.sessionLocalTimeZone, RRunnerModes.DATAFRAME_GAPPLY) {
protected override def bufferedWrite(
dataOut: DataOutputStream)(writeFunc: ByteArrayOutputStream => Unit): Unit = {
super.bufferedWrite(dataOut)(writeFunc)
// Don't forget we're sending keys additionally.
keys.foreach(dataOut.write)
}
}
// The communication mechanism is as follows:
//
// JVM side R side
//
// 1. Group internal rows
// 2. Grouped internal rows --------> Arrow record batches
// 3. Grouped keys --------> Regular serialized keys
// 4. Converts each Arrow record batch to each R data frame
// 5. Deserializes keys
// 6. Maps each key to each R Data frame
// 7. Computes R native function on each key/R data frame
// 8. Converts all R data frames to Arrow record batches
// 9. Columnar batches <-------- Arrow record batches
// 10. Each row from each batch
//
// Note that, unlike Python vectorization implementation, R side sends Arrow formatted
// binary in a batch due to the limitation of R API. See also ARROW-4512.
val columnarBatchIter = runner.compute(groupedByRKey, -1)
val outputProject = UnsafeProjection.create(output, output)
columnarBatchIter.flatMap(_.rowIterator().asScala).map(outputProject)
}
}
}
/**
* Co-groups the data from left and right children, and calls the function with each group and 2
* iterators containing all elements in the group from left and right side.
* The result of this function is flattened before being output.
*/
case class CoGroupExec(
func: (Any, Iterator[Any], Iterator[Any]) => TraversableOnce[Any],
keyDeserializer: Expression,
leftDeserializer: Expression,
rightDeserializer: Expression,
leftGroup: Seq[Attribute],
rightGroup: Seq[Attribute],
leftAttr: Seq[Attribute],
rightAttr: Seq[Attribute],
outputObjAttr: Attribute,
left: SparkPlan,
right: SparkPlan) extends BinaryExecNode with ObjectProducerExec {
override def requiredChildDistribution: Seq[Distribution] =
HashClusteredDistribution(leftGroup) :: HashClusteredDistribution(rightGroup) :: Nil
override def requiredChildOrdering: Seq[Seq[SortOrder]] =
leftGroup.map(SortOrder(_, Ascending)) :: rightGroup.map(SortOrder(_, Ascending)) :: Nil
override protected def doExecute(): RDD[InternalRow] = {
left.execute().zipPartitions(right.execute()) { (leftData, rightData) =>
val leftGrouped = GroupedIterator(leftData, leftGroup, left.output)
val rightGrouped = GroupedIterator(rightData, rightGroup, right.output)
val getKey = ObjectOperator.deserializeRowToObject(keyDeserializer, leftGroup)
val getLeft = ObjectOperator.deserializeRowToObject(leftDeserializer, leftAttr)
val getRight = ObjectOperator.deserializeRowToObject(rightDeserializer, rightAttr)
val outputObject = ObjectOperator.wrapObjectToRow(outputObjAttr.dataType)
new CoGroupedIterator(leftGrouped, rightGrouped, leftGroup).flatMap {
case (key, leftResult, rightResult) =>
val result = func(
getKey(key),
leftResult.map(getLeft),
rightResult.map(getRight))
result.map(outputObject)
}
}
}
}
| icexelloss/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/objects.scala | Scala | apache-2.0 | 24,374 |
package org.jetbrains.plugins.scala
package lang
package parser
package parsing
package base
import org.jetbrains.plugins.scala.lang.lexer.{ScalaTokenType, ScalaTokenTypes}
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
object End {
private val isAllowedEndToken = Set(
ScalaTokenTypes.kVAL,
ScalaTokenTypes.kIF,
ScalaTokenTypes.kWHILE,
ScalaTokenTypes.kFOR,
ScalaTokenTypes.kMATCH,
ScalaTokenTypes.kTRY,
ScalaTokenTypes.kTHIS,
ScalaTokenType.NewKeyword,
ScalaTokenType.GivenKeyword,
ScalaTokenType.ExtensionKeyword,
ScalaTokenTypes.tIDENTIFIER,
)
//override def parse(implicit builder: ScalaPsiBuilder): Boolean = apply(builder.currentIndentationWidth)
def apply(targetIndentationWidth: IndentationWidth)(implicit builder: ScalaPsiBuilder): Boolean = {
if (!builder.isScala3)
return false
if (builder.getTokenType == ScalaTokenTypes.tIDENTIFIER &&
builder.getTokenText == "end" &&
isAllowedEndToken(builder.lookAhead(1)) &&
builder.findPreviousIndent.contains(targetIndentationWidth)) {
val marker = builder.mark()
builder.remapCurrentToken(ScalaTokenType.EndKeyword)
builder.advanceLexer() // ate end
if (builder.getTokenType == ScalaTokenTypes.tIDENTIFIER && builder.getTokenText == ScalaTokenType.ExtensionKeyword.keywordText) {
builder.remapCurrentToken(ScalaTokenType.ExtensionKeyword)
}
builder.advanceLexer() // ate end-token
marker.done(ScalaElementType.END_STMT)
true
} else false
}
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/parser/parsing/base/End.scala | Scala | apache-2.0 | 1,592 |
package gapt.expr
import gapt.expr.ty.Ty
trait VarOrConst extends Expr {
def name: String
}
/**
* Matches constants and variables, but nothing else.
*/
object VarOrConst {
def unapply( e: VarOrConst ): Some[( String, Ty, List[Ty] )] =
e match {
case Const( n, t, p ) => Some( n, t, p )
case Var( n, t ) => Some( n, t, Nil )
}
} | gapt/gapt | core/src/main/scala/gapt/expr/VarOrConst.scala | Scala | gpl-3.0 | 361 |
package org.ensime.util
import scala.collection.mutable.{ Set => MutableSet }
import scala.collection.mutable.{ HashMap, HashSet }
import org.objectweb.asm.FieldVisitor;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.commons.EmptyVisitor;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.Opcodes;
import java.io._
import java.util.jar.{ JarFile, Manifest => JarManifest }
import java.util.zip._
import java.io.{ File, InputStream, IOException }
trait ClassHandler {
def onClass(name: String, location: String, flags:Int) {}
def onMethod(className: String, name: String, location: String, flags:Int) {}
def onField(className: String, name: String, location: String, flags:Int) {}
}
private class ClassVisitor(location: File, handler: ClassHandler) extends EmptyVisitor {
var currentClassName: Option[String] = None
val path: String = location.getPath()
override def visit(version: Int,
access: Int,
name: String,
signature: String,
superName: String,
interfaces: Array[String]) {
val nm = mapClassName(name)
currentClassName = Some(nm)
handler.onClass(nm, path, access)
}
override def visitMethod(access: Int,
name: String,
description: String,
signature: String,
exceptions: Array[String]): MethodVisitor =
{
handler.onMethod(currentClassName.getOrElse("."), name, path, access)
null
}
override def visitField(access: Int,
name: String,
description: String,
signature: String,
value: java.lang.Object): FieldVisitor =
{
handler.onField(currentClassName.getOrElse("."), name, path, access)
null
}
private def mapClassName(name: String): String = {
if (name == null) ""
else name.replaceAll("/", ".")
}
}
object ClassIterator {
val ASMAcceptCriteria = 0
def find(path: Iterable[File], handler: ClassHandler) {
for (f <- path) {
findClassesIn(f, handler)
}
}
private def findClassesIn(f: File, handler: ClassHandler) {
val name = f.getPath.toLowerCase
if (name.endsWith(".jar"))
processJar(f, handler)
else if (name.endsWith(".zip"))
processZip(f, handler)
else if (f.isDirectory)
processDirectory(f, handler)
}
private def processJar(file: File, handler: ClassHandler) {
val jar = new JarFile(file)
processOpenZip(file, jar, handler)
var manifest = jar.getManifest
if (manifest != null) {
val path = loadManifestPath(jar, file, manifest)
find(path, handler)
}
}
private def loadManifestPath(jar: JarFile,
jarFile: File,
manifest: JarManifest): List[File] =
{
import scala.collection.JavaConversions._
val attrs = manifest.getMainAttributes
val value = attrs.get("Class-Path").asInstanceOf[String]
if (value == null)
Nil
else {
val parent = jarFile.getParent
val tokens = value.split("""\\s+""").toList
if (parent == null)
tokens.map(new File(_))
else
tokens.map(s => new File(parent + File.separator + s))
}
}
private def processZip(file: File, handler: ClassHandler) {
processOpenZip(file, new ZipFile(file), handler)
}
private def processOpenZip(file: File, zipFile: ZipFile, handler: ClassHandler) {
import scala.collection.JavaConversions._
val zipFileName = file.getPath
for (e <- zipFile.entries) {
if (isClass(e)) {
processClassData(new BufferedInputStream(
zipFile.getInputStream(e)), file, handler)
}
}
}
// Matches both ZipEntry and File
type FileEntry = {
def isDirectory(): Boolean
def getName(): String
}
private def isClass(e: FileEntry): Boolean =
(!e.isDirectory) && (e.getName.toLowerCase.endsWith(".class"))
private def processDirectory(dir: File, handler: ClassHandler) {
import FileUtils._
for (f <- dir.andTree) {
if (isClass(f)) {
processClassData(new BufferedInputStream(
new FileInputStream(f)), dir, handler)
}
}
}
private def processClassData(is: InputStream, location: File, handler: ClassHandler) {
val cr = new ClassReader(is)
val visitor = new ClassVisitor(location, handler)
cr.accept(visitor, ClassReader.SKIP_CODE)
}
}
| tbje/ensime | src/main/scala/org/ensime/util/ClassIterator.scala | Scala | gpl-3.0 | 4,285 |
package org.openeyes.api.controllers
import org.json4s.mongo.ObjectIdSerializer
import org.json4s.{DefaultFormats, FullTypeHints}
import org.openeyes.api.forms.EncounterForm
import org.openeyes.api.models.{Element, Encounter}
import org.openeyes.api.services.EncounterService
import org.openeyes.api.stacks.ApiStack
import org.scalatra.swagger.Swagger
/**
* Created by jamie on 15/09/2014.
*/
class EncounterController(implicit val swagger: Swagger) extends ApiStack {
protected val applicationDescription = "The Encounter API."
override protected implicit val jsonFormats = new DefaultFormats {
// NOTE: Comment this in if you want/need to see why elements aren't saving correctly.
// It is currently commented out to avoid issues with the front end, for instance "lefteye": {} is posted then
// this will throw an exception. Once they have changed the front end to send "lefteye: null then we can
// comment this back in and do away with this boring note.
// override val strict = true
override val typeHintFieldName = "type"
override val typeHints = FullTypeHints(List(classOf[Element]))
} + new ObjectIdSerializer
val list = (apiOperation[List[Encounter]]("listEncounters")
notes "Lists all known Encounters"
parameters(
queryParam[String]("patientId").description("An optional Patient ID to filter the Encounters by").optional
)
summary "List Encounters"
)
get("/", operation(list)) {
params.get("patientId") match {
case Some(patientId: String) => EncounterService.findAllForPatient(patientId)
case _ => EncounterService.findAll
}
}
val get = (apiOperation[Encounter]("getEncounter")
notes "Get an Encounter by ID"
parameters(
pathParam[String]("id").description("The ID of the Encounter to retrieve").required
)
summary "Get Encounter"
)
get("/:id", operation(get)) {
val id = params("id")
EncounterService.find(id)
}
val post = (apiOperation[Encounter]("createEncounter")
notes "Create an Encounter"
parameters(
bodyParam[EncounterForm].description("The Encounter content").required
)
summary "Create Encounter"
)
post("/", operation(post)) {
val resource = parsedBody.extract[EncounterForm]
EncounterService.create(resource)
}
}
| openeyes/poc-backend | src/main/scala/org/openeyes/api/controllers/EncounterController.scala | Scala | gpl-3.0 | 2,330 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.rules.physical.stream
import org.apache.flink.table.planner.plan.nodes.FlinkConventions
import org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalTableFunctionScan
import org.apache.flink.table.planner.plan.nodes.physical.stream.StreamPhysicalWindowTableFunction
import org.apache.flink.table.planner.plan.utils.WindowUtil
import org.apache.flink.table.planner.plan.utils.WindowUtil.convertToWindowingStrategy
import org.apache.calcite.plan.{RelOptRule, RelOptRuleCall, RelTraitSet}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.convert.ConverterRule
import org.apache.calcite.rex.RexCall
/**
* Rule to convert a [[FlinkLogicalTableFunctionScan]] with window table function call
* into a [[StreamPhysicalWindowTableFunction]].
*/
class StreamPhysicalWindowTableFunctionRule extends ConverterRule(
classOf[FlinkLogicalTableFunctionScan],
FlinkConventions.LOGICAL,
FlinkConventions.STREAM_PHYSICAL,
"StreamPhysicalWindowTableFunctionRule") {
override def matches(call: RelOptRuleCall): Boolean = {
val scan: FlinkLogicalTableFunctionScan = call.rel(0)
WindowUtil.isWindowTableFunctionCall(scan.getCall)
}
def convert(rel: RelNode): RelNode = {
val scan: FlinkLogicalTableFunctionScan = rel.asInstanceOf[FlinkLogicalTableFunctionScan]
val traitSet: RelTraitSet = rel.getTraitSet.replace(FlinkConventions.STREAM_PHYSICAL)
val newInput = RelOptRule.convert(scan.getInput(0), FlinkConventions.STREAM_PHYSICAL)
new StreamPhysicalWindowTableFunction(
scan.getCluster,
traitSet,
newInput,
scan.getRowType,
convertToWindowingStrategy(scan.getCall.asInstanceOf[RexCall], newInput.getRowType)
)
}
}
object StreamPhysicalWindowTableFunctionRule {
val INSTANCE = new StreamPhysicalWindowTableFunctionRule
}
| rmetzger/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/rules/physical/stream/StreamPhysicalWindowTableFunctionRule.scala | Scala | apache-2.0 | 2,664 |
/*
* Copyright (c) 2015 Goldman Sachs.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Eclipse Distribution License v. 1.0 which accompany this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*/
package org.eclipse.collections.impl.map.mutable
import org.eclipse.collections.impl.{InternalIterableTestTrait, UnmodifiableIterableTestTrait}
class UnmodifiableMutableMapScalaTest extends InternalIterableTestTrait with UnmodifiableIterableTestTrait
{
val map = UnifiedMap.newWithKeysValues(
Integer.valueOf(1), "1",
Integer.valueOf(2), "2",
Integer.valueOf(3), "3")
val classUnderTest = new UnmodifiableMutableMap[Integer, String](map)
}
| g-votte/eclipse-collections | scala-unit-tests/src/test/scala/org/eclipse/collections/impl/map/mutable/UnmodifiableMutableMapScalaTest.scala | Scala | bsd-3-clause | 935 |
package uk.gov.dvla.vehicles.presentation.common.controllers
import play.api.test.FakeRequest
import play.api.test.Helpers.{OK, contentAsString, defaultAwaitTimeout}
import uk.gov.dvla.vehicles.presentation.common.TestWithApplication
import uk.gov.dvla.vehicles.presentation.common.helpers.{CookieFactoryForUnitSpecs, UnitSpec}
import uk.gov.dvla.vehicles.presentation.common.models.ValtechSelectModel.Form.{FirstOption, SecondOption}
class ValtechSelectControllerUnitSpec extends UnitSpec {
"present" should {
"display the page" in new TestWithApplication {
whenReady(present) {
r =>
r.header.status should equal(OK)
}
}
"not display drop down pre-selected when nothing has been previously selected" in new TestWithApplication {
val request = FakeRequest()
val result = valtechSelectController.present(request)
val content = contentAsString(result)
content should not include "selected>"
}
"display drop down pre-selected when cookie contains first option" in new TestWithApplication {
val request = FakeRequest().
withCookies(CookieFactoryForUnitSpecs.valtechSelect(selectedOption = FirstOption))
val result = valtechSelectController.present(request)
val content = contentAsString(result)
content should include(expectedOptionSelected(FirstOption))
}
"display drop down pre-selected when cookie contains second option" in new TestWithApplication {
val request = FakeRequest().
withCookies(CookieFactoryForUnitSpecs.valtechSelect(selectedOption = SecondOption))
val result = valtechSelectController.present(request)
val content = contentAsString(result)
content should include(expectedOptionSelected(SecondOption))
}
}
private def expectedOptionSelected(option: String) = s"""<option value="$option" selected>"""
private def valtechSelectController = injector.getInstance(classOf[ValtechSelectController])
private lazy val present = {
val request = FakeRequest()
valtechSelectController.present(request)
}
}
| dvla/vehicles-presentation-common | common-test/test/uk/gov/dvla/vehicles/presentation/common/controllers/ValtechSelectControllerUnitSpec.scala | Scala | mit | 2,088 |
/*
* ControlPlatform.scala
* (FScape)
*
* Copyright (c) 2001-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* [email protected]
*/
package de.sciss.fscape.stream
import de.sciss.file.File
import java.net.URI
trait ControlImplPlatform {
final def createTempFile(): File = File.createTemp()
final def createTempURI(): URI = createTempFile().toURI
}
trait ControlPlatform {
/** Creates a temporary file. The caller is responsible for deleting the file
* after it is not needed any longer. (The file will still be marked `deleteOnExit`)
*/
@deprecated("Only supported on JVM. Use platform neutral createTempURI instead", since = "3.6.0")
def createTempFile(): File
/** Creates a temporary file. The caller is responsible for deleting the file
* after it is not needed any longer. (The file will still be marked `deleteOnExit`)
*/
def createTempURI(): URI
}
| Sciss/FScape-next | core/jvm/src/main/scala/de/sciss/fscape/stream/ControlPlatform.scala | Scala | agpl-3.0 | 1,050 |
class ScalaFile1 {
val value = "value"
def function: Unit = {
println("function called.")
}
}
| Sagacify/sonar-scala | src/test/resources/ScalaFile1.scala | Scala | lgpl-3.0 | 105 |
/*
* Copyright (c) 2014 Alvaro Agea.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt._
import scoverage.ScoverageSbtPlugin._
object Build extends sbt.Build {
lazy val root = (Project(id = "rogerfs", base = file("."))
aggregate (core, common, test, cassandraStore, shell)
settings (instrumentSettings: _*)
)
lazy val core = (Project(id = "core", base = file("rogerfs-core"))
dependsOn (common, test % "test->compile")
settings (instrumentSettings: _*)
)
lazy val test = (Project(id = "test", base = file("rogerfs-test"))
dependsOn common
settings (instrumentSettings: _*)
)
lazy val common = (Project(id = "common", base = file("rogerfs-common"))
settings (instrumentSettings: _*)
)
lazy val cassandraStore= (Project(id="cassandra-store", base = file("rogerfs-cassandra-store"))
dependsOn common
settings(instrumentSettings: _*))
lazy val shell=(Project(id="shell", base= file("rogerfs-shell"))
dependsOn(common,core,cassandraStore, test % "test->compile")
settings(instrumentSettings: _*)
)
}
| aagea/rogerfs | project/Build.scala | Scala | apache-2.0 | 1,601 |
object Test {
trait A[+X] {
protected[this] def f(x: X): X = x
}
trait B extends A[B] {
def kaboom = f(new B {})
}
// protected[this] disables variance checking
// of the signature of `f`.
//
// C's parent list unifies A[B] with A[C]
//
// The protected[this] loophole is widely used
// in the collections, every newBuilder method
// would fail variance checking otherwise.
class C extends B with A[C] {
override protected[this] def f(c: C) = c
}
// java.lang.ClassCastException: B$$anon$1 cannot be cast to C
// at C.f(<console>:15)
new C().kaboom
}
| yusuke2255/dotty | tests/pos/t7093.scala | Scala | bsd-3-clause | 583 |
package dk.gp.cogp.svi
import breeze.linalg.DenseMatrix
import breeze.linalg.inv
import breeze.linalg.InjectNumericOps
import breeze.linalg.cholesky
import dk.gp.math.invchol
import dk.gp.cogp.lb.LowerBound
import dk.gp.cogp.lb.grad.calcLBGradVEta1
import dk.gp.cogp.lb.grad.calcLBGradVEta2
import dk.gp.cogp.model.CogpModel
import breeze.linalg.eig
import breeze.linalg.diag
import dk.gp.cov.CovNoise
import dk.gp.math.inveig
import dk.bayes.math.gaussian.MultivariateGaussian
/**
* Stochastic update for the parameters (mu,S) of p(v|y)
*
* Nguyen et al. Collaborative Multi-output Gaussian Processes, 2014
*/
object stochasticUpdateV {
private val learningRate = 1e-2
def apply(i: Int, lb: LowerBound): MultivariateGaussian = {
val model = lb.model
val v = model.h(i).u
//natural parameters theta
val vInv = invchol(cholesky(v.v).t)
val theta1 = vInv * v.m
val theta2 = -0.5 * vInv
val naturalGradEta1 = calcLBGradVEta1(i, lb)
val naturalGradEta2 = calcLBGradVEta2(i, lb)
val newTheta1 = theta1 + learningRate * naturalGradEta1
val newTheta2 = theta2 + learningRate * naturalGradEta2
// val newTheta2Eig = eig(newTheta2)
val invNewTheta2 = inveig(newTheta2)//newTheta2Eig.eigenvectors * diag(1.0 :/ newTheta2Eig.eigenvalues) * newTheta2Eig.eigenvectors.t
val newS = -0.5 * invNewTheta2
val newM = lb.model.h(i).covFunc match {
case covFunc: CovNoise => v.m
case _ => newS * newTheta1
}
MultivariateGaussian(newM, newS)
}
} | danielkorzekwa/bayes-scala-gp | src/main/scala/dk/gp/cogp/svi/stochasticUpdateV.scala | Scala | bsd-2-clause | 1,538 |
package common.runner
import cucumber.api.CucumberOptions
import cucumber.api.junit.Cucumber
import org.junit.runner.RunWith
@RunWith(classOf[Cucumber])
@CucumberOptions(
features = Array("src/test/resources/features/"),
glue = Array("common.stepDefs"),
tags = Array("@suite"),
plugin = Array("pretty", "html:target/cucumber", "json:target/cucumber.json"))
class RunSuite
| avinash-anand/CucumberScala1 | src/test/scala/common/runner/RunSuite.scala | Scala | gpl-2.0 | 382 |
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.events._
class FunSpecSuite extends FunSuite with SharedHelpers {
test("three plain-old specifiers should be invoked in order") {
class MySpec extends FunSpec with ShouldMatchers {
var example1WasInvoked = false
var example2WasInvokedAfterExample1 = false
var example3WasInvokedAfterExample2 = false
it("should get invoked") {
example1WasInvoked = true
}
it("should also get invoked") {
if (example1WasInvoked)
example2WasInvokedAfterExample1 = true
}
it("should also also get invoked") {
if (example2WasInvokedAfterExample1)
example3WasInvokedAfterExample2 = true
}
}
val a = new MySpec
a.execute()
assert(a.example1WasInvoked)
assert(a.example2WasInvokedAfterExample1)
assert(a.example3WasInvokedAfterExample2)
}
test("three plain-old specifiers should be invoked in order when two are surrounded by a plain-old describe") {
class MySpec extends FunSpec with ShouldMatchers {
var example1WasInvoked = false
var example2WasInvokedAfterExample1 = false
var example3WasInvokedAfterExample2 = false
it("should get invoked") {
example1WasInvoked = true
}
describe("Stack") {
it("should also get invoked") {
if (example1WasInvoked)
example2WasInvokedAfterExample1 = true
}
it("should also also get invoked") {
if (example2WasInvokedAfterExample1)
example3WasInvokedAfterExample2 = true
}
}
}
val a = new MySpec
a.execute()
assert(a.example1WasInvoked)
assert(a.example2WasInvokedAfterExample1)
assert(a.example3WasInvokedAfterExample2)
}
test("two plain-old specifiers should show up in order of appearance in testNames") {
class MySpec extends FunSpec with ShouldMatchers {
var example1WasInvoked = false
var example2WasInvokedAfterExample1 = false
it("should get invoked") {
example1WasInvoked = true
}
it("should also get invoked") {
if (example1WasInvoked)
example2WasInvokedAfterExample1 = true
}
}
val a = new MySpec
a.execute()
assert(a.testNames.size === 2)
assert(a.testNames.iterator.toList(0) === "should get invoked")
assert(a.testNames.iterator.toList(1) === "should also get invoked")
}
test("plain-old specifier test names should include an enclosing describe string, separated by a space") {
class MySpec extends FunSpec with ShouldMatchers {
describe("A Stack") {
it("must allow me to pop") {}
it("must allow me to push") {}
}
}
val a = new MySpec
assert(a.testNames.size === 2)
assert(a.testNames.iterator.toList(0) === "A Stack must allow me to pop")
assert(a.testNames.iterator.toList(1) === "A Stack must allow me to push")
}
test("plain-old test names should properly nest plain-old descriptions in test names") {
class MySpec extends FunSpec with ShouldMatchers {
describe("A Stack") {
describe("(when not empty)") {
it("must allow me to pop") {}
}
describe("(when not full)") {
it("must allow me to push") {}
}
}
}
val a = new MySpec
assert(a.testNames.size === 2)
assert(a.testNames.iterator.toList(0) === "A Stack (when not empty) must allow me to pop")
assert(a.testNames.iterator.toList(1) === "A Stack (when not full) must allow me to push")
}
test("should be able to mix in BeforeAndAfterEach with BeforeAndAfterAll without any problems") {
class MySpec extends FunSpec with ShouldMatchers with BeforeAndAfterEach with BeforeAndAfterAll {
describe("A Stack") {
describe("(when not empty)") {
it("should allow me to pop") {}
}
describe("(when not full)") {
it("should allow me to push") {}
}
}
}
val a = new MySpec
a.execute()
}
// Test for good strings in report for top-level examples
test("Top-level plain-old specifiers should yield good strings in a TestSucceeded report") {
var reportHadCorrectTestName = false
var reportHadCorrectSpecText = false
var reportHadCorrectFormattedSpecText = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
if (testName.indexOf("must start with proper words") != -1)
reportHadCorrectTestName = true
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "must start with proper words")
reportHadCorrectSpecText = true
if (formattedText == "- must start with proper words")
reportHadCorrectFormattedSpecText = true
case _ =>
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
it("must start with proper words") {}
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(reportHadCorrectTestName)
assert(reportHadCorrectSpecText)
assert(reportHadCorrectFormattedSpecText)
}
test("Top-level plain-old specifiers should yield good strings in a testSucceeded report") {
var reportHadCorrectTestName = false
var reportHadCorrectSpecText = false
var reportHadCorrectFormattedSpecText = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
if (testName.indexOf("must start with proper words") != -1)
reportHadCorrectTestName = true
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "must start with proper words")
reportHadCorrectSpecText = true
if (formattedText == "- must start with proper words")
reportHadCorrectFormattedSpecText = true
case _ =>
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
it("must start with proper words") {}
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(reportHadCorrectTestName)
assert(reportHadCorrectSpecText)
assert(reportHadCorrectFormattedSpecText)
}
test("Top-level plain-old specifiers should yield good strings in a testFailed report") {
var reportHadCorrectTestName = false
var reportHadCorrectSpecText = false
var reportHadCorrectFormattedSpecText = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case event: TestFailed =>
if (event.testName.indexOf("must start with proper words") != -1)
reportHadCorrectTestName = true
event.formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "must start with proper words")
reportHadCorrectSpecText = true
if (formattedText == "- must start with proper words")
reportHadCorrectFormattedSpecText = true
case _ =>
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
it("must start with proper words") { fail() }
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(reportHadCorrectTestName)
assert(reportHadCorrectSpecText)
assert(reportHadCorrectFormattedSpecText)
}
// Tests for good strings in report for nested-one-level examples
test("Nested-one-level plain-old specifiers should yield good strings in a TestSucceeded report") {
var infoReportHadCorrectTestName = false
var infoReportHadCorrectSpecText = false
var infoReportHadCorrectFormattedSpecText = false
var reportHadCorrectTestName = false
var reportHadCorrectSpecText = false
var reportHadCorrectFormattedSpecText = false
var scopeOpenedHasBeenInvoked = false
var theOtherMethodHasBeenInvoked = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case ScopeOpened(ordinal, message, nameInfo, formatter, location, payload, threadName, timeStamp) =>
// scopeOpened should be invoked before the other method
assert(!theOtherMethodHasBeenInvoked)
scopeOpenedHasBeenInvoked = true
if (message.indexOf("My Spec") != -1)
infoReportHadCorrectTestName = true
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "My Spec")
infoReportHadCorrectSpecText = true
if (formattedText == "My Spec")
infoReportHadCorrectFormattedSpecText = true
case _ =>
}
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
// scopeOpened should be invoked before the this method
assert(scopeOpenedHasBeenInvoked)
theOtherMethodHasBeenInvoked = true
if (testName.indexOf("My Spec must start with proper words") != -1)
reportHadCorrectTestName = true
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "must start with proper words")
reportHadCorrectSpecText = true
if (formattedText == "- must start with proper words")
reportHadCorrectFormattedSpecText = true
case _ =>
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
describe("My Spec") {
it("must start with proper words") {}
}
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(reportHadCorrectTestName)
assert(reportHadCorrectSpecText)
assert(reportHadCorrectFormattedSpecText)
assert(infoReportHadCorrectTestName)
assert(infoReportHadCorrectSpecText)
assert(infoReportHadCorrectFormattedSpecText)
}
test("Nested-one-level plain-old specifiers should yield good strings in a testSucceeded report") {
var infoReportHadCorrectTestName = false
var infoReportHadCorrectSpecText = false
var infoReportHadCorrectFormattedSpecText = false
var reportHadCorrectTestName = false
var reportHadCorrectSpecText = false
var reportHadCorrectFormattedSpecText = false
var scopeOpenedHasBeenInvoked = false
var theOtherMethodHasBeenInvoked = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case ScopeOpened(ordinal, message, nameInfo, formatter, location, payload, threadName, timeStamp) =>
// scopeOpened should be invoked before the other method
assert(!theOtherMethodHasBeenInvoked)
scopeOpenedHasBeenInvoked = true
if (message.indexOf("My Spec") != -1)
infoReportHadCorrectTestName = true
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "My Spec")
infoReportHadCorrectSpecText = true
if (formattedText == "My Spec")
infoReportHadCorrectFormattedSpecText = true
case _ =>
}
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
// scopeOpened should be invoked before the this method
assert(scopeOpenedHasBeenInvoked)
theOtherMethodHasBeenInvoked = true
if (testName.indexOf("My Spec must start with proper words") != -1)
reportHadCorrectTestName = true
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "must start with proper words")
reportHadCorrectSpecText = true
if (formattedText == "- must start with proper words")
reportHadCorrectFormattedSpecText = true
case _ =>
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
describe("My Spec") {
it("must start with proper words") {}
}
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(reportHadCorrectTestName)
assert(reportHadCorrectSpecText)
assert(reportHadCorrectFormattedSpecText)
assert(infoReportHadCorrectTestName)
assert(infoReportHadCorrectSpecText)
assert(infoReportHadCorrectFormattedSpecText)
}
test("Nested-one-level plain-old specifiers should yield good strings in a TestFailed report") {
var infoReportHadCorrectTestName = false
var infoReportHadCorrectSpecText = false
var infoReportHadCorrectFormattedSpecText = false
var reportHadCorrectTestName = false
var reportHadCorrectSpecText = false
var reportHadCorrectFormattedSpecText = false
var scopeOpenedHasBeenInvoked = false
var theOtherMethodHasBeenInvoked = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case ScopeOpened(ordinal, message, nameInfo, formatter, location, payload, threadName, timeStamp) =>
// scopeOpened should be invoked before the other method
assert(!theOtherMethodHasBeenInvoked)
scopeOpenedHasBeenInvoked = true
if (message.indexOf("My Spec") != -1)
infoReportHadCorrectTestName = true
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "My Spec")
infoReportHadCorrectSpecText = true
if (formattedText == "My Spec")
infoReportHadCorrectFormattedSpecText = true
case _ =>
}
case event: TestFailed =>
// scopeOpened should be invoked before the this method
assert(scopeOpenedHasBeenInvoked)
theOtherMethodHasBeenInvoked = true
if (event.testName.indexOf("My Spec must start with proper words") != -1)
reportHadCorrectTestName = true
event.formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "must start with proper words")
reportHadCorrectSpecText = true
if (formattedText == "- must start with proper words")
reportHadCorrectFormattedSpecText = true
case _ =>
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
describe("My Spec") {
it("must start with proper words") { fail() }
}
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(reportHadCorrectTestName)
assert(reportHadCorrectSpecText)
assert(reportHadCorrectFormattedSpecText)
assert(infoReportHadCorrectTestName)
assert(infoReportHadCorrectSpecText)
assert(infoReportHadCorrectFormattedSpecText)
}
// Tests for good strings in report for nested-two-levels examples
test("Nested-two-levels plain-old specifiers should yield good strings in a TestSucceeded report") { //ZZZ
var infoReportHadCorrectTestName = false
var infoReportHadCorrectSpecText = false
var infoReportHadCorrectFormattedSpecText = false
var reportHadCorrectTestName = false
var reportHadCorrectSpecText = false
var reportHadCorrectFormattedSpecText = false
var scopeOpenedHasBeenInvokedOnce = false
var scopeOpenedHasBeenInvokedTwice = false
var theOtherMethodHasBeenInvoked = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case ScopeOpened(ordinal, message, nameInfo, formatter, location, payload, threadName, timeStamp) =>
// scopeOpened should be invoked before the other method
assert(!theOtherMethodHasBeenInvoked)
if (!scopeOpenedHasBeenInvokedOnce) {
scopeOpenedHasBeenInvokedOnce = true
if (message.indexOf("My") >= 0)
infoReportHadCorrectTestName = true
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "My")
infoReportHadCorrectSpecText = true
if (formattedText == "My")
infoReportHadCorrectFormattedSpecText = true
case _ =>
}
}
else {
scopeOpenedHasBeenInvokedTwice = true
if (message.indexOf("Spec") < 0)
infoReportHadCorrectTestName = false
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText != "Spec")
infoReportHadCorrectSpecText = false
if (formattedText != " Spec")
infoReportHadCorrectFormattedSpecText = false
case _ =>
}
}
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
// scopeOpened should be invoked before the this method
assert(scopeOpenedHasBeenInvokedTwice)
theOtherMethodHasBeenInvoked = true
if (testName.indexOf("My Spec must start with proper words") != -1)
reportHadCorrectTestName = true
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "must start with proper words")
reportHadCorrectSpecText = true
if (formattedText == " - must start with proper words")
reportHadCorrectFormattedSpecText = true
case _ =>
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
describe("My") {
describe("Spec") {
it("must start with proper words") {}
}
}
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(reportHadCorrectTestName)
assert(reportHadCorrectSpecText)
assert(reportHadCorrectFormattedSpecText)
assert(infoReportHadCorrectTestName)
assert(infoReportHadCorrectSpecText)
assert(infoReportHadCorrectFormattedSpecText)
}
test("Nested-two-levels plain-old specifiers should yield good strings in a TestFailed report") { //YYY
var infoReportHadCorrectTestName = false
var infoReportHadCorrectSpecText = false
var infoReportHadCorrectFormattedSpecText = false
var reportHadCorrectTestName = false
var reportHadCorrectSpecText = false
var reportHadCorrectFormattedSpecText = false
var scopeOpenedHasBeenInvokedOnce = false
var scopeOpenedHasBeenInvokedTwice = false
var theOtherMethodHasBeenInvoked = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case ScopeOpened(ordinal, message, nameInfo, formatter, location, payload, threadName, timeStamp) =>
// scopeOpened should be invoked before the other method
assert(!theOtherMethodHasBeenInvoked)
if (!scopeOpenedHasBeenInvokedOnce) {
scopeOpenedHasBeenInvokedOnce = true
if (message.indexOf("My") >= 0)
infoReportHadCorrectTestName = true
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "My")
infoReportHadCorrectSpecText = true
if (formattedText == "My")
infoReportHadCorrectFormattedSpecText = true
case _ =>
}
}
else {
scopeOpenedHasBeenInvokedTwice = true
if (message.indexOf("Spec") < 0)
infoReportHadCorrectTestName = false
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText != "Spec")
infoReportHadCorrectSpecText = false
if (formattedText != " Spec")
infoReportHadCorrectFormattedSpecText = false
case _ =>
}
}
case event: TestFailed =>
// scopeOpened should be invoked before the this method
assert(scopeOpenedHasBeenInvokedTwice)
theOtherMethodHasBeenInvoked = true
if (event.testName.indexOf("My Spec must start with proper words") != -1)
reportHadCorrectTestName = true
event.formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "must start with proper words")
reportHadCorrectSpecText = true
if (formattedText == " - must start with proper words")
reportHadCorrectFormattedSpecText = true
case _ =>
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
describe("My") {
describe("Spec") {
it("must start with proper words") { fail() }
}
}
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(reportHadCorrectTestName)
assert(reportHadCorrectSpecText)
assert(reportHadCorrectFormattedSpecText)
assert(infoReportHadCorrectTestName)
assert(infoReportHadCorrectSpecText)
assert(infoReportHadCorrectFormattedSpecText)
}
// Test for good strings in report for top-level shared behavior examples
test("Top-level 'shared behavior - fancy specifiers' should yield good strings in a TestSucceeded report") {
var reportHadCorrectTestName = false
var reportHadCorrectSpecText = false
var reportHadCorrectFormattedSpecText = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
if (testName.indexOf("it should start with proper words") != -1)
reportHadCorrectTestName = true
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "it should start with proper words")
reportHadCorrectSpecText = true
if (formattedText == "- it should start with proper words")
reportHadCorrectFormattedSpecText = true
case _ =>
}
case _ =>
}
}
}
class MySpec extends FunSpec {
def myBehavior(i: Int) {
it("it should start with proper words") {}
}
it should behave like myBehavior(1)
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(reportHadCorrectTestName)
assert(reportHadCorrectSpecText)
assert(reportHadCorrectFormattedSpecText)
}
test("Top-level 'shared behavior - plain-old specifiers' should yield good strings in a TestSucceeded report") {
var reportHadCorrectTestName = false
var reportHadCorrectSpecText = false
var reportHadCorrectFormattedSpecText = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
if (testName.indexOf("must start with proper words") != -1)
reportHadCorrectTestName = true
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "must start with proper words")
reportHadCorrectSpecText = true
if (formattedText == "- must start with proper words")
reportHadCorrectFormattedSpecText = true
case _ =>
}
case _ =>
}
}
}
class MySpec extends FunSpec {
def myBehavior(i: Int) {
it("must start with proper words") {}
}
it should behave like myBehavior(1)
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(reportHadCorrectTestName)
assert(reportHadCorrectSpecText)
assert(reportHadCorrectFormattedSpecText)
}
test("Top-level 'shared behavior - plain-old specifiers' should yield good strings in a testSucceeded report") {
var reportHadCorrectTestName = false
var reportHadCorrectSpecText = false
var reportHadCorrectFormattedSpecText = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
if (testName.indexOf("must start with proper words") != -1)
reportHadCorrectTestName = true
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "must start with proper words")
reportHadCorrectSpecText = true
if (formattedText == "- must start with proper words")
reportHadCorrectFormattedSpecText = true
case _ =>
}
case _ =>
}
}
}
class MySpec extends FunSpec {
def myBehavior(i: Int) {
it("must start with proper words") {}
}
it should behave like myBehavior(1)
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(reportHadCorrectTestName)
assert(reportHadCorrectSpecText)
assert(reportHadCorrectFormattedSpecText)
}
test("Top-level 'shared behavior - plain-old specifiers' should yield good strings in a TestFailed report") {
var reportHadCorrectTestName = false
var reportHadCorrectSpecText = false
var reportHadCorrectFormattedSpecText = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case event: TestFailed =>
if (event.testName.indexOf("must start with proper words") != -1)
reportHadCorrectTestName = true
event.formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "must start with proper words")
reportHadCorrectSpecText = true
if (formattedText == "- must start with proper words")
reportHadCorrectFormattedSpecText = true
case _ =>
}
case _ =>
}
}
}
class MySpec extends FunSpec {
def myBehavior(i: Int) {
it("must start with proper words") { fail() }
}
it should behave like myBehavior(1)
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(reportHadCorrectTestName)
assert(reportHadCorrectSpecText)
assert(reportHadCorrectFormattedSpecText)
}
// Tests for good strings in report for shared-behavior, nested-one-level specifiers
test("Nested-one-level 'shared behavior' should yield good strings in a TestSucceeded report") {
var infoReportHadCorrectTestName = false
var infoReportHadCorrectSpecText = false
var infoReportHadCorrectFormattedSpecText = false
var reportHadCorrectTestName = false
var reportHadCorrectSpecText = false
var reportHadCorrectFormattedSpecText = false
var scopeOpenedHasBeenInvoked = false
var theOtherMethodHasBeenInvoked = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case ScopeOpened(ordinal, message, nameInfo, formatter, location, payload, threadName, timeStamp) =>
// scopeOpened should be invoked before the other method
assert(!theOtherMethodHasBeenInvoked)
scopeOpenedHasBeenInvoked = true
if (message.indexOf("My Spec") != -1)
infoReportHadCorrectTestName = true
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "My Spec")
infoReportHadCorrectSpecText = true
if (formattedText == "My Spec")
infoReportHadCorrectFormattedSpecText = true
case _ =>
}
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
// scopeOpened should be invoked before the this method
assert(scopeOpenedHasBeenInvoked)
theOtherMethodHasBeenInvoked = true
if (testName.indexOf("My Spec should start with proper words") != -1)
reportHadCorrectTestName = true
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "should start with proper words")
reportHadCorrectSpecText = true
if (formattedText == "- should start with proper words")
reportHadCorrectFormattedSpecText = true
case _ =>
}
case _ =>
}
}
}
class MySpec extends FunSpec {
def myBehavior(i: Int) {
it("should start with proper words") {}
}
describe("My Spec") {
it should behave like myBehavior(1)
}
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(reportHadCorrectTestName)
assert(reportHadCorrectSpecText)
assert(reportHadCorrectFormattedSpecText)
assert(infoReportHadCorrectTestName)
assert(infoReportHadCorrectSpecText)
assert(infoReportHadCorrectFormattedSpecText)
}
// Huh? what was I testing here?
test("An empty describe shouldn't throw an exception") {
class MySpec extends FunSpec with ShouldMatchers {
describe("this will be empty") {}
}
val a = new MySpec
a.execute()
}
test("Only a passed test name should be invoked.") {
var correctTestWasInvoked = false
var wrongTestWasInvoked = false
class MySpec extends FunSpec with ShouldMatchers {
it("it should be invoked") {
correctTestWasInvoked = true
}
it("it should not be invoked") {
wrongTestWasInvoked = true
}
}
val a = new MySpec
a.run(Some("it should be invoked"), Args(StubReporter))
assert(correctTestWasInvoked)
assert(!wrongTestWasInvoked)
}
test("Config map should make it through to runTest") {
var foundMyGoodie = false
class MySpec extends FunSpec with ShouldMatchers {
override def runTest(testName: String, args: Args): Status = {
foundMyGoodie = args.configMap.contains("my goodie")
super.runTest(testName, args)
}
it("it should find my goodie") {}
}
val a = new MySpec
a.run(None, Args(StubReporter, Stopper.default, Filter(), Map("my goodie" -> "hi"), None, new Tracker, Set.empty))
assert(foundMyGoodie)
}
// I think delete this one. Repeat.
test("In a TestSucceeded report, the example name should start with '<description> should' if nested two levels inside describe clauses") {
var testSucceededReportHadCorrectTestName = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
if (testName.indexOf("A Stack (when working right) should push and pop properly") != -1) {
testSucceededReportHadCorrectTestName = true
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
describe("A Stack") {
describe("(when working right)") {
it("should push and pop properly") {}
}
}
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(testSucceededReportHadCorrectTestName)
}
test("expectedTestCount is the number of plain-old specifiers if no shares") {
class MySpec extends FunSpec with ShouldMatchers {
it("must one") {}
it("must two") {}
describe("behavior") {
it("must three") {}
it("must four") {}
}
it("must five") {}
}
val a = new MySpec
assert(a.expectedTestCount(Filter()) === 5)
}
// Testing strings sent in reports
test("In a TestSucceeded report, the example name should be verbatim if top level if example registered with it") {
var testSucceededReportHadCorrectTestName = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
if (testName.indexOf("this thing must start with proper words") != -1) {
testSucceededReportHadCorrectTestName = true
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
it("this thing must start with proper words") {}
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(testSucceededReportHadCorrectTestName)
}
test("In a testSucceeded report, the example name should be verbatim if top level if example registered with it") {
var testSucceededReportHadCorrectTestName = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
if (testName.indexOf("this thing must start with proper words") != -1) {
testSucceededReportHadCorrectTestName = true
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
it("this thing must start with proper words") {}
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(testSucceededReportHadCorrectTestName)
}
test("In a TestFailed report, the example name should be verbatim if top level if example registered with it") {
var testFailedReportHadCorrectTestName = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case event: TestFailed =>
if (event.testName.indexOf("this thing must start with proper words") != -1)
testFailedReportHadCorrectTestName = true
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
it("this thing must start with proper words") { fail() }
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(testFailedReportHadCorrectTestName)
}
test("In a TestStarting report, the example name should start with '<description> ' if nested one level " +
"inside a describe clause and registered with it") {
var testSucceededReportHadCorrectTestName = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case TestStarting(_, _, _, _, testName, _, _, _, _, _, _, _) =>
if (testName == "A Stack needs to push and pop properly") {
testSucceededReportHadCorrectTestName = true
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
describe("A Stack") {
it("needs to push and pop properly") {}
}
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(testSucceededReportHadCorrectTestName)
}
test("Specs should send defined formatters") {
class MyReporter extends Reporter {
var gotAnUndefinedFormatter = false
var lastEventWithUndefinedFormatter: Option[Event] = None
private def ensureFormatterIsDefined(event: Event) {
if (!event.formatter.isDefined) {
gotAnUndefinedFormatter = true
lastEventWithUndefinedFormatter = Some(event)
}
}
def apply(event: Event) {
event match {
case event: RunAborted => ensureFormatterIsDefined(event)
case event: SuiteAborted => ensureFormatterIsDefined(event)
case event: SuiteStarting => ensureFormatterIsDefined(event)
case event: SuiteCompleted => ensureFormatterIsDefined(event)
case event: TestStarting => ensureFormatterIsDefined(event)
case event: TestSucceeded => ensureFormatterIsDefined(event)
case event: TestIgnored => ensureFormatterIsDefined(event)
case event: TestFailed => ensureFormatterIsDefined(event)
case event: InfoProvided => ensureFormatterIsDefined(event)
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
it("it should send defined formatters") {
assert(true)
}
it("it should also send defined formatters") {
assert(false)
}
}
val a = new MySpec
val myRep = new MyReporter
a.run(None, Args(myRep))
assert(!myRep.gotAnUndefinedFormatter, myRep.lastEventWithUndefinedFormatter.toString)
}
test("SpecText should come through correctly in a SpecReport when registering with it") {
var testSucceededReportHadCorrectSpecText = false
var lastSpecText: Option[String] = None
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "My spec text must have the proper words")
testSucceededReportHadCorrectSpecText = true
else
lastSpecText = Some(rawText)
case _ => throw new RuntimeException("Got a non-SpecReport")
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
it("My spec text must have the proper words") {}
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(testSucceededReportHadCorrectSpecText, lastSpecText match { case Some(s) => s; case None => "No report"})
}
test("Spec text should come through correctly in a SpecReport when registering with it when nested in one describe") {
var testSucceededReportHadCorrectSpecText = false
var lastSpecText: Option[String] = None
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "My short name must have the proper words")
testSucceededReportHadCorrectSpecText = true
else
lastSpecText = Some(rawText)
case _ => throw new RuntimeException("Got a non-SpecReport")
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
describe("A Stack") {
it("My short name must have the proper words") {}
}
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(testSucceededReportHadCorrectSpecText, lastSpecText match { case Some(s) => s; case None => "No report"})
}
test("Spec text should come through correctly in a SpecReport when registering with it when nested in two describes") {
var testSucceededReportHadCorrectSpecText = false
var lastSpecText: Option[String] = None
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
if (rawText == "My short name must have the proper words")
testSucceededReportHadCorrectSpecText = true
else
lastSpecText = Some(rawText)
case _ => throw new RuntimeException("Got a non-SpecReport")
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
describe("A Stack") {
describe("(when empty)") {
it("My short name must have the proper words") {}
}
}
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(testSucceededReportHadCorrectSpecText, lastSpecText match { case Some(s) => s; case None => "No report"})
}
test("Should get ScopeOpened with description if one and only one describe clause") {
val expectedSpecText = "A Stack"
class MyReporter extends Reporter {
var scopeOpenedCalled = false
var expectedMessageReceived = false
def apply(event: Event) {
event match {
case event: ScopeOpened =>
event.formatter match {
case Some(IndentedText(formattedText, rawText, indentationLevel)) =>
scopeOpenedCalled = true
if (!expectedMessageReceived) {
expectedMessageReceived = (rawText == expectedSpecText)
}
case _ =>
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
describe("A Stack") {
it("should allow me to push") {}
}
}
val a = new MySpec
val myRep = new MyReporter
a.run(None, Args(myRep))
assert(myRep.scopeOpenedCalled)
assert(myRep.expectedMessageReceived)
}
// Testing Shared behaviors
test("a shared specifier invoked with 'should behave like a' should get invoked") {
class MySpec extends FunSpec with BeforeAndAfterEach with BeforeAndAfterAll {
var sharedExampleInvoked = false
def invocationVerifier(i: Int) {
it("should be invoked") {
sharedExampleInvoked = true
}
}
describe("A Stack") {
describe("(when not empty)") {
it("should allow me to pop") {}
it should behave like invocationVerifier(1)
}
describe("(when not full)") {
it("should allow me to push") {}
}
}
}
val a = new MySpec
a.execute()
assert(a.sharedExampleInvoked)
}
test("two examples in a shared behavior should get invoked") {
class MySpec extends FunSpec with BeforeAndAfterEach with BeforeAndAfterAll {
var sharedExampleInvoked = false
var sharedExampleAlsoInvoked = false
def invocationVerifier(i: Int) {
it("should be invoked") {
sharedExampleInvoked = true
}
it("should also be invoked") {
sharedExampleAlsoInvoked = true
}
}
describe("A Stack") {
describe("(when not empty)") {
it("should allow me to pop") {}
it should behave like invocationVerifier(1)
}
describe("(when not full)") {
it("should allow me to push") {}
}
}
}
val a = new MySpec
a.execute()
assert(a.sharedExampleInvoked)
assert(a.sharedExampleAlsoInvoked)
}
test("three examples in a shared behavior should be invoked in order") {
class MySpec extends FunSpec {
var example1WasInvoked = false
var example2WasInvokedAfterExample1 = false
var example3WasInvokedAfterExample2 = false
def invocationVerifier(i: Int) {
it("should get invoked") {
example1WasInvoked = true
}
it("should also get invoked") {
if (example1WasInvoked)
example2WasInvokedAfterExample1 = true
}
it("should also also get invoked") {
if (example2WasInvokedAfterExample1)
example3WasInvokedAfterExample2 = true
}
}
it should behave like invocationVerifier(1)
}
val a = new MySpec
a.execute()
assert(a.example1WasInvoked)
assert(a.example2WasInvokedAfterExample1)
assert(a.example3WasInvokedAfterExample2)
}
test("three examples in a shared behavior should not get invoked at all if the behavior isn't used in a like clause") {
class MySpec extends FunSpec with ShouldMatchers {
var example1WasInvoked = false
var example2WasInvokedAfterExample1 = false
var example3WasInvokedAfterExample2 = false
def invocationVerifier(i: Int) {
it("should get invoked") {
example1WasInvoked = true
}
it("should also get invoked") {
if (example1WasInvoked)
example2WasInvokedAfterExample1 = true
}
it("should also also get invoked") {
if (example2WasInvokedAfterExample1)
example3WasInvokedAfterExample2 = true
}
}
// don't use it: behaves like (an InvocationVerifier())
}
val a = new MySpec
a.execute()
assert(!a.example1WasInvoked)
assert(!a.example2WasInvokedAfterExample1)
assert(!a.example3WasInvokedAfterExample2)
}
// Probably delete
test("The test name for a shared specifier invoked with 'should behave like a' should be verbatim if top level") {
var testSucceededReportHadCorrectTestName = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
if (testName.indexOf("it should be invoked") != -1) {
testSucceededReportHadCorrectTestName = true
}
case _ =>
}
}
}
class MySpec extends FunSpec with BeforeAndAfterEach with BeforeAndAfterAll {
var sharedExampleInvoked = false
def invocationVerifier(i: Int) {
it("it should be invoked") {
sharedExampleInvoked = true
}
}
it should behave like invocationVerifier(1)
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(testSucceededReportHadCorrectTestName)
}
ignore("The example name for a shared example invoked with 'it should behave like' should start with '<description> should' if nested one level in a describe clause") {
var testSucceededReportHadCorrectTestName = false
class MyReporter extends Reporter {
def apply(event: Event) {
event match {
case TestSucceeded(ordinal, suiteName, suiteID, suiteClassName, testName, testText, testEvents, duration, formatter, location, rerunnable, payload, threadName, timeStamp) =>
if (testName.indexOf("A Stack should pop properly") != -1) {
testSucceededReportHadCorrectTestName = true
}
case _ =>
}
}
}
class MySpec extends FunSpec {
var sharedExampleInvoked = false
def invocationVerifier(i: Int) {
it("should pop properly") {
sharedExampleInvoked = true
}
}
describe("A Stack") {
it should behave like invocationVerifier(1)
}
}
val a = new MySpec
a.run(None, Args(new MyReporter))
assert(testSucceededReportHadCorrectTestName)
}
test("expectedTestCount should not include tests in shares if never called") {
class MySpec extends FunSpec with ShouldMatchers {
class Misbehavior extends FunSpec with ShouldMatchers {
it("should six") {}
it("should seven") {}
}
it("should one") {}
it("should two") {}
describe("behavior") {
it("should three") {}
it("should four") {}
}
it("should five") {}
}
val a = new MySpec
assert(a.expectedTestCount(Filter()) === 5)
}
test("expectedTestCount should include tests in a share that is called") {
class MySpec extends FunSpec {
def misbehavior(i: Int) {
it("should six") {}
it("should seven") {}
}
it("should one") {}
it("should two") {}
describe("behavior") {
it("should three") {}
it should behave like misbehavior(1)
it("should four") {}
}
it("should five") {}
}
val a = new MySpec
assert(a.expectedTestCount(Filter()) === 7)
}
test("expectedTestCount should include tests in a share that is called twice") {
class MySpec extends FunSpec {
def misbehavior(i: Int) {
it("should six") {}
it("should seven") {}
}
it("should one") {}
it("should two") {}
describe("behavior") {
it("should three") {}
it should behave like misbehavior(1)
it("should four") {}
}
it("should five") {}
it should behave like misbehavior(1)
}
val a = new MySpec
assert(a.expectedTestCount(Filter()) === 9)
}
test("Spec's expectedTestCount includes tests in nested suites") {
class TwoTestSpec extends FunSpec {
it("should count this test") {}
it("should count this test also") {}
}
class MySpec extends FunSpec {
override def nestedSuites = Vector(new TwoTestSpec, new TwoTestSpec, new TwoTestSpec)
it("should count this here test") {}
}
val mySpec = new MySpec
assert(mySpec.expectedTestCount(Filter()) === 7)
}
// End of Share stuff
ignore("should be able to send info to the reporter") { // Can't do this yet, no info in Spec yet
// UPDATE 27 August Chee Seng: Probably need to use recordedEvents now.
val expectedMessage = "this is the expected message"
class MyReporter extends Reporter {
var infoProvidedCalled = false
var expectedMessageReceived = false
def apply(event: Event) {
event match {
case event: InfoProvided =>
infoProvidedCalled = true
if (!expectedMessageReceived) {
expectedMessageReceived = event.message.indexOf(expectedMessage) != -1
}
case _ =>
}
}
}
class MySpec extends FunSpec with ShouldMatchers {
describe("A Stack") {
describe("(when not empty)") {
it("should allow me to pop") {
info(expectedMessage)
()
}
}
describe("(when not full)") {
it("should allow me to push") {}
}
}
}
val a = new MySpec
val myRep = new MyReporter
a.run(None, Args(myRep))
assert(myRep.infoProvidedCalled)
assert(myRep.expectedMessageReceived)
}
test("that a null specText results in a thrown NPE at construction time") {
intercept[NullPointerException] {
new FunSpec {
it(null) {}
}
}
intercept[NullPointerException] {
new FunSpec {
ignore(null) {}
}
}
}
test("test durations are included in TestFailed and TestSucceeded events fired from Spec") {
class MySpec extends FunSpec {
it("should succeed") {}
it("should fail") { fail() }
}
val mySpec = new MySpec
val myReporter = new TestDurationReporter
mySpec.run(None, Args(myReporter, Stopper.default, Filter(), Map(), None, new Tracker(new Ordinal(99)), Set.empty))
assert(myReporter.testSucceededWasFiredAndHadADuration)
assert(myReporter.testFailedWasFiredAndHadADuration)
}
test("suite durations are included in SuiteCompleted events fired from Spec") {
class MySpec extends FunSpec {
override def nestedSuites = Vector(new Suite {})
}
val mySuite = new MySpec
val myReporter = new SuiteDurationReporter
mySuite.run(None, Args(myReporter, Stopper.default, Filter(), Map(), None, new Tracker(new Ordinal(99)), Set.empty))
assert(myReporter.suiteCompletedWasFiredAndHadADuration)
}
test("suite durations are included in SuiteAborted events fired from Spec") {
class SuiteThatAborts extends Suite {
override def run(testName: Option[String], args: Args): Status = {
throw new RuntimeException("Aborting for testing purposes")
}
}
class MySpec extends FunSpec {
override def nestedSuites = Vector(new SuiteThatAborts {})
}
val mySuite = new MySpec
val myReporter = new SuiteDurationReporter
mySuite.run(None, Args(myReporter, Stopper.default, Filter(), Map(), None, new Tracker(new Ordinal(99)), Set.empty))
assert(myReporter.suiteAbortedWasFiredAndHadADuration)
}
test("pending in a Spec should cause TestPending to be fired") {
class MySpec extends FunSpec {
it("should be pending") (pending)
}
val mySuite = new MySpec
val myReporter = new PendingReporter
mySuite.run(None, Args(myReporter, Stopper.default, Filter(), Map(), None, new Tracker(new Ordinal(99)), Set.empty))
assert(myReporter.testPendingWasFired)
}
}
| vivosys/scalatest | src/test/scala/org/scalatest/FunSpecSuite.scala | Scala | apache-2.0 | 55,571 |
package com.ometer.mongo
import com.ometer.ClassAnalysis
import com.ometer.bson.BsonAST.BObject
import com.mongodb.DBObject
import org.bson.types.ObjectId
/**
* Trait typically added to the companion object for an entity case class.
* This trait's interface supports operations on the collection itself.
* Here, the trait doesn't have knowledge of a specific MongoDB implementation
* (Hammersmith, Casbah, etc.)
*/
abstract trait CollectionOperations[EntityType <: Product, CaseClassIdType, BObjectIdType] {
/**
* This method performs any one-time-on-startup setup for the collection, such as ensuring an index.
* The app will need to somehow arrange to call this for each collection to use this feature.
*/
def migrate() : Unit = {}
/** Synchronous DAO returning BObject values from the collection */
val bobjectSyncDAO : BObjectSyncDAO[BObjectIdType]
/** Synchronous DAO returning case class entity values from the collection */
val caseClassSyncDAO : CaseClassSyncDAO[BObject, EntityType, CaseClassIdType]
/**
* You have to override this from a class, because traits can't
* put the ": Manifest" annotation on their type parameters.
*/
protected val manifestOfEntityType : Manifest[EntityType]
/**
* This lets you write a function that generically works for either the case class or
* BObject results. So for example you can implement query logic that supports
* both kinds of result.
*/
def syncDAO[E : Manifest] : SyncDAO[BObject, E, ObjectId] = {
manifest[E] match {
case m if m == manifest[BObject] =>
bobjectSyncDAO.asInstanceOf[SyncDAO[BObject, E, ObjectId]]
case m if m == manifestOfEntityType =>
caseClassSyncDAO.asInstanceOf[SyncDAO[BObject, E, ObjectId]]
case _ =>
throw new IllegalArgumentException("Missing type param on syncDAO[T]? add the [T]? No DAO returns type: " + manifest[E])
}
}
}
| havocp/mongo-scala-thingy | src/main/scala/com/ometer/mongo/CollectionOperations.scala | Scala | apache-2.0 | 2,010 |
package com.overviewdocs.models.tables
import com.overviewdocs.database.Slick.api._
import com.overviewdocs.models.UploadedFile
import java.sql.Timestamp
class UploadedFilesImpl(tag: Tag) extends Table[UploadedFile](tag, "uploaded_file") {
def id = column[Long]("id", O.PrimaryKey)
def contentDisposition = column[String]("content_disposition")
def contentType = column[String]("content_type")
def size = column[Long]("size")
def uploadedAt = column[Timestamp]("uploaded_at")
def * = (id, contentDisposition, contentType, size, uploadedAt) <>
((UploadedFile.apply _).tupled, UploadedFile.unapply)
def createAttributes = (contentDisposition, contentType, size, uploadedAt) <>
(UploadedFile.CreateAttributes.tupled, UploadedFile.CreateAttributes.unapply)
def updateAttributes = (size, uploadedAt) <>
(UploadedFile.UpdateAttributes.tupled, UploadedFile.UpdateAttributes.unapply)
}
object UploadedFiles extends TableQuery(new UploadedFilesImpl(_))
| overview/overview-server | common/src/main/scala/com/overviewdocs/models/tables/UploadedFiles.scala | Scala | agpl-3.0 | 979 |
package geotrellis.config.json
import geotrellis.config._
import geotrellis.spark.etl.config.BackendProfile
import geotrellis.spark.etl.config.json._
import spray.json.RootJsonFormat
import spray.json._
import spray.json.DefaultJsonProtocol._
import java.time.{LocalDate, ZoneOffset, ZonedDateTime}
trait ConfigFormats {
implicit object ZonedDateTimeFormat extends RootJsonFormat[ZonedDateTime] {
def write(zdt: ZonedDateTime): JsValue = zdt.formatted("yyyy-MM-dd").toJson
def read(value: JsValue): ZonedDateTime =
value match {
case JsString(time) => LocalDate.parse(time).atStartOfDay(ZoneOffset.UTC)
case _ =>
throw new DeserializationException("LocalDate must be a valid string.")
}
}
implicit val accumuloProfileFormat = jsonFormat5(Validation)
implicit object TileTypeFormat extends RootJsonFormat[TileType] {
def write(tt: TileType): JsValue = tt.name.toJson
def read(value: JsValue): TileType =
value match {
case JsString(tt) => TileType.fromName(tt)
case _ =>
throw new DeserializationException("TileType must be a valid string.")
}
}
implicit object IngestTypeFormat extends RootJsonFormat[IngestType] {
def write(it: IngestType): JsValue = it.name.toJson
def read(value: JsValue): IngestType =
value match {
case JsString(it) => IngestType.fromName(it)
case _ =>
throw new DeserializationException("IngestType must be a valid string.")
}
}
implicit val attributesFormat = jsonFormat2(Attributes)
case class DatasetFormat(bp: Map[String, BackendProfile]) extends RootJsonFormat[Dataset] {
val iformat = InputFormat(bp)
val oformat = OutputFormat(bp)
def write(o: Dataset): JsValue = JsObject(
"input" -> iformat.write(o.input),
"output" -> oformat.write(o.output),
"validation" -> o.validation.toJson,
"attributes" -> o.attributes.toJson
)
def read(value: JsValue): Dataset =
value match {
case JsObject(fields) =>
Dataset(
input = iformat.read(fields("input")),
output = oformat.read(fields("output")),
validation = fields("validation").convertTo[Validation],
attributes = fields("attributes").convertTo[Attributes]
)
case _ =>
throw new DeserializationException("Dataset must be a valid json object.")
}
}
case class DatasetsFormat(bp: Map[String, BackendProfile]) extends RootJsonFormat[List[Dataset]] {
val dformat = DatasetFormat(bp)
def write(l: List[Dataset]): JsValue = l.map(dformat.write).toJson
def read(value: JsValue): List[Dataset] =
value match {
case JsArray(fields) => fields.toList.map(dformat.read)
case _ =>
throw new DeserializationException("Dataset must be a valid json object.")
}
}
}
| geotrellis/geotrellis-integration-tests-tool | src/main/scala/geotrellis/config/json/ConfigFormats.scala | Scala | apache-2.0 | 2,911 |
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\
* @ @ *
* # # # # (c) 2017 CAB *
* # # # # # # *
* # # # # # # # # # # # # *
* # # # # # # # # # *
* # # # # # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # # # # # *
* @ @ *
\* * http://github.com/alexcab * * * * * * * * * * * * * * * * * * * * * * * * * */
package mathact.tools
import mathact.core.bricks.blocks.{Block, BlockContext}
/** Empty block
* Created by CAB on 24.10.2016.
*/
class EmptyBlock(implicit blockContext: BlockContext) extends Block(blockContext){
//Variables
private var _name: Option[String] = None
private var _imagePath: Option[String] = None
//DSL
def name_=(v: String) { _name = v match{case "" ⇒ None; case s ⇒ Some(s)} }
def name = _name
def imagePath_=(v: String) { _imagePath = v match{case "" ⇒ None; case s ⇒ Some(s)} }
def imagePath = _imagePath
//Abstract callbacks (will called by system after sketch will constructed)
private[mathact] def blockName: Option[String] = _name
private[mathact] def blockImagePath: Option[String] = _imagePath
//TODO Add more
}
| AlexCAB/MathAct | mathact_tools/src/main/scala/mathact/tools/EmptyBlock.scala | Scala | mit | 1,887 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnetexamples.visualization
import org.apache.mxnet.Symbol
/**
* @author Depeng Liang
*/
object LeNet {
def getSymbol(numClasses: Int = 10): Symbol = {
val data = Symbol.Variable("data")
// first conv
val conv1 = Symbol.Convolution()()(
Map("data" -> data, "kernel" -> "(5, 5)", "num_filter" -> 20))
val tanh1 = Symbol.Activation()()(Map("data" -> conv1, "act_type" -> "tanh"))
val pool1 = Symbol.Pooling()()(Map("data" -> tanh1, "pool_type" -> "max",
"kernel" -> "(2, 2)", "stride" -> "(2, 2)"))
// second conv
val conv2 = Symbol.Convolution()()(
Map("data" -> pool1, "kernel" -> "(5, 5)", "num_filter" -> 50))
val tanh2 = Symbol.Activation()()(Map("data" -> conv2, "act_type" -> "tanh"))
val pool2 = Symbol.Pooling()()(Map("data" -> tanh2, "pool_type" -> "max",
"kernel" -> "(2, 2)", "stride" -> "(2, 2)"))
// first fullc
val flatten = Symbol.Flatten()()(Map("data" -> pool2))
val fc1 = Symbol.FullyConnected()()(Map("data" -> flatten, "num_hidden" -> 500))
val tanh3 = Symbol.Activation()()(Map("data" -> fc1, "act_type" -> "tanh"))
// second fullc
val fc2 = Symbol.FullyConnected()()(
Map("data" -> tanh3, "num_hidden" -> numClasses))
// loss
val lenet = Symbol.SoftmaxOutput(name = "softmax")()(Map("data" -> fc2))
lenet
}
}
| indhub/mxnet | scala-package/examples/src/main/scala/org/apache/mxnetexamples/visualization/LeNet.scala | Scala | apache-2.0 | 2,230 |
package utils.credentials
trait SaltGenerator {
def generateSalt(): String
}
object SaltGeneratorUUID extends SaltGenerator {
def generateSalt = java.util.UUID.randomUUID.toString
}
| ybr/PlayBootstrap | app/utils/credentials/SaltGenerator.scala | Scala | mit | 188 |
package org.jetbrains.plugins.scala.annotator.gutter
/**
* Pavel.Fatin, 21.01.2010
*/
class PrefaceImportContainerTest extends LineMarkerTestBase {
protected override def getBasePath = super.getBasePath + "/preface/import/container/"
def testBlock(): Unit = doTest()
def testClass(): Unit = doTest()
def testFunctionDefinition(): Unit = doTest()
def testObject(): Unit = doTest()
def testPackageContainer(): Unit = doTest()
def testTrait(): Unit = doTest()
} | JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/annotator/gutter/PrefaceImportContainerTest.scala | Scala | apache-2.0 | 477 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import monix.eval.Task
import monix.execution.Ack.Continue
import monix.execution.FutureUtils.extensions._
import monix.execution.Scheduler
import monix.reactive.Observable.{empty, now}
import monix.execution.exceptions.DummyException
import monix.reactive.subjects.PublishSubject
import monix.reactive.{Observable, Observer}
import scala.concurrent.Future
import scala.concurrent.duration.Duration.Zero
import scala.concurrent.duration._
object MergeOneSuite extends BaseOperatorSuite {
def createObservable(sourceCount: Int) = Some {
val o = Observable.range(0L, sourceCount.toLong).mergeMap(i => Observable.now(i))
Sample(o, count(sourceCount), sum(sourceCount), Zero, Zero)
}
def count(sourceCount: Int) =
sourceCount
def waitForFirst = Duration.Zero
def waitForNext = Duration.Zero
def observableInError(sourceCount: Int, ex: Throwable) =
if (sourceCount <= 1) {
val o = Observable.now(1L).mergeMap(_ => Observable.raiseError(ex))
Some(Sample(o, 0, 0, Zero, Zero))
} else
Some {
val o = Observable
.range(0L, sourceCount.toLong)
.endWithError(ex)
.mergeMap(i => Observable.now(i))
Sample(o, count(sourceCount), sum(sourceCount), Zero, Zero)
}
def sum(sourceCount: Int) = {
sourceCount * (sourceCount - 1) / 2
}
def brokenUserCodeObservable(sourceCount: Int, ex: Throwable) = Some {
val o = Observable.range(0L, sourceCount.toLong).flatMap { i =>
if (i == sourceCount - 1)
throw ex
else
Observable.now(i)
}
Sample(o, count(sourceCount - 1), sum(sourceCount - 1), Zero, Zero)
}
def toList[A](o: Observable[A])(implicit s: Scheduler) = {
o.foldLeft(Vector.empty[A])(_ :+ _)
.runAsyncGetLast
.map(_.getOrElse(Vector.empty))
}
override def cancelableObservables(): Seq[Sample] = {
val sample1 = Observable
.range(1, 100)
.mergeMap(x => Observable.now(x).delayExecution(2.second))
val sample2 = Observable
.range(0, 100)
.delayOnNext(1.second)
.mergeMap(x => Observable.now(x).delayExecution(2.second))
Seq(
Sample(sample1, 0, 0, 0.seconds, 0.seconds),
Sample(sample1, 0, 0, 1.seconds, 0.seconds),
Sample(sample2, 0, 0, 0.seconds, 0.seconds),
Sample(sample2, 0, 0, 1.seconds, 0.seconds)
)
}
test("filter can be expressed in terms of mergeMap, without ordering") { implicit s =>
val obs1 = Observable.range(0, 100).filter(_ % 2 == 0)
val obs2 = Observable.range(0, 100).mergeMap(x => if (x % 2 == 0) now(x) else empty)
val lst1 = toList(obs1)
val lst2 = toList(obs2)
s.tick()
assert(lst1.isCompleted && lst2.isCompleted)
assertEquals(lst1.value.get, lst2.value.get)
}
test("map can be expressed in terms of mergeMap, without ordering") { implicit s =>
val obs1 = Observable.range(0, 100).map(_ + 10)
val obs2 = Observable.range(0, 100).mergeMap(x => now(x + 10))
val lst1 = toList(obs1)
val lst2 = toList(obs2)
s.tick()
assert(lst1.isCompleted && lst2.isCompleted)
assertEquals(lst1.value.get, lst2.value.get)
}
test("should wait the completion of the current, before subscribing to the next") { implicit s =>
var obs2WasStarted = false
var received = 0L
var wasCompleted = false
val obs1 = PublishSubject[Long]()
val obs2 = Observable.range(1, 100).map { x =>
obs2WasStarted = true; x
}
Observable
.fromIterable(Seq(obs1, obs2))
.flatten
.unsafeSubscribeFn(new Observer[Long] {
def onNext(elem: Long) = {
received += elem
if (elem == 1000)
Future.delayedResult(1.second)(Continue)
else
Continue
}
def onError(ex: Throwable) = ()
def onComplete() = wasCompleted = true
})
s.tickOne()
assertEquals(received, 0)
obs1.onNext(10)
assertEquals(received, 10)
val f = obs1.onNext(1000)
assertEquals(received, 1010)
f.onComplete(_ => obs1.onComplete())
s.tick()
assert(!obs2WasStarted)
s.tick(1.second)
assert(obs2WasStarted)
assertEquals(received, 1010 + 99 * 50)
assert(wasCompleted)
}
test("should interrupt the streaming on error") { implicit s =>
var obs1WasStarted = false
var obs2WasStarted = false
var wasThrown: Throwable = null
val sub = PublishSubject[Long]()
val obs1 = sub.doOnStart(_ => Task { obs1WasStarted = true })
val obs2 = Observable.range(1, 100).map { x =>
obs2WasStarted = true; x
}
Observable
.fromIterable(Seq(obs1, obs2))
.flatten
.unsafeSubscribeFn(new Observer[Long] {
def onNext(elem: Long) = Continue
def onError(ex: Throwable) = wasThrown = ex
def onComplete() = ()
})
s.tick()
sub.onNext(1)
assert(obs1WasStarted)
sub.onError(DummyException("dummy"))
s.tick()
assertEquals(wasThrown, DummyException("dummy"))
assert(!obs2WasStarted)
}
}
| alexandru/monifu | monix-reactive/shared/src/test/scala/monix/reactive/internal/operators/MergeOneSuite.scala | Scala | apache-2.0 | 5,739 |
package sbtappengine
import java.util.Properties
import sbt._
import spray.revolver.RevolverPlugin
import sbtappengine.Compat.{Process, _}
@deprecated("will be removed. use enablePlugins(AppenginePlugin)", "0.7.0")
object Plugin {
val AppengineKeys = AppenginePlugin.autoImport.AppengineKeys
val appengineSettings = AppenginePlugin.projectSettings
}
object AppenginePlugin extends AutoPlugin {
import Keys._
import Def.Initialize
import com.earldouglas.xwp.ContainerPlugin
import com.earldouglas.xwp.WarPlugin
import com.earldouglas.xwp.WebappPlugin.autoImport.webappPrepare
import spray.revolver
import revolver.Actions._
import revolver.Utilities._
override def requires = sbt.plugins.JvmPlugin && WarPlugin && ContainerPlugin
object autoImport {
@deprecated("", "")
object AppengineKeys extends revolver.RevolverKeys
lazy val appengineRequestLogs = InputKey[Unit]("appengine-request-logs", "Write request logs in Apache common log format.")
lazy val appengineRollback = InputKey[Unit]("appengine-rollback", "Rollback an in-progress update.")
lazy val appengineDeploy = InputKey[Unit]("appengine-deploy", "Create or update an app version.")
lazy val appengineDeployBackends = InputKey[Unit]("appengine-deploy-backends", "Update the specified backend or all backends.")
lazy val appengineRollbackBackend = InputKey[Unit]("appengine-rollback-backends", "Roll back a previously in-progress update.")
lazy val appengineConfigBackends = InputKey[Unit]("appengine-config-backends", "Configure the specified backend.")
lazy val appengineStartBackend = InputKey[Unit]("appengine-start-backend", "Start the specified backend.")
lazy val appengineStopBackend = InputKey[Unit]("appengine-stop-backend", "Stop the specified backend.")
lazy val appengineDeleteBackend = InputKey[Unit]("appengine-delete-backend", "Delete the specified backend.")
lazy val appengineDeployIndexes = InputKey[Unit]("appengine-deploy-indexes", "Update application indexes.")
lazy val appengineDeployCron = InputKey[Unit]("appengine-deploy-cron", "Update application cron jobs.")
lazy val appengineDeployQueues = InputKey[Unit]("appengine-deploy-queues", "Update application task queue definitions.")
lazy val appengineDeployDos = InputKey[Unit]("appengine-deploy-dos", "Update application DoS protection configuration.")
lazy val appengineCronInfo = InputKey[Unit]("appengine-cron-info", "Displays times for the next several runs of each cron job.")
lazy val appengineDevServer = InputKey[revolver.AppProcess]("appengine-dev-server", "Run application through development server.")
lazy val appengineStopDevServer = TaskKey[Unit]("appengine-stop-dev-server", "Stop development server.")
lazy val appengineEnhance = TaskKey[Unit]("appengine-enhance", "Execute ORM enhancement.")
lazy val appengineEnhanceCheck = TaskKey[Unit]("appengine-enhance-check", "Just check the classes for enhancement status.")
lazy val appengineOnStartHooks = SettingKey[Seq[() => Unit]]("appengine-on-start-hooks")
lazy val appengineOnStopHooks = SettingKey[Seq[() => Unit]]("appengine-on-stop-hooks")
lazy val appengineApiToolsJar = SettingKey[String]("appengine-api-tools-jar", "Name of the development startup executable jar.")
lazy val appengineApiToolsPath = SettingKey[File]("appengine-api-tools-path", "Path of the development startup executable jar.")
lazy val appengineSdkVersion = SettingKey[String]("appengine-sdk-version")
lazy val appengineSdkPath = SettingKey[File]("appengine-sdk-path")
lazy val appengineClasspath = SettingKey[Classpath]("appengine-classpath")
lazy val appengineApiJarName = SettingKey[String]("appengine-api-jar-name")
lazy val appengineApiLabsJarName = SettingKey[String]("appengine-api-labs-jar-name")
lazy val appengineJsr107CacheJarName = SettingKey[String]("appengine-jsr107-cache-jar-name")
lazy val appengineBinPath = SettingKey[File]("appengine-bin-path")
lazy val appengineLibPath = SettingKey[File]("appengine-lib-path")
lazy val appengineLibUserPath = SettingKey[File]("appengine-lib-user-path")
lazy val appengineLibImplPath = SettingKey[File]("appengine-lib-impl-path")
lazy val appengineApiJarPath = SettingKey[File]("appengine-api-jar-path")
lazy val appengineAppcfgName = SettingKey[String]("appengine-appcfg-name")
lazy val appengineAppcfgPath = SettingKey[File]("appengine-appcfg-path")
lazy val appengineOverridePath = SettingKey[File]("appengine-override-path")
lazy val appengineOverridesJarPath = SettingKey[File]("appengine-overrides-jar-path")
lazy val appengineAgentJarPath = SettingKey[File]("appengine-agent-jar-path")
lazy val appengineEmptyFile = TaskKey[File]("appengine-empty-file")
lazy val appengineTemporaryWarPath = SettingKey[File]("appengine-temporary-war-path")
lazy val appengineLocalDbPath = SettingKey[File]("appengine-local-db-path")
lazy val appengineDebug = SettingKey[Boolean]("appengine-debug")
lazy val appengineDebugPort = SettingKey[Int]("appengine-debug-port")
lazy val appengineIncludeLibUser = SettingKey[Boolean]("appengine-include-lib-user")
lazy val appenginePersistenceApi = SettingKey[String]("appengine-persistence-api", "Name of the API we are enhancing for: JDO, JPA.")
@deprecated("will be removed. use enablePlugins(AppenginePlugin)", "0.7.0")
lazy val appengineSettings = AppenginePlugin.projectSettings
lazy val appengineDataNucleusSettings: Seq[Def.Setting[_]] =
inConfig(Compile)(baseAppengineDataNucleusSettings)
}
import autoImport._
object AppEngine {
// see https://github.com/jberkel/android-plugin/blob/master/src/main/scala/AndroidHelpers.scala
def appcfgTask(action: String,
depends: TaskKey[File] = appengineEmptyFile, outputFile: Option[String] = None): Initialize[InputTask[Unit]] =
Def.inputTask {
import complete.DefaultParsers._
val input: Seq[String] = spaceDelimited("<arg>").parsed
val x = depends.value
appcfgTaskCmd(appengineAppcfgPath.value, input, Seq(action, appengineTemporaryWarPath.value.getAbsolutePath) ++ outputFile.toSeq, streams.value)
}
def appcfgBackendTask(action: String,
depends: TaskKey[File] = appengineEmptyFile, reqParam: Boolean = false): Initialize[InputTask[Unit]] =
Def.inputTask {
import complete.DefaultParsers._
val input: Seq[String] = spaceDelimited("<arg>").parsed
val (opts, args) = input.partition(_.startsWith("--"))
if (reqParam && args.isEmpty) {
sys.error("error executing appcfg: required parameter missing")
}
val x = depends.value
appcfgTaskCmd(appengineAppcfgPath.value, opts, Seq("backends", appengineTemporaryWarPath.value.getAbsolutePath, action) ++ args, streams.value)
}
def appcfgTaskCmd(appcfgPath: sbt.File, args: Seq[String],
params: Seq[String], s: TaskStreams) = {
val appcfg: Seq[String] = Seq(appcfgPath.absolutePath.toString) ++ args ++ params
s.log.debug(appcfg.mkString(" "))
val out = new StringBuffer
val exit = Process(appcfg).!<
if (exit != 0) {
s.log.error(out.toString)
sys.error("error executing appcfg")
}
else s.log.info(out.toString)
()
}
def buildAppengineSdkPath(baseDir: File): File = {
var sdk = System.getenv("APPENGINE_SDK_HOME")
if (sdk == null) {
val appengineSettings = baseDir / "appengine.properties"
val prop = new Properties()
IO.load(prop, appengineSettings)
sdk = prop.getProperty("sdkHome")
}
if (sdk == null) sys.error("You need to set the 'APPENGINE_SDK_HOME' environment variable " +
"or the 'sdkHome' property in 'appengine.properties'")
new File(sdk)
}
def buildSdkVersion(libUserPath: File): String = {
val pat = """appengine-api-1.0-sdk-(\\d\\.\\d+\\.\\d+(?:\\.\\d+)*)\\.jar""".r
(libUserPath * "appengine-api-1.0-sdk-*.jar").get.toList match {
case jar::_ => jar.name match {
case pat(version) => version
case _ => sys.error("invalid jar file. " + jar)
}
case _ => sys.error("not found appengine api jar.")
}
}
def isWindows = System.getProperty("os.name").startsWith("Windows")
def osBatchSuffix = if (isWindows) ".cmd" else ".sh"
// see https://github.com/spray/sbt-revolver/blob/master/src/main/scala/spray/revolver/Actions.scala#L26
def restartDevServer(streams: TaskStreams, logTag: String, project: ProjectRef, options: ForkOptions, mainClass: Option[String],
cp: Classpath, args: Seq[String], startConfig: ExtraCmdLineOptions, war: File,
onStart: Seq[() => Unit], onStop: Seq[() => Unit]): revolver.AppProcess = {
if (revolverState.getProcess(project).exists(_.isRunning)) {
colorLogger(streams.log).info("[YELLOW]Stopping dev server ...")
stopAppWithStreams(streams, project)
onStop foreach { _.apply() }
}
startDevServer(streams, logTag, project, options, mainClass, cp, args, startConfig, onStart)
}
// see https://github.com/spray/sbt-revolver/blob/master/src/main/scala/spray/revolver/Actions.scala#L32
def startDevServer(streams: TaskStreams, logTag: String, project: ProjectRef, options: ForkOptions, mainClass: Option[String],
cp: Classpath, args: Seq[String], startConfig: ExtraCmdLineOptions, onStart: Seq[() => Unit]): revolver.AppProcess = {
assert(!revolverState.getProcess(project).exists(_.isRunning))
val color = updateStateAndGet(_.takeColor)
val logger = new revolver.SysoutLogger(logTag, color, streams.log.ansiCodesSupported)
colorLogger(streams.log).info("[YELLOW]Starting dev server in the background ...")
onStart foreach { _.apply() }
val appProcess = revolver.AppProcess(project, color, logger) {
val f = new Fork("java", mainClass)
val config = options
.withRunJVMOptions(
Vector("-cp", cp.map(_.data.absolutePath).mkString(System.getProperty("file.separator"))) ++
options.runJVMOptions ++ startConfig.jvmArgs
)
.withOutputStrategy(
StdoutOutput
)
f.fork(config, startConfig.startArgs ++ args)
}
registerAppProcess(project, appProcess)
appProcess
}
}
lazy val baseAppengineSettings: Seq[Def.Setting[_]] = Seq(
// this is classpath during compile
unmanagedClasspath ++= appengineClasspath.value,
// this is classpath included into WEB-INF/lib
// https://developers.google.com/appengine/docs/java/tools/ant
// "All of these JARs are in the SDK's lib/user/ directory."
unmanagedClasspath in Runtime ++= unmanagedClasspath.value,
appengineRequestLogs := AppEngine.appcfgTask("request_logs", outputFile = Some("request.log")).evaluated,
appengineRollback := AppEngine.appcfgTask("rollback").evaluated,
appengineDeploy := AppEngine.appcfgTask("update", `package`).evaluated,
appengineDeployIndexes := AppEngine.appcfgTask("update_indexes", `package`).evaluated,
appengineDeployCron := AppEngine.appcfgTask("update_cron", `package`).evaluated,
appengineDeployQueues := AppEngine.appcfgTask("update_queues", `package`).evaluated,
appengineDeployDos := AppEngine.appcfgTask("update_dos", `package`).evaluated,
appengineCronInfo := AppEngine.appcfgTask("cron_info").evaluated,
appengineDeployBackends := AppEngine.appcfgBackendTask("update", `package`).evaluated,
appengineConfigBackends := AppEngine.appcfgBackendTask("configure", `package`).evaluated,
appengineRollbackBackend := AppEngine.appcfgBackendTask("rollback", `package`, true).evaluated,
appengineStartBackend := AppEngine.appcfgBackendTask("start", `package`, true).evaluated,
appengineStopBackend := AppEngine.appcfgBackendTask("stop", `package`, true).evaluated,
appengineDeleteBackend := AppEngine.appcfgBackendTask("delete", `package`, true).evaluated,
appengineDevServer := {
val args = startArgsParser.parsed
val x = (products in Compile).value
AppEngine.restartDevServer(streams.value, (RevolverPlugin.autoImport.reLogTag in appengineDevServer).value,
thisProjectRef.value, (RevolverPlugin.autoImport.reForkOptions in appengineDevServer).value,
(mainClass in appengineDevServer).value, (fullClasspath in appengineDevServer).value,
(RevolverPlugin.autoImport.reStartArgs in appengineDevServer).value, args,
`package`.value,
(appengineOnStartHooks in appengineDevServer).value, (appengineOnStopHooks in appengineDevServer).value)
},
RevolverPlugin.autoImport.reForkOptions in appengineDevServer := {
ForkOptions(
javaHome = javaHome.value,
outputStrategy = outputStrategy.value,
bootJars = scalaInstance.value.libraryJar +: scalaInstance.value.allJars.toVector,
workingDirectory = Some(appengineTemporaryWarPath.value),
runJVMOptions = (javaOptions in appengineDevServer).value.toVector,
connectInput = false,
envVars = Map.empty[String, String]
)
},
RevolverPlugin.autoImport.reLogTag in appengineDevServer := "appengineDevServer",
mainClass in appengineDevServer := Some("com.google.appengine.tools.development.DevAppServerMain"),
fullClasspath in appengineDevServer := Seq(appengineApiToolsPath.value).classpath,
appengineLocalDbPath in appengineDevServer := target.value / "local_db.bin",
RevolverPlugin.autoImport.reStartArgs in appengineDevServer := Seq(appengineTemporaryWarPath.value.absolutePath),
// http://thoughts.inphina.com/2010/06/24/remote-debugging-google-app-engine-application-on-eclipse/
appengineDebug in appengineDevServer := true,
appengineDebugPort in appengineDevServer := 1044,
appengineOnStartHooks in appengineDevServer := Nil,
appengineOnStopHooks in appengineDevServer := Nil,
SbtCompat.impl.changeJavaOptions { (o, a, jr, ldb, d, dp) =>
Seq("-ea" , "-javaagent:" + a.getAbsolutePath, "-Xbootclasspath/p:" + o.getAbsolutePath,
"-Ddatastore.backing_store=" + ldb.getAbsolutePath) ++
Seq("-Djava.awt.headless=true") ++
(if (d) Seq("-Xdebug", "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=" + dp.toString) else Nil) ++
createJRebelAgentOption(revolver.SysoutLogger, jr).toSeq },
appengineStopDevServer := RevolverPlugin.autoImport.reStop.value,
appengineApiToolsJar := "appengine-tools-api.jar",
appengineSdkVersion := AppEngine.buildSdkVersion(appengineLibUserPath.value),
appengineSdkPath := AppEngine.buildAppengineSdkPath(baseDirectory.value),
appengineIncludeLibUser := true,
// this controls appengine classpath, which is used in unmanagedClasspath
appengineClasspath := {
if (appengineIncludeLibUser.value) (appengineLibUserPath.value ** "*.jar").classpath
else Nil
},
appengineApiJarName := { "appengine-api-1.0-sdk-" + appengineSdkVersion.value + ".jar" },
appengineApiLabsJarName := { "appengine-api-labs-" + appengineSdkVersion.value + ".jar" },
appengineJsr107CacheJarName := { "appengine-jsr107cache-" + appengineSdkVersion.value + ".jar" },
appengineBinPath := appengineSdkPath.value / "bin",
appengineLibPath := appengineSdkPath.value / "lib",
appengineLibUserPath := appengineLibPath.value / "user",
appengineLibImplPath := appengineLibPath.value / "impl",
appengineApiJarPath := appengineLibUserPath.value / appengineApiJarName.value,
appengineApiToolsPath := appengineLibPath.value / appengineApiToolsJar.value,
appengineAppcfgName := "appcfg" + AppEngine.osBatchSuffix,
appengineAppcfgPath := appengineBinPath.value / appengineAppcfgName.value,
appengineOverridePath := appengineLibPath.value / "override",
appengineOverridesJarPath := appengineOverridePath.value / "appengine-dev-jdk-overrides.jar",
appengineAgentJarPath := appengineLibPath.value / "agent" / "appengine-agent.jar",
appengineEmptyFile := file(""),
appengineTemporaryWarPath := target.value / "webapp"
)
lazy val baseAppengineDataNucleusSettings: Seq[Def.Setting[_]] = Seq(
`package` := `package`.dependsOn(appengineEnhance).value,
appengineClasspath := {
val appengineORMJars = (appengineLibPath.value / "orm" * "*.jar").get
appengineClasspath.value ++ appengineORMJars.classpath
},
appengineEnhance := {
val r: ScalaRun = (runner in Runtime).value
val main: String = (mainClass in appengineEnhance).value.get
val files: Seq[File] = (exportedProducts in Runtime).value flatMap { dir =>
(dir.data ** "*.class").get ++ (dir.data ** "*.jdo").get
}
val args: Seq[String] = (scalacOptions in appengineEnhance).value ++ (files map {_.toString})
r.run(main, (fullClasspath in appengineEnhance).value map {_.data}, args, streams.value.log)
},
appengineEnhanceCheck := {
val r: ScalaRun = (runner in Runtime).value
val main: String = (mainClass in appengineEnhance).value.get
val files: Seq[File] = (exportedProducts in Runtime).value flatMap { dir =>
(dir.data ** "*.class").get ++ (dir.data ** "*.jdo").get
}
val args: Seq[String] = (scalacOptions in appengineEnhance).value ++ Seq("-checkonly") ++ (files map {_.toString})
r.run(main, (fullClasspath in appengineEnhance).value map {_.data}, args, streams.value.log)
},
mainClass in appengineEnhance := Some("org.datanucleus.enhancer.DataNucleusEnhancer"),
fullClasspath in appengineEnhance := {
val appengineORMEnhancerJars = (appengineLibPath.value / "tools" / "orm" * "datanucleus-enhancer-*.jar").get ++
(appengineLibPath.value / "tools" / "orm" * "asm-*.jar").get
(Seq(appengineApiToolsPath.value) ++ appengineORMEnhancerJars).classpath ++ (fullClasspath in Compile).value
},
// http://www.datanucleus.org/products/accessplatform_2_2/enhancer.html
scalacOptions in appengineEnhance := ((logLevel in appengineEnhance) match {
case Level.Debug => Seq("-v")
case _ => Seq()
} ) ++ Seq("-api", (appenginePersistenceApi in appengineEnhance).value),
logLevel in appengineEnhance := Level.Debug,
appenginePersistenceApi in appengineEnhance := "JDO"
)
lazy val webSettings = projectSettings
override lazy val projectSettings: Seq[Def.Setting[_]] =
inConfig(Compile)(WarPlugin.projectSettings ++ revolver.RevolverPlugin.settings ++ baseAppengineSettings) ++
inConfig(Test)(Seq(
unmanagedClasspath ++= appengineClasspath.value,
appengineClasspath := {
val impljars = ((appengineLibImplPath in Compile).value * "*.jar").get
val testingjars = ((appengineLibPath in Compile).value / "testing" * "*.jar").get
(appengineClasspath in Compile).value ++ Attributed.blankSeq(impljars ++ testingjars)
}
)) ++
Seq(
watchSources ++= ((sourceDirectory in webappPrepare in Compile).value ** "*").get)
}
| sbt/sbt-appengine | src/main/scala/AppenginePlugin.scala | Scala | mit | 19,521 |
package io.github.suitougreentea.VariousMinos.rule
import io.github.suitougreentea.VariousMinos.Field
class RotationSystemClassic extends RotationSystem {
var offsetCW44 = Array((-1, 0), (0, 1), (1, 0), (0, -1))
var offsetCCW44 = Array((0, 1), (1, 0), (0, -1), (-1, 0))
override def rotateCW(field: Field): Boolean = {
var state = field.currentMino.rotationState
var (ox, oy) = field.currentMino.minoId match {
case 1 | 3 | 4 | 7 | 14 | 15 | 16 | 17 | 25 | 26 => offsetCW44(state)
case _ => (0, 0)
}
field.currentMino.rotateCW()
if(field.checkHit(minoPos = (field.currentMinoX + ox, field.currentMinoY + oy))){
field.currentMino.rotateCCW()
false
} else {
field.currentMinoX += ox
field.currentMinoY += oy
true
}
}
override def rotateCCW(field: Field): Boolean = {
var state = field.currentMino.rotationState
var (ox, oy) = field.currentMino.minoId match {
case 1 | 3 | 4 | 7 | 14 | 15 | 16 | 17 | 25 | 26 => offsetCCW44(state)
case _ => (0, 0)
}
field.currentMino.rotateCCW()
if(field.checkHit(minoPos = (field.currentMinoX + ox, field.currentMinoY + oy))){
field.currentMino.rotateCW()
false
} else {
field.currentMinoX += ox
field.currentMinoY += oy
true
}
}
} | suitougreentea/VariousMinos2 | src/main/scala/io/github/suitougreentea/VariousMinos/rule/RotationSystemClassic.scala | Scala | mit | 1,322 |
package com.twitter.zipkin.collector.processor
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import com.twitter.zipkin.common.Span
import com.twitter.finagle.TooManyWaitersException
import com.twitter.logging.Logger
import com.twitter.ostrich.stats.Stats
import com.twitter.util.Future
/**
* A processor that takes a span and processes it some way.
* Could be: storing it or aggregating the data in some way for example.
*/
trait Processor {
private val log = Logger.get
/**
* Process the span.
*/
def processSpan(span: Span): Future[Unit]
/**
* Shut down this processor
*/
def shutdown()
protected def failureHandler(method: String): (Throwable) => Unit = {
case t: TooManyWaitersException =>
case e => {
Stats.getCounter("exception_" + method + "_" + e.getClass).incr()
log.error(e, method)
}
}
} | lanrion/zipkin | zipkin-server/src/main/scala/com/twitter/zipkin/collector/processor/Processor.scala | Scala | apache-2.0 | 1,418 |
/*
* ScalaRay - Ray tracer based on pbrt (see http://pbrt.org) written in Scala
* Copyright (C) 2009, 2010, 2011 Jesper de Jong
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.jesperdj.scalaray.material
import scala.collection.immutable.IndexedSeq
import org.jesperdj.scalaray.reflection.{ BSDF, Lambertian }
import org.jesperdj.scalaray.shape.DifferentialGeometry
import org.jesperdj.scalaray.spectrum.Spectrum
import org.jesperdj.scalaray.texture.Texture
// TODO: Implement this fully (OrenNayar reflection model, sigma, bump mapping)
// Matte material (pbrt 9.2.1)
final class MatteMaterial (texture: Texture[Spectrum]) extends Material {
// Select a BSDF for the given differential geometries
def bsdf(dgGeom: DifferentialGeometry, dgShading: DifferentialGeometry): BSDF =
new BSDF(IndexedSeq(new Lambertian(texture(dgShading))), dgShading, dgGeom.normal)
}
| jesperdj/scalaray | src/main/scala/org/jesperdj/scalaray/material/MatteMaterial.scala | Scala | gpl-3.0 | 1,494 |
/*
* Copyright 2016 Guy Van den Broeck and Wannes Meert (UCLA and KU Leuven)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.ucla.cs.starai.forclift.bugs
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
import org.scalatest.Matchers
import org.scalatest.Spec
import edu.ucla.cs.starai.forclift._
import edu.ucla.cs.starai.forclift.examples.models._
@RunWith(classOf[JUnitRunner])
class TestBug12 extends ModelBehaviours {
describe("Bug12Model") {
val correctLogWMC = 146.52023270334251 +- 0.00001
def model = new WeightedCNFModel {
def theoryString = """
domain Page 4 {llqanon1}
predicate pageclassfaculty(Page) 1 2
predicate pageclassstudent(Page) 3 4
predicate linked(Page,Page) 3 4
predicate f1(Page,Page) 3 4
predicate f2(Page,Page) 2 4
predicate f3(Page,Page) 1 4
predicate f4(Page,Page) 5 4
predicate f5(Page,Page) 3 2
linked(X,Y) ∨ ¬f5(X,Y) ∨ ¬linked(Y,X)
pageclassfaculty(X) ∨ ¬f1(X,Y) ∨ ¬linked(Y,X) ∨ ¬pageclassstudent(Y)
pageclassstudent(X) ∨ ¬f2(X,Y) ∨ ¬linked(Y,X) ∨ ¬pageclassfaculty(Y)
pageclassstudent(llqanon1)
f1(X,Y) ∨ linked(Y,X)
f1(X,Y) ∨ pageclassstudent(Y)
f1(X,Y) ∨ ¬pageclassfaculty(X)
f2(X,Y) ∨ linked(Y,X)
f2(X,Y) ∨ pageclassfaculty(Y)
f2(X,Y) ∨ ¬pageclassstudent(X)
f4(X,Y) ∨ ¬linked(X,Y)
f5(X,Y) ∨ linked(Y,X)
f5(X,Y) ∨ ¬linked(X,Y)
"""
}
it should behave like smallModel(model, correctLogWMC)
}
}
| UCLA-StarAI/Forclift | src/test/scala/edu/ucla/cs/starai/forclift/bugs/TestBug12.scala | Scala | apache-2.0 | 1,992 |
/*
* Copyright 2016 sadikovi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.sadikovi.spark.netflow
import com.github.sadikovi.netflowlib.Buffers.RecordBuffer
import com.github.sadikovi.testutil.UnitTestSuite
class NetFlowOptionsSuite extends UnitTestSuite {
test("NetFlowOptions - predicate pushdown is enabled when no option is provided") {
val opts = new NetFlowOptions(Map.empty)
opts.usePredicatePushdown should be (true)
}
test("NetFlowOptions - predicate pushdown is disabled when 'false' is provided") {
val opts = new NetFlowOptions(Map("predicate-pushdown" -> "false"))
opts.usePredicatePushdown should be (false)
}
test("NetFlowOptions - predicate pushdown is enabled when 'true' is provided") {
val opts = new NetFlowOptions(Map("predicate-pushdown" -> "true"))
opts.usePredicatePushdown should be (true)
}
test("NetFlowOptions - test buffer size") {
// check that buffer size is default
var opts = new NetFlowOptions(Map.empty)
opts.bufferSize should be (RecordBuffer.BUFFER_LENGTH_2)
// set buffer size to be 64Kb
opts = new NetFlowOptions(Map("buffer" -> "64Kb"))
opts.bufferSize should be (64 * 1024)
}
test("NetFlowOptions - invalid buffer size") {
// buffer size >> Integer.MAX_VALUE
var err = intercept[RuntimeException] {
new NetFlowOptions(Map("buffer" -> "10Gb"))
}
assert(err.getMessage.contains("> maximum buffer size"))
// negative buffer size
intercept[NumberFormatException] {
new NetFlowOptions(Map("buffer" -> "-1"))
}
// buffer size < min buffer size
err = intercept[RuntimeException] {
new NetFlowOptions(Map("buffer" -> "10"))
}
assert(err.getMessage.contains("< minimum buffer size"))
// just for completeness, test on wrong buffer value
intercept[NumberFormatException] {
new NetFlowOptions(Map("buffer" -> "wrong"))
}
}
test("NetFlowOptions - stringify is enabled by default") {
val opts = new NetFlowOptions(Map.empty)
opts.applyConversion should be (true)
}
test("NetFlowOptions - stringify is disabled, if false") {
val opts = new NetFlowOptions(Map("stringify" -> "false"))
opts.applyConversion should be (false)
}
test("NetFlowOptions - stringify is enabled, if true") {
val opts = new NetFlowOptions(Map("stringify" -> "true"))
opts.applyConversion should be (true)
}
test("NetFlowOptions - toString 1") {
val opts = new NetFlowOptions(Map.empty)
opts.toString should be ("NetFlowOptions(applyConversion=true, " +
s"bufferSize=${RecordBuffer.BUFFER_LENGTH_2}, usePredicatePushdown=true)")
}
test("NetFlowOptions - toString 2") {
val opts = new NetFlowOptions(Map("stringify" -> "false", "buffer" -> "32768",
"predicate-pushdown" -> "false"))
opts.toString should be ("NetFlowOptions(applyConversion=false, " +
s"bufferSize=32768, usePredicatePushdown=false)")
}
}
| sadikovi/spark-netflow | src/test/scala/com/github/sadikovi/spark/netflow/NetFlowOptionsSuite.scala | Scala | apache-2.0 | 3,481 |
package co.s4n.template
case class RestMessage(val id: String)
case class ResponseMessage(val id: String) | juanrubiano/actorPerRequest | ms-service/src/main/scala/co/s4n/template/Domain.scala | Scala | mit | 107 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn.tf
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.utils.T
import com.intel.analytics.bigdl.dllib.utils.serializer.ModuleSerializationTest
import scala.util.Random
class ReluGradSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val reluGrad = ReluGrad[Float]
val input = T(Tensor[Float](2, 2, 2).apply1(_ => Random.nextFloat()),
Tensor[Float](2, 2, 2).apply1(_ => Random.nextFloat()))
runSerializationTest(reluGrad, input)
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/tf/ReluGradSpec.scala | Scala | apache-2.0 | 1,166 |
package dotty.tools.dotc
package transform
import TreeTransforms._
import core.DenotTransformers._
import core.Denotations._
import core.SymDenotations._
import core.Contexts._
import core.Symbols._
import core.Types._
import core.Flags._
import core.Constants._
import core.StdNames._
import core.Decorators._
import core.TypeErasure.isErasedType
import core.Phases.Phase
import typer._
import typer.ErrorReporting._
import reporting.ThrowingReporter
import ast.Trees._
import ast.{tpd, untpd}
import util.SourcePosition
import collection.mutable
import ProtoTypes._
import java.lang.AssertionError
import scala.util.control.NonFatal
/** Run by -Ycheck option after a given phase, this class retypes all syntax trees
* and verifies that the type of each tree node so obtained conforms to the type found in the tree node.
* It also performs the following checks:
*
* - The owner of each definition is the same as the owner of the current typing context.
* - Ident nodes do not refer to a denotation that would need a select to be accessible
* (see tpd.needsSelect).
* - After typer, identifiers and select nodes refer to terms only (all types should be
* represented as TypeTrees then).
*/
class TreeChecker {
import ast.tpd._
private def previousPhases(phases: List[Phase])(implicit ctx: Context): List[Phase] = phases match {
case (phase: TreeTransformer) :: phases1 =>
val subPhases = phase.transformations.map(_.phase)
val previousSubPhases = previousPhases(subPhases.toList)
if (previousSubPhases.length == subPhases.length) previousSubPhases ::: previousPhases(phases1)
else previousSubPhases
case phase :: phases1 if phase ne ctx.phase =>
phase :: previousPhases(phases1)
case _ =>
Nil
}
def check(phasesToRun: Seq[Phase], ctx: Context) = {
val prevPhase = ctx.phase.prev // can be a mini-phase
val squahsedPhase = ctx.squashed(prevPhase)
println(s"checking ${ctx.compilationUnit} after phase ${squahsedPhase}")
val checkingCtx = ctx.fresh
.setTyperState(ctx.typerState.withReporter(new ThrowingReporter(ctx.typerState.reporter)))
val checker = new Checker(previousPhases(phasesToRun.toList)(ctx))
try checker.typedExpr(ctx.compilationUnit.tpdTree)(checkingCtx)
catch {
case NonFatal(ex) =>
implicit val ctx: Context = checkingCtx
println(i"*** error while checking after phase ${checkingCtx.phase.prev} ***")
throw ex
}
}
class Checker(phasesToCheck: Seq[Phase]) extends ReTyper {
val nowDefinedSyms = new mutable.HashSet[Symbol]
val everDefinedSyms = new mutable.HashMap[Symbol, Tree]
def withDefinedSym[T](tree: untpd.Tree)(op: => T)(implicit ctx: Context): T = tree match {
case tree: DefTree =>
val sym = tree.symbol
everDefinedSyms.get(sym) match {
case Some(t) =>
if(t ne tree)
ctx.warning(i"symbol ${sym.fullName} is defined at least twice in different parts of AST")
// should become an error
case None =>
everDefinedSyms(sym) = tree
}
assert(!nowDefinedSyms.contains(sym), i"doubly defined symbol: ${sym.fullName} in $tree")
if(ctx.settings.YcheckMods.value) {
tree match {
case t: MemberDef =>
if (t.name ne sym.name) ctx.warning(s"symbol ${sym.fullName} name doesn't correspond to AST: ${t}")
if (sym.flags != t.mods.flags) ctx.warning(s"symbol ${sym.fullName} flags ${sym.flags} doesn't match AST definition flags ${t.mods.flags}")
// todo: compare trees inside annotations
case _ =>
}
}
nowDefinedSyms += tree.symbol
//println(i"defined: ${tree.symbol}")
val res = op
nowDefinedSyms -= tree.symbol
//println(i"undefined: ${tree.symbol}")
res
case _ => op
}
def withDefinedSyms[T](trees: List[untpd.Tree])(op: => T)(implicit ctx: Context) =
trees.foldRightBN(op)(withDefinedSym(_)(_))
def withDefinedSymss[T](vparamss: List[List[untpd.ValDef]])(op: => T)(implicit ctx: Context): T =
vparamss.foldRightBN(op)(withDefinedSyms(_)(_))
def assertDefined(tree: untpd.Tree)(implicit ctx: Context) =
if (tree.symbol.maybeOwner.isTerm)
assert(nowDefinedSyms contains tree.symbol, i"undefined symbol ${tree.symbol}")
override def typedUnadapted(tree: untpd.Tree, pt: Type)(implicit ctx: Context): tpd.Tree = {
val res = tree match {
case _: untpd.UnApply =>
// can't recheck patterns
tree.asInstanceOf[tpd.Tree]
case _: untpd.TypedSplice | _: untpd.Thicket | _: EmptyValDef[_] =>
super.typedUnadapted(tree)
case _ if tree.isType =>
promote(tree)
case _ =>
val tree1 = super.typedUnadapted(tree, pt)
def isSubType(tp1: Type, tp2: Type) =
(tp1 eq tp2) || // accept NoType / NoType
(tp1 <:< tp2)
def divergenceMsg(tp1: Type, tp2: Type) =
s"""Types differ
|Original type : ${tree.typeOpt.show}
|After checking: ${tree1.tpe.show}
|Original tree : ${tree.show}
|After checking: ${tree1.show}
|Why different :
""".stripMargin + core.TypeComparer.explained((tp1 <:< tp2)(_))
if (tree.hasType) // it might not be typed because Typer sometimes constructs new untyped trees and resubmits them to typedUnadapted
assert(isSubType(tree1.tpe, tree.typeOpt), divergenceMsg(tree1.tpe, tree.typeOpt))
tree1
}
checkNoOrphans(res.tpe)
phasesToCheck.foreach(_.checkPostCondition(res))
res
}
/** Check that PolyParams and MethodParams refer to an enclosing type */
def checkNoOrphans(tp: Type)(implicit ctx: Context) = new TypeMap() {
val definedBinders = mutable.Set[Type]()
def apply(tp: Type): Type = {
tp match {
case tp: BindingType =>
definedBinders += tp
mapOver(tp)
definedBinders -= tp
case tp: ParamType =>
assert(definedBinders.contains(tp.binder), s"orphan param: $tp")
case _ =>
mapOver(tp)
}
tp
}
}.apply(tp)
override def typedIdent(tree: untpd.Ident, pt: Type)(implicit ctx: Context): Tree = {
assert(tree.isTerm || !ctx.isAfterTyper, tree.show + " at " + ctx.phase)
assert(tree.isType || !needsSelect(tree.tpe), i"bad type ${tree.tpe} for $tree # ${tree.uniqueId}")
assertDefined(tree)
super.typedIdent(tree, pt)
}
override def typedSelect(tree: untpd.Select, pt: Type)(implicit ctx: Context): Tree = {
assert(tree.isTerm || !ctx.isAfterTyper, tree.show + " at " + ctx.phase)
super.typedSelect(tree, pt)
}
override def typedThis(tree: untpd.This)(implicit ctx: Context) = {
val res = super.typedThis(tree)
val cls = res.symbol
assert(cls.isStaticOwner || ctx.owner.isContainedIn(cls), i"error while typing $tree, ${ctx.owner} is not contained in $cls")
res
}
private def checkOwner(tree: untpd.Tree)(implicit ctx: Context): Unit = {
def ownerMatches(symOwner: Symbol, ctxOwner: Symbol): Boolean =
symOwner == ctxOwner ||
ctxOwner.isWeakOwner && ownerMatches(symOwner, ctxOwner.owner)
assert(ownerMatches(tree.symbol.owner, ctx.owner),
i"bad owner; ${tree.symbol} has owner ${tree.symbol.owner}, expected was ${ctx.owner}\n" +
i"owner chain = ${tree.symbol.ownersIterator.toList}%, %, ctxOwners = ${ctx.outersIterator.map(_.owner).toList}%, %")
}
override def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(implicit ctx: Context) = {
val TypeDef(_, impl @ Template(constr, _, _, _)) = cdef
assert(cdef.symbol == cls)
assert(impl.symbol.owner == cls)
assert(constr.symbol.owner == cls)
assert(cls.primaryConstructor == constr.symbol, i"mismatch, primary constructor ${cls.primaryConstructor}, in tree = ${constr.symbol}")
checkOwner(impl)
checkOwner(impl.constr)
super.typedClassDef(cdef, cls)
}
override def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(implicit ctx: Context) =
withDefinedSyms(ddef.tparams) {
withDefinedSymss(ddef.vparamss) {
super.typedDefDef(ddef, sym)
}
}
override def typedCase(tree: untpd.CaseDef, pt: Type, selType: Type, gadtSyms: Set[Symbol])(implicit ctx: Context): CaseDef = {
withDefinedSyms(tree.pat.asInstanceOf[tpd.Tree].filterSubTrees(_.isInstanceOf[ast.Trees.Bind[_]])) {
super.typedCase(tree, pt, selType, gadtSyms)
}
}
override def typedBlock(tree: untpd.Block, pt: Type)(implicit ctx: Context) =
withDefinedSyms(tree.stats) { super.typedBlock(tree, pt) }
/** Check that all defined symbols have legal owners.
* An owner is legal if it is either the same as the context's owner
* or there's an owner chain of valdefs starting at the context's owner and
* reaching up to the symbol's owner. The reason for this relaxed matching
* is that we should be able to pull out an expression as an initializer
* of a helper value without having to do a change owner traversal of the expression.
*/
override def typedStats(trees: List[untpd.Tree], exprOwner: Symbol)(implicit ctx: Context): List[Tree] = {
for (tree <- trees) tree match {
case tree: untpd.DefTree => checkOwner(tree)
case _: untpd.Thicket => assert(false, i"unexpanded thicket $tree in statement sequence $trees%\n%")
case _ =>
}
super.typedStats(trees, exprOwner)
}
override def ensureNoLocalRefs(block: Block, pt: Type, forcedDefined: Boolean = false)(implicit ctx: Context): Tree =
block
override def adapt(tree: Tree, pt: Type, original: untpd.Tree = untpd.EmptyTree)(implicit ctx: Context) = {
def isPrimaryConstructorReturn =
ctx.owner.isPrimaryConstructor && pt.isRef(ctx.owner.owner) && tree.tpe.isRef(defn.UnitClass)
if (ctx.mode.isExpr &&
!tree.isEmpty &&
!isPrimaryConstructorReturn &&
!pt.isInstanceOf[FunProto])
assert(tree.tpe <:< pt,
s"error at ${sourcePos(tree.pos)}\n" +
err.typeMismatchStr(tree.tpe, pt) + "\ntree = " + tree)
tree
}
}
}
object TreeChecker extends TreeChecker | AlexSikia/dotty | src/dotty/tools/dotc/transform/TreeChecker.scala | Scala | bsd-3-clause | 10,503 |
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package scray.hdfs.io.index.format.sequence.mapping
import org.apache.hadoop.io.Writable
/**
* Combine key-value pairs
*/
trait SequenceKeyValuePair[IDXKEY <: Writable, IDXVALUE <: Writable, DATAKEY <: Writable, DATAVALUE <: Writable] extends SequenceKey[IDXKEY, IDXVALUE] with SequneceValue[DATAKEY, DATAVALUE]
{} | scray/scray | scray-hdfs/modules/scray-hdfs-writer/src/main/scala/scray/hdfs/io/index/format/sequence/mapping/SequenceKeyValuePair.scala | Scala | apache-2.0 | 990 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.stream.table.stringexpr
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.utils.TableTestBase
import org.junit.Test
class SetOperatorsStringExpressionTest extends TableTestBase {
@Test
def testUnionAll(): Unit = {
val util = streamTestUtil()
val t1 = util.addTable[(Int, Long, String)]('int, 'long, 'string)
val t2 = util.addTable[(Int, Long, String)]('int, 'long, 'string)
val resScala = t1.unionAll(t2).select('int)
val resJava = t1.unionAll(t2).select("int")
verifyTableEquals(resJava, resScala)
}
@Test
def testUnionAllWithFilter(): Unit = {
val util = streamTestUtil()
val t1 = util.addTable[(Int, Long, String)]('int, 'long, 'string)
val t2 = util.addTable[(Int, Long, Double, String)]('int, 'long, 'double, 'string)
val resScala = t1.unionAll(t2.select('int, 'long, 'string)).filter('int < 2).select('int)
val resJava = t1.unionAll(t2.select("int, long, string")).filter("int < 2").select("int")
verifyTableEquals(resJava, resScala)
}
}
| tzulitai/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/stringexpr/SetOperatorsStringExpressionTest.scala | Scala | apache-2.0 | 1,905 |
/*
* Copyright 2013 - 2020 Outworkers Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.outworkers.phantom.example.advanced
import com.outworkers.phantom.connectors
import com.outworkers.phantom.connectors.CassandraConnection
import com.outworkers.phantom.dsl._
import com.outworkers.phantom.example.basics._
import scala.concurrent.{Future => ScalaFuture}
// In this section, we will show how you can create a real-world Cassandra service with com.outworkers.phantom.
// First you have to think of what queries you need to perform. The usual.
// Say you come up with id and author.
// You will end up with several mapping tables enabling you to do all the queries you want.
// Now you are left with maintaining consistency at application level.
// We usually overlay a service on top of the mapping tables.
// To keep all the complexity away from other parts of the application.
class RecipesDatabase(override val connector: CassandraConnection) extends Database[RecipesDatabase](connector) {
object Recipes extends Recipes with Connector
object AdvancedRecipes extends AdvancedRecipes with Connector
object AdvancedRecipesByTitle extends AdvancedRecipesByTitle with Connector
object CompositeKeyRecipes extends CompositeKeyRecipes with Connector
object ThriftTable extends ThriftTable with connector.Connector
object SecondaryKeyRecipes extends SecondaryKeyRecipes with Connector
/**
* Right now you can go for a really neat trick of the trade.
* You can automatically initialise all your tables using phantom's schema auto-generation capabilities.
* We are using the same connector as the tables do, which will link to the exact same database session.
*
* The bellow example uses the Future.join method which Twitter specific and not available in the less advanced Scala API.
* Nonetheless, if you are using Scala you can almost replicate the below with a Future.sequence or Future.traverse over a List.
*
* This is a very neat and simple trick which will initialise all your tables in parallel at any time you want. The initialisation will automatically
* trigger the mechanism that connects to Cassandra and gives you back a session.
*/
// For instance, right now when you want to insert a new recipe.
// Say from a JavaScript client with a fancy interface.
// You need to insert one record into the actual table.
// And another into the author -> id mapping table.
// This is a trivial example showing how you can map and flatMap your path to glory.
// Non blocking, 3 lines of code, 15 seconds of typing effort. Done.
def store(recipe: Recipe): ScalaFuture[ResultSet] = {
for {
first <- AdvancedRecipes.store(recipe).future()
byTitle <- AdvancedRecipesByTitle.store(recipe.title -> recipe.id).future()
} yield first
}
}
object RecipesDatabase extends RecipesDatabase(connectors.ContactPoint.local.keySpace("recipes"))
| outworkers/phantom | phantom-example/src/main/scala/com/outworkers/phantom/example/advanced/RecipesDatabase.scala | Scala | apache-2.0 | 3,445 |
package io.iohk.ethereum.vm
import akka.util.ByteString
import akka.util.ByteString.{empty => bEmpty}
import io.iohk.ethereum.crypto.kec256
import io.iohk.ethereum.domain.{Account, Address, UInt256}
import io.iohk.ethereum.Fixtures.{Blocks => BlockFixtures}
import io.iohk.ethereum.vm.Fixtures.blockchainConfig
import io.iohk.ethereum.vm.MockWorldState.{PC, TestVM}
import org.bouncycastle.util.encoders.Hex
import org.scalatest.prop.TableFor5
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
// scalastyle:off magic.number
class ShiftingOpCodeSpec extends AnyWordSpec with Matchers with ScalaCheckPropertyChecks {
val array_0x01 = Array(1.toByte)
val array_0x00 = Array(0.toByte)
val byteCode_0x80: Array[Byte] = array_0x01 ++ Array.fill(255)(0.toByte)
val byteCode_0xff: Array[Byte] = Array.fill(256)(1.toByte)
val byteCode_0xfe: Array[Byte] = Array.fill(255)(1.toByte) ++ array_0x00
val byteCode_0x7f: Array[Byte] = Array.fill(255)(1.toByte)
val byteString_0x40 = ByteString(Hex.decode("4000000000000000000000000000000000000000000000000000000000000000"))
val byteString_0x07f = ByteString(Hex.decode("000000000000000000000000000000000000000000000000000000000000007f"))
val byteString_0xfe = ByteString(Hex.decode("fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe"))
val byteString_0x7f = ByteString(Hex.decode("7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"))
val byteString_0x80 = ByteString(Hex.decode("8000000000000000000000000000000000000000000000000000000000000000"))
val byteString_0xff = ByteString(Hex.decode("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"))
val byteString_0xc0 = ByteString(Hex.decode("c000000000000000000000000000000000000000000000000000000000000000"))
val byteString_0x01 = ByteString(Hex.decode("0000000000000000000000000000000000000000000000000000000000000001"))
val byteString_0x00 = ByteString(Hex.decode("0000000000000000000000000000000000000000000000000000000000000000"))
val array_0x80: Array[Byte] = byteString_0x80.toArray
val array_0xff: Array[Byte] = byteString_0xff.toArray
val array_0x07f: Array[Byte] = byteString_0x07f.toArray
val array_0xfe: Array[Byte] = byteString_0xfe.toArray
// shift left
val SHLTable: TableFor5[Int, ByteString, ByteString, Int, Array[Byte]] = Table(
("number", "code", "stackArg1", "stackArg2", "result"),
(1, Assembly(PUSH1, byteString_0x01, PUSH1, 0x00).code, byteString_0x01, 0x00, array_0x01),
(2, Assembly(PUSH1, byteString_0x01, PUSH1, 0x01).code, byteString_0x01, 0x01, Array(2.toByte)),
(3, Assembly(PUSH1, byteString_0x01, PUSH1, 0xff).code, byteString_0x01, 0xff, array_0x80),
(4, Assembly(PUSH1, byteString_0x01, PUSH1, 0x0100).code, byteString_0x01, 0x0100, array_0x00),
(5, Assembly(PUSH1, byteString_0x01, PUSH1, 0x0101).code, byteString_0x01, 0x0101, array_0x00),
(6, Assembly(PUSH1, byteString_0xff, PUSH1, 0x00).code, byteString_0xff, 0x00, array_0xff),
(7, Assembly(PUSH1, byteString_0xff, PUSH1, 0x01).code, byteString_0xff, 0x01, array_0xfe),
(8, Assembly(PUSH1, byteString_0xff, PUSH1, 0xff).code, byteString_0xff, 0xff, array_0x80),
(9, Assembly(PUSH1, byteString_0xff, PUSH1, 0x0100).code, byteString_0xff, 0x0100, array_0x00),
(10, Assembly(PUSH1, byteString_0x00, PUSH1, 0x01).code, byteString_0x00, 0x01, array_0x00),
(11, Assembly(PUSH1, byteString_0x7f, PUSH1, 0x01).code, byteString_0x7f, 0x01, array_0xfe)
)
// shift right (logical)
val SHRTable: TableFor5[Int, ByteString, ByteString, Int, Array[Byte]] = Table(
("number", "code", "stackArg1", "stackArg2", "result"),
(1, Assembly(PUSH1, byteString_0x01, PUSH1, 0x00).code, byteString_0x01, 0x00, array_0x01),
(2, Assembly(PUSH1, byteString_0x01, PUSH1, 0x01).code, byteString_0x01, 0x01, array_0x00),
(3, Assembly(PUSH1, byteString_0x80, PUSH1, 0x01).code, byteString_0x80, 0x01, byteString_0x40.toArray),
(4, Assembly(PUSH1, byteString_0x80, PUSH1, 0xff).code, byteString_0x80, 0xff, array_0x01),
(5, Assembly(PUSH1, byteString_0x80, PUSH1, 0x0100).code, byteString_0x80, 0x0100, array_0x00),
(6, Assembly(PUSH1, byteString_0x80, PUSH1, 0x0101).code, byteString_0x80, 0x0101, array_0x00),
(7, Assembly(PUSH1, byteString_0xff, PUSH1, 0x00).code, byteString_0xff, 0x00, array_0xff),
(8, Assembly(PUSH1, byteString_0xff, PUSH1, 0x01).code, byteString_0xff, 0x01, byteString_0x7f.toArray),
(9, Assembly(PUSH1, byteString_0xff, PUSH1, 0xff).code, byteString_0xff, 0xff, array_0x01),
(10, Assembly(PUSH1, byteString_0xff, PUSH1, 0x0100).code, byteString_0xff, 0x0100, array_0x00),
(11, Assembly(PUSH1, byteString_0x00, PUSH1, 0x01).code, byteString_0x00, 0x01, array_0x00)
)
// shift right (arithmetic)
val SARTable: TableFor5[Int, ByteString, ByteString, Int, Array[Byte]] = Table(
("number", "code", "stackArg1", "stackArg2", "result"),
(1, Assembly(PUSH1, byteString_0x01, PUSH1, 0x00).code, byteString_0x01, 0x00, array_0x01),
(2, Assembly(PUSH1, byteString_0x01, PUSH1, 0x01).code, byteString_0x01, 0x01, array_0x00),
(3, Assembly(PUSH1, byteString_0x80, PUSH1, 0x01).code, byteString_0x80, 0x01, byteString_0xc0.toArray),
(4, Assembly(PUSH1, byteString_0x80, PUSH1, 0xff).code, byteString_0x80, 0xff, array_0xff),
(5, Assembly(PUSH1, byteString_0x80, PUSH1, 0x0100).code, byteString_0x80, 0x0100, array_0xff),
(6, Assembly(PUSH1, byteString_0x80, PUSH1, 0x0101).code, byteString_0x80, 0x0101, array_0xff),
(7, Assembly(PUSH1, byteString_0xff, PUSH1, 0x00).code, byteString_0xff, 0x00, array_0xff),
(8, Assembly(PUSH1, byteString_0xff, PUSH1, 0x01).code, byteString_0xff, 0x01, array_0xff),
(9, Assembly(PUSH1, byteString_0xff, PUSH1, 0xff).code, byteString_0xff, 0xff, array_0xff),
(10, Assembly(PUSH1, byteString_0xff, PUSH1, 0x0100).code, byteString_0xff, 0x0100, array_0xff),
(11, Assembly(PUSH1, byteString_0x00, PUSH1, 0x01).code, byteString_0x00, 0x01, array_0x00),
(12, Assembly(PUSH1, byteString_0x40, PUSH1, 0xfe).code, byteString_0x40, 0xfe, array_0x01),
(13, Assembly(PUSH1, byteString_0x7f, PUSH1, 0xf8).code, byteString_0x7f, 0xf8, array_0x07f),
(14, Assembly(PUSH1, byteString_0x7f, PUSH1, 0xfe).code, byteString_0x7f, 0xfe, array_0x01),
(15, Assembly(PUSH1, byteString_0x7f, PUSH1, 0xff).code, byteString_0x7f, 0xff, array_0x00),
(16, Assembly(PUSH1, byteString_0x7f, PUSH1, 0x100).code, byteString_0x7f, 0x100, array_0x00),
(17, Assembly(PUSH1, byteString_0x00, PUSH1, 0x0101).code, byteString_0x00, 0x0101, array_0x00)
)
"Shift OpCodes" when {
"calling a program that executes a shifting opcodes" should {
SHLTable.foreach { case (index, assemblyCode, arg1, arg2, expectedResult) =>
s"execute $index test case for SHL opcode: arg=${Hex.toHexString(arg1.toArray)}, " +
s"shift=${arg2.toHexString} with expected result ${Hex.toHexString(expectedResult)}" in new TestSetup {
val state: ProgramState[MockWorldState, MockStorage] = prepareProgramState(assemblyCode, arg1, arg2)
val result: ProgramState[MockWorldState, MockStorage] = SHL.execute(state)
result.stack.pop._1 shouldBe UInt256(expectedResult)
}
}
SHRTable.foreach { case (index, assemblyCode, arg1, arg2, expectedResult) =>
s"execute $index test case for SHR opcode: arg=${Hex.toHexString(arg1.toArray)}, " +
s"shift=${arg2.toHexString} with expected result ${Hex.toHexString(expectedResult)}" in new TestSetup {
val state: ProgramState[MockWorldState, MockStorage] = prepareProgramState(assemblyCode, arg1, arg2)
val result: ProgramState[MockWorldState, MockStorage] = SHR.execute(state)
result.stack.pop._1 shouldBe UInt256(expectedResult)
}
}
SARTable.foreach { case (index, assemblyCode, arg1, arg2, expectedResult) =>
s"execute $index test case fo SAR opcode: arg=${Hex.toHexString(arg1.toArray)}, " +
s"shift=${arg2.toHexString} with expected result ${Hex.toHexString(expectedResult)}" in new TestSetup {
val state: ProgramState[MockWorldState, MockStorage] = prepareProgramState(assemblyCode, arg1, arg2)
val result: ProgramState[MockWorldState, MockStorage] = SAR.execute(state)
result.stack.pop._1 shouldBe UInt256(expectedResult)
}
}
}
}
trait TestSetup {
val config = EvmConfig.ConstantinopleConfigBuilder(blockchainConfig)
val vm = new TestVM
val senderAddr = Address(0xcafebabeL)
val senderAcc = Account(nonce = 1, balance = 1000000)
val accountWithCode: ByteString => Account = code => Account.empty().withCode(kec256(code))
def defaultWorld: MockWorldState = MockWorldState().saveAccount(senderAddr, senderAcc)
val blockHeader = BlockFixtures.ValidBlock.header.copy(
difficulty = 1000000,
number = 1,
gasLimit = 10000000,
gasUsed = 0,
unixTimestamp = 0
)
def getContext(world: MockWorldState = defaultWorld, inputData: ByteString = bEmpty): PC =
ProgramContext(
callerAddr = senderAddr,
originAddr = senderAddr,
recipientAddr = None,
gasPrice = 1,
startGas = 1000000,
inputData = inputData,
value = 100,
endowment = 100,
doTransfer = true,
blockHeader = blockHeader,
callDepth = 0,
world = world,
initialAddressesToDelete = Set(),
evmConfig = config,
originalWorld = world
)
def prepareProgramState(
assemblyCode: ByteString,
arg1: ByteString,
arg2: Int
): ProgramState[MockWorldState, MockStorage] = {
val newWorld = defaultWorld
.saveAccount(senderAddr, accountWithCode(assemblyCode))
.saveCode(senderAddr, assemblyCode)
val context: PC = getContext(newWorld)
val env = ExecEnv(context, ByteString.empty, context.originAddr)
val initMemory = Memory.empty.store(UInt256.Zero, assemblyCode)
val initStack: Seq[UInt256] = Seq(UInt256(arg1), UInt256(arg2))
ProgramState(vm, context, env)
.withStack(Stack.empty().push(initStack))
.withMemory(initMemory)
}
}
}
| input-output-hk/etc-client | src/test/scala/io/iohk/ethereum/vm/ShiftingOpCodeSpec.scala | Scala | mit | 10,354 |
package co.verdigris.spark.connector.cql
import com.datastax.driver.core.Cluster
class AwsS3USWest1ConnectionFactoryTest extends ConnectionFactorySpec {
override def beforeAll {
super.beforeAll
factory = AwsS3USWest1ConnectionFactory
}
describe(".clusterBuilder") {
it("should return a new Cluster.Builder instance") {
factory.clusterBuilder(cassandraConf) shouldBe a [Cluster.Builder]
}
}
describe(".createCluster") {
it("should return a new Cluster instance") {
factory.createCluster(cassandraConf) shouldBe a [Cluster]
}
}
}
| VerdigrisTech/spark-cassandra-connection-factory | src/test/scala/co/verdigris/spark/connector/cql/AwsS3USWest1ConnectionFactoryTest.scala | Scala | apache-2.0 | 581 |
/*
* SPDX-License-Identifier: Apache-2.0
* Copyright 2016-2020 Daniel Urban and contributors listed in NOTICE.txt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dev.tauri.choam
package kcas
abstract class KCASSpec extends BaseSpec {
private final def tryPerformBatch(ops: List[CASD[_]]): Boolean = {
val desc = ops.foldLeft(kcasImpl.start()) { (d, op) =>
op match {
case op: CASD[a] =>
d.withCAS[a](op.ref, op.ov, op.nv)
}
}
desc.tryPerform()
}
"k-CAS" should "succeed if old values match, and there is no contention" in {
val r1 = Ref.mk("r1")
val r2 = Ref.mk("r2")
val r3 = Ref.mk("r3")
val succ = tryPerformBatch(List(
CASD(r1, "r1", "x"),
CASD(r2, "r2", "y"),
CASD(r3, "r3", "z")
))
assert(succ)
kcasImpl.read(r1) shouldBe theSameInstanceAs ("x")
kcasImpl.read(r2) shouldBe theSameInstanceAs ("y")
kcasImpl.read(r3) shouldBe theSameInstanceAs ("z")
}
it should "fail if any of the old values doesn't match" in {
val r1 = Ref.mk("r1")
val r2 = Ref.mk("r2")
val r3 = Ref.mk("r3")
def go(): Boolean = {
tryPerformBatch(List(
CASD(r1, "r1", "x"),
CASD(r2, "r2", "y"),
CASD(r3, "r3", "z")
))
}
r1.unsafeSet("x")
assert(!go())
kcasImpl.read(r1) shouldBe theSameInstanceAs ("x")
kcasImpl.read(r2) shouldBe theSameInstanceAs ("r2")
kcasImpl.read(r3) shouldBe theSameInstanceAs ("r3")
r1.unsafeSet("r1")
r2.unsafeSet("x")
assert(!go())
kcasImpl.read(r1) shouldBe theSameInstanceAs ("r1")
kcasImpl.read(r2) shouldBe theSameInstanceAs ("x")
kcasImpl.read(r3) shouldBe theSameInstanceAs ("r3")
r2.unsafeSet("r2")
r3.unsafeSet("x")
assert(!go())
kcasImpl.read(r1) shouldBe theSameInstanceAs ("r1")
kcasImpl.read(r2) shouldBe theSameInstanceAs ("r2")
kcasImpl.read(r3) shouldBe theSameInstanceAs ("x")
r3.unsafeSet("r3")
assert(go())
kcasImpl.read(r1) shouldBe theSameInstanceAs ("x")
kcasImpl.read(r2) shouldBe theSameInstanceAs ("y")
kcasImpl.read(r3) shouldBe theSameInstanceAs ("z")
}
it should "not accept more than one CAS for the same ref" in {
val r1 = Ref.mk("r1")
val r2 = Ref.mk("r2")
val exc = intercept[Exception] {
tryPerformBatch(List(
CASD(r1, "r1", "x"),
CASD(r2, "r2", "y"),
CASD(r1, "r1", "x") // this is a duplicate
))
}
exc.getMessage should include ("Impossible k-CAS")
kcasImpl.read(r1) shouldBe theSameInstanceAs ("r1")
kcasImpl.read(r2) shouldBe theSameInstanceAs ("r2")
}
it should "be able to succeed after one successful operation" in {
val r1 = Ref.mk("r1")
val r2 = Ref.mk("r2")
val r3 = Ref.mk("r3")
assert(tryPerformBatch(List(
CASD(r1, "r1", "x"),
CASD(r2, "r2", "y"),
CASD(r3, "r3", "z")
)))
assert(tryPerformBatch(List(
CASD(r1, "x", "x2"),
CASD(r2, "y", "y2"),
CASD(r3, "z", "z2")
)))
assert(!tryPerformBatch(List(
CASD(r1, "x2", "x3"),
CASD(r2, "yyy", "y3"), // this will fail
CASD(r3, "z2", "z3")
)))
kcasImpl.read(r1) shouldBe theSameInstanceAs ("x2")
kcasImpl.read(r2) shouldBe theSameInstanceAs ("y2")
kcasImpl.read(r3) shouldBe theSameInstanceAs ("z2")
}
"Snapshotting" should "work" in {
val r1 = Ref.mk("r1")
val r2 = Ref.mk("r2")
val r3 = Ref.mk("r3")
val d0 = kcasImpl.start()
val d1 = d0.withCAS(r1, "r1", "r1x")
val snap = d1.snapshot()
val d21 = d1.withCAS(r2, "foo", "bar")
assert(!d21.tryPerform())
kcasImpl.read(r1) shouldBe theSameInstanceAs ("r1")
kcasImpl.read(r2) shouldBe theSameInstanceAs ("r2")
kcasImpl.read(r3) shouldBe theSameInstanceAs ("r3")
val d22 = snap.load().withCAS(r3, "r3", "r3x")
assert(d22.tryPerform())
kcasImpl.read(r1) shouldBe theSameInstanceAs ("r1x")
kcasImpl.read(r2) shouldBe theSameInstanceAs ("r2")
kcasImpl.read(r3) shouldBe theSameInstanceAs ("r3x")
}
it should "work when cancelling" in {
val r1 = Ref.mk("r1")
val r2 = Ref.mk("r2")
val r3 = Ref.mk("r3")
val d0 = kcasImpl.start()
val d1 = d0.withCAS(r1, "r1", "r1x")
val snap = d1.snapshot()
val d21 = d1.withCAS(r2, "foo", "bar")
d21.cancel()
kcasImpl.read(r1) shouldBe theSameInstanceAs ("r1")
kcasImpl.read(r2) shouldBe theSameInstanceAs ("r2")
kcasImpl.read(r3) shouldBe theSameInstanceAs ("r3")
val d22 = snap.load().withCAS(r3, "r3", "r3x")
assert(d22.tryPerform())
kcasImpl.read(r1) shouldBe theSameInstanceAs ("r1x")
kcasImpl.read(r2) shouldBe theSameInstanceAs ("r2")
kcasImpl.read(r3) shouldBe theSameInstanceAs ("r3x")
}
}
final class KCASSpecNaiveKCAS
extends KCASSpec
with SpecNaiveKCAS
final class KCASSpecCASN
extends KCASSpec
with SpecCASN
final class KCASSpecMCAS
extends KCASSpec
with SpecMCAS
final class KCASSpecEMCAS
extends KCASSpec
with SpecEMCAS
| durban/exp-reagents | core/src/test/scala/dev/tauri/choam/kcas/kcasSpec.scala | Scala | apache-2.0 | 5,523 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.3
* @date Mon Sep 9 13:30:41 EDT 2013
* @see LICENSE (MIT style license file).
* @see http://en.wikipedia.org/wiki/Perceptron
* @see hebb.mit.edu/courses/9.641/2002/lectures/lecture03.pdf
*/
package scalation.analytics
import scala.math.exp
import scala.util.control.Breaks.{break, breakable}
import scalation.linalgebra.{MatrixD, VectoD, VectorD}
import scalation.random.Random
import scalation.util.Error
import LogisticFunction.sigmoid
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Perceptron` class supports single-valued 2-layer (input and output)
* Neural-Networks. Given several input vectors and output values (training data),
* fit the weights 'w' connecting the layers, so that for a new
* input vector 'zi', the net can predict the output value 'zo', i.e.,
* 'zi --> zo = f (w dot zi)'.
* Note, w0 is treated as the bias, so x0 must be 1.0.
* @param x the input matrix (training data consisting of m input vectors)
* @param y the output vector (training data consisting of m output values)
* @param eta the learning/convergence rate
*/
class Perceptron (x: MatrixD, y: VectorD, eta: Double = 1.0)
extends Predictor with Error
{
private val MAX_ITER = 200 // maximum number of iterations
private val EPSILON = 1E-9 // number close to zero
private val DEBUG = true // debug flag
private val m = x.dim1 // number of data points (input vectors)
private val n = x.dim2 // dimensionality of the input
private val _1 = new VectorD (m); _1.set (1.0)
if (y.dim != m) flaw ("constructor", "dimensions of x and y are incompatible")
println ("Create a Perceptron with " + n + " input, " + 1 + " output nodes")
private var w: VectorD = null // weight vector between input and output layers
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set the initial weight matrix w manually before training.
* @param w0 the initial weights for w
*/
def setWeights (w0: VectorD) { w = w0 }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set the initial weight vector 'w' with values in (0, 1) before training.
* @param i the random number stream to use
*/
def setWeights (i: Int = 0)
{
val rn = new Random (i) // change i to get different random numbers
w = new VectorD (n)
for (i <- 0 until n) w(i) = rn.gen
} // setWeights
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Given training data x and y, fit the weight vector w.
*/
def train () { if (w == null) setWeights (); minimizeError () }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Minimize the error in the prediction by adjusting the weight vector 'w'.
* The error 'e' is simply the difference between the target value 'y' and the
* predicted value 'z'. Minimize 1/2 of the dot product of error with itself
* using gradient-descent. The gradient is '-x.t * (e * z * (_1 - z))', so
* move in the opposite direction of the gradient.
*/
def minimizeError ()
{
breakable { for (k <- 0 until MAX_ITER) { // kth learning phase
val z = sigmoid (x * w) // vector of predicted outputs
val e = y - z // vector of outputs from training data
w += x.t * (e * z * (_1 - z)) * eta // adjust the weights
println ("weights for " + k + "th phase: w = " + w + ", error e = " + e)
if ((e dot e) < 2.0 * EPSILON) break // break when error is small enough
}} // for
} // minimizeError
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the fit, the weight vector 'w'.
*/
def fit: VectorD = w
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Given a new input vector 'zi', predict the output/response value 'zo'.
* @param zi the new input vector
*/
def predict (zi: VectoD): Double = sigmoid (w dot zi)
} // Perceptron class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `PerceptronTest` object is used to test the `Perceptron` class. For this
* test, the initial weights are used for used for prediction.
*/
object PerceptronTest extends App
{
val x = new MatrixD (1, 3) // training data - input vectors (not used)
val y = new VectorD (1) // training data - output vectors (not used)
val ann = new Perceptron (x, y) // create a Perceptron
val w = VectorD (0.0, 0.5, 0.5) // weight vector w (input to output layer)
ann.setWeights (w) // set initial weights
val z_i = VectorD (1.0, 1.0, 1.0) // predict output z_o from input z_i
println ("input vector: z_i = " + z_i)
println ("output vector: z_o = " + ann.predict (z_i))
} // PerceptronTest object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `PerceptronTest2` object is used to test the `Perceptron` class. For this
* test, training data is used to fit the weights before using them for prediction.
* @see http://www4.rgu.ac.uk/files/chapter3%20-%20bp.pdf
*/
object PerceptronTest2 extends App
{
val x = new MatrixD ((1, 3), 1.0, 0.35, 0.9) // training data - input vectors
val y = VectorD (0.5) // training data - output vectors
val ann = new Perceptron (x, y) // create a Perceptron
val w = VectorD (0.0, 0.5, 0.5) // weight vector w (input to output layer)
ann.setWeights (w) // set initial weights
println ("input vector: x(0) = " + x(0))
println ("=== target output value: y(0) = " + y(0))
println ("--- initial output value: z = " + ann.predict (x(0)))
ann.train () // fit the weights using training data
println ("+++ trained output value: z = " + ann.predict (x(0)))
} // PerceptronTest2 object
| NBKlepp/fda | scalation_1.3/scalation_modeling/src/main/scala/scalation/analytics/Perceptron.scala | Scala | mit | 6,507 |
package com.twitter.bijection.macros.impl
import scala.collection.mutable.{Map => MMap}
import scala.reflect.macros.Context
class TupleUtils[C <: Context](val c: C) {
import c.universe._
private[this] val tupleCaseClassCache = MMap.empty[Type, Tree]
// Takes a case class and generates the equiv tuple to it
def tupleCaseClassEquivalent(tpe: Type): Seq[Tree] =
tpe.declarations.collect {
case m: MethodSymbol if m.isCaseAccessor =>
m.returnType match {
case tpe if IsCaseClassImpl.isCaseClassType(c)(tpe) =>
tupleCaseClassCache.getOrElseUpdate(
tpe, {
val equiv = tupleCaseClassEquivalent(tpe)
AppliedTypeTree(Ident(newTypeName("Tuple" + equiv.size)), equiv.toList)
}
)
case tpe => Ident(tpe.typeSymbol.name.toTypeName)
}
}.toSeq
}
| twitter/bijection | bijection-macros/src/main/scala/com/twitter/bijection/macros/impl/TupleUtils.scala | Scala | apache-2.0 | 877 |
/*
* Copyright 2013-2015 Websudos, Limited.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* - Explicit consent must be obtained from the copyright owner, Websudos Limited before any redistribution is made.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.websudos.phantom.udt
import org.scalatest.{Matchers, FlatSpec}
class UDTSerialisationTest extends FlatSpec with Matchers {
it should "serialise an UDT value to the correct CQL query" in {
}
}
| analytically/phantom | phantom-udt/src/test/scala/com/websudos/phantom/udt/UDTSerialisationTest.scala | Scala | bsd-2-clause | 1,705 |
package uk.me.chrs.inflect
import org.scalatest.FunSuite
import Inflect_EN.Builder._
class BuilderTest extends FunSuite {
test("builder can do count and some") {
assert("I see " + using(0)(count("person"),q(" with "),some("opinion")) === "I see 0 people with no opinions")
assert("I see " + using(1)(count("person"),q(" with "),some("opinion")) === "I see 1 person with an opinion")
assert("I see " + using(8)(count("person"),q(" with "),some("opinion")) === "I see 8 people with opinions")
}
test("builder can do plural too") {
assert("There " + using(0)(plural("is"),q(" "),count("door"),q(" on your "),plural("house")) ===
"There are 0 doors on your houses")
assert("There " + using(1)(plural("is"),q(" "),count("door"),q(" on your "),plural("house")) ===
"There is 1 door on your house")
assert("There " + using(8)(plural("is"),q(" "),count("door"),q(" on your "),plural("house")) ===
"There are 8 doors on your houses")
}
}
| nespera/inflect | src/test/scala/uk/me/chrs/inflect/BuilderTest.scala | Scala | apache-2.0 | 983 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnet.spark
import org.apache.mxnet.NDArray
/**
* A wrapper for serialize & deserialize <pre>[[org.apache.mxnet.NDArray]]</pre> in spark job
* @author Yizhi Liu
*/
class MXNDArray(@transient private var ndArray: NDArray) extends Serializable {
require(ndArray != null, "Undefined ndArray")
private val arrayBytes: Array[Byte] = ndArray.serialize()
def get: NDArray = {
if (ndArray == null) {
ndArray = NDArray.deserialize(arrayBytes)
}
ndArray
}
}
object MXNDArray {
def apply(ndArray: NDArray): MXNDArray = new MXNDArray(ndArray)
}
| tlby/mxnet | scala-package/spark/src/main/scala/org/apache/mxnet/spark/MXNDArray.scala | Scala | apache-2.0 | 1,389 |
package week5
object ListHighOrderFun {
val nums = List(2, -4, 5, 7, 1)
val fruits = List("apples", "pineapple", "orange", "banana")
nums filter (x => x > 0)
nums filterNot (x => x > 0)
nums partition (x => x > 0)
nums takeWhile (x => x > 0)
nums dropWhile (x => x > 0)
nums span (x => x > 0)
val duplicates = List("a", "a", "a", "b", "c", "c", "a")
def pack[T](xs: List[T]): List[List[T]] = xs match {
case Nil => Nil
case x :: xs1 => {
val (first, rest) = xs span (y => y == x)
first :: pack(rest)
}
}
pack(duplicates)
def encode[T](xs: List[T]): List[(T, Int)] =
pack(xs) map (ys => (ys.head, ys.length))
encode(duplicates)
} | M4573R/playground-notes | functional-programming-principles-in-scala/week5/ListHighOrderFun.scala | Scala | mit | 697 |
package com.arcusys.learn.liferay.update.version250.slide
import com.arcusys.valamis.persistence.common.DbNameUtils._
import com.arcusys.valamis.persistence.common.SlickProfile
import scala.slick.driver.JdbcProfile
trait SlideTableComponent {
protected val driver: JdbcProfile
import driver.simple._
case class SlideSet(id: Option[Long] = None,
title: String,
description: String,
courseId: Long,
logo: Option[String] = None,
isTemplate: Boolean = false,
isSelectedContinuity: Boolean = false,
themeId: Option[Long] = None)
class SlideSetTable(tag: Tag) extends Table[SlideSet](tag, tblName("SLIDE_SET")) {
def id = column[Long]("ID", O.PrimaryKey, O.AutoInc)
def title = column[String]("TITLE")
def description = column[String]("DESCRIPTION", O.DBType(varCharMax))
def courseId = column[Long]("COURSE_ID")
def logo = column[Option[String]]("LOGO")
def isTemplate = column[Boolean]("IS_TEMPLATE")
def isSelectedContinuity = column[Boolean]("IS_SELECTED_CONTINUITY")
def themeId = column[Option[Long]]("THEME_ID")
def * = (id.?, title, description, courseId, logo, isTemplate, isSelectedContinuity, themeId) <>(SlideSet.tupled, SlideSet.unapply)
def slideThemeFK = foreignKey(fkName("SLIDESET_TO_THEME"), themeId, slideThemes)(x => x.id)
}
case class Slide(id: Option[Long] = None,
title: String,
bgColor: Option[String] = None,
bgImage: Option[String] = None,
font: Option[String] = None,
questionFont: Option[String] = None,
answerFont: Option[String] = None,
answerBg: Option[String] = None,
duration: Option[String] = None,
leftSlideId: Option[Long] = None,
topSlideId: Option[Long] = None,
slideSetId: Long,
statementVerb: Option[String] = None,
statementObject: Option[String] = None,
statementCategoryId: Option[String] = None,
isTemplate: Boolean = false,
isLessonSummary: Boolean = false)
class SlideTable(tag: Tag) extends Table[Slide](tag, tblName("SLIDE")) {
def id = column[Long]("ID", O.PrimaryKey, O.AutoInc)
def title = column[String]("TITLE")
def bgColor = column[Option[String]]("BG_COLOR")
def bgImage = column[Option[String]]("BG_IMAGE")
def font = column[Option[String]]("FONT")
def questionFont = column[Option[String]]("QUESTION_FONT")
def answerFont = column[Option[String]]("ANSWER_FONT")
def answerBg = column[Option[String]]("ANSWER_BG")
def duration = column[Option[String]]("DURATION")
def leftSlideId = column[Option[Long]]("LEFT_SLIDE_ID")
def topSlideId = column[Option[Long]]("TOP_SLIDE_ID")
def slideSetId = column[Long]("SLIDE_SET_ID")
def statementVerb = column[Option[String]]("STATEMENT_VERB")
def statementObject = column[Option[String]]("STATEMENT_OBJECT")
def statementCategoryId = column[Option[String]]("STATEMENT_CATEGORY_ID")
def isTemplate = column[Boolean]("IS_TEMPLATE")
def isLessonSummary = column[Boolean]("IS_LESSON_SUMMARY")
def * = (
id.?,
title,
bgColor,
bgImage,
font,
questionFont,
answerFont,
answerBg,
duration,
leftSlideId,
topSlideId,
slideSetId,
statementVerb,
statementObject,
statementCategoryId,
isTemplate,
isLessonSummary) <>(Slide.tupled, Slide.unapply)
def slideSetFK = foreignKey(fkName("SLIDE_TO_SLIDESET"), slideSetId, slideSets)(x => x.id, onDelete = ForeignKeyAction.Cascade)
}
case class SlideElement(id: Option[Long] = None,
top: String,
left: String,
width: String,
height: String,
zIndex: String,
content: String,
slideEntityType: String,
slideId: Long,
correctLinkedSlideId: Option[Long] = None,
incorrectLinkedSlideId: Option[Long] = None,
notifyCorrectAnswer: Option[Boolean] = None)
class SlideElementTable(tag: Tag) extends Table[SlideElement](tag, tblName("SLIDE_ELEMENT")) {
def id = column[Long]("ID", O.PrimaryKey, O.AutoInc)
def top = column[String]("TOP")
def left = column[String]("LEFT")
def width = column[String]("WIDTH")
def height = column[String]("HEIGHT")
def zIndex = column[String]("Z_INDEX")
def content = column[String]("CONTENT", O.DBType(varCharMax))
def slideEntityType = column[String]("SLIDE_ENTITY_TYPE")
def slideId = column[Long]("SLIDE_ID")
def correctLinkedSlideId = column[Option[Long]]("CORRECT_LINKED_SLIDE_ID")
def incorrectLinkedSlideId = column[Option[Long]]("INCORRECT_LINKED_SLIDE_ID")
def notifyCorrectAnswer = column[Option[Boolean]]("NOTIFY_CORRECT_ANSWER")
def * = (
id.?,
top,
left,
width,
height,
zIndex,
content,
slideEntityType,
slideId,
correctLinkedSlideId,
incorrectLinkedSlideId,
notifyCorrectAnswer) <>(SlideElement.tupled, SlideElement.unapply)
def slideFK = foreignKey(fkName("SLIDE_ELEMENT_TO_SLIDE"), slideId, slides)(x => x.id, onDelete = ForeignKeyAction.Cascade)
}
case class SlideTheme(id: Option[Long] = None,
title: String = "Theme",
bgColor: Option[String] = None,
bgImage: Option[String] = None,
font: Option[String] = None,
questionFont: Option[String] = None,
answerFont: Option[String] = None,
answerBg: Option[String] = None,
userId: Option[Long] = None,
isDefault: Boolean = false)
class SlideThemeTable(tag : Tag) extends Table[SlideTheme](tag, tblName("SLIDE_THEME")) {
def id = column[Long]("ID", O.PrimaryKey, O.AutoInc)
def title = column[String]("TITLE", O.NotNull)
def bgColor = column[Option[String]]("BGCOLOR")
def bgImage = column[Option[String]]("BGIMAGE")
def font = column[Option[String]]("FONT")
def questionFont = column[Option[String]]("QUESTIONFONT")
def answerFont = column[Option[String]]("ANSWERFONT")
def answerBg = column[Option[String]]("ANSWERBG")
def userId = column[Option[Long]]("USER_ID")
def isDefault = column[Boolean]("IS_DEFAULT", O.Default(false))
def * = (
id.?,
title,
bgColor,
bgImage,
font,
questionFont,
answerFont,
answerBg,
userId,
isDefault)<>(SlideTheme.tupled, SlideTheme.unapply)
}
val slideSets = TableQuery[SlideSetTable]
val slides = TableQuery[SlideTable]
val slideElements = TableQuery[SlideElementTable]
val slideThemes = TableQuery[SlideThemeTable]
}
| igor-borisov/valamis | learn-portlet/src/main/scala/com/arcusys/learn/liferay/update/version250/slide/SlideTableComponent.scala | Scala | gpl-3.0 | 7,218 |
/*
* Copyright 2017-2018 47 Degrees, LLC. <http://www.47deg.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package freestyle.free
import cats.{~>}
package cache {
class KeyValueProvider[Key, Val] {
/* CacheM trait is a type-class of functors for which key-value store operations
* can be provided.
*
* We assume that the actual store is too big or remote to allow for general
* operations over all values, or to search or to iterate over all keys
*/
@free sealed trait CacheM {
// Gets the value associated to a key, if there is one */
def get(key: Key): FS[Option[Val]]
// Sets the value of a key to a newValue.
def put(key: Key, newVal: Val): FS[Unit]
// Copy all of the mappings from the specified map to this cache
def putAll(keyValues: Map[Key, Val]): FS[Unit]
//If the specified key is not already associated with a value, associate it with the given value.
def putIfAbsent(key: Key, newVal: Val): FS[Unit]
// Removes the entry for the key if one exists
def del(key: Key): FS[Unit]
// Returns whether there is an entry for key or not.
def has(key: Key): FS[Boolean]
// Returns the set of keys in the store
def keys: FS[List[Key]]
// Removes all entries
def clear: FS[Unit]
//Replaces the entry for a key only if currently mapped to some value
def replace(key: Key, newVal: Val): FS[Unit]
//Returns true if this cache contains no key-value mappings.
def isEmpty: FS[Boolean]
}
/*
* Ideal Equations for a CacheM. We use m,n for keys, v,w for values.
* Using different variables in an equation means that their values are different.
* We use `>>=` for `flatMap`, and `>>` for the sequence operator.
*
* For `put`:
* - On a same key, only the right-most (latest) `put` counts:
* put(m,v) >> put(m,w) === put(m,w)
* - `put` operations on different keys commute:
* put(m,v) >> put(n,w) === put(n,w) >> put(m,v)
*
* For `del`:
* - Deletes on a same key are idempotent:
* del(m) >> del(m) === del(m)
* - Deletes on different keys commute:
* del(m) >> del(n) === del(n) >> del(m)
* - A put followed by a delete on a same key is the same as doing nothing
* put(m,v) >> del(m) === return Unit
* - A del followed by a put on a same key is the same as the put
* del(m) >> put(m,v) === put(m,v)
* - Del and put on different keys commute
* put(m,v) >> del(n) === del(n) >> put(m, v)
*
* For `get`:
* - The result of a `get` should be that of the latest `put`
* put(m,v) >> get(m) === put(m,v) >> return( Some(v) )
* - The result of a `get` after a `del` is Nothing
* del(m) >> get(m) === del(m) >> return(None)
* - `get` commutes with `del` and `put` on different keys:
* put(m,v) >> get(n) === get(n) >>= (w => (put(m,v) >>= return(w) ))
* del(m) >> get(n) === get(n) >>= (w => (del(m) >>= return(w) )
*
*/
trait Implicits {
implicit def cacheHandler[F[_], G[_]](
implicit rawMap: KeyValueMap[F, Key, Val],
interpret: F ~> G
): CacheM.Handler[G] = new CacheHandler[F, G]
private[this] class CacheHandler[F[_], G[_]](
implicit rawMap: KeyValueMap[F, Key, Val],
interpret: F ~> G
) extends CacheM.Handler[G] {
override def get(key: Key): G[Option[Val]] =
interpret(rawMap.get(key))
override def put(key: Key, newVal: Val): G[Unit] =
interpret(rawMap.put(key, newVal))
override def putAll(keyValues: Map[Key, Val]): G[Unit] =
interpret(rawMap.putAll(keyValues))
override def putIfAbsent(key: Key, newVal: Val): G[Unit] =
interpret(rawMap.putIfAbsent(key, newVal))
override def del(key: Key): G[Unit] =
interpret(rawMap.delete(key))
override def has(key: Key): G[Boolean] =
interpret(rawMap.hasKey(key))
override def keys: G[List[Key]] =
interpret(rawMap.keys)
override def clear: G[Unit] =
interpret(rawMap.clear)
override def replace(key: Key, newVal: Val): G[Unit] =
interpret(rawMap.replace(key, newVal))
override def isEmpty: G[Boolean] =
interpret(rawMap.isEmpty)
}
}
object implicits extends Implicits
}
trait KeyValueMap[F[_], Key, Val] {
def get(key: Key): F[Option[Val]]
def put(key: Key, newVal: Val): F[Unit]
def putAll(keyValues: Map[Key, Val]): F[Unit]
def putIfAbsent(key: Key, newVal: Val): F[Unit]
def delete(key: Key): F[Unit]
def hasKey(key: Key): F[Boolean]
def keys: F[List[Key]]
def clear: F[Unit]
def replace(key: Key, newVal: Val): F[Unit]
def isEmpty: F[Boolean]
}
}
package object cache {
def apply[Key, Val] = new KeyValueProvider[Key, Val]
}
| frees-io/freestyle | modules/cache/shared/src/main/scala/free/cache.scala | Scala | apache-2.0 | 5,515 |
package net.liftweb.test
package snippet
import lib._
import scala.xml.{NodeSeq, Text}
import net.liftweb.util._
import net.liftweb.common._
import java.util.Date
import Helpers._
class HelloWorld {
lazy val date: Box[Date] = DependencyFactory.inject[Date] // inject the date
// replace the contents of the element with id "time" with the date
def howdy = "#time *" #> date.map(_.toString)
/*
lazy val date: Date = DependencyFactory.time.vend // create the date via factory
def howdy = "#time *" #> date.toString
*/
}
| joescii/lift-test | src/main/scala/net/liftweb/test/snippet/HelloWorld.scala | Scala | apache-2.0 | 542 |
/*
* Copyright (C) 2014 - 2017 Contributors as noted in the AUTHORS.md file
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.wegtam.tensei.agent.helpers
import java.io._
import java.net.URI
import java.nio.file.Files
import java.sql.Time
import java.time.{ LocalDate, LocalTime }
import java.util
import java.util.{ Date, Locale }
import akka.actor.{ Actor, ActorLogging, Props }
import akka.event.{ DiagnosticLoggingAdapter, Logging }
import com.wegtam.tensei.adt.ConnectionInformation
import com.wegtam.tensei.agent.exceptions.AccessValidationException
import com.wegtam.tensei.agent.helpers.ExcelToCSVConverter.ExcelConverterMessages.{
Convert,
ConvertResult,
Stop
}
import org.apache.poi.ss.usermodel._
import scala.util.{ Failure, Success, Try }
/**
* This actor converts a given Excel file into a corresponding CSV file.
*
* Code based on:
* http://svn.apache.org/repos/asf/poi/trunk/src/examples/src/org/apache/poi/ss/examples/ToCSV.java
*
* @param source The source connection information of the file.
* @param agentRunIdentifier An optional agent run identifier which is usually an uuid.
*/
class ExcelToCSVConverter(source: ConnectionInformation, agentRunIdentifier: Option[String])
extends Actor
with ActorLogging {
override val log
: DiagnosticLoggingAdapter = Logging(this) // Override the standard logger to be able to add stuff via MDC.
log.mdc(LoggingHelpers.generateMdcEntryForRunIdentifier(agentRunIdentifier))
// The separator for the single rows that is used when converting the
// content from Excel to CSV
val separator: String =
context.system.settings.config.getString("tensei.agents.parser.excel-row-separator")
/*
* EXCEL_STYLE_ESCAPING (0)
* ========================
* Identifies that the CSV file should obey Excel's formatting conventions
* with regard to escaping certain embedded characters - the field separator,
* speech mark and end of line (EOL) character
*
* UNIX_STYLE_ESCAPING (1)
* ========================
* Identifies that the CSV file should obey UNIX formatting conventions
* with regard to escaping certain embedded characters - the field separator
* and end of line (EOL) character
*
**/
val usedStyleEscaping = 0
@throws[Exception](classOf[Exception])
override def postStop(): Unit = {
log.clearMDC()
super.postStop()
}
override def receive: Receive = {
case Convert =>
log.info(s"Received `Convert` message for file {}", source.uri.getSchemeSpecificPart)
// Determine the locale for the source that is delivered with the connection information
val locale = source.languageTag.fold(Locale.ROOT)(l => Locale.forLanguageTag(l))
val updatedSource =
ExcelToCSVConverter.processConversion(source, locale, usedStyleEscaping, separator)
updatedSource match {
case Failure(e) =>
// FIXME -> Return correct error
sender() ! e
case Success(newSource) =>
sender() ! ConvertResult(newSource)
}
context stop self
case Stop =>
log.info("Received `Stop` message")
context stop self
}
}
object ExcelToCSVConverter {
val EXCEL_STYLE_ESCAPING = 0
val UNIX_STYLE_ESCAPING = 1
/**
* Helper method to create an ExcelToCSVConverter.
*
* @param source The source connection information for the file.
* @param agentRunIdentifier An optional agent run identifier which is usually an uuid.
*
* @return The props to create the actor.
*/
def props(source: ConnectionInformation, agentRunIdentifier: Option[String]): Props =
Props(classOf[ExcelToCSVConverter], source, agentRunIdentifier)
sealed trait ExcelConverterMessages
object ExcelConverterMessages {
/**
* Start the conversion of the Excel file into the temporary CSV file.
*/
final case object Convert extends ExcelConverterMessages
/**
* Return the modified connection information with the updated file path for
* the created CSV file.
*
* @param source The updated connection information.
*/
final case class ConvertResult(source: ConnectionInformation) extends ExcelConverterMessages
/**
* Stop the actor
*/
final case object Stop extends ExcelConverterMessages
}
final case class ConvertedData(csvData: util.ArrayList[util.ArrayList[String]], maxRowWidth: Int)
def processConversion(source: ConnectionInformation,
locale: Locale,
usedStyleEscaping: Int,
separator: String): Try[ConnectionInformation] =
for {
file <- ExcelToCSVConverter.openFile(source.uri)
data <- ExcelToCSVConverter.createCSVData(file, locale)
newUri <- ExcelToCSVConverter.saveExcelFile(file,
data.csvData,
data.maxRowWidth,
usedStyleEscaping,
separator)
} yield {
source.copy(uri = newUri)
}
/**
* Try to open the file defined in the source connection information and validate
* whether the file exists and is readable.
*
* @param uri The source uri of the file.
* @return
*/
def openFile(uri: URI): Try[File] =
Try {
val extension = uri.getSchemeSpecificPart.split("\\\\.(?=[^\\\\.]+$)")
if (extension.length != 2)
Failure(
new AccessValidationException(
s"File ${uri.getSchemeSpecificPart} does not end on valid extension."
)
)
else if (!extension(1).equalsIgnoreCase("xls") && !extension(1).equalsIgnoreCase("xlsx"))
Failure(
new AccessValidationException(
s"File ${uri.getSchemeSpecificPart} does not end on `xls` or `xlsx`."
)
)
else {
val f: File = new File(uri.getSchemeSpecificPart)
if (!f.exists())
Failure(
new FileNotFoundException(s"File on path ${uri.getSchemeSpecificPart} not exists.")
)
else if (!f.canRead)
Failure(
new IllegalAccessError(s"File on path ${uri.getSchemeSpecificPart} not accessible.")
)
else Success(f)
}
} match {
case Success(file) => file
case Failure(error) => Failure(error)
case _ =>
Failure(
new AccessValidationException(
s"Error during access to file ${uri.getSchemeSpecificPart}"
)
)
}
/**
* Convert the source data from the Excel file into a CSV file.
*
* @param file The source file that should be converted
*
* @return The converted data type
*/
def createCSVData(file: File, local: Locale): Try[ConvertedData] = {
// workbook relevant variables
val fis = Files.newInputStream(file.toPath)
val workbook = WorkbookFactory.create(fis)
val evaluator: FormulaEvaluator = workbook.getCreationHelper.createFormulaEvaluator()
val formatter: DataFormatter = new DataFormatter(local, true)
// buffer variables
val csvData = new util.ArrayList[util.ArrayList[String]]()
var maxRowWidth = 0
// FIXME: Currently, we only process the first sheet of an Excel file
//val numSheets = workbook.getNumberOfSheets
for (i <- 0 until 1) {
val sheet = workbook.getSheetAt(i)
if (sheet.getPhysicalNumberOfRows > 0) {
for (r <- 0 to sheet.getLastRowNum) {
val result = rowToCSV(sheet.getRow(r), formatter, evaluator)
csvData.add(result._1)
if (maxRowWidth < result._2)
maxRowWidth = result._2
}
}
}
Success(ConvertedData(csvData, maxRowWidth))
}
/**
* Store the content of the CSV target file.
*
* @param file The source file.
* @param csvData The data that was extracted from the source file.
* @param maxRowWidth The maximum number of rows of all.
* @param formattingConvention Formatting convention for the source file.
* @param separator The separator of the columns in the target file.
* @return The updated URI for the newly created target file.
*/
def saveExcelFile(file: File,
csvData: util.ArrayList[util.ArrayList[String]],
maxRowWidth: Int,
formattingConvention: Int,
separator: String): Try[URI] = {
val destinationFileURI = File.createTempFile(s"${file.getName}-", ".csv").toURI
val fw: FileWriter = new FileWriter(destinationFileURI.getSchemeSpecificPart)
val bw: BufferedWriter = new BufferedWriter(fw)
var line: util.ArrayList[String] = new util.ArrayList[String]()
var buffer: StringBuffer = null
var csvLineElement: String = null
val s = csvData.size
for (i <- 0 until s) {
buffer = new StringBuffer()
line = csvData.get(i)
for (j <- 0 until maxRowWidth) {
if (line.size() > j) {
csvLineElement = line.get(j)
if (csvLineElement != null)
buffer.append(
escapeEmbeddedCharacters(csvLineElement, formattingConvention, separator)
)
}
if (j < (maxRowWidth - 1))
buffer.append(separator)
}
// Once the line is built, write it away to the CSV file.
bw.write(buffer.toString.trim())
// Condition the inclusion of new line characters so as to
// avoid an additional, superfluous, new line at the end of
// the file.
if (i < (csvData.size() - 1)) {
bw.newLine()
}
}
bw.flush()
bw.close()
Success(destinationFileURI)
}
def rowToCSV(row: Row,
formatter: DataFormatter,
evaluator: FormulaEvaluator): (util.ArrayList[String], Int) = {
var lastCellNum = 0
var maxRowWidth = 0
val csvLine = new util.ArrayList[String]()
// Check to ensure that a row was recovered from the sheet as it is
// possible that one or more rows between other populated rows could be
// missing - blank. If the row does contain cells then...
if (row != null) {
// Get the index for the right most cell on the row and then
// step along the row from left to right recovering the contents
// of each cell, converting that into a formatted String and
// then storing the String into the csvLine ArrayList.
lastCellNum = row.getLastCellNum.toInt
for (i <- 0 to lastCellNum) {
val cell = row.getCell(i)
if (cell == null) {
csvLine.add("")
} else {
if (Try(cell.getCellFormula.nonEmpty).toOption.getOrElse(false))
csvLine.add(formatter.formatCellValue(cell, evaluator))
else {
val isDate: Option[Date] = Try {
if (DateUtil.isCellDateFormatted(cell))
cell.getDateCellValue match {
case null =>
throw new IllegalArgumentException("Null returned by getDateCellValue!")
case d => d
} else
throw new IllegalArgumentException("Cell not date formatted!")
}.toOption
val isSqlDate
: Option[java.sql.Date] = Try(java.sql.Date.valueOf(cell.toString)).toOption
val isLocalDate: Option[LocalDate] = Try(LocalDate.parse(cell.toString)).toOption
val isSqlTime
: Option[Time] = Try(Time.valueOf(formatter.formatCellValue(cell))).toOption
val isLocalTime: Option[LocalTime] = Try(
LocalTime.parse(formatter.formatCellValue(cell))
).toOption
// If we have a `Date`, we must write the string raw to the target file. Otherwise, the
// Excel date format can not be evaluated with a DFASDL.
// We must check the cell content for LocalTime or SqlTime. Time values are as raw like a Date,
// therefore we must distinguish the time values and they MUST be added to the target file
// with the `formatter`.
// Date
if ((isDate.isDefined || isSqlDate.isDefined || isLocalDate.isDefined) && isSqlTime.isEmpty && isLocalTime.isEmpty) {
val date: java.sql.Date =
isDate.fold {
isSqlDate.fold {
java.sql.Date.valueOf(LocalDate.parse(cell.toString))
} { _ =>
java.sql.Date.valueOf(cell.toString)
}
} { _ =>
new java.sql.Date(cell.getDateCellValue.getTime)
}
csvLine.add(date.toString)
}
// Time
else if (isSqlTime.isDefined || isLocalTime.isDefined) {
val time: Time =
isSqlTime.getOrElse(Time.valueOf(LocalTime.parse(formatter.formatCellValue(cell))))
csvLine.add(time.toString)
} else
csvLine.add(formatter.formatCellValue(cell))
}
}
}
// Make a note of the index number of the right most cell. This value
// will later be used to ensure that the matrix of data in the CSV file
// is square.
if (lastCellNum > maxRowWidth) {
maxRowWidth = lastCellNum
}
}
(csvLine, maxRowWidth)
}
/**
* Helper method that escaped characters depending on the given formatting convention.
*
* @param theField The field that should be escaped
* @param formattingConvention The formatting convention for the field
* @param separator The separator for the elements
* @return The escaped content of the field
*/
def escapeEmbeddedCharacters(theField: String,
formattingConvention: Int,
separator: String): String = {
var buffer: StringBuffer = null
var field: String = theField
// If the fields contents should be formatted to conform with Excel's
// convention....
if (formattingConvention == EXCEL_STYLE_ESCAPING) {
// Firstly, check if there are any speech marks (") in the field;
// each occurrence must be escaped with another set of spech marks
// and then the entire field should be enclosed within another
// set of speech marks. Thus, "Yes" he said would become
// """Yes"" he said"
if (field.contains("\\"")) {
buffer = new StringBuffer(field.replaceAll("\\"", "\\\\\\"\\\\\\""))
buffer.insert(0, "\\"")
buffer.append("\\"")
} else {
// If the field contains either embedded separator or EOL
// characters, then escape the whole field by surrounding it
// with speech marks.
buffer = new StringBuffer(field)
if (buffer.indexOf(separator) > -1 ||
buffer.indexOf("\\n") > -1) {
buffer.insert(0, "\\"")
buffer.append("\\"")
}
}
buffer.toString.trim()
}
// The only other formatting convention this class obeys is the UNIX one
// where any occurrence of the field separator or EOL character will
// be escaped by preceding it with a backslash.
else {
if (field.contains(separator))
field = field.replaceAll(separator, "\\\\\\\\" + separator)
if (field.contains("\\n"))
field = field.replaceAll("\\n", "\\\\\\\\\\n")
field
}
}
}
| Tensei-Data/tensei-agent | src/main/scala/com/wegtam/tensei/agent/helpers/ExcelToCSVConverter.scala | Scala | agpl-3.0 | 16,286 |
package com.twitter.finagle.service
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.stats.InMemoryStatsReceiver
import com.twitter.finagle.{FailedFastException, Failure, FailureFlags, Service, WriteException}
import com.twitter.util._
import org.mockito.Matchers.anyObject
import org.mockito.Mockito.{times, verify, when}
import org.scalatest.{BeforeAndAfter, FunSpec}
import org.scalatestplus.mockito.MockitoSugar
import scala.language.reflectiveCalls
class RetryFilterTest extends FunSpec with MockitoSugar with BeforeAndAfter {
var timer: JavaTimer = _
val backoffs = Stream(1.second, 2.seconds, 3.seconds)
val shouldRetryException: PartialFunction[Try[Nothing], Boolean] = {
case Throw(WriteException(_)) => true
case _ => false
}
before {
timer = new JavaTimer(true)
}
after {
timer.stop()
}
val goodResponse = 321
val badResponse = 111
val idempotentRequest = 123
val nonIdempotentRequest = 999
val shouldRetryResponse: PartialFunction[(Int, Try[Int]), Boolean] = {
case (`idempotentRequest`, Throw(WriteException(_))) => true
case (`idempotentRequest`, Return(`badResponse`)) => true
case _ => false
}
val exceptionOnlyRetryPolicy = RetryPolicy.tries(3, shouldRetryException)
val retryPolicy = RetryPolicy.tries(3, shouldRetryResponse)
class TriesFixture(retryExceptionsOnly: Boolean) {
val stats = new InMemoryStatsReceiver()
def retriesStat: Seq[Int] = stats.stat("retries")().map(_.toInt)
val service = mock[Service[Int, Int]]
when(service.close(anyObject[Time])) thenReturn Future.Done
val filter =
if (retryExceptionsOnly)
new RetryExceptionsFilter[Int, Int](
RetryPolicy.tries(3, shouldRetryException),
timer,
stats
)
else
new RetryFilter[Int, Int](RetryPolicy.tries(3, shouldRetryResponse), timer, stats)
val retryingService = filter andThen service
}
class PolicyFixture(policy: RetryPolicy[_], retryExceptionsOnly: Boolean, theTimer: Timer) {
val stats = new InMemoryStatsReceiver()
def retriesStat: Seq[Int] = stats.stat("retries")().map(_.toInt)
val filter =
if (retryExceptionsOnly)
new RetryExceptionsFilter[Int, Int](
policy.asInstanceOf[RetryPolicy[Try[Nothing]]],
theTimer,
stats
)
else
new RetryFilter[Int, Int](
policy.asInstanceOf[RetryPolicy[(Int, Try[Int])]],
theTimer,
stats
)
val service = mock[Service[Int, Int]]
when(service.close(anyObject[Time])) thenReturn Future.Done
val retryingService = filter andThen service
}
describe("RetryFilter") {
it("respects RetryBudget") {
val stats = new InMemoryStatsReceiver()
// use a budget that just gets 2 retries
val budgetRetries = 2
val budget = RetryBudget(1.second, minRetriesPerSec = budgetRetries, percentCanRetry = 0.0)
// have a policy that allows for way more retries than the budgets allows for
val policy = RetryPolicy.tries(10, RetryPolicy.WriteExceptionsOnly)
val filter = new RetryExceptionsFilter[Throwable, Int](policy, Timer.Nil, stats, budget)
val service: Service[Throwable, Int] = Service.mk(Future.exception)
val svc = filter.andThen(service)
Time.withCurrentTimeFrozen { _ =>
intercept[FailedFastException] {
Await.result(svc(new FailedFastException("yep")), 5.seconds)
}
assert(1 == stats.counter("retries", "budget_exhausted")())
assert(budgetRetries == stats.stat("retries")().head)
}
}
describe("with RetryPolicy.tries") {
def runWithTries(retryExceptionsOnly: Boolean): Unit = {
it("always try once") {
new TriesFixture(retryExceptionsOnly) {
when(service(123)) thenReturn Future(321)
assert(Await.result(retryingService(123), 5.seconds) == 321)
verify(service)(123)
}
}
it("when failing with WriteExceptions, retry n-1 times") {
new TriesFixture(retryExceptionsOnly) {
when(service(123)) thenReturn Future.exception(WriteException(new Exception))
val f = retryingService(123)
intercept[WriteException] {
Await.result(f, 5.seconds)
}
verify(service, times(3))(123)
}
}
it("when failed with a non-WriteException, fail immediately") {
new TriesFixture(retryExceptionsOnly) {
when(service(123)) thenReturn Future.exception(new Exception("WTF!"))
val e = intercept[Exception] {
Await.result(retryingService(123), 5.seconds)
}
assert(e.getMessage == "WTF!")
verify(service)(123)
assert(retriesStat == Seq(0))
}
}
it("when no retry occurs, no stat update") {
new TriesFixture(retryExceptionsOnly) {
when(service(123)) thenReturn Future(goodResponse)
assert(Await.result(retryingService(123), 5.seconds) == goodResponse)
assert(retriesStat == Seq(0))
}
}
it("propagate interrupts") {
new TriesFixture(retryExceptionsOnly) {
val replyPromise = new Promise[Int] {
@volatile var interrupted: Option[Throwable] = None
setInterruptHandler { case exc => interrupted = Some(exc) }
}
when(service(123)) thenReturn replyPromise
val res = retryingService(123)
assert(!res.isDefined)
assert(replyPromise.interrupted == None)
val exc = new Exception
res.raise(exc)
assert(!res.isDefined)
assert(replyPromise.interrupted == Some(exc))
}
}
}
describe("using RetryPolicy[(Req, Try[Rep])]") {
runWithTries(retryExceptionsOnly = false)
it("when failing with WriteExceptions and non-idempotent request, don't retry") {
new TriesFixture(retryExceptionsOnly = false) {
when(service(nonIdempotentRequest)) thenReturn Future.exception(
WriteException(new Exception)
)
val f = retryingService(nonIdempotentRequest)
intercept[WriteException] {
Await.result(f, 5.seconds)
}
verify(service, times(1))(nonIdempotentRequest)
}
}
it("when succeeds with bad response and idempotent request, retry n-1 times") {
new TriesFixture(retryExceptionsOnly = false) {
when(service(idempotentRequest)) thenReturn Future(badResponse)
val f = retryingService(idempotentRequest)
Await.result(f, 5.seconds)
verify(service, times(3))(idempotentRequest)
}
}
it("when succeeds with bad response and non-idempotent request, don't retry") {
new TriesFixture(retryExceptionsOnly = false) {
when(service(nonIdempotentRequest)) thenReturn Future(badResponse)
val f = retryingService(nonIdempotentRequest)
Await.result(f, 5.seconds)
verify(service, times(1))(nonIdempotentRequest)
}
}
}
describe("using RetryPolicy[Try[Nothing]]") {
runWithTries(retryExceptionsOnly = true)
}
}
describe("with RetryPolicy.backoff: Exception cases") {
describe("using RetryPolicy[(Req, Try[Rep])]") {
testExceptionPolicy(
RetryPolicy.backoff(backoffs)(shouldRetryResponse),
retryExceptionsOnly = false
)
}
describe("using RetryPolicy[Try[Nothing]]") {
testExceptionPolicy(
RetryPolicy.backoff(backoffs)(shouldRetryException),
retryExceptionsOnly = true
)
}
}
describe("with RetryPolicy.backoffJava: Exception cases") {
describe("using RetryPolicy[(Req, Try[Rep])]") {
testExceptionPolicy(
RetryPolicy.backoffJava(Backoff.toJava(backoffs), shouldRetryResponse),
retryExceptionsOnly = false
)
}
describe("using RetryPolicy[Try[Nothing]]") {
testExceptionPolicy(
RetryPolicy.backoffJava(Backoff.toJava(backoffs), shouldRetryException),
retryExceptionsOnly = true
)
}
}
describe("with Success RetryPolicy.backoff") {
testSuccessPolicy(RetryPolicy.backoff(backoffs)(shouldRetryResponse))
}
describe("with Success RetryPolicy.backoffJava") {
testSuccessPolicy(RetryPolicy.backoffJava(Backoff.toJava(backoffs), shouldRetryResponse))
}
describe("non retryable failures") {
it("when failed with a Non-Retryable failure, fail immediately") {
val timer = new MockTimer()
val stats = new InMemoryStatsReceiver()
val svc = mock[Service[Int, Int]]
when(svc.apply(1)).thenReturn(
Future.exception(Failure.rejected("nack")),
Future.exception(Failure("not retryable", FailureFlags.NonRetryable)),
Future.exception(new RuntimeException("never gonna be seen"))
)
val policy = RetryPolicy.tries[Try[Nothing]](3, {
case Throw(_) => true
})
val retryFilter =
new RetryExceptionsFilter[Int, Int](policy, timer, stats, RetryBudget.Infinite)
val retryingSvc = retryFilter.andThen(svc)
Time.withCurrentTimeFrozen { tc =>
// this req will fail, and enqueue a 2nd req waiting for the timer to advance
val f = retryingSvc(1)
verify(svc, times(1))(1)
assert(!f.isDefined)
assert(timer.tasks.size == 1)
// roll the timer and kick off the 2nd request, which should not get
// retried, despite our policy
tc.advance(1.second); timer.tick()
verify(svc, times(2))(1)
val ex = intercept[Failure] {
Await.result(f, 5.seconds)
}
assert(ex.getMessage == "not retryable")
assert(stats.stat("retries")() == Seq(1))
}
}
}
def testExceptionPolicy(policy: RetryPolicy[_], retryExceptionsOnly: Boolean): Unit = {
it("always try once") {
new PolicyFixture(policy, retryExceptionsOnly, timer) {
when(service(123)) thenReturn Future(321)
assert(Await.result(retryingService(123), 5.seconds) == 321)
verify(service)(123)
assert(retriesStat == Seq(0))
}
}
it("when failed with a WriteException, consult the retry strategy") {
val timer = new MockTimer()
new PolicyFixture(policy, retryExceptionsOnly, timer) {
Time.withCurrentTimeFrozen { tc =>
when(service(123)) thenReturn Future.exception(WriteException(new Exception))
val f = retryingService(123)
verify(service)(123)
assert(!f.isDefined)
assert(timer.tasks.size == 1)
when(service(123)) thenReturn Future(321) // we succeed next time; tick!
tc.advance(1.second); timer.tick()
verify(service, times(2))(123)
assert(retriesStat == Seq(1))
assert(Await.result(f, 5.seconds) == 321)
}
}
}
it("give up when the retry strategy is exhausted") {
val timer = new MockTimer()
new PolicyFixture(policy, retryExceptionsOnly, timer) {
Time.withCurrentTimeFrozen { tc =>
when(service(123)) thenReturn Future.exception(
WriteException(new Exception("i'm exhausted"))
)
val f = retryingService(123)
1 to 3 foreach { i =>
assert(!f.isDefined)
verify(service, times(i))(123)
assert(retriesStat == Seq.empty)
tc.advance(i.seconds); timer.tick()
}
assert(retriesStat == Seq(3))
assert(f.isDefined)
assert(Await.ready(f, 5.seconds).poll.get.isThrow)
val e = intercept[WriteException] {
Await.result(f, 5.seconds)
}
assert(e.getMessage.contains("i'm exhausted"))
}
}
}
it("when failed with a non-WriteException, fail immediately") {
val timer = new MockTimer()
new PolicyFixture(policy, retryExceptionsOnly, timer) {
when(service(123)) thenReturn Future.exception(new Exception("WTF!"))
val e = intercept[Exception] {
Await.result(retryingService(123), 5.seconds)
}
assert(e.getMessage == "WTF!")
verify(service)(123)
assert(timer.tasks.isEmpty)
assert(retriesStat == Seq(0))
}
}
it("when failed with a Non-Retryable failure, fail immediately") {
val timer = new MockTimer()
new PolicyFixture(policy, retryExceptionsOnly, timer) {
when(service(123)) thenReturn Future.exception(Failure("WTF!", FailureFlags.NonRetryable))
val e = intercept[Exception] {
Await.result(retryingService(123), 5.seconds)
}
assert(e.getMessage == "WTF!")
verify(service)(123)
assert(timer.tasks.isEmpty)
assert(retriesStat == Seq(0))
}
}
it("when no retry occurs, no stat update") {
new PolicyFixture(policy, retryExceptionsOnly, timer) {
when(service(123)) thenReturn Future(321)
assert(Await.result(retryingService(123), 5.seconds) == 321)
assert(retriesStat == Seq(0))
}
}
it("propagate cancellation") {
new PolicyFixture(policy, retryExceptionsOnly, timer) {
val replyPromise = new Promise[Int] {
@volatile var interrupted: Option[Throwable] = None
setInterruptHandler { case exc => interrupted = Some(exc) }
}
when(service(123)) thenReturn replyPromise
val res = retryingService(123)
assert(!res.isDefined)
assert(replyPromise.interrupted == None)
val exc = new Exception
res.raise(exc)
assert(!res.isDefined)
assert(replyPromise.interrupted == Some(exc))
}
}
}
def testSuccessPolicy(policy: RetryPolicy[(Int, Try[Int])]): Unit = {
it("when it succeeds with a bad response, consult the retry strategy") {
val timer = new MockTimer()
new PolicyFixture(policy, retryExceptionsOnly = false, timer) {
Time.withCurrentTimeFrozen { tc =>
when(service(123)) thenReturn Future(badResponse)
val f = retryingService(123)
verify(service)(123)
assert(!f.isDefined)
assert(timer.tasks.size == 1)
when(service(123)) thenReturn Future(goodResponse) // we succeed next time; tick!
tc.advance(1.second); timer.tick()
verify(service, times(2))(123)
assert(retriesStat == Seq(1))
assert(Await.result(f, 5.seconds) == goodResponse)
}
}
}
it("return result when the retry strategy is exhausted") {
val timer = new MockTimer()
new PolicyFixture(policy, retryExceptionsOnly = false, timer) {
Time.withCurrentTimeFrozen { tc =>
when(service(123)) thenReturn Future(badResponse)
val f = retryingService(123)
1 to 3 foreach { i =>
assert(!f.isDefined)
verify(service, times(i))(123)
assert(retriesStat == Seq.empty)
tc.advance(i.seconds); timer.tick()
}
assert(retriesStat == Seq(3))
assert(f.isDefined)
assert(Await.result(f, 5.seconds) == badResponse)
}
}
}
it("when it succeeds, return the result immediately") {
val timer = new MockTimer()
new PolicyFixture(policy, retryExceptionsOnly = false, timer) {
when(service(123)) thenReturn Future(goodResponse)
val f = retryingService(123)
verify(service)(123)
assert(timer.tasks.isEmpty)
assert(retriesStat == Seq(0))
}
}
it("when no retry occurs, no stat update") {
new PolicyFixture(policy, retryExceptionsOnly = false, timer) {
when(service(123)) thenReturn Future(321)
assert(Await.result(retryingService(123), 5.seconds) == 321)
assert(retriesStat == Seq(0))
}
}
}
}
}
| luciferous/finagle | finagle-core/src/test/scala/com/twitter/finagle/service/RetryFilterTest.scala | Scala | apache-2.0 | 16,533 |
package edu.berkeley.nlp.entity.coref
import scala.collection.JavaConverters.asScalaBufferConverter
import edu.berkeley.nlp.entity.Chunk
import edu.berkeley.nlp.entity.Driver;
import scala.collection.mutable.HashSet
import scala.collection.mutable.ArrayBuffer
import edu.berkeley.nlp.entity.lang.CorefLanguagePack
import edu.berkeley.nlp.entity.lang.Language
import edu.berkeley.nlp.futile.util.Logger
import edu.berkeley.nlp.entity.lang.EnglishCorefLanguagePack
import edu.berkeley.nlp.entity.lang.ChineseCorefLanguagePack
import edu.berkeley.nlp.entity.lang.ArabicCorefLanguagePack
import edu.berkeley.nlp.futile.util.Counter
import edu.berkeley.nlp.futile.syntax.Trees.PennTreeRenderer
import edu.berkeley.nlp.entity.ConllDoc
case class ProtoMention(val sentIdx: Int, val startIdx: Int, val endIdx: Int, val headIdx: Int);
case class ProtoMentionFancy(val sentIdx: Int, val startIdx: Int, val endIdx: Int, val headIndices: Seq[Int]);
case class ProtoCorefDoc(val doc: ConllDoc, val goldMentions: Seq[Mention], val predProtoMentions: Seq[ProtoMention]);
class CorefDocAssembler(val langPack: CorefLanguagePack,
val useGoldMentions: Boolean) {
def createCorefDoc(rawDoc: ConllDoc, propertyComputer: MentionPropertyComputer): CorefDoc = {
val (goldMentions, goldClustering) = extractGoldMentions(rawDoc, propertyComputer);
if (goldMentions.size == 0) {
Logger.logss("WARNING: no gold mentions on document " + rawDoc.printableDocName);
}
val predMentions = if (useGoldMentions) goldMentions else extractPredMentions(rawDoc, propertyComputer, goldMentions);
new CorefDoc(rawDoc, goldMentions, goldClustering, predMentions)
}
def createCorefDocFancy(rawDoc: ConllDoc, propertyComputer: MentionPropertyComputer, possibleChunks: Seq[Seq[Chunk[Boolean]]]): CorefDoc = {
val (goldMentions, goldClustering) = extractGoldMentions(rawDoc, propertyComputer);
if (goldMentions.size == 0) {
Logger.logss("WARNING: no gold mentions on document " + rawDoc.printableDocName);
}
val predMentions = if (useGoldMentions) goldMentions else extractPredMentionsFancy(rawDoc, propertyComputer, goldMentions, possibleChunks);
// MentionLatticeHandler.findConflicts(predMentions);
new CorefDoc(rawDoc, goldMentions, goldClustering, predMentions)
}
def extractGoldMentions(rawDoc: ConllDoc, propertyComputer: MentionPropertyComputer): (Seq[Mention], OrderedClustering) = {
CorefDocAssembler.extractGoldMentions(rawDoc, propertyComputer, langPack);
}
def extractPredMentions(rawDoc: ConllDoc, propertyComputer: MentionPropertyComputer, gms: Seq[Mention]): Seq[Mention] = {
val protoMentionsSorted = getProtoMentionsSorted(rawDoc, gms);
val finalMentions = new ArrayBuffer[Mention]();
for (sentProtoMents <- protoMentionsSorted; protoMent <- sentProtoMents) {
finalMentions += Mention.createMentionComputeProperties(rawDoc, finalMentions.size, protoMent.sentIdx, protoMent.startIdx, protoMent.endIdx, protoMent.headIdx, Seq(protoMent.headIdx), false, propertyComputer, langPack)
}
finalMentions;
}
def extractPredMentionsFancy(rawDoc: ConllDoc, propertyComputer: MentionPropertyComputer, gms: Seq[Mention], possibleChunks: Seq[Seq[Chunk[Boolean]]]): Seq[Mention] = {
val protoMentionsSorted = getProtoMentionsSortedFancy(rawDoc, gms, possibleChunks);
val finalMentions = new ArrayBuffer[Mention]();
for (sentProtoMents <- protoMentionsSorted; protoMent <- sentProtoMents) {
finalMentions += Mention.createMentionComputeProperties(rawDoc, finalMentions.size, protoMent.sentIdx, protoMent.startIdx, protoMent.endIdx, protoMent.headIdx, Seq(protoMent.headIdx), false, propertyComputer, langPack)
}
finalMentions;
}
private def getProtoMentionsSorted(rawDoc: ConllDoc, gms: Seq[Mention]): Seq[Seq[ProtoMention]] = {
val mentionExtents = (0 until rawDoc.numSents).map(i => new HashSet[ProtoMention]);
for (sentIdx <- 0 until rawDoc.numSents) {
// Extract NE spans: filter out O, QUANTITY, CARDINAL, CHUNK
// Throw out NE types which aren't mentions
val filterNEsByType: Chunk[String] => Boolean = chunk => !(chunk.label == "O" || chunk.label == "QUANTITY" || chunk.label == "CARDINAL" || chunk.label == "PERCENT");
// Extract NPs and PRPs *except* for those contained in NE chunks (the NE tagger seems more reliable than the parser)
val posAndConstituentsOfInterest = langPack.getMentionConstituentTypes ++ langPack.getPronominalTags;
for (label <- posAndConstituentsOfInterest) {
mentionExtents(sentIdx) ++= rawDoc.trees(sentIdx).getSpansAndHeadsOfType(label).map(span => new ProtoMention(sentIdx, span._1, span._2, span._3));
}
// Add NEs if we want
val neMentType = Driver.neMentType
if (neMentType == "all") {
val neProtoMents = rawDoc.nerChunks(sentIdx).filter(filterNEsByType).
map(chunk => new ProtoMention(sentIdx, chunk.start, chunk.end, rawDoc.trees(sentIdx).getSpanHead(chunk.start, chunk.end)));
mentionExtents(sentIdx) ++= neProtoMents
} else if (neMentType == "nnp") {
val spans = getMaximalNNPSpans(rawDoc.pos(sentIdx));
val neProtoMents = spans.map(span => new ProtoMention(sentIdx, span._1, span._2, rawDoc.trees(sentIdx).getSpanHead(span._1, span._2)));
mentionExtents(sentIdx) ++= neProtoMents
} else {
// Do nothing
}
}
// Now take maximal mentions with the same heads
if (Driver.filterNonMaximalNPs) {
filterNonMaximalNPs(rawDoc, mentionExtents).map(CorefDocAssembler.sortProtoMentionsLinear(_));
} else {
mentionExtents.map(protoMents => CorefDocAssembler.sortProtoMentionsLinear(new ArrayBuffer[ProtoMention] ++ protoMents));
}
// if (Driver.filterNonMaximalNPs) {
// val filteredProtoMentionsSorted = (0 until rawDoc.numSents).map(i => new ArrayBuffer[ProtoMention]);
// for (sentIdx <- 0 until mentionExtents.size) {
// val protoMentionsByHead = mentionExtents(sentIdx).groupBy(_.headIdx);
// // Look from smallest head first
// for (head <- protoMentionsByHead.keys.toSeq.sorted) {
// // Find the biggest span containing this head
// var currentBiggest: ProtoMention = null;
// for (ment <- protoMentionsByHead(head)) {
// // Overlapping but neither is contained in the other
// if (currentBiggest != null && ((ment.startIdx < currentBiggest.startIdx && ment.endIdx < currentBiggest.endIdx) || (ment.startIdx > currentBiggest.startIdx && ment.endIdx > currentBiggest.endIdx))) {
// Logger.logss("WARNING: mentions with the same head but neither contains the other");
// Logger.logss(" " + rawDoc.words(sentIdx).slice(ment.startIdx, ment.endIdx) + ", head = " + rawDoc.words(sentIdx)(head));
// Logger.logss(" " + rawDoc.words(sentIdx).slice(currentBiggest.startIdx, currentBiggest.endIdx) + ", head = " + rawDoc.words(sentIdx)(head));
// }
// // This one is bigger
// if (currentBiggest == null || (ment.startIdx <= currentBiggest.startIdx && ment.endIdx >= currentBiggest.endIdx)) {
// currentBiggest = ment;
// }
// }
// filteredProtoMentionsSorted(sentIdx) += currentBiggest;
// // ENGLISH ONLY: don't remove appositives
// for (ment <- protoMentionsByHead(head)) {
// val isNotBiggest = ment.startIdx != currentBiggest.startIdx || ment.endIdx != currentBiggest.endIdx;
// val isAppositiveLike = ment.endIdx < rawDoc.pos(sentIdx).size && (rawDoc.pos(sentIdx)(ment.endIdx) == "," || rawDoc.pos(sentIdx)(ment.endIdx) == "CC");
// if (isNotBiggest && isAppositiveLike && Driver.includeAppositives) {
// filteredProtoMentionsSorted(sentIdx) += ment;
// }
// }
// }
// }
// filteredProtoMentionsSorted.map(sortProtoMentionsLinear(_));
// } else {
// mentionExtents.map(protoMents => sortProtoMentionsLinear(new ArrayBuffer[ProtoMention] ++ protoMents));
// }
}
private def getProtoMentionsSortedFancy(rawDoc: ConllDoc, gms: Seq[Mention], possibleChunks: Seq[Seq[Chunk[Boolean]]]): Seq[Seq[ProtoMention]] = {
val mentionExtents = (0 until rawDoc.numSents).map(i => new HashSet[ProtoMention]);
for (sentIdx <- 0 until rawDoc.numSents) {
// Extract NPs and PRPs *except* for those contained in NE chunks (the NE tagger seems more reliable than the parser)
val posAndConstituentsOfInterest = langPack.getMentionConstituentTypes ++ langPack.getPronominalTags;
for (label <- posAndConstituentsOfInterest) {
mentionExtents(sentIdx) ++= rawDoc.trees(sentIdx).getSpansAndHeadsOfType(label).map(span => new ProtoMention(sentIdx, span._1, span._2, span._3));
}
// Add NEs, appropriately filtered by type
val neProtoMents = possibleChunks(sentIdx).filter(_.label).
map(chunk => new ProtoMention(sentIdx, chunk.start, chunk.end, rawDoc.trees(sentIdx).getSpanHead(chunk.start, chunk.end)));
val existingHeadIndices = mentionExtents(sentIdx).map(_.headIdx);
// Detect conflicts
mentionExtents(sentIdx) ++= neProtoMents.filter(protoMent => !existingHeadIndices.contains(protoMent.headIdx));
}
// Now take maximal mentions with the same heads
if (Driver.filterNonMaximalNPs) {
filterNonMaximalNPs(rawDoc, mentionExtents).map(CorefDocAssembler.sortProtoMentionsLinear(_));
} else {
mentionExtents.map(protoMents => CorefDocAssembler.sortProtoMentionsLinear(new ArrayBuffer[ProtoMention] ++ protoMents));
}
}
private def filterNonMaximalNPs(rawDoc: ConllDoc, mentionExtents: Seq[HashSet[ProtoMention]]) = {
val filteredProtoMentionsSorted = (0 until rawDoc.numSents).map(i => new ArrayBuffer[ProtoMention]);
for (sentIdx <- 0 until mentionExtents.size) {
val protoMentionsByHead = mentionExtents(sentIdx).groupBy(_.headIdx);
// Look from smallest head first
for (head <- protoMentionsByHead.keys.toSeq.sorted) {
// Find the biggest span containing this head
var currentBiggest: ProtoMention = null;
for (ment <- protoMentionsByHead(head)) {
// Overlapping but neither is contained in the other
if (currentBiggest != null && ((ment.startIdx < currentBiggest.startIdx && ment.endIdx < currentBiggest.endIdx) || (ment.startIdx > currentBiggest.startIdx && ment.endIdx > currentBiggest.endIdx))) {
Logger.logss("WARNING: mentions with the same head but neither contains the other");
Logger.logss(" " + rawDoc.words(sentIdx).slice(ment.startIdx, ment.endIdx) + ", head = " + rawDoc.words(sentIdx)(head));
Logger.logss(" " + rawDoc.words(sentIdx).slice(currentBiggest.startIdx, currentBiggest.endIdx) + ", head = " + rawDoc.words(sentIdx)(head));
}
// This one is bigger
if (currentBiggest == null || (ment.startIdx <= currentBiggest.startIdx && ment.endIdx >= currentBiggest.endIdx)) {
currentBiggest = ment;
}
}
filteredProtoMentionsSorted(sentIdx) += currentBiggest;
// ENGLISH ONLY: don't remove appositives
for (ment <- protoMentionsByHead(head)) {
val isNotBiggest = ment.startIdx != currentBiggest.startIdx || ment.endIdx != currentBiggest.endIdx;
val isAppositiveLike = ment.endIdx < rawDoc.pos(sentIdx).size && (rawDoc.pos(sentIdx)(ment.endIdx) == "," || rawDoc.pos(sentIdx)(ment.endIdx) == "CC");
if (isNotBiggest && isAppositiveLike && Driver.includeAppositives) {
filteredProtoMentionsSorted(sentIdx) += ment;
}
}
}
}
filteredProtoMentionsSorted;
}
private def getMaximalNNPSpans(tags: Seq[String]) = {
var start = -1;
var inside = false;
val spans = new ArrayBuffer[(Int,Int)]
for (i <- 0 until tags.size) {
if (tags(i).startsWith("NNP") && (i == 0 || !tags(i-1).startsWith("NNP"))) {
start = i
inside = true;
}
if (inside && !tags(i).startsWith("NNP")) {
spans += start -> i;
start = -1;
inside = false;
}
}
spans;
}
//////////////////
// COORDINATION //
//////////////////
def createCorefDocWithCoordination(rawDoc: ConllDoc, propertyComputer: MentionPropertyComputer): CorefDoc = {
val (goldMentions, goldClustering) = extractGoldMentionsWithCoordination(rawDoc, propertyComputer);
if (goldMentions.size == 0) {
Logger.logss("WARNING: no gold mentions on document " + rawDoc.printableDocName);
}
val predMentions = if (useGoldMentions) goldMentions else extractPredMentionsWithCoordination(rawDoc, propertyComputer, goldMentions);
new CorefDoc(rawDoc, goldMentions, goldClustering, predMentions)
}
def extractGoldMentionsWithCoordination(rawDoc: ConllDoc, propertyComputer: MentionPropertyComputer): (Seq[Mention], OrderedClustering) = {
val goldProtoMentionsSorted = getGoldProtoMentionsSortedWithCoordination(rawDoc);
val finalMentions = new ArrayBuffer[Mention]();
val goldClusterLabels = new ArrayBuffer[Int]();
for (sentProtoMents <- goldProtoMentionsSorted; protoMent <- sentProtoMents) {
finalMentions += Mention.createMentionComputeProperties(rawDoc, finalMentions.size, protoMent.sentIdx, protoMent.startIdx, protoMent.endIdx, protoMent.headIndices.head, protoMent.headIndices, protoMent.headIndices.size > 1, propertyComputer, langPack)
val correspondingChunks = rawDoc.corefChunks(protoMent.sentIdx).filter(chunk => chunk.start == protoMent.startIdx && chunk.end == protoMent.endIdx);
if (correspondingChunks.size != 1) {
Logger.logss("WARNING: multiple gold coref chunks matching span");
Logger.logss("Location: " + rawDoc.printableDocName + ", sentence " + protoMent.sentIdx + ": (" + protoMent.startIdx + ", " + protoMent.endIdx + ") " +
rawDoc.words(protoMent.sentIdx).slice(protoMent.startIdx, protoMent.endIdx));
}
require(correspondingChunks.size >= 1);
goldClusterLabels += correspondingChunks.map(_.label).reduce(Math.min(_, _));
}
(finalMentions, OrderedClustering.createFromClusterIds(goldClusterLabels));
}
def extractPredMentionsWithCoordination(rawDoc: ConllDoc, propertyComputer: MentionPropertyComputer, gms: Seq[Mention]): Seq[Mention] = {
val protoMentionsSorted = getProtoMentionsSortedWithCoordination(rawDoc, gms);
val finalMentions = new ArrayBuffer[Mention]();
for (sentProtoMents <- protoMentionsSorted; protoMent <- sentProtoMents) {
finalMentions += Mention.createMentionComputeProperties(rawDoc, finalMentions.size, protoMent.sentIdx, protoMent.startIdx, protoMent.endIdx, protoMent.headIndices.head, protoMent.headIndices, protoMent.headIndices.size > 1, propertyComputer, langPack)
}
finalMentions;
}
private def getGoldProtoMentionsSortedWithCoordination(rawDoc: ConllDoc): Seq[Seq[ProtoMentionFancy]] = {
val goldProtoMentions = for (sentIdx <- 0 until rawDoc.corefChunks.size) yield {
for (chunk <- rawDoc.corefChunks(sentIdx)) yield {
val headIndices = rawDoc.trees(sentIdx).getSpanHeadOrNPCoordinatedHeads(chunk.start, chunk.end);
new ProtoMentionFancy(sentIdx, chunk.start, chunk.end, headIndices.toSeq.sorted);
}
}
goldProtoMentions.map(_.sortBy(ment => (ment.sentIdx, ment.headIndices.head, ment.endIdx, ment.startIdx)));
}
private def getProtoMentionsSortedWithCoordination(rawDoc: ConllDoc, gms: Seq[Mention]): Seq[Seq[ProtoMentionFancy]] = {
val mentionExtents = (0 until rawDoc.numSents).map(i => new HashSet[ProtoMentionFancy]);
for (sentIdx <- 0 until rawDoc.numSents) {
// Extract NE spans: filter out O, QUANTITY, CARDINAL, CHUNK
// Throw out NE types which aren't mentions
val filterNEsByType: Chunk[String] => Boolean = chunk => !(chunk.label == "O" || chunk.label == "QUANTITY" || chunk.label == "CARDINAL" || chunk.label == "PERCENT");
val neProtoMentions = rawDoc.nerChunks(sentIdx).filter(filterNEsByType).
map(chunk => new ProtoMentionFancy(sentIdx, chunk.start, chunk.end, rawDoc.trees(sentIdx).getSpanHeadOrNPCoordinatedHeads(chunk.start, chunk.end).toSeq.sorted));
mentionExtents(sentIdx) ++= neProtoMentions
// Extract NPs and PRPs *except* for those contained in NE chunks (the NE tagger seems more reliable than the parser)
val posAndConstituentsOfInterest = langPack.getMentionConstituentTypes ++ langPack.getPronominalTags;
for (label <- posAndConstituentsOfInterest) {
mentionExtents(sentIdx) ++= rawDoc.trees(sentIdx).getSpansAndCoordinatedHeadsOfType(label).map(span => new ProtoMentionFancy(sentIdx, span._1, span._2, span._3.toSeq.sorted));
}
}
// Now take maximal mentions with the same heads
val filteredProtoMentionsSorted = (0 until rawDoc.numSents).map(i => new ArrayBuffer[ProtoMentionFancy]);
for (sentIdx <- 0 until mentionExtents.size) {
val protoMentionsByHead = mentionExtents(sentIdx).groupBy(_.headIndices);
// Look from smallest head first
for (headSet <- protoMentionsByHead.keys.toSeq) {
// Find the biggest span containing this head
var currentBiggest: ProtoMentionFancy = null;
for (ment <- protoMentionsByHead(headSet)) {
// Overlapping but neither is contained in the other
if (currentBiggest != null && ((ment.startIdx < currentBiggest.startIdx && ment.endIdx < currentBiggest.endIdx) || (ment.startIdx > currentBiggest.startIdx && ment.endIdx > currentBiggest.endIdx))) {
Logger.logss("WARNING: mentions with the same head but neither contains the other");
Logger.logss(" " + rawDoc.words(sentIdx).slice(ment.startIdx, ment.endIdx) + ", head(s) = " + headSet.map(rawDoc.words(sentIdx)(_)));
Logger.logss(" " + rawDoc.words(sentIdx).slice(currentBiggest.startIdx, currentBiggest.endIdx) + ", head = " + headSet.map(rawDoc.words(sentIdx)(_)));
}
// This one is bigger
if (currentBiggest == null || (ment.startIdx <= currentBiggest.startIdx && ment.endIdx >= currentBiggest.endIdx)) {
currentBiggest = ment;
}
}
filteredProtoMentionsSorted(sentIdx) += currentBiggest;
// ENGLISH ONLY: don't remove appositives
for (ment <- protoMentionsByHead(headSet)) {
val isNotBiggest = ment.startIdx != currentBiggest.startIdx || ment.endIdx != currentBiggest.endIdx;
val isAppositiveLike = ment.endIdx < rawDoc.pos(sentIdx).size && (rawDoc.pos(sentIdx)(ment.endIdx) == "," || rawDoc.pos(sentIdx)(ment.endIdx) == "CC");
if (isNotBiggest && isAppositiveLike && Driver.includeAppositives) {
filteredProtoMentionsSorted(sentIdx) += ment;
}
}
}
}
filteredProtoMentionsSorted.map(_.sortBy(ment => (ment.sentIdx, ment.headIndices.head, ment.endIdx, ment.startIdx)));
}
/////////////////////////////
/////////////////////////////
/////////////////////////////
/////////////////////////////
/////////////////////////////
/////////////////////////////
// def getProtoMentionCandidatesSorted(rawDoc: ConllDoc, gms: Seq[Mention]): Seq[Seq[ProtoMention]] = {
// val mentionExtents = (0 until rawDoc.numSents).map(i => new HashSet[ProtoMention]);
// for (sentIdx <- 0 until rawDoc.numSents) {
// // Throw out NE types which aren't mentions
// val filterNEsByType: Chunk[String] => Boolean = chunk => !(chunk.label == "O" || chunk.label == "QUANTITY" || chunk.label == "CARDINAL" || chunk.label == "PERCENT");
// val neProtoMentions = rawDoc.nerChunks(sentIdx).filter(filterNEsByType).
// map(chunk => new ProtoMention(sentIdx, chunk.start, chunk.end, rawDoc.trees(sentIdx).getSpanHead(chunk.start, chunk.end)));
// mentionExtents(sentIdx) ++= neProtoMentions
// // Extract NPs and PRPs *except* for those contained in NE chunks (the NE tagger seems more reliable than the parser)
// val filterSpanIfInNE: ((Int, Int, Int)) => Boolean = startEndHead => neProtoMentions.filter(ment => ment.startIdx <= startEndHead._1 && startEndHead._2 <= ment.endIdx).size == 0;
// val posAndConstituentsOfInterest = langPack.getMentionConstituentTypes ++ langPack.getPronominalTags;
// for (label <- posAndConstituentsOfInterest) {
// if (Driver.includeSpansContainedInNE) {
// mentionExtents(sentIdx) ++= rawDoc.trees(sentIdx).getSpansAndHeadsOfType(label).map(span => new ProtoMention(sentIdx, span._1, span._2, span._3));
//// mentionExtents(sentIdx) ++= rawDoc.trees(sentIdx).getSpansAndHeadsOfType(label).map(span => new ProtoMention(sentIdx, span._1, span._2, span._3)).map(contractPMToRemovePossessive);
// } else {
// mentionExtents(sentIdx) ++= rawDoc.trees(sentIdx).getSpansAndHeadsOfType(label).filter(filterSpanIfInNE).map(span => new ProtoMention(sentIdx, span._1, span._2, span._3));
// }
// }
// }
// // Now take maximal mentions with the same heads
// val filteredProtoMentionsSorted = (0 until rawDoc.numSents).map(i => new ArrayBuffer[ProtoMention]);
// for (sentIdx <- 0 until mentionExtents.size) {
// val protoMentionsByHead = mentionExtents(sentIdx).groupBy(_.headIdx);
// // Look from smallest head first
// for (head <- protoMentionsByHead.keys.toSeq.sorted) {
// // Find the biggest span containing this head
// var currentBiggest: ProtoMention = null;
// for (ment <- protoMentionsByHead(head)) {
// // Overlapping but neither is contained in the other
// if (currentBiggest != null && ((ment.startIdx < currentBiggest.startIdx && ment.endIdx < currentBiggest.endIdx) || (ment.startIdx > currentBiggest.startIdx && ment.endIdx > currentBiggest.endIdx))) {
// Logger.logss("WARNING: mentions with the same head but neither contains the other");
// Logger.logss(" " + rawDoc.words(sentIdx).slice(ment.startIdx, ment.endIdx) + ", head = " + rawDoc.words(sentIdx)(head));
// Logger.logss(" " + rawDoc.words(sentIdx).slice(currentBiggest.startIdx, currentBiggest.endIdx) + ", head = " + rawDoc.words(sentIdx)(head));
// }
// // This one is bigger
// if (currentBiggest == null || (ment.startIdx <= currentBiggest.startIdx && ment.endIdx >= currentBiggest.endIdx)) {
// currentBiggest = ment;
// }
// }
// filteredProtoMentionsSorted(sentIdx) += currentBiggest;
// // ENGLISH ONLY: don't remove appositives
// for (ment <- protoMentionsByHead(head)) {
// val isNotBiggest = ment.startIdx != currentBiggest.startIdx || ment.endIdx != currentBiggest.endIdx;
// val isAppositiveLike = ment.endIdx < rawDoc.pos(sentIdx).size && (rawDoc.pos(sentIdx)(ment.endIdx) == "," || rawDoc.pos(sentIdx)(ment.endIdx) == "CC");
// if (isNotBiggest && isAppositiveLike && Driver.includeAppositives) {
// filteredProtoMentionsSorted(sentIdx) += ment;
// }
// }
// // ENGLISH ONLY: don't remove conjunctions
//
// }
// }
// filteredProtoMentionsSorted.map(sortProtoMentionsLinear(_));
//
//
//
// // Try including / removing 's
// val mungePossessive: ProtoMention => Seq[ProtoMention] = pm => {
// val pms = new ArrayBuffer[ProtoMention];
// pms += pm;
// val sent = rawDoc.words(pm.sentIdx);
// if (pm.endIdx <= sent.size - 1 && sent(pm.endIdx) == "'s") {
// pms += new ProtoMention(pm.sentIdx, pm.startIdx, pm.endIdx + 1, pm.headIdx)
// }
// if (pm.endIdx - pm.startIdx >= 2 && sent(pm.endIdx - 1) == "'s") {
// pms += new ProtoMention(pm.sentIdx, pm.startIdx, pm.endIdx - 1, (if (pm.headIdx == pm.endIdx - 1) pm.endIdx - 2 else pm.headIdx))
// }
// pms;
// }
//
// // Try removing an initial "the"
// val mungeThe: ProtoMention => Seq[ProtoMention] = pm => {
// val pms = new ArrayBuffer[ProtoMention];
// pms += pm;
// val sent = rawDoc.words(pm.sentIdx);
// if (pm.endIdx > pm.startIdx + 1 && rawDoc.words(pm.sentIdx)(pm.startIdx).toLowerCase == "the") {
// pms += new ProtoMention(pm.sentIdx, pm.startIdx + 1, pm.endIdx, pm.headIdx)
// }
// pms;
// }
// val mungeFollowingContent: ProtoMention => Seq[ProtoMention] = pm => {
// val pms = new ArrayBuffer[ProtoMention];
// pms += pm;
// if (pm.headIdx < pm.endIdx - 1) {
// pms += new ProtoMention(pm.sentIdx, pm.startIdx, pm.headIdx + 1, pm.headIdx)
// }
// pms;
// }
//
// filteredProtoMentionsSorted
//// filteredProtoMentionsSorted.map(_.flatMap(mungePossessive));
//// filteredProtoMentionsSorted.map(_.flatMap(mungeThe));
//// filteredProtoMentionsSorted.map(_.flatMap(mungeFollowingContent));
//
//// mentionExtents.map(protoMents => new ArrayBuffer[ProtoMention] ++ protoMents)
//// mentionExtents.map(protoMents => new ArrayBuffer[ProtoMention] ++ protoMents).map(_.flatMap(mungeFollowingContent));
//
// // Try shaving off "the X 's"
//// filteredProtoMentionsSorted.map(sentPms => sentPms.flatMap(pm => {
//// val finalPms = new ArrayBuffer[ProtoMention] ++ Seq(pm);
//// if (pm.endIdx - pm.startIdx == 2 &&
//// rawDoc.words(pm.sentIdx)(pm.endIdx - 1) == "'s" &&
//// rawDoc.nerChunks(pm.sentIdx).filter(chunk => chunk.start == pm.startIdx && chunk.end == pm.startIdx + 1).size >= 1) {
//// finalPms += new ProtoMention(pm.sentIdx, pm.startIdx, pm.startIdx + 1, pm.headIdx);
//// }
//// if (pm.endIdx - pm.startIdx == 3 &&
//// (rawDoc.words(pm.sentIdx)(pm.startIdx) == "the" || rawDoc.words(pm.sentIdx)(pm.startIdx) == "a") &&
//// rawDoc.words(pm.sentIdx)(pm.endIdx - 1) == "'s" &&
//// rawDoc.nerChunks(pm.sentIdx).filter(chunk => chunk.start == pm.startIdx && chunk.end == pm.startIdx + 1).size >= 1) {
//// finalPms += new ProtoMention(pm.sentIdx, pm.startIdx + 1, pm.startIdx + 2, pm.headIdx);
//// }
//// finalPms;
//// })).map(sortProtoMentionsLinear(_));
// }
}
object CorefDocAssembler {
def apply(language: Language, useGoldMentions: Boolean) = {
val langPack = language match {
case Language.ENGLISH => new EnglishCorefLanguagePack();
case Language.CHINESE => new ChineseCorefLanguagePack();
case Language.ARABIC => new ArabicCorefLanguagePack();
case _ => throw new RuntimeException("Unrecognized language");
}
new CorefDocAssembler(langPack, useGoldMentions);
}
def extractGoldMentions(rawDoc: ConllDoc, propertyComputer: MentionPropertyComputer, langPack: CorefLanguagePack): (Seq[Mention], OrderedClustering) = {
val goldProtoMentionsSorted = getGoldProtoMentionsSorted(rawDoc);
val finalMentions = new ArrayBuffer[Mention]();
val goldClusterLabels = new ArrayBuffer[Int]();
for (sentProtoMents <- goldProtoMentionsSorted; protoMent <- sentProtoMents) {
finalMentions += Mention.createMentionComputeProperties(rawDoc, finalMentions.size, protoMent.sentIdx, protoMent.startIdx, protoMent.endIdx, protoMent.headIdx, Seq(protoMent.headIdx), false, propertyComputer, langPack)
val correspondingChunks = rawDoc.corefChunks(protoMent.sentIdx).filter(chunk => chunk.start == protoMent.startIdx && chunk.end == protoMent.endIdx);
if (correspondingChunks.size != 1) {
Logger.logss("WARNING: multiple gold coref chunks matching span");
Logger.logss("Location: " + rawDoc.printableDocName + ", sentence " + protoMent.sentIdx + ": (" + protoMent.startIdx + ", " + protoMent.endIdx + ") " +
rawDoc.words(protoMent.sentIdx).slice(protoMent.startIdx, protoMent.endIdx));
}
require(correspondingChunks.size >= 1);
goldClusterLabels += correspondingChunks.map(_.label).reduce(Math.min(_, _));
}
(finalMentions, OrderedClustering.createFromClusterIds(goldClusterLabels));
}
def getGoldProtoMentionsSorted(rawDoc: ConllDoc): Seq[Seq[ProtoMention]] = {
val goldProtoMentions = for (sentIdx <- 0 until rawDoc.corefChunks.size) yield {
for (chunk <- rawDoc.corefChunks(sentIdx)) yield {
val headIdx = rawDoc.trees(sentIdx).getSpanHead(chunk.start, chunk.end);
new ProtoMention(sentIdx, chunk.start, chunk.end, headIdx);
}
}
goldProtoMentions.map(sortProtoMentionsLinear(_));
}
def sortProtoMentionsLinear(protoMentions: Seq[ProtoMention]): Seq[ProtoMention] = {
protoMentions.sortBy(ment => (ment.sentIdx, ment.headIdx, ment.endIdx, ment.startIdx));
}
def checkGoldMentionRecallQuick(protoDocs: Seq[ProtoCorefDoc]) {
val numGMs = protoDocs.foldLeft(0)((size, doc) => size + doc.goldMentions.size);
val numPMs = protoDocs.foldLeft(0)((size, doc) => size + doc.predProtoMentions.size);
var numGMsRecalled = 0;
var numGMsUnrecalledNonConstituents = 0;
var numGMsUnrecalledCrossingBrackets = 0;
var numGMsVerbal = 0;
for (doc <- protoDocs; gm <- doc.goldMentions) {
if (doc.predProtoMentions.filter(pm => pm.sentIdx == gm.sentIdx && pm.startIdx == gm.startIdx && pm.endIdx == gm.endIdx).size >= 1) {
numGMsRecalled += 1;
} else {
if (doc.doc.trees(gm.sentIdx).doesCrossBrackets(gm.startIdx, gm.endIdx)) {
numGMsUnrecalledCrossingBrackets += 1;
}
if (!doc.doc.trees(gm.sentIdx).isConstituent(gm.startIdx, gm.endIdx)) {
numGMsUnrecalledNonConstituents += 1;
} else {
if (doc.doc.trees(gm.sentIdx).getConstituentType(gm.startIdx, gm.endIdx).startsWith("V")) {
numGMsVerbal += 1;
}
}
}
}
Logger.logss("Pred proto mentions: " + numPMs);
Logger.logss("Recall: " + numGMsRecalled + "/" + numGMs + " = " + (numGMsRecalled.toDouble / numGMs));
Logger.logss("Num GMs non-constituents: " + numGMsUnrecalledNonConstituents + ", num verbal: " + numGMsVerbal);
Logger.logss("Num GMs crossing brackets (NC includes these): " + numGMsUnrecalledCrossingBrackets);
}
def checkGoldMentionRecall(docs: Seq[CorefDoc]) {
val numGMs = docs.map(_.goldMentions.size).reduce(_ + _);
val numPMs = docs.map(_.predMentions.size).reduce(_ + _);
val numNomPMs = docs.map(doc => doc.predMentions.filter(_.mentionType == MentionType.NOMINAL).size).reduce(_ + _);
val numPropPMs = docs.map(doc => doc.predMentions.filter(_.mentionType == MentionType.PROPER).size).reduce(_ + _);
val numPronPMs = docs.map(doc => doc.predMentions.filter(_.mentionType == MentionType.PRONOMINAL).size).reduce(_ + _);
val numDemonstrativePMs = docs.map(doc => doc.predMentions.filter(_.mentionType == MentionType.DEMONSTRATIVE).size).reduce(_ + _);
var numGMsRecalled = 0;
var numGMsUnrecalledNonConstituents = 0;
// These partition the errors
var numGMsUnrecalledCrossingBrackets = 0;
var numGMsUnrecalledInternal = 0;
var numGMsUnrecalledPPAttach = 0;
var numGMsUnrecalledCoord = 0;
var numGMsUnrecalledOther = 0;
val missedConstituentTypes = new Counter[String];
for (doc <- docs; gm <- doc.goldMentions) {
if (doc.predMentions.filter(pm => pm.sentIdx == gm.sentIdx && pm.startIdx == gm.startIdx && pm.endIdx == gm.endIdx).size >= 1) {
numGMsRecalled += 1;
} else {
if (!doc.rawDoc.trees(gm.sentIdx).isConstituent(gm.startIdx, gm.endIdx)) {
numGMsUnrecalledNonConstituents += 1;
}
if (doc.rawDoc.trees(gm.sentIdx).doesCrossBrackets(gm.startIdx, gm.endIdx)) {
numGMsUnrecalledCrossingBrackets += 1;
} else if (doc.rawDoc.pos(gm.sentIdx).slice(gm.startIdx, gm.endIdx).map(_.startsWith("N")).reduce(_ && _)) {
numGMsUnrecalledInternal += 1;
} else if (gm.endIdx < doc.rawDoc.pos(gm.sentIdx).size && (doc.rawDoc.pos(gm.sentIdx)(gm.endIdx) == "IN" ||
doc.rawDoc.pos(gm.sentIdx)(gm.endIdx) == "TO")) {
numGMsUnrecalledPPAttach += 1;
} else if ((gm.endIdx < doc.rawDoc.words(gm.sentIdx).size && doc.rawDoc.words(gm.sentIdx)(gm.endIdx) == "and") ||
(gm.startIdx > 0 && doc.rawDoc.words(gm.sentIdx)(gm.startIdx - 1) == "and")) {
// Logger.logss("Didn't get coordination-like mention: " + doc.rawDoc.words(gm.sentIdx).slice(gm.startIdx, gm.endIdx) + "\\n" + PennTreeRenderer.render(doc.rawDoc.trees(gm.sentIdx).constTree));
numGMsUnrecalledCoord += 1;
} else {
numGMsUnrecalledOther += 1;
}
val constituentType = doc.rawDoc.trees(gm.sentIdx).getConstituentType(gm.startIdx, gm.endIdx);
missedConstituentTypes.incrementCount(constituentType, 1.0);
if (constituentType.startsWith("N")) {
// Logger.logss("Missed mention: " + PronounAnalyzer.renderMentionWithHeadAndContext(gm));
// Logger.logss(" Mentions we had that sentence: " + doc.predMentions.filter(pm => pm.sentIdx == gm.sentIdx).map(pm => pm.spanToString));
}
}
}
Logger.logss("Detected " + numPMs + " predicted mentions (" + numNomPMs + " nominal, " + numPropPMs + " proper, " + numPronPMs + " pronominal, " + numDemonstrativePMs + " demonstrative), " +
numGMsRecalled + " / " + numGMs + " = " + (numGMsRecalled.toDouble/numGMs) + " gold mentions recalled (" + numGMsUnrecalledNonConstituents + " missed ones are not constituents)")
Logger.logss("Partition of errors: " + numGMsUnrecalledCrossingBrackets + " cross brackets, " + numGMsUnrecalledInternal + " look like internal NPs, " +
numGMsUnrecalledPPAttach + " look like PP attachment problems, " + numGMsUnrecalledCoord + " look like coordination problems, " + numGMsUnrecalledOther + " other");
Logger.logss(" Missed constituent types: " + missedConstituentTypes);
}
}
| matthewfl/berkeley-entity | src/main/java/edu/berkeley/nlp/entity/coref/CorefDocAssembler.scala | Scala | gpl-3.0 | 34,005 |
package com.meetup.iap.receipt
import java.text.SimpleDateFormat
import com.meetup.iap.AppleApi
import AppleApi.{ReceiptResponse, ReceiptInfo}
import java.util.{Date, TimeZone}
import org.json4s.JsonDSL._
import org.json4s.native.JsonMethods._
import org.json4s.JsonAST.JValue
import org.slf4j.LoggerFactory
object ReceiptRenderer {
val log = LoggerFactory.getLogger(ReceiptRenderer.getClass)
private def appleDateFormat(date: Date): String = {
val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss 'Etc/GMT'")
sdf.setTimeZone(TimeZone.getTimeZone("UTC"))
sdf.format(date)
}
def apply(response: ReceiptResponse): String = {
pretty(render(
("status" -> response.statusCode) ~
("latest_receipt_info" -> response.latestReceiptInfo.reverse.map(renderReceipt)) ~
("latest_receipt" -> response.latestReceipt)))
}
private def renderReceipt(receiptInfo: ReceiptInfo): JValue = {
val origPurchaseDate = receiptInfo.originalPurchaseDate
val origPurchaseDateStr = appleDateFormat(origPurchaseDate)
val origPurchaseDateMs = origPurchaseDate.getTime
val purchaseDate = receiptInfo.purchaseDate
val purchaseDateStr = appleDateFormat(purchaseDate)
val purchaseDateMs = purchaseDate.getTime
val expiresDate = receiptInfo.expiresDate
val expiresDateStr = appleDateFormat(expiresDate)
val expiresDateMs = expiresDate.getTime
val cancellationDate = receiptInfo.cancellationDate.map { date =>
appleDateFormat(date)
}
("quantity" -> "1") ~
("product_id" -> receiptInfo.productId) ~
("transaction_id" -> receiptInfo.transactionId) ~
("original_transaction_id" -> receiptInfo.originalTransactionId) ~
("purchase_date" -> purchaseDateStr) ~
("purchase_date_ms" -> purchaseDateMs.toString) ~
("original_purchase_date" -> origPurchaseDateStr) ~
("original_purchase_date_ms" -> origPurchaseDateMs.toString) ~
("expires_date" -> expiresDateStr) ~
("expires_date_ms" -> expiresDateMs.toString) ~
("is_trial_period" -> receiptInfo.isTrialPeriod.toString) ~ //We mimic Apple's weird json here by converting the boolean type to a string
("is_in_intro_offer_period" -> receiptInfo.isInIntroOfferPeriod.map(_.toString)) ~
("cancellation_date" -> cancellationDate)
}
}
| meetup/apple-of-my-iap | iap-service/src/main/scala/com/meetup/iap/receipt/ReceiptRenderer.scala | Scala | mit | 2,321 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming.continuous
import java.util.concurrent.TimeUnit
import scala.concurrent.duration.Duration
import org.apache.commons.lang3.StringUtils
import org.apache.spark.annotation.Evolving
import org.apache.spark.sql.streaming.Trigger
import org.apache.spark.unsafe.types.CalendarInterval
/**
* A [[Trigger]] that continuously processes streaming data, asynchronously checkpointing at
* the specified interval.
*/
@Evolving
case class ContinuousTrigger(intervalMs: Long) extends Trigger {
require(intervalMs >= 0, "the interval of trigger should not be negative")
}
private[sql] object ContinuousTrigger {
def apply(interval: String): ContinuousTrigger = {
if (StringUtils.isBlank(interval)) {
throw new IllegalArgumentException(
"interval cannot be null or blank.")
}
val cal = if (interval.startsWith("interval")) {
CalendarInterval.fromString(interval)
} else {
CalendarInterval.fromString("interval " + interval)
}
if (cal == null) {
throw new IllegalArgumentException(s"Invalid interval: $interval")
}
if (cal.months > 0) {
throw new IllegalArgumentException(s"Doesn't support month or year interval: $interval")
}
new ContinuousTrigger(TimeUnit.MICROSECONDS.toMillis(cal.microseconds))
}
def apply(interval: Duration): ContinuousTrigger = {
ContinuousTrigger(interval.toMillis)
}
def create(interval: String): ContinuousTrigger = {
apply(interval)
}
def create(interval: Long, unit: TimeUnit): ContinuousTrigger = {
ContinuousTrigger(unit.toMillis(interval))
}
}
| yanboliang/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/ContinuousTrigger.scala | Scala | apache-2.0 | 2,429 |
package me.gregd.cineworld.integration.cineworld
import java.time.LocalDate
import com.typesafe.scalalogging.LazyLogging
import me.gregd.cineworld.util.Clock
import me.gregd.cineworld.config.CineworldConfig
import play.api.libs.json.Json
import play.api.libs.ws._
import scalacache.ScalaCache
import scalacache.memoization._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.concurrent.duration._
class CineworldIntegrationService(ws: WSClient, implicit val cache: ScalaCache[Array[Byte]], config: CineworldConfig, clock: Clock) extends LazyLogging {
implicit val d = Json.format[RawFilm]
implicit val c = Json.format[RawEvent]
implicit val b = Json.format[ListingsBody]
implicit val a = Json.format[CinemaResp]
private def curlCinemas(): Future[String] = memoize(1.day) {
val tomorrow = clock.today().plusDays(1).toString
val url = s"${config.baseUrl}/uk/data-api-service/v1/quickbook/10108/cinemas/with-event/until/$tomorrow?attr=&lang=en_GB"
for {
resp <- ws.url(url).get()
body = resp.body
_ = if (body.length < 300) logger.warn(s"Response for $url is suspiciously short!")
} yield body
}
private def curl7DayListings(cinema: String, date: LocalDate): Future[String] = memoize(6.hours) {
val url = s"${config.baseUrl}/uk/data-api-service/v1/quickbook/10108/film-events/in-cinema/$cinema/at-date/$date?attr=&lang=en_GB"
ws.url(url)
.get()
.map(_.body)
}
private def parse(string: String) = {
try {
Json.parse(string)
} catch {
case e: Throwable =>
logger.error(s"Failed to parse response from Cineworld. Response was: $string", e)
throw e
}
}
def retrieveCinemas(): Future[Seq[CinemaResp]] = {
curlCinemas().map { r =>
val json = parse(r)
logger.debug(s"Retrieved cinemas response:\n$r")
val cinemas = json \ "body" \ "cinemas"
if (cinemas.isEmpty) logger.error(s"No cinemas found. Response was:\n$r")
cinemas.as[Seq[CinemaResp]]
}
}
def retrieveListings(cinema: String, date: LocalDate): Future[ListingsBody] = {
curl7DayListings(cinema, date).map { r =>
val films = parse(r) \ "body"
films.as[ListingsBody]
}
}
}
| Grogs/cinema-service | domain/src/main/scala/me/gregd/cineworld/integration/cineworld/CineworldIntegrationService.scala | Scala | gpl-3.0 | 2,269 |
package mvgk.db
import java.util.Properties
import scala.slick.jdbc.StaticQuery
import scala.util.Try
import mvgk.config.Config
import mvgk.db.MyPostgresDriver.simple._
import Database.dynamicSession
import mvgk.db.model.Tables._
import mvgk.util._
/**
* @author Got Hug
*/
object DB {
val driver = "org.postgresql.Driver"
val name = Config.db.name
val user = Config.db.user
val password = Config.db.password
val url = "jdbc:postgresql"
val host = getEnvVar("PGSQL_PORT_5432_TCP_ADDR", "localhost")
val port = getEnvVar("PGSQL_PORT_5432_TCP_PORT", "5432")
val tables = List(film, resource, search)
val db = Database.forURL(s"$url://$host:$port/$name", user, password, new Properties(), driver)
val purePostgres = Database.forURL(s"$url:?port=$port&user=$user&password=$password", driver = driver)
def create(): Unit = {
purePostgres.withDynSession {
StaticQuery.updateNA(s"create database $name").execute
}
// TODO: not needed currently
// createEnums()
}
def safeDrop(): Unit = {
purePostgres.withDynSession {
StaticQuery.updateNA(s"drop database if exists $name").execute
}
}
//todo: make it safe?
def createTables(): Unit = {
db.withDynSession {
tables.reverse.map { table => Try(table.ddl.create)}
}
}
// TODO: not needed currently
// def createEnums(): Unit = {
// mov.db.withDynSession {
// buildCreateSql("Status", Statuses).execute
// buildCreateSql("Regime", Regimes).execute
// buildCreateSql("Product", Products).execute
// buildCreateSql("Platform", Platforms).execute
// }
// }
def dropTables(): Unit = {
db.withDynSession {
tables.map { table => Try(table.ddl.drop)}
// dropEnums()
}
}
// TODO: not needed currently
// def dropEnums(): Unit = {
// buildDropSql("platform").execute
// buildDropSql("product").execute
// buildDropSql("regime").execute
// buildDropSql("status").execute
// }
}
| gothug/movie-geek | src/main/scala/mvgk/db/DB.scala | Scala | apache-2.0 | 1,961 |
package controllers
import play.api.http.HeaderNames
import play.api.mvc._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
object Application extends Controller {
def index = Action {
Ok
}
def options(path: String) = CorsAction {
Action { request =>
Ok.withHeaders(ACCESS_CONTROL_ALLOW_HEADERS -> Seq(AUTHORIZATION, CONTENT_TYPE, "Target-URL").mkString(","))
}
}
}
// Adds the CORS header
case class CorsAction[A](action: Action[A]) extends Action[A] {
def apply(request: Request[A]): Future[Result] = {
action(request).map(result => result.withHeaders(HeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN -> "*",
HeaderNames.ALLOW -> "*",
HeaderNames.ACCESS_CONTROL_ALLOW_METHODS -> "POST, GET, PUT, DELETE, OPTIONS",
HeaderNames.ACCESS_CONTROL_ALLOW_HEADERS -> "Origin, X-Requested-With, Content-Type, Accept, Referer, User-Agent"
))
}
lazy val parser = action.parser
} | douglasbolis/hackdetran | CrawlerDetran/app/controllers/Application.scala | Scala | gpl-3.0 | 966 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.scala
import org.apache.flink.api.scala.completeness.ScalaAPICompletenessTestBase
import org.apache.flink.streaming.api.datastream.{DataStream => JavaStream}
import org.junit.Test
import java.lang.reflect.Method
import scala.language.existentials
/**
* This checks whether the streaming Scala API is up to feature parity with the Java API.
*/
class StreamingScalaAPICompletenessTest extends ScalaAPICompletenessTestBase {
override def isExcludedByName(method: Method): Boolean = {
val name = method.getDeclaringClass.getName + "." + method.getName
val excludedNames = Seq(
// These are only used internally. Should be internal API but Java doesn't have
// private[flink].
"org.apache.flink.streaming.api.datastream.DataStream.getType",
"org.apache.flink.streaming.api.datastream.DataStream.copy",
"org.apache.flink.streaming.api.datastream.DataStream.getTransformation",
"org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator.forceNonParallel",
"org.apache.flink.streaming.api.datastream.ConnectedStreams.getExecutionEnvironment",
"org.apache.flink.streaming.api.datastream.ConnectedStreams.getExecutionEnvironment",
"org.apache.flink.streaming.api.datastream.ConnectedStreams.getFirstInput",
"org.apache.flink.streaming.api.datastream.ConnectedStreams.getSecondInput",
"org.apache.flink.streaming.api.datastream.ConnectedStreams.getType1",
"org.apache.flink.streaming.api.datastream.ConnectedStreams.getType2",
"org.apache.flink.streaming.api.datastream.ConnectedStreams.addGeneralWindowCombine",
"org.apache.flink.streaming.api.datastream.WindowedStream.getType",
"org.apache.flink.streaming.api.datastream.WindowedStream.getExecutionConfig",
"org.apache.flink.streaming.api.datastream.WindowedStream.getExecutionEnvironment",
"org.apache.flink.streaming.api.datastream.WindowedStream.getInputType",
"org.apache.flink.streaming.api.datastream.AllWindowedStream.getExecutionEnvironment",
"org.apache.flink.streaming.api.datastream.AllWindowedStream.getInputType",
"org.apache.flink.streaming.api.datastream.KeyedStream.getKeySelector",
"org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.isChainingEnabled",
"org.apache.flink.streaming.api.environment.StreamExecutionEnvironment." +
"getStateHandleProvider",
"org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.getCheckpointInterval",
"org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.addOperator",
"org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.getCheckpointingMode",
"org.apache.flink.streaming.api.environment.StreamExecutionEnvironment." +
"isForceCheckpointing",
// TypeHints are only needed for Java API, Scala API doesn't need them
"org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator.returns",
// Deactivated until Scala API has new windowing API
"org.apache.flink.streaming.api.datastream.DataStream.timeWindowAll",
"org.apache.flink.streaming.api.datastream.DataStream.windowAll"
)
val excludedPatterns = Seq(
// We don't have project on tuples in the Scala API
"""^org\\.apache\\.flink\\.streaming.api.*project""",
// Cleaning is easier in the Scala API
"""^org\\.apache\\.flink\\.streaming.api.*clean""",
// Object methods
"""^.*notify""",
"""^.*wait""",
"""^.*notifyAll""",
"""^.*equals""",
"""^.*toString""",
"""^.*getClass""",
"""^.*hashCode"""
).map(_.r)
lazy val excludedByPattern =
excludedPatterns.map(_.findFirstIn(name)).exists(_.isDefined)
name.contains("$") || excludedNames.contains(name) || excludedByPattern
}
@Test
override def testCompleteness(): Unit = {
checkMethods("DataStream", "DataStream", classOf[JavaStream[_]], classOf[DataStream[_]])
checkMethods(
"StreamExecutionEnvironment", "StreamExecutionEnvironment",
classOf[org.apache.flink.streaming.api.environment.StreamExecutionEnvironment],
classOf[StreamExecutionEnvironment])
checkMethods(
"SingleOutputStreamOperator", "DataStream",
classOf[org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator[_]],
classOf[DataStream[_]])
checkMethods(
"ConnectedStreams", "ConnectedStreams",
classOf[org.apache.flink.streaming.api.datastream.ConnectedStreams[_,_]],
classOf[ConnectedStreams[_,_]])
checkMethods(
"WindowedStream", "WindowedStream",
classOf[org.apache.flink.streaming.api.datastream.WindowedStream[_, _, _]],
classOf[WindowedStream[_, _, _]])
checkMethods(
"AllWindowedStream", "AllWindowedStream",
classOf[org.apache.flink.streaming.api.datastream.AllWindowedStream[_, _]],
classOf[AllWindowedStream[_, _]])
checkMethods(
"KeyedStream", "KeyedStream",
classOf[org.apache.flink.streaming.api.datastream.KeyedStream[_, _]],
classOf[KeyedStream[_, _]])
checkMethods(
"JoinedStreams.WithWindow", "JoinedStreams.WithWindow",
classOf[org.apache.flink.streaming.api.datastream.JoinedStreams.WithWindow[_,_,_,_]],
classOf[JoinedStreams[_,_]#Where[_]#EqualTo#WithWindow[_]])
checkMethods(
"CoGroupedStreams.WithWindow", "CoGroupedStreams.WithWindow",
classOf[org.apache.flink.streaming.api.datastream.CoGroupedStreams.WithWindow[_,_,_,_]],
classOf[CoGroupedStreams[_, _]#Where[_]#EqualTo#WithWindow[_]])
}
}
| darionyaphet/flink | flink-streaming-scala/src/test/scala/org/apache/flink/streaming/api/scala/StreamingScalaAPICompletenessTest.scala | Scala | apache-2.0 | 6,434 |
/* NEST (New Scala Test)
* Copyright 2007-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools
package cmd
import scala.collection.mutable.ListBuffer
trait CommandLineConfig {
def enforceArity: Boolean = true
def onlyKnownOptions: Boolean = true
}
/** An instance of a command line, parsed according to a Spec.
*/
class CommandLine(val spec: Reference, val originalArgs: List[String]) extends CommandLineConfig {
def this(spec: Reference, line: String) = this(spec, CommandLineParser tokenize line)
def this(spec: Reference, args: Array[String]) = this(spec, args.toList)
import spec.{ isUnaryOption, isBinaryOption, isExpandOption }
val Terminator = "--"
val ValueForUnaryOption = "true" // so if --opt is given, x(--opt) = true
def mapForUnary(opt: String) = Map(fromOpt(opt) -> ValueForUnaryOption)
def errorFn(msg: String) = println(msg)
/** argMap is option -> argument (or "" if it is a unary argument)
* residualArgs are what is left after removing the options and their args.
*/
lazy val (argMap, residualArgs): (Map[String, String], List[String]) = {
val residualBuffer = new ListBuffer[String]
def loop(args: List[String]): Map[String, String] = {
def residual(xs: List[String]) = { residualBuffer ++= xs ; Map[String, String]() }
/* Returns Some(List(args)) if this option expands to an
* argument list and it's not returning only the same arg.
*/
def expand(s1: String) = {
if (isExpandOption(s1)) {
val s2 = spec expandArg s1
if (s2 == List(s1)) None
else Some(s2)
}
else None
}
/* Assumes known options have all been ruled out already. */
def isUnknown(opt: String) =
onlyKnownOptions && (opt startsWith "-") && {
errorFn(s"Option '$opt' not recognized.")
true
}
args match {
case Nil => Map()
case Terminator :: xs => residual(xs)
case x :: Nil =>
expand(x) foreach (exp => return loop(exp))
if (isBinaryOption(x) && enforceArity)
errorFn(s"Option '$x' requires argument, found EOF instead.")
if (isUnaryOption(x)) mapForUnary(x)
else if (isUnknown(x)) Map()
else residual(args)
case x1 :: x2 :: xs =>
expand(x1) foreach (exp => return loop(exp ++ args.tail))
if (x2 == Terminator) mapForUnary(x1) ++ residual(xs)
else if (isUnaryOption(x1)) mapForUnary(x1) ++ loop(args.tail)
else if (isBinaryOption(x1)) Map(fromOpt(x1) -> x2) ++ loop(xs)
else if (isUnknown(x1)) loop(args.tail)
else residual(List(x1)) ++ loop(args.tail)
}
}
(loop(originalArgs), residualBuffer map stripQuotes toList)
}
def apply(arg: String) = argMap(arg)
def get(arg: String) = argMap get arg
def isSet(arg: String) = argMap contains arg
def getOrElse(arg: String, orElse: => String) = if (isSet(arg)) apply(arg) else orElse
override def toString() = argMap.toString + " " + residualArgs.toString
}
| felixmulder/scala | src/compiler/scala/tools/cmd/CommandLine.scala | Scala | bsd-3-clause | 3,154 |
import sbt._
import Keys._
import play.Project._
object ApplicationBuild extends Build {
val appName = "WeFarm"
val appVersion = "1.0-SNAPSHOT"
val appDependencies = Seq(
// Add your project dependencies here,
javaCore,
javaJdbc,
javaEbean,
"mysql" % "mysql-connector-java" % "5.1.18",
"org.mindrot" % "jbcrypt" % "0.3m"
)
val main = play.Project(appName, appVersion, appDependencies).settings(
// Add your own project settings here
)
}
| wepay/WeFarm-Java | project/Build.scala | Scala | mit | 501 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.validation
import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, TypeInformation}
import org.apache.flink.table.api.{TableException, TableSchema}
import org.apache.flink.table.utils.TableTestBase
import org.junit.Test
class TableSchemaValidationTest extends TableTestBase {
@Test
def testColumnNameAndColumnTypeNotEqual() {
thrown.expect(classOf[TableException])
thrown.expectMessage(
"Number of field names and field types must be equal.\\n" +
"Number of names is 3, number of types is 2.\\n" +
"List of field names: [a, b, c].\\n" +
"List of field types: [Integer, String].")
val fieldNames = Array("a", "b", "c")
val typeInfos: Array[TypeInformation[_]] = Array(
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO)
new TableSchema(fieldNames, typeInfos)
}
@Test
def testColumnNamesDuplicate() {
thrown.expect(classOf[TableException])
thrown.expectMessage(
"Field names must be unique.\\n" +
"List of duplicate fields: [a].\\n" +
"List of all fields: [a, a, c].")
val fieldNames = Array("a", "a", "c")
val typeInfos: Array[TypeInformation[_]] = Array(
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO)
new TableSchema(fieldNames, typeInfos)
}
}
| zimmermatt/flink | flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/validation/TableSchemaValidationTest.scala | Scala | apache-2.0 | 2,178 |
/*
* Copyright 2012 Eike Kettner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eknet.publet.ext
import org.eknet.publet.engine.scala.ScalaScript
import org.eknet.publet.web.shiro.Security
import org.eknet.publet.auth.store.{User, UserProperty}
import org.eknet.publet.vfs.Content
import org.eknet.publet.web.util.{PubletWeb, PubletWebContext}
import org.eknet.publet.auth.Algorithm
/**
* @author Eike Kettner [email protected]
* @since 19.07.12 16:21
*/
class MyDataScript extends ScalaScript {
import org.eknet.publet.web.util.RenderUtils.makeJson
def serve(): Option[Content] = {
if (!Security.isAuthenticated) {
error("Not authenticated.")
} else {
PubletWebContext.param("what") match {
case Some("setPassword") => {
PubletWebContext.param("newPassword1").map (newPass1 => {
PubletWebContext.param("newPassword2").flatMap (newPass2 => {
val algorithm = PubletWebContext.param("algorithm").getOrElse("SHA-512")
if (newPass1 == newPass2) changePassword(newPass1, algorithm)
else error("Passwords do not match!")
})
}).getOrElse(error("Cannot change password!"))
}
case Some("setUserData") => {
PubletWebContext.param("fullName").map(fullName => {
PubletWebContext.param("email").flatMap(email => {
changeUserData(fullName, email)
})
}).getOrElse(error("Cannot update user data!"))
}
case Some("getUserData") => getUserData
case _ => error("No action specified.")
}
}
}
def success(msg: String) = makeJson(Map("success"->true, "message"->msg))
def error(msg: String) = makeJson(Map("success"->false, "message"->msg))
private def changePassword(newpassPlain: String, algorithm: String) = {
PubletWeb.authManager.setPassword(Security.username, newpassPlain, Some(Algorithm.withName(algorithm)))
success("Password updated.")
}
private def changeUserData(fullName: String, email: String) = {
val user = PubletWeb.authManager.findUser(Security.username).get
val props = user.properties +
(UserProperty.fullName -> fullName) +
(UserProperty.email -> email)
val newUser = new User(user.login, props)
PubletWeb.authManager.updateUser(newUser)
success("User data updated.")
}
private def getUserData = {
PubletWeb.authManager.findUser(Security.username).flatMap(user => {
makeJson(Map(
"success" -> true,
"fullName" -> (user.get(UserProperty.fullName).getOrElse("")),
"email" -> (user.get(UserProperty.email).getOrElse(""))
))
}) orElse {
error("User '"+Security.username+"' not found.")
}
}
}
| eikek/publet | ext/src/main/scala/org/eknet/publet/ext/MyDataScript.scala | Scala | apache-2.0 | 3,272 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution
import monix.execution.internal.Platform
import java.io.{ByteArrayOutputStream, PrintStream}
import scala.util.control.NonFatal
/**
* INTERNAL API — test utilities.
*/
trait TestUtils {
lazy val isCI = {
Platform.getEnv("CI").map(_.toLowerCase).contains("true")
}
/**
* Silences `System.err`, only printing the output in case exceptions are
* thrown by the executed `thunk`.
*/
def silenceSystemErr[A](thunk: => A): A = synchronized {
// Silencing System.err
val oldErr = System.err
val outStream = new ByteArrayOutputStream()
val fakeErr = new PrintStream(outStream)
System.setErr(fakeErr)
try {
val result = thunk
System.setErr(oldErr)
result
} catch {
case NonFatal(e) =>
System.setErr(oldErr)
// In case of errors, print whatever was caught
fakeErr.close()
val out = outStream.toString("utf-8")
if (out.nonEmpty) oldErr.println(out)
throw e
}
}
/**
* Catches `System.err` output, for testing purposes.
*/
def catchSystemErr(thunk: => Unit): String = synchronized {
val oldErr = System.err
val outStream = new ByteArrayOutputStream()
val fakeErr = new PrintStream(outStream)
System.setErr(fakeErr)
try {
thunk
} finally {
System.setErr(oldErr)
fakeErr.close()
}
outStream.toString("utf-8")
}
}
| alexandru/monifu | monix-execution/shared/src/test/scala/monix/execution/TestUtils.scala | Scala | apache-2.0 | 2,100 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
package squants.photo
import squants._
/**
* @author garyKeorkunian
* @since 0.1
*
* @param value Double
*/
final class Luminance private (val value: Double, val unit: LuminanceUnit)
extends Quantity[Luminance] {
def dimension = Luminance
def *(that: Area): LuminousIntensity = Candelas(value * that.toSquareMeters)
def toCandelasPerSquareMeters = to(CandelasPerSquareMeter)
}
object Luminance extends Dimension[Luminance] {
private[photo] def apply[A](n: A, unit: LuminanceUnit)(implicit num: Numeric[A]) = new Luminance(num.toDouble(n), unit)
def apply = parse _
def name = "Luminance"
def primaryUnit = CandelasPerSquareMeter
def siUnit = CandelasPerSquareMeter
def units = Set(CandelasPerSquareMeter)
}
trait LuminanceUnit extends UnitOfMeasure[Luminance] {
def apply[A](n: A)(implicit num: Numeric[A]) = Luminance(num.toDouble(n), this)
}
object CandelasPerSquareMeter extends LuminanceUnit with PrimaryUnit with SiUnit {
val symbol = "cd/m²"
}
object LuminanceConversions {
lazy val candelaPerSquareMeter = CandelasPerSquareMeter(1)
implicit class LuminanceConversions[A](n: A)(implicit num: Numeric[A]) {
def candelasPerSquareMeter = CandelasPerSquareMeter(n)
}
implicit object LuminanceNumeric extends AbstractQuantityNumeric[Luminance](Luminance.primaryUnit)
}
| rmihael/squants | shared/src/main/scala/squants/photo/Luminance.scala | Scala | apache-2.0 | 1,855 |
package com.twitter.inject.server
import com.twitter.concurrent.exp.AsyncStream
import com.twitter.io.{Buf, Reader}
object AsyncStreamUtils {
def readerToAsyncStream(reader: Reader): AsyncStream[Buf] = {
for {
optBuf <- AsyncStream.fromFuture(reader.read(Int.MaxValue))
result <- optBuf match {
case None => AsyncStream.empty[Buf]
case Some(buf) =>
buf +:: readerToAsyncStream(reader)
}
} yield result
}
}
| nkhuyu/finatra | inject/inject-server/src/main/scala/com/twitter/inject/server/AsyncStreamUtils.scala | Scala | apache-2.0 | 466 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.collector
import com.twitter.common.zookeeper.ZooKeeperClient
import com.twitter.conversions.time._
import com.twitter.finagle.stats.{StatsReceiver, NullStatsReceiver}
import com.twitter.logging.Logger
import com.twitter.util.Timer
import org.apache.zookeeper.{CreateMode, KeeperException, ZooDefs, Watcher, WatchedEvent}
import scala.collection.JavaConversions._
// TODO this was stolen straight up from ostrich-aggregator. we should switch to use the scribe library
final class ResilientZKNode(
path: String,
nodeName: String,
zkClient: ZooKeeperClient,
timer: Timer,
statsReceiver: StatsReceiver = NullStatsReceiver
) { self =>
require(path.startsWith("/"), "zookeeper path must start with '/'")
private[this] val scopedStatsReceiver = statsReceiver.scope("zookeeper")
private[this] val registerCounter = scopedStatsReceiver.counter("registers")
private[this] val expirationCounter = scopedStatsReceiver.counter("expirations")
private[this] val deletionCounter = scopedStatsReceiver.counter("deletions")
private[this] val disconnectCounter = scopedStatsReceiver.counter("disconnects")
private[this] val failureCounter = scopedStatsReceiver.counter("failures")
private[this] val fullPath = path + "/" + nodeName
private[this] val logger = Logger.get("ZookeeperLog")
private[this] val OPEN_ACL = ZooDefs.Ids.OPEN_ACL_UNSAFE
@volatile private[this] var register = true
private[this] val watcher = new Watcher {
override def process(e: WatchedEvent) {
logger.info("processing event: " + e.toString)
if (e.getType == Watcher.Event.EventType.NodeDeleted) {
deletionCounter.incr()
self.register()
} else {
e.getState match {
case Watcher.Event.KeeperState.Disconnected =>
disconnectCounter.incr()
self.register()
case Watcher.Event.KeeperState.Expired =>
expirationCounter.incr()
self.register()
case _ => ()
}
}
}
}
def register() {
synchronized {
if (!register) {
return
}
registerCounter.incr()
logger.info("registering " + fullPath)
ensureParentExists()
if (createNode(fullPath, false)) {
watchForDeletions()
} else {
failureCounter.incr()
scheduleRegister()
}
}
}
/**
* Delete and stop recreating the node.
*/
@throws(classOf[KeeperException])
def unregister() {
synchronized {
register = false
try {
zkClient.get().delete(fullPath, -1)
} catch {
case e: KeeperException.NoNodeException => () // node doesn't exist, no need to delete it
}
}
}
private[this] def watchForDeletions() {
zkClient.get().exists(fullPath, watcher)
}
private[this] def ensureParentExists() {
val parts = path.slice(1, path.size).split("/")
var currentPath = ""
parts foreach { part =>
currentPath = currentPath + "/" + part
createNode(currentPath, true)
}
}
private[this] def createNode(path: String, isPersistent: Boolean): Boolean = {
try {
if (zkClient.get().exists(path, false) == null) {
logger.info("creating node: " + path)
val mode = if (isPersistent)
CreateMode.PERSISTENT
else
CreateMode.EPHEMERAL
zkClient.get().create(path, null, OPEN_ACL, mode)
}
true
} catch {
case e: KeeperException if e.code == KeeperException.Code.NODEEXISTS =>
logger.info("node already exists")
true
case e: Throwable =>
logger.error("error creating zookeeper node: %s".format(e.toString))
false
}
}
private[this] def scheduleRegister() {
timer.schedule(30.seconds.fromNow) {
register()
}
}
}
| cogitate/twitter-zipkin-uuid | zipkin-collector-scribe/src/main/scala/com/twitter/zipkin/collector/ResilientZKNode.scala | Scala | apache-2.0 | 4,418 |
package org.workcraft.gui.modeleditor.sim
import java.util.TimerTask
import javax.swing.SwingUtilities
import org.workcraft.scala.effects.IO._
import org.workcraft.scala.Expressions._
import java.awt.geom.Point2D
import java.awt.Color
import org.workcraft.graphics.Colorisation
import org.workcraft.graphics.Touchable
import org.workcraft.graphics.GraphicalContent
import org.workcraft.gui.modeleditor.tools.ConnectionManager
import org.workcraft.gui.modeleditor.tools.ModelEditorTool
import org.workcraft.gui.modeleditor.tools.Button
import java.awt.event.KeyEvent
import org.workcraft.gui.GUI
import org.workcraft.gui.modeleditor.tools.GenericConnectionToolImpl
import org.workcraft.gui.modeleditor.Viewport
import org.workcraft.gui.modeleditor.tools.ToolEnvironment
import org.workcraft.scala.grapheditor.tools.HitTester
import org.workcraft.dependencymanager.advanced.user.Variable
import scalaz.Scalaz._
import org.workcraft.gui.modeleditor.tools.ModelEditorToolInstance
import org.workcraft.gui.modeleditor.tools.ToolEnvironment
import org.workcraft.scala.effects.IO
import javax.swing.JPanel
import javax.swing.JLabel
import java.util.Timer
import java.awt.event.ActionEvent
import java.awt.event.ActionListener
case class Trace[Event](events: Seq[Event])
case class StateAnnotatedTrace[Event, State](initialState: State, events: Seq[(Event, State)])
case class MarkedTrace[Event, State](trace: StateAnnotatedTrace[Event, State], position: Int) {
def goto(position: Int, applyState: State => IO[Unit]) = {
applyState(if (position > 0) trace.events(position - 1)._2 else trace.initialState) >>=| ioPure.pure { MarkedTrace(trace, position) }
}
def !(e: Event, s: State) = MarkedTrace(StateAnnotatedTrace(trace.initialState, trace.events.take(position) :+ (e, s)), position + 1)
}
object Trace {
def annotateWithState[Event, State](t: Trace[Event], state: IO[State], fire: Event => IO[Unit]) =
state >>= (initialState => t.events.traverse(e => (fire(e) >>=| state).map((e, _))).map(StateAnnotatedTrace(initialState, _)))
}
class GenericSimulationToolInstance[Event, State](
viewport: Viewport,
hasFocus: Expression[Boolean],
eventSources: Expression[Iterable[Event]],
touchable: Event => Expression[Touchable],
sim: SimulationModel[Event, State],
val trace: ModifiableExpression[MarkedTrace[Event, State]],
paint: ((Event => Colorisation), State) => Expression[GraphicalContent],
hintMessage: Option[String],
deadlockMessage: Option[String]) extends ModelEditorToolInstance {
def fire(event: Event): IO[Unit] = sim.fire(event) >>=| sim.state.eval >>= (state => trace.update(_ ! (event, state)))
def gotoState(position: Int) = trace.eval >>= (_.goto(position, sim.setState(_))) >>= (t => trace.set(t))
val hitTester = HitTester.create(eventSources, touchable)
val mouseListener = Some(new GenericSimulationToolMouseListener(node => ioPure.pure { hitTester.hitTest(node) }, sim.enabled.eval, (e: Event) => fire(e)))
def keyBindings = List()
def userSpaceContent = (sim.state <|**|> (sim.enabled, GenericSimulationTool.col)) >>= { case (state, enabled, col) => (paint(ev => if (enabled(ev)) col else Colorisation.Empty, state)) }
def screenSpaceContent = (sim.enabled <|*|> eventSources) >>= {
case (enabled, events) => if (events.forall(enabled(_) == false) && deadlockMessage.isDefined)
GUI.editorMessage(viewport, Color.RED, deadlockMessage.get)
else if (hintMessage.isDefined)
GUI.editorMessage(viewport, Color.BLACK, hintMessage.get)
else
constant(GraphicalContent.Empty)
}
val interfacePanel = Some(new SimControlPanel[Event, State](trace, (e: Event) => sim.name(e), gotoState(_)))
}
case class GenericSimulationTool[Event, State](
eventSources: Expression[Iterable[Event]],
touchable: Event => Expression[Touchable],
sim: IO[SimulationModel[Event, State]],
paint: ((Event => Colorisation), State) => Expression[GraphicalContent],
hintMessage: Option[String] = None,
deadlockMessage: Option[String] = None) extends ModelEditorTool {
def button = GenericSimulationTool.button
def createInstance(env: ToolEnvironment) = for {
sim <- sim;
initialState <- sim.state.eval;
trace <- newVar(MarkedTrace(StateAnnotatedTrace[Event, State](initialState, Seq()), 0))
} yield new GenericSimulationToolInstance(env.viewport, env.hasFocus, eventSources, touchable, sim, trace, paint, hintMessage, deadlockMessage)
def createInstanceWithGivenTrace(env: ToolEnvironment, trace: MarkedTrace[Event, State]) = for {
sim <- sim;
trace <- newVar(trace)
} yield new GenericSimulationToolInstance(env.viewport, env.hasFocus, eventSources, touchable, sim, trace, paint, hintMessage, deadlockMessage)
}
object GenericSimulationTool {
val button = new Button {
override def hotkey = Some(KeyEvent.VK_M)
override def icon = Some(GUI.createIconFromSvgUsingSettingsSize("images/icons/svg/start-green.svg").unsafePerformIO)
override def label = "Simulation tool"
}
val t = Variable.create(0.0)
// new Timer(30, new ActionListener {
// def actionPerformed(e: ActionEvent) = t.set(scala.math.sin(System.currentTimeMillis() / 200.0)).unsafePerformIO
// }).start()
// val tt = new Timer(true)
// tt.scheduleAtFixedRate(new TimerTask { def run =SwingUtilities.invokeLater ( new Runnable { def run = t.set(scala.math.sin(System.currentTimeMillis() / 200.0)).unsafePerformIO })}, 0, 30)
val col = t.map(t => Colorisation(Some(new Color(80 + (40 * t).toInt, 200 + (40 * t).toInt, 80 + (40 * t).toInt)), None))
val highlightedColorisation = Colorisation(Some(new Color(240, 180, 40)), None)
}
| tuura/workcraft-2.2 | ScalaGraphEditorUtil/src/main/scala/org/workcraft/gui/modeleditor/sim/GenericSimulationTool.scala | Scala | gpl-3.0 | 5,767 |
package ru.pavkin.todoist.api.core.decoder
import cats.{Apply, Functor}
import shapeless.{HNil, ::, HList}
import cats.syntax.apply._
import cats.syntax.functor._
trait MultipleResponseDecoder[F[_], Base, Out <: HList] extends ResponseDecoder[F, Base, Out] {self =>
def combine[Out2](other: ResponseDecoder[F, Base, Out2])
(implicit A: Apply[F]): MultipleResponseDecoder[F, Base, Out2 :: Out] =
new MultipleResponseDecoder[F, Base, Out2 :: Out] {
def parse(resource: Base): F[Out2 :: Out] = self.parse(resource).map2(other.parse(resource))((a, b) => b :: a)
}
}
object MultipleResponseDecoder {
def using[F[_], Base, Out0 <: HList](f: Base => F[Out0]): MultipleResponseDecoder[F, Base, Out0] =
new MultipleResponseDecoder[F, Base, Out0] {
def parse(resource: Base): F[Out0] = f(resource)
}
implicit def singleHListParser[F[_] : Functor, Base, T](implicit p: SingleResponseDecoder[F, Base, T])
: MultipleResponseDecoder[F, Base, T :: HNil] =
new MultipleResponseDecoder[F, Base, T :: HNil] {
def parse(resource: Base): F[T :: HNil] = p.parse(resource).map(_ :: HNil)
}
implicit def recurse[F[_] : Apply, Base, H, T <: HList](implicit
h: SingleResponseDecoder[F, Base, H],
t: MultipleResponseDecoder[F, Base, T])
: MultipleResponseDecoder[F, Base, H :: T] =
new MultipleResponseDecoder[F, Base, H :: T] {
def parse(resource: Base): F[H :: T] = t.combine(h).parse(resource)
}
}
| vpavkin/scalist | core/src/main/scala/ru/pavkin/todoist/api/core/decoder/MultipleResponseDecoder.scala | Scala | mit | 1,585 |
// Copyright 2010 Twitter, Inc.
package com.twitter.concurrent
import org.junit.runner.RunWith
import org.scalatest.WordSpec
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class ConcurrentMultiMapTest extends WordSpec {
"behave like a multimap" in {
val map = new ConcurrentMultiMap[Int, Int]
map += 1 -> 2
map += 1 -> 3
map += 1 -> 4
assert(map.get(1) === List(2, 3, 4))
assert(map.get(0) === List())
assert(map.get(2) === List())
map += 0 -> 20
map += 3 -> 30
map += 10 -> 40
assert(map.get(1) === List(2, 3, 4))
assert(map.get(0) === List(20))
assert(map.get(2) === List())
assert(map.get(3) === List(30))
assert(map.get(10) === List(40))
assert(map.get(110) === List())
}
}
| travisbrown/util | util-core/src/test/scala/com/twitter/concurrent/ConcurrentMultiMapTest.scala | Scala | apache-2.0 | 775 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.connector.catalog.{CatalogManager, CatalogPlugin, LookupCatalog, SupportsNamespaces, TableCatalog, TableChange}
/**
* Resolves catalogs from the multi-part identifiers in SQL statements, and convert the statements
* to the corresponding v2 commands if the resolved catalog is not the session catalog.
*/
class ResolveCatalogs(val catalogManager: CatalogManager)
extends Rule[LogicalPlan] with LookupCatalog {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
import org.apache.spark.sql.connector.catalog.CatalogV2Util._
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case AlterTableAddColumnsStatement(
nameParts @ NonSessionCatalogAndTable(catalog, tbl), cols) =>
val changes = cols.map { col =>
TableChange.addColumn(
col.name.toArray,
col.dataType,
col.nullable,
col.comment.orNull,
col.position.orNull)
}
createAlterTable(nameParts, catalog, tbl, changes)
case a @ AlterTableAlterColumnStatement(
nameParts @ NonSessionCatalogAndTable(catalog, tbl), _, _, _, _, _) =>
val colName = a.column.toArray
val typeChange = a.dataType.map { newDataType =>
TableChange.updateColumnType(colName, newDataType)
}
val nullabilityChange = a.nullable.map { nullable =>
TableChange.updateColumnNullability(colName, nullable)
}
val commentChange = a.comment.map { newComment =>
TableChange.updateColumnComment(colName, newComment)
}
val positionChange = a.position.map { newPosition =>
TableChange.updateColumnPosition(colName, newPosition)
}
createAlterTable(
nameParts,
catalog,
tbl,
typeChange.toSeq ++ nullabilityChange ++ commentChange ++ positionChange)
case AlterTableRenameColumnStatement(
nameParts @ NonSessionCatalogAndTable(catalog, tbl), col, newName) =>
val changes = Seq(TableChange.renameColumn(col.toArray, newName))
createAlterTable(nameParts, catalog, tbl, changes)
case AlterTableDropColumnsStatement(
nameParts @ NonSessionCatalogAndTable(catalog, tbl), cols) =>
val changes = cols.map(col => TableChange.deleteColumn(col.toArray))
createAlterTable(nameParts, catalog, tbl, changes)
case AlterTableSetPropertiesStatement(
nameParts @ NonSessionCatalogAndTable(catalog, tbl), props) =>
val changes = props.map { case (key, value) =>
TableChange.setProperty(key, value)
}.toSeq
createAlterTable(nameParts, catalog, tbl, changes)
// TODO: v2 `UNSET TBLPROPERTIES` should respect the ifExists flag.
case AlterTableUnsetPropertiesStatement(
nameParts @ NonSessionCatalogAndTable(catalog, tbl), keys, _) =>
val changes = keys.map(key => TableChange.removeProperty(key))
createAlterTable(nameParts, catalog, tbl, changes)
case AlterTableSetLocationStatement(
nameParts @ NonSessionCatalogAndTable(catalog, tbl), partitionSpec, newLoc) =>
if (partitionSpec.nonEmpty) {
throw new AnalysisException(
"ALTER TABLE SET LOCATION does not support partition for v2 tables.")
}
val changes = Seq(TableChange.setProperty(TableCatalog.PROP_LOCATION, newLoc))
createAlterTable(nameParts, catalog, tbl, changes)
case AlterViewSetPropertiesStatement(
NonSessionCatalogAndTable(catalog, tbl), props) =>
throw new AnalysisException(
s"Can not specify catalog `${catalog.name}` for view ${tbl.quoted} " +
s"because view support in catalog has not been implemented yet")
case AlterViewUnsetPropertiesStatement(
NonSessionCatalogAndTable(catalog, tbl), keys, ifExists) =>
throw new AnalysisException(
s"Can not specify catalog `${catalog.name}` for view ${tbl.quoted} " +
s"because view support in catalog has not been implemented yet")
case RenameTableStatement(NonSessionCatalogAndTable(catalog, oldName), newNameParts, isView) =>
if (isView) {
throw new AnalysisException("Renaming view is not supported in v2 catalogs.")
}
RenameTable(catalog.asTableCatalog, oldName.asIdentifier, newNameParts.asIdentifier)
case DescribeColumnStatement(
NonSessionCatalogAndTable(catalog, tbl), colNameParts, isExtended) =>
throw new AnalysisException("Describing columns is not supported for v2 tables.")
case c @ CreateTableStatement(
NonSessionCatalogAndTable(catalog, tbl), _, _, _, _, _, _, _, _, _) =>
CreateV2Table(
catalog.asTableCatalog,
tbl.asIdentifier,
c.tableSchema,
// convert the bucket spec and add it as a transform
c.partitioning ++ c.bucketSpec.map(_.asTransform),
convertTableProperties(c.properties, c.options, c.location, c.comment, c.provider),
ignoreIfExists = c.ifNotExists)
case c @ CreateTableAsSelectStatement(
NonSessionCatalogAndTable(catalog, tbl), _, _, _, _, _, _, _, _, _) =>
CreateTableAsSelect(
catalog.asTableCatalog,
tbl.asIdentifier,
// convert the bucket spec and add it as a transform
c.partitioning ++ c.bucketSpec.map(_.asTransform),
c.asSelect,
convertTableProperties(c.properties, c.options, c.location, c.comment, c.provider),
writeOptions = c.options,
ignoreIfExists = c.ifNotExists)
case RefreshTableStatement(NonSessionCatalogAndTable(catalog, tbl)) =>
RefreshTable(catalog.asTableCatalog, tbl.asIdentifier)
case c @ ReplaceTableStatement(
NonSessionCatalogAndTable(catalog, tbl), _, _, _, _, _, _, _, _, _) =>
ReplaceTable(
catalog.asTableCatalog,
tbl.asIdentifier,
c.tableSchema,
// convert the bucket spec and add it as a transform
c.partitioning ++ c.bucketSpec.map(_.asTransform),
convertTableProperties(c.properties, c.options, c.location, c.comment, c.provider),
orCreate = c.orCreate)
case c @ ReplaceTableAsSelectStatement(
NonSessionCatalogAndTable(catalog, tbl), _, _, _, _, _, _, _, _, _) =>
ReplaceTableAsSelect(
catalog.asTableCatalog,
tbl.asIdentifier,
// convert the bucket spec and add it as a transform
c.partitioning ++ c.bucketSpec.map(_.asTransform),
c.asSelect,
convertTableProperties(c.properties, c.options, c.location, c.comment, c.provider),
writeOptions = c.options,
orCreate = c.orCreate)
case DropTableStatement(NonSessionCatalogAndTable(catalog, tbl), ifExists, _) =>
DropTable(catalog.asTableCatalog, tbl.asIdentifier, ifExists)
case DropViewStatement(NonSessionCatalogAndTable(catalog, viewName), _) =>
throw new AnalysisException(
s"Can not specify catalog `${catalog.name}` for view ${viewName.quoted} " +
s"because view support in catalog has not been implemented yet")
case c @ CreateNamespaceStatement(CatalogAndNamespace(catalog, ns), _, _)
if !isSessionCatalog(catalog) =>
CreateNamespace(catalog.asNamespaceCatalog, ns, c.ifNotExists, c.properties)
case UseStatement(isNamespaceSet, nameParts) =>
if (isNamespaceSet) {
SetCatalogAndNamespace(catalogManager, None, Some(nameParts))
} else {
val CatalogAndNamespace(catalog, ns) = nameParts
val namespace = if (ns.nonEmpty) Some(ns) else None
SetCatalogAndNamespace(catalogManager, Some(catalog.name()), namespace)
}
case ShowCurrentNamespaceStatement() =>
ShowCurrentNamespace(catalogManager)
}
object NonSessionCatalogAndTable {
def unapply(nameParts: Seq[String]): Option[(CatalogPlugin, Seq[String])] = nameParts match {
case NonSessionCatalogAndIdentifier(catalog, ident) =>
Some(catalog -> ident.asMultipartIdentifier)
case _ => None
}
}
}
| ptkool/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala | Scala | apache-2.0 | 8,962 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package xml
package parsing
import scala.io.Source
import scala.xml.dtd._
import Utility.Escapes.{ pairs => unescape }
/**
* An XML parser.
*
* Parses XML 1.0, invokes callback methods of a `MarkupHandler` and returns
* whatever the markup handler returns. Use `ConstructingParser` if you just
* want to parse XML to construct instances of `scala.xml.Node`.
*
* While XML elements are returned, DTD declarations - if handled - are
* collected using side-effects.
*
* @author Burak Emir
* @version 1.0
*/
trait MarkupParser extends MarkupParserCommon with TokenTests {
self: MarkupParser with MarkupHandler =>
type PositionType = Int
type InputType = Source
type ElementType = NodeSeq
type AttributesType = (MetaData, NamespaceBinding)
type NamespaceType = NamespaceBinding
def truncatedError(msg: String): Nothing = throw FatalError(msg)
def errorNoEnd(tag: String) = throw FatalError("expected closing tag of " + tag)
def xHandleError(that: Char, msg: String) = reportSyntaxError(msg)
val input: Source
/** if true, does not remove surplus whitespace */
val preserveWS: Boolean
def externalSource(systemLiteral: String): Source
//
// variables, values
//
protected var curInput: Source = input
// See ticket #3720 for motivations.
private class WithLookAhead(underlying: Source) extends Source {
private val queue = scala.collection.mutable.Queue[Char]()
def lookahead(): BufferedIterator[Char] = {
val iter = queue.iterator ++ new Iterator[Char] {
def hasNext = underlying.hasNext
def next() = { val x = underlying.next(); queue += x; x }
}
iter.buffered
}
val iter = new Iterator[Char] {
def hasNext = underlying.hasNext || !queue.isEmpty
def next() = if (!queue.isEmpty) queue.dequeue() else underlying.next()
}
}
def lookahead(): BufferedIterator[Char] = curInput match {
case curInputWLA: WithLookAhead =>
curInputWLA.lookahead()
case _ =>
val newInput = new WithLookAhead(curInput)
curInput = newInput
newInput.lookahead()
}
/** the handler of the markup, returns this */
private val handle: MarkupHandler = this
/** stack of inputs */
var inpStack: List[Source] = Nil
/** holds the position in the source file */
var pos: Int = _
/* used when reading external subset */
var extIndex = -1
/** holds temporary values of pos */
var tmppos: Int = _
/** holds the next character */
var nextChNeeded: Boolean = false
var reachedEof: Boolean = false
var lastChRead: Char = _
def ch: Char = {
if (nextChNeeded) {
if (curInput.hasNext) {
lastChRead = curInput.next()
pos = curInput.pos
} else {
val ilen = inpStack.length
//Console.println(" ilen = "+ilen+ " extIndex = "+extIndex);
if ((ilen != extIndex) && (ilen > 0)) {
/* for external source, inpStack == Nil ! need notify of eof! */
pop()
} else {
reachedEof = true
lastChRead = 0.asInstanceOf[Char]
}
}
nextChNeeded = false
}
lastChRead
}
/** character buffer, for names */
protected val cbuf = new StringBuilder()
var dtd: DTD = null
protected var doc: Document = null
def eof: Boolean = { ch; reachedEof }
//
// methods
//
/**
* {{{
* <? prolog ::= xml S ... ?>
* }}}
*/
def xmlProcInstr(): MetaData = {
xToken("xml")
xSpace()
val (md, scp) = xAttributes(TopScope)
if (scp != TopScope)
reportSyntaxError("no xmlns definitions here, please.")
xToken('?')
xToken('>')
md
}
/**
* Factored out common code.
*/
private def prologOrTextDecl(isProlog: Boolean): (Option[String], Option[String], Option[Boolean]) = {
var info_ver: Option[String] = None
var info_enc: Option[String] = None
var info_stdl: Option[Boolean] = None
val m = xmlProcInstr()
var n = 0
if (isProlog)
xSpaceOpt()
m("version") match {
case null =>
case Text("1.0") =>
info_ver = Some("1.0"); n += 1
case _ => reportSyntaxError("cannot deal with versions != 1.0")
}
m("encoding") match {
case null =>
case Text(enc) =>
if (!isValidIANAEncoding(enc))
reportSyntaxError("\\"" + enc + "\\" is not a valid encoding")
else {
info_enc = Some(enc)
n += 1
}
}
if (isProlog) {
m("standalone") match {
case null =>
case Text("yes") =>
info_stdl = Some(true); n += 1
case Text("no") =>
info_stdl = Some(false); n += 1
case _ => reportSyntaxError("either 'yes' or 'no' expected")
}
}
if (m.length - n != 0) {
val s = if (isProlog) "SDDecl? " else ""
reportSyntaxError("VersionInfo EncodingDecl? %sor '?>' expected!" format s)
}
(info_ver, info_enc, info_stdl)
}
/**
* {{{
* <? prolog ::= xml S?
* // this is a bit more lenient than necessary...
* }}}
*/
def prolog(): (Option[String], Option[String], Option[Boolean]) =
prologOrTextDecl(isProlog = true)
/** prolog, but without standalone */
def textDecl(): (Option[String], Option[String]) =
prologOrTextDecl(isProlog = false) match { case (x1, x2, _) => (x1, x2) }
/**
* {{{
* [22] prolog ::= XMLDecl? Misc* (doctypedecl Misc*)?
* [23] XMLDecl ::= '<?xml' VersionInfo EncodingDecl? SDDecl? S? '?>'
* [24] VersionInfo ::= S 'version' Eq ("'" VersionNum "'" | '"' VersionNum '"')
* [25] Eq ::= S? '=' S?
* [26] VersionNum ::= '1.0'
* [27] Misc ::= Comment | PI | S
* }}}
*/
def document(): Document = {
doc = new Document()
this.dtd = null
var info_prolog: (Option[String], Option[String], Option[Boolean]) = (None, None, None)
if ('<' != ch) {
reportSyntaxError("< expected")
return null
}
nextch() // is prolog ?
var children: NodeSeq = null
if ('?' == ch) {
nextch()
info_prolog = prolog()
doc.version = info_prolog._1
doc.encoding = info_prolog._2
doc.standAlone = info_prolog._3
children = content(TopScope) // DTD handled as side effect
} else {
val ts = new NodeBuffer()
content1(TopScope, ts) // DTD handled as side effect
ts &+ content(TopScope)
children = NodeSeq.fromSeq(ts)
}
//println("[MarkupParser::document] children now: "+children.toList)
var elemCount = 0
var theNode: Node = null
for (c <- children) c match {
case _: ProcInstr =>
case _: Comment =>
case _: EntityRef => // todo: fix entities, shouldn't be "special"
reportSyntaxError("no entity references allowed here")
case s: SpecialNode =>
if (s.toString.trim().length > 0) //non-empty text nodes not allowed
elemCount += 2
case m: Node =>
elemCount += 1
theNode = m
}
if (1 != elemCount) {
reportSyntaxError("document must contain exactly one element")
//Console.println(children.toList)
}
doc.children = children
doc.docElem = theNode
doc
}
/** append Unicode character to name buffer*/
protected def putChar(c: Char) = cbuf append c
/**
* As the current code requires you to call nextch once manually
* after construction, this method formalizes that suboptimal reality.
*/
def initialize: this.type = {
nextch()
this
}
protected def ch_returning_nextch: Char = { val res = ch; nextch(); res }
def mkAttributes(name: String, pscope: NamespaceBinding): AttributesType =
if (isNameStart (ch)) xAttributes(pscope)
else (Null, pscope)
def mkProcInstr(position: Int, name: String, text: String): ElementType =
handle.procInstr(position, name, text)
/** this method tells ch to get the next character when next called */
def nextch() {
// Read current ch if needed
ch
// Mark next ch to be required
nextChNeeded = true
}
/**
* parse attribute and create namespace scope, metadata
* {{{
* [41] Attributes ::= { S Name Eq AttValue }
* }}}
*/
def xAttributes(pscope: NamespaceBinding): (MetaData, NamespaceBinding) = {
var scope: NamespaceBinding = pscope
var aMap: MetaData = Null
while (isNameStart(ch)) {
val qname = xName
xEQ() // side effect
val value = xAttributeValue()
Utility.prefix(qname) match {
case Some("xmlns") =>
val prefix = qname.substring(6 /*xmlns:*/ , qname.length)
scope = new NamespaceBinding(prefix, value, scope)
case Some(prefix) =>
val key = qname.substring(prefix.length + 1, qname.length)
aMap = new PrefixedAttribute(prefix, key, Text(value), aMap)
case _ =>
if (qname == "xmlns")
scope = new NamespaceBinding(null, value, scope)
else
aMap = new UnprefixedAttribute(qname, Text(value), aMap)
}
if ((ch != '/') && (ch != '>') && ('?' != ch))
xSpace()
}
if (!aMap.wellformed(scope))
reportSyntaxError("double attribute")
(aMap, scope)
}
/**
* entity value, terminated by either ' or ". value may not contain <.
* {{{
* AttValue ::= `'` { _ } `'`
* | `"` { _ } `"`
* }}}
*/
def xEntityValue(): String = {
val endch = ch
nextch()
while (ch != endch && !eof) {
putChar(ch)
nextch()
}
nextch()
val str = cbuf.toString()
cbuf.length = 0
str
}
/**
* {{{
* '<! CharData ::= [CDATA[ ( {char} - {char}"]]>"{char} ) ']]>'
*
* see [15]
* }}}
*/
def xCharData: NodeSeq = {
xToken("[CDATA[")
def mkResult(pos: Int, s: String): NodeSeq = {
handle.text(pos, s)
PCData(s)
}
xTakeUntil(mkResult, () => pos, "]]>")
}
/**
* {{{
* Comment ::= '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->'
*
* see [15]
* }}}
*/
def xComment: NodeSeq = {
val sb: StringBuilder = new StringBuilder()
xToken("--")
while (!eof) {
if (ch == '-' && { sb.append(ch); nextch(); ch == '-' }) {
sb.length = sb.length - 1
nextch()
xToken('>')
return handle.comment(pos, sb.toString())
} else sb.append(ch)
nextch()
}
throw truncatedError("broken comment")
}
/* todo: move this into the NodeBuilder class */
def appendText(pos: Int, ts: NodeBuffer, txt: String): Unit = {
if (preserveWS)
ts &+ handle.text(pos, txt)
else
for (t <- TextBuffer.fromString(txt).toText) {
ts &+ handle.text(pos, t.text)
}
}
/**
* {{{
* '<' content1 ::= ...
* }}}
*/
def content1(pscope: NamespaceBinding, ts: NodeBuffer) {
ch match {
case '!' =>
nextch()
if ('[' == ch) // CDATA
ts &+ xCharData
else if ('D' == ch) // doctypedecl, parse DTD // @todo REMOVE HACK
parseDTD()
else // comment
ts &+ xComment
case '?' => // PI
nextch()
ts &+ xProcInstr
case _ =>
ts &+ element1(pscope) // child
}
}
/**
* {{{
* content1 ::= '<' content1 | '&' charref ...
* }}}
*/
def content(pscope: NamespaceBinding): NodeSeq = {
val ts = new NodeBuffer
var exit = eof
// todo: optimize seq repr.
def done = NodeSeq.fromSeq(ts.toList)
while (!exit) {
tmppos = pos
exit = eof
if (eof)
return done
ch match {
case '<' => // another tag
nextch(); ch match {
case '/' => exit = true // end tag
case _ => content1(pscope, ts)
}
// postcond: xEmbeddedBlock == false!
case '&' => // EntityRef or CharRef
nextch(); ch match {
case '#' => // CharacterRef
nextch()
val theChar = handle.text(tmppos, xCharRef(() => ch, () => nextch()))
xToken(';')
ts &+ theChar
case _ => // EntityRef
val n = xName
xToken(';')
if (unescape contains n) {
handle.entityRef(tmppos, n)
ts &+ unescape(n)
} else push(n)
}
case _ => // text content
appendText(tmppos, ts, xText)
}
}
done
} // content(NamespaceBinding)
/**
* {{{
* externalID ::= SYSTEM S syslit
* PUBLIC S pubid S syslit
* }}}
*/
def externalID(): ExternalID = ch match {
case 'S' =>
nextch()
xToken("YSTEM")
xSpace()
val sysID = systemLiteral()
new SystemID(sysID)
case 'P' =>
nextch(); xToken("UBLIC")
xSpace()
val pubID = pubidLiteral()
xSpace()
val sysID = systemLiteral()
new PublicID(pubID, sysID)
}
/**
* parses document type declaration and assigns it to instance variable
* dtd.
* {{{
* <! parseDTD ::= DOCTYPE name ... >
* }}}
*/
def parseDTD() { // dirty but fast
var extID: ExternalID = null
if (this.dtd ne null)
reportSyntaxError("unexpected character (DOCTYPE already defined")
xToken("DOCTYPE")
xSpace()
val n = xName
xSpace()
//external ID
if ('S' == ch || 'P' == ch) {
extID = externalID()
xSpaceOpt()
}
/* parse external subset of DTD
*/
if ((null != extID) && isValidating) {
pushExternal(extID.systemId)
extIndex = inpStack.length
extSubset()
pop()
extIndex = -1
}
if ('[' == ch) { // internal subset
nextch()
/* TODO */
intSubset()
// TODO: do the DTD parsing?? ?!?!?!?!!
xToken(']')
xSpaceOpt()
}
xToken('>')
this.dtd = new DTD {
/*override var*/ externalID = extID
/*override val */ decls = handle.decls.reverse
}
//this.dtd.initializeEntities();
if (doc ne null)
doc.dtd = this.dtd
handle.endDTD(n)
}
def element(pscope: NamespaceBinding): NodeSeq = {
xToken('<')
element1(pscope)
}
/**
* {{{
* '<' element ::= xmlTag1 '>' { xmlExpr | '{' simpleExpr '}' } ETag
* | xmlTag1 '/' '>'
* }}}
*/
def element1(pscope: NamespaceBinding): NodeSeq = {
val pos = this.pos
val (qname, (aMap, scope)) = xTag(pscope)
val (pre, local) = Utility.prefix(qname) match {
case Some(p) => (p, qname drop p.length + 1)
case _ => (null, qname)
}
val ts = {
if (ch == '/') { // empty element
xToken("/>")
handle.elemStart(pos, pre, local, aMap, scope)
NodeSeq.Empty
} else { // element with content
xToken('>')
handle.elemStart(pos, pre, local, aMap, scope)
val tmp = content(scope)
xEndTag(qname)
tmp
}
}
val res = handle.elem(pos, pre, local, aMap, scope, ts == NodeSeq.Empty, ts)
handle.elemEnd(pos, pre, local)
res
}
/**
* Parse character data.
*
* precondition: `xEmbeddedBlock == false` (we are not in a scala block)
*/
private def xText: String = {
var exit = false
while (!exit) {
putChar(ch)
nextch()
exit = eof || (ch == '<') || (ch == '&')
}
val str = cbuf.toString
cbuf.length = 0
str
}
/**
* attribute value, terminated by either ' or ". value may not contain <.
* {{{
* AttValue ::= `'` { _ } `'`
* | `"` { _ } `"`
* }}}
*/
def systemLiteral(): String = {
val endch = ch
if (ch != '\\'' && ch != '"')
reportSyntaxError("quote ' or \\" expected")
nextch()
while (ch != endch && !eof) {
putChar(ch)
nextch()
}
nextch()
val str = cbuf.toString()
cbuf.length = 0
str
}
/**
* {{{
* [12] PubidLiteral ::= '"' PubidChar* '"' | "'" (PubidChar - "'")* "'"
* }}}
*/
def pubidLiteral(): String = {
val endch = ch
if (ch != '\\'' && ch != '"')
reportSyntaxError("quote ' or \\" expected")
nextch()
while (ch != endch && !eof) {
putChar(ch)
//println("hello '"+ch+"'"+isPubIDChar(ch))
if (!isPubIDChar(ch))
reportSyntaxError("char '" + ch + "' is not allowed in public id")
nextch()
}
nextch()
val str = cbuf.toString
cbuf.length = 0
str
}
//
// dtd parsing
//
def extSubset(): Unit = {
var textdecl: (Option[String], Option[String]) = null
if (ch == '<') {
nextch()
if (ch == '?') {
nextch()
textdecl = textDecl()
} else
markupDecl1()
}
while (!eof)
markupDecl()
}
def markupDecl1() = {
def doInclude() = {
xToken('['); while (']' != ch && !eof) markupDecl(); nextch() // ']'
}
def doIgnore() = {
xToken('['); while (']' != ch && !eof) nextch(); nextch() // ']'
}
if ('?' == ch) {
nextch()
xProcInstr // simply ignore processing instructions!
} else {
xToken('!')
ch match {
case '-' =>
xComment // ignore comments
case 'E' =>
nextch()
if ('L' == ch) {
nextch()
elementDecl()
} else
entityDecl()
case 'A' =>
nextch()
attrDecl()
case 'N' =>
nextch()
notationDecl()
case '[' if inpStack.length >= extIndex =>
nextch()
xSpaceOpt()
ch match {
case '%' =>
nextch()
val ent = xName
xToken(';')
xSpaceOpt()
push(ent)
xSpaceOpt()
val stmt = xName
xSpaceOpt()
stmt match {
// parameter entity
case "INCLUDE" => doInclude()
case "IGNORE" => doIgnore()
}
case 'I' =>
nextch()
ch match {
case 'G' =>
nextch()
xToken("NORE")
xSpaceOpt()
doIgnore()
case 'N' =>
nextch()
xToken("NCLUDE")
doInclude()
}
}
xToken(']')
xToken('>')
case _ =>
curInput.reportError(pos, "unexpected character '" + ch + "', expected some markupdecl")
while (ch != '>' && !eof)
nextch()
}
}
}
def markupDecl(): Unit = ch match {
case '%' => // parameter entity reference
nextch()
val ent = xName
xToken(';')
if (!isValidating)
handle.peReference(ent) // n-v: just create PE-reference
else
push(ent) // v: parse replacementText
//peReference
case '<' =>
nextch()
markupDecl1()
case _ if isSpace(ch) =>
xSpace()
case _ =>
reportSyntaxError("markupdecl: unexpected character '" + ch + "' #" + ch.toInt)
nextch()
}
/**
* "rec-xml/#ExtSubset" pe references may not occur within markup declarations
*/
def intSubset() {
//Console.println("(DEBUG) intSubset()")
xSpace()
while (']' != ch && !eof)
markupDecl()
}
/**
* <! element := ELEMENT
*/
def elementDecl() {
xToken("EMENT")
xSpace()
val n = xName
xSpace()
while ('>' != ch && !eof) {
//Console.println("["+ch+"]")
putChar(ch)
nextch()
}
//Console.println("END["+ch+"]")
nextch()
val cmstr = cbuf.toString()
cbuf.length = 0
handle.elemDecl(n, cmstr)
}
/**
* {{{
* <! attlist := ATTLIST
* }}}
*/
def attrDecl() = {
xToken("TTLIST")
xSpace()
val n = xName
xSpace()
var attList: List[AttrDecl] = Nil
// later: find the elemDecl for n
while ('>' != ch && !eof) {
val aname = xName
xSpace()
// could be enumeration (foo,bar) parse this later :-/
while ('"' != ch && '\\'' != ch && '#' != ch && '<' != ch) {
if (!isSpace(ch))
cbuf.append(ch)
nextch()
}
val atpe = cbuf.toString
cbuf.length = 0
val defdecl: DefaultDecl = ch match {
case '\\'' | '"' =>
DEFAULT(fixed = false, xAttributeValue())
case '#' =>
nextch()
xName match {
case "FIXED" =>
xSpace(); DEFAULT(fixed = true, xAttributeValue())
case "IMPLIED" => IMPLIED
case "REQUIRED" => REQUIRED
}
case _ =>
null
}
xSpaceOpt()
attList ::= AttrDecl(aname, atpe, defdecl)
cbuf.length = 0
}
nextch()
handle.attListDecl(n, attList.reverse)
}
/**
* {{{
* <! element := ELEMENT
* }}}
*/
def entityDecl() = {
var isParameterEntity = false
xToken("NTITY")
xSpace()
if ('%' == ch) {
nextch()
isParameterEntity = true
xSpace()
}
val n = xName
xSpace()
ch match {
case 'S' | 'P' => //sy
val extID = externalID()
if (isParameterEntity) {
xSpaceOpt()
xToken('>')
handle.parameterEntityDecl(n, ExtDef(extID))
} else { // notation?
xSpace()
if ('>' != ch) {
xToken("NDATA")
xSpace()
val notat = xName
xSpaceOpt()
xToken('>')
handle.unparsedEntityDecl(n, extID, notat)
} else {
nextch()
handle.parsedEntityDecl(n, ExtDef(extID))
}
}
case '"' | '\\'' =>
val av = xEntityValue()
xSpaceOpt()
xToken('>')
if (isParameterEntity)
handle.parameterEntityDecl(n, IntDef(av))
else
handle.parsedEntityDecl(n, IntDef(av))
}
{}
} // entityDecl
/**
* {{{
* 'N' notationDecl ::= "OTATION"
* }}}
*/
def notationDecl() {
xToken("OTATION")
xSpace()
val notat = xName
xSpace()
val extID = if (ch == 'S') {
externalID()
} else if (ch == 'P') {
/* PublicID (without system, only used in NOTATION) */
nextch()
xToken("UBLIC")
xSpace()
val pubID = pubidLiteral()
xSpaceOpt()
val sysID = if (ch != '>')
systemLiteral()
else
null
new PublicID(pubID, sysID)
} else {
reportSyntaxError("PUBLIC or SYSTEM expected")
scala.sys.error("died parsing notationdecl")
}
xSpaceOpt()
xToken('>')
handle.notationDecl(notat, extID)
}
def reportSyntaxError(pos: Int, str: String) { curInput.reportError(pos, str) }
def reportSyntaxError(str: String) { reportSyntaxError(pos, str) }
def reportValidationError(pos: Int, str: String) { reportSyntaxError(pos, str) }
def push(entityName: String) {
if (!eof)
inpStack = curInput :: inpStack
// can't push before getting next character if needed
ch
curInput = replacementText(entityName)
nextch()
}
def pushExternal(systemId: String) {
if (!eof)
inpStack = curInput :: inpStack
// can't push before getting next character if needed
ch
curInput = externalSource(systemId)
nextch()
}
def pop() {
curInput = inpStack.head
inpStack = inpStack.tail
lastChRead = curInput.ch
nextChNeeded = false
pos = curInput.pos
reachedEof = false // must be false, because of places where entity refs occur
}
}
| biswanaths/scala-xml | src/main/scala/scala/xml/parsing/MarkupParser.scala | Scala | bsd-3-clause | 24,186 |