code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
package synereo.client.facades
import scala.scalajs.js
import scala.scalajs.js.annotation.JSName
@js.native
@JSName("SRPClient")
class SRPClient(username: String, password: String) extends js.Object {
def getAHex(): String = js.native
def getMHex(BHex: String, saltHex: String): String = js.native
def getVerifierHex(saltHex: String): String = js.native
def matches(M2Hex: String): Boolean = js.native
}
| LivelyGig/ProductWebUI | sclient/src/main/scala/synereo/client/facades/SRPClient.scala | Scala | apache-2.0 | 414 |
/*
* Copyright 2014-16 Intelix Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package au.com.intelix.rs.core.stream
import java.util
import au.com.intelix.rs.core.Subject
import au.com.intelix.rs.core.javaapi.JServiceActor
import au.com.intelix.rs.core.services.endpoint.StreamConsumer
import au.com.intelix.rs.core.services.{BaseServiceActor, SimpleStreamId, StreamId}
import au.com.intelix.rs.core.stream.SetStreamState._
import scala.language.implicitConversions
object SetStreamState {
private def calculateDiff(fromSet: Set[Any], toSet: Set[Any]): Seq[SetOp] = {
(fromSet diff toSet).map(Remove).toSeq ++ (toSet diff fromSet).map(Add).toSeq
}
sealed trait SetOp
case class Add(el: Any) extends SetOp
case class Remove(el: Any) extends SetOp
case class SetSpecs(allowPartialUpdates: Boolean = true)
}
case class SetStreamState(seed: Int, seq: Int, set: Set[Any], specs: SetSpecs) extends StreamState with StreamStateTransition {
override def transitionFrom(olderState: Option[StreamState]): Option[StreamStateTransition] = olderState match {
case Some(SetStreamState(otherSeed, otherSeq, otherSet, _)) if specs.allowPartialUpdates && otherSeed == seed =>
val diffSet = SetStreamState.calculateDiff(otherSet, set)
if (diffSet.size < set.size)
Some(SetStreamTransitionPartial(seed, otherSeq, seq, diffSet))
else
Some(this)
case _ => Some(this)
}
override def toNewStateFrom(state: Option[StreamState]): Option[StreamState] = Some(this)
override def applicableTo(state: Option[StreamState]): Boolean = true
}
case class SetStreamTransitionPartial(seed: Int, seq: Int, seq2: Int, list: Seq[SetOp]) extends StreamStateTransition {
override def toNewStateFrom(state: Option[StreamState]): Option[StreamState] = state match {
case Some(st@SetStreamState(otherSeed, otherSeq, set, _)) if otherSeed == seed && otherSeq == seq =>
val newSet = list.foldLeft[Set[Any]](set.toSet) {
case (s, Add(e)) => s + e
case (s, Remove(e)) => s - e
}
Some(st.copy(seq = seq + 1, set = newSet))
case _ => None
}
override def applicableTo(state: Option[StreamState]): Boolean = state match {
case Some(SetStreamState(otherSeed, otherSeq, _, _)) => otherSeed == seed && otherSeq == seq
case _ => false
}
}
trait SetStreamConsumer extends StreamConsumer {
type SetStreamConsumer = PartialFunction[(Subject, Set[Any]), Unit]
onStreamUpdate {
case (s, x: SetStreamState) => composedFunction((s, x.set))
}
private var composedFunction: SetStreamConsumer = {
case _ =>
}
final def onSetRecord(f: SetStreamConsumer) =
composedFunction = f orElse composedFunction
}
trait JSetStreamPublisher extends SetStreamPublisher {
self: JServiceActor =>
def streamSetSnapshot(s: String, l: util.Set[Any], allowPartialUpdates: Boolean): Unit = streamSetSnapshot(SimpleStreamId(s), l, allowPartialUpdates)
def streamSetSnapshot(s: StreamId, l: util.Set[Any], allowPartialUpdates: Boolean): Unit = {
implicit val setSpecs = SetSpecs(allowPartialUpdates)
s !% l.toArray.toSet
}
def streamSetAdd(s: StreamId, v: Any): Unit = s !%+ v
def streamSetRemove(s: StreamId, v: Any): Unit = s !%- v
def streamSetAdd(s: String, v: Any): Unit = s !%+ v
def streamSetRemove(s: String, v: Any): Unit = s !%- v
}
trait SetStreamPublisher {
self: BaseServiceActor =>
implicit def toSetPublisher(v: String): SetPublisher = SetPublisher(v)
implicit def toSetPublisher(v: StreamId): SetPublisher = SetPublisher(v)
def ?%(s: StreamId): Option[SetStreamState] = currentStreamState(s) flatMap {
case s: SetStreamState => Some(s)
case _ => None
}
case class SetPublisher(s: StreamId) {
def streamSetSnapshot(l: => Set[Any])(implicit specs: SetSpecs): Unit = !%(l)
def !%[T](l: => Set[T])(implicit specs: SetSpecs): Unit = ?%(s) match {
case Some(x) =>
val set = l
if (set != x.set) performStateTransition(s, SetStreamState((System.nanoTime() % Int.MaxValue).toInt, 0, l.asInstanceOf[Set[Any]], specs))
case None => performStateTransition(s, SetStreamState((System.nanoTime() % Int.MaxValue).toInt, 0, l.asInstanceOf[Set[Any]], specs))
}
def streamSetAdd(v: => Any): Unit = !%+(v)
def !%+(v: => Any): Unit = ?%(s) match {
case Some(x) => performStateTransition(s, SetStreamTransitionPartial(x.seed, x.seq, x.seq + 1, Seq(Add(v))))
case None =>
}
def streamSetRemove(v: => Any): Unit = !%-(v)
def !%-(v: => Any): Unit = ?%(s) match {
case Some(x) => performStateTransition(s, SetStreamTransitionPartial(x.seed, x.seq, x.seq + 1, Seq(Remove(v))))
case None =>
}
}
}
| intelix/reactiveservices | platform/core/src/main/scala/au/com/intelix/rs/core/stream/SetStreamState.scala | Scala | apache-2.0 | 5,245 |
import _root_.io.gatling.core.scenario.Simulation
import ch.qos.logback.classic.{Level, LoggerContext}
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
/**
* Performance test for the UserGroup entity.
*/
class UserGroupGatlingTest extends Simulation {
val context: LoggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
// Log all HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("TRACE"))
// Log failed HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("DEBUG"))
val baseURL = Option(System.getProperty("baseURL")) getOrElse """http://127.0.0.1:8080"""
val httpConf = http
.baseURL(baseURL)
.inferHtmlResources()
.acceptHeader("*/*")
.acceptEncodingHeader("gzip, deflate")
.acceptLanguageHeader("fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3")
.connection("keep-alive")
.userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:33.0) Gecko/20100101 Firefox/33.0")
val headers_http = Map(
"Accept" -> """application/json"""
)
val headers_http_authentication = Map(
"Content-Type" -> """application/json""",
"Accept" -> """application/json"""
)
val headers_http_authenticated = Map(
"Accept" -> """application/json""",
"Authorization" -> "${access_token}"
)
val scn = scenario("Test the UserGroup entity")
.exec(http("First unauthenticated request")
.get("/api/account")
.headers(headers_http)
.check(status.is(401))).exitHereIfFailed
.pause(10)
.exec(http("Authentication")
.post("/api/authenticate")
.headers(headers_http_authentication)
.body(StringBody("""{"username":"admin", "password":"admin"}""")).asJSON
.check(header.get("Authorization").saveAs("access_token"))).exitHereIfFailed
.pause(1)
.exec(http("Authenticated request")
.get("/api/account")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10)
.repeat(2) {
exec(http("Get all userGroups")
.get("/api/user-groups")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10 seconds, 20 seconds)
.exec(http("Create new userGroup")
.post("/api/user-groups")
.headers(headers_http_authenticated)
.body(StringBody("""{"id":null, "name":"SAMPLE_TEXT", "email":"SAMPLE_TEXT"}""")).asJSON
.check(status.is(201))
.check(headerRegex("Location", "(.*)").saveAs("new_userGroup_url"))).exitHereIfFailed
.pause(10)
.repeat(5) {
exec(http("Get created userGroup")
.get("${new_userGroup_url}")
.headers(headers_http_authenticated))
.pause(10)
}
.exec(http("Delete created userGroup")
.delete("${new_userGroup_url}")
.headers(headers_http_authenticated))
.pause(10)
}
val users = scenario("Users").exec(scn)
setUp(
users.inject(rampUsers(100) over (1 minutes))
).protocols(httpConf)
}
| gcorreageek/noctem | src/test/gatling/simulations/UserGroupGatlingTest.scala | Scala | mit | 3,327 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.nodes
import org.apache.calcite.rex.{RexCall, RexLiteral, RexNode}
import org.apache.calcite.sql.`type`.SqlTypeName
import org.apache.flink.api.java.ExecutionEnvironment
import org.apache.flink.configuration.{ConfigOption, Configuration}
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
import org.apache.flink.table.api.{TableConfig, TableException}
import org.apache.flink.table.functions.UserDefinedFunction
import org.apache.flink.table.functions.python.{PythonFunction, PythonFunctionInfo}
import org.apache.flink.table.functions.utils.{ScalarSqlFunction, TableSqlFunction}
import org.apache.flink.table.util.DummyStreamExecutionEnvironment
import scala.collection.mutable
import scala.collection.JavaConversions._
trait CommonPythonBase {
protected def loadClass(className: String): Class[_] = {
try {
Class.forName(className, false, Thread.currentThread.getContextClassLoader)
} catch {
case ex: ClassNotFoundException => throw new TableException(
"The dependency of 'flink-python' is not present on the classpath.", ex)
}
}
private lazy val convertLiteralToPython = {
val clazz = loadClass("org.apache.flink.api.common.python.PythonBridgeUtils")
clazz.getMethod("convertLiteralToPython", classOf[RexLiteral], classOf[SqlTypeName])
}
private def createPythonFunctionInfo(
pythonRexCall: RexCall,
inputNodes: mutable.Map[RexNode, Integer],
func: UserDefinedFunction): PythonFunctionInfo = {
val inputs = new mutable.ArrayBuffer[AnyRef]()
pythonRexCall.getOperands.foreach {
case pythonRexCall: RexCall =>
// Continuous Python UDFs can be chained together
val argPythonInfo = createPythonFunctionInfo(pythonRexCall, inputNodes)
inputs.append(argPythonInfo)
case literal: RexLiteral =>
inputs.append(
convertLiteralToPython.invoke(null, literal, literal.getType.getSqlTypeName))
case argNode: RexNode =>
// For input arguments of RexInputRef, it's replaced with an offset into the input row
inputNodes.get(argNode) match {
case Some(existing) => inputs.append(existing)
case None =>
val inputOffset = Integer.valueOf(inputNodes.size)
inputs.append(inputOffset)
inputNodes.put(argNode, inputOffset)
}
}
new PythonFunctionInfo(func.asInstanceOf[PythonFunction], inputs.toArray)
}
protected def createPythonFunctionInfo(
pythonRexCall: RexCall,
inputNodes: mutable.Map[RexNode, Integer]): PythonFunctionInfo = {
pythonRexCall.getOperator match {
case sfc: ScalarSqlFunction =>
createPythonFunctionInfo(pythonRexCall, inputNodes, sfc.getScalarFunction)
case tfc: TableSqlFunction =>
createPythonFunctionInfo(pythonRexCall, inputNodes, tfc.getTableFunction)
}
}
protected def getConfig(
env: ExecutionEnvironment,
tableConfig: TableConfig): Configuration = {
val field = classOf[ExecutionEnvironment].getDeclaredField("cacheFile")
field.setAccessible(true)
val clazz = loadClass(CommonPythonBase.PYTHON_DEPENDENCY_UTILS_CLASS)
val method = clazz.getDeclaredMethod(
"configurePythonDependencies", classOf[java.util.List[_]], classOf[Configuration])
val config = method.invoke(
null, field.get(env), getMergedConfiguration(env, tableConfig))
.asInstanceOf[Configuration]
config.setString("table.exec.timezone", tableConfig.getLocalTimeZone.getId)
config
}
protected def getConfig(
env: StreamExecutionEnvironment,
tableConfig: TableConfig): Configuration = {
val clazz = loadClass(CommonPythonBase.PYTHON_DEPENDENCY_UTILS_CLASS)
val realEnv = getRealEnvironment(env)
val method = clazz.getDeclaredMethod(
"configurePythonDependencies", classOf[java.util.List[_]], classOf[Configuration])
val config = method.invoke(
null, realEnv.getCachedFiles, getMergedConfiguration(realEnv, tableConfig))
.asInstanceOf[Configuration]
config.setString("table.exec.timezone", tableConfig.getLocalTimeZone.getId)
config
}
private def getMergedConfiguration(
env: StreamExecutionEnvironment,
tableConfig: TableConfig): Configuration = {
// As the python dependency configurations may appear in both
// `StreamExecutionEnvironment#getConfiguration` (e.g. parsed from flink-conf.yaml and command
// line) and `TableConfig#getConfiguration` (e.g. user specified), we need to merge them and
// ensure the user specified configuration has priority over others.
val method = classOf[StreamExecutionEnvironment].getDeclaredMethod("getConfiguration")
method.setAccessible(true)
val config = new Configuration(method.invoke(env).asInstanceOf[Configuration])
config.addAll(tableConfig.getConfiguration)
config
}
private def getMergedConfiguration(
env: ExecutionEnvironment,
tableConfig: TableConfig): Configuration = {
// As the python dependency configurations may appear in both
// `ExecutionEnvironment#getConfiguration` (e.g. parsed from flink-conf.yaml and command
// line) and `TableConfig#getConfiguration` (e.g. user specified), we need to merge them and
// ensure the user specified configuration has priority over others.
val config = new Configuration(env.getConfiguration)
config.addAll(tableConfig.getConfiguration)
config
}
private def getRealEnvironment(env: StreamExecutionEnvironment): StreamExecutionEnvironment = {
val realExecEnvField = classOf[DummyStreamExecutionEnvironment].getDeclaredField("realExecEnv")
realExecEnvField.setAccessible(true)
var realEnv = env
while (realEnv.isInstanceOf[DummyStreamExecutionEnvironment]) {
realEnv = realExecEnvField.get(realEnv).asInstanceOf[StreamExecutionEnvironment]
}
realEnv
}
protected def isPythonWorkerUsingManagedMemory(config: Configuration): Boolean = {
val clazz = loadClass("org.apache.flink.python.PythonOptions")
config.getBoolean(clazz.getField("USE_MANAGED_MEMORY").get(null)
.asInstanceOf[ConfigOption[java.lang.Boolean]])
}
}
object CommonPythonBase {
val PYTHON_DEPENDENCY_UTILS_CLASS = "org.apache.flink.python.util.PythonDependencyUtils"
}
| aljoscha/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/plan/nodes/CommonPythonBase.scala | Scala | apache-2.0 | 7,127 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.clustering
import org.apache.spark.SparkFunSuite
import org.apache.spark.ml.linalg.{DenseMatrix, Matrices, Vector, Vectors}
import org.apache.spark.ml.param.ParamMap
import org.apache.spark.ml.stat.distribution.MultivariateGaussian
import org.apache.spark.ml.util.{DefaultReadWriteTest, MLTest, MLTestingUtils}
import org.apache.spark.ml.util.TestingUtils._
import org.apache.spark.sql.{DataFrame, Dataset, Row}
import org.apache.spark.sql.functions._
class GaussianMixtureSuite extends MLTest with DefaultReadWriteTest {
import GaussianMixtureSuite._
import testImplicits._
final val k = 5
private val seed = 538009335
@transient var dataset: DataFrame = _
@transient var denseDataset: DataFrame = _
@transient var sparseDataset: DataFrame = _
@transient var decompositionDataset: DataFrame = _
@transient var rDataset: DataFrame = _
override def beforeAll(): Unit = {
super.beforeAll()
dataset = KMeansSuite.generateKMeansData(spark, 50, 3, k)
denseDataset = denseData.map(FeatureData).toDF()
sparseDataset = denseData.map { point =>
FeatureData(point.toSparse)
}.toDF()
decompositionDataset = decompositionData.map(FeatureData).toDF()
rDataset = rData.map(FeatureData).toDF()
}
test("gmm fails on high dimensional data") {
val df = Seq(
Vectors.sparse(GaussianMixture.MAX_NUM_FEATURES + 1, Array(0, 4), Array(3.0, 8.0)),
Vectors.sparse(GaussianMixture.MAX_NUM_FEATURES + 1, Array(1, 5), Array(4.0, 9.0)))
.map(Tuple1.apply).toDF("features")
val gm = new GaussianMixture()
withClue(s"GMM should restrict the maximum number of features to be < " +
s"${GaussianMixture.MAX_NUM_FEATURES}") {
intercept[IllegalArgumentException] {
gm.fit(df)
}
}
}
test("default parameters") {
val gm = new GaussianMixture()
assert(gm.getK === 2)
assert(gm.getFeaturesCol === "features")
assert(gm.getPredictionCol === "prediction")
assert(gm.getProbabilityCol === "probability")
assert(gm.getMaxIter === 100)
assert(gm.getTol === 0.01)
val model = gm.setMaxIter(1).fit(dataset)
val transformed = model.transform(dataset)
checkNominalOnDF(transformed, "prediction", model.weights.length)
checkVectorSizeOnDF(transformed, "probability", model.weights.length)
MLTestingUtils.checkCopyAndUids(gm, model)
assert(model.hasSummary)
val copiedModel = model.copy(ParamMap.empty)
assert(copiedModel.hasSummary)
}
test("set parameters") {
val gm = new GaussianMixture()
.setK(9)
.setFeaturesCol("test_feature")
.setPredictionCol("test_prediction")
.setProbabilityCol("test_probability")
.setMaxIter(33)
.setSeed(123)
.setTol(1e-3)
assert(gm.getK === 9)
assert(gm.getFeaturesCol === "test_feature")
assert(gm.getPredictionCol === "test_prediction")
assert(gm.getProbabilityCol === "test_probability")
assert(gm.getMaxIter === 33)
assert(gm.getSeed === 123)
assert(gm.getTol === 1e-3)
}
test("parameters validation") {
intercept[IllegalArgumentException] {
new GaussianMixture().setK(1)
}
}
test("fit, transform and summary") {
val predictionColName = "gm_prediction"
val probabilityColName = "gm_probability"
val gm = new GaussianMixture().setK(k).setMaxIter(2).setPredictionCol(predictionColName)
.setProbabilityCol(probabilityColName).setSeed(1)
val model = gm.fit(dataset)
assert(model.hasParent)
assert(model.weights.length === k)
assert(model.gaussians.length === k)
// Check prediction matches the highest probability, and probabilities sum to one.
testTransformer[Tuple1[Vector]](dataset.toDF(), model,
"features", predictionColName, probabilityColName) {
case Row(_, pred: Int, prob: Vector) =>
val probArray = prob.toArray
val predFromProb = probArray.zipWithIndex.maxBy(_._1)._2
assert(pred === predFromProb)
assert(probArray.sum ~== 1.0 absTol 1E-5)
}
// Check validity of model summary
val numRows = dataset.count()
assert(model.hasSummary)
val summary: GaussianMixtureSummary = model.summary
assert(summary.predictionCol === predictionColName)
assert(summary.probabilityCol === probabilityColName)
assert(summary.featuresCol === "features")
assert(summary.predictions.count() === numRows)
for (c <- Array(predictionColName, probabilityColName, "features")) {
assert(summary.predictions.columns.contains(c))
}
assert(summary.cluster.columns === Array(predictionColName))
assert(summary.probability.columns === Array(probabilityColName))
val clusterSizes = summary.clusterSizes
assert(clusterSizes.length === k)
assert(clusterSizes.sum === numRows)
assert(clusterSizes.forall(_ >= 0))
assert(summary.numIter == 2)
model.setSummary(None)
assert(!model.hasSummary)
}
test("read/write") {
def checkModelData(model: GaussianMixtureModel, model2: GaussianMixtureModel): Unit = {
assert(model.weights === model2.weights)
assert(model.gaussians.map(_.mean) === model2.gaussians.map(_.mean))
assert(model.gaussians.map(_.cov) === model2.gaussians.map(_.cov))
}
val gm = new GaussianMixture()
testEstimatorAndModelReadWrite(gm, dataset, GaussianMixtureSuite.allParamSettings,
GaussianMixtureSuite.allParamSettings, checkModelData)
}
test("univariate dense/sparse data with two clusters") {
val weights = Array(2.0 / 3.0, 1.0 / 3.0)
val means = Array(Vectors.dense(5.1604), Vectors.dense(-4.3673))
val covs = Array(Matrices.dense(1, 1, Array(0.86644)), Matrices.dense(1, 1, Array(1.1098)))
val gaussians = means.zip(covs).map { case (mean, cov) =>
new MultivariateGaussian(mean, cov)
}
val expected = new GaussianMixtureModel("dummy", weights, gaussians)
Seq(denseDataset, sparseDataset).foreach { dataset =>
val actual = new GaussianMixture().setK(2).setSeed(seed).fit(dataset)
modelEquals(expected, actual)
}
}
test("check distributed decomposition") {
val k = 5
val d = decompositionData.head.size
assert(GaussianMixture.shouldDistributeGaussians(k, d))
val gmm = new GaussianMixture().setK(k).setSeed(seed).fit(decompositionDataset)
assert(gmm.getK === k)
}
test("multivariate data and check againt R mvnormalmixEM") {
/*
Using the following R code to generate data and train the model using mixtools package.
library(mvtnorm)
library(mixtools)
set.seed(1)
a <- rmvnorm(7, c(0, 0))
b <- rmvnorm(8, c(10, 10))
data <- rbind(a, b)
model <- mvnormalmixEM(data, k = 2)
model$lambda
[1] 0.4666667 0.5333333
model$mu
[1] 0.11731091 -0.06192351
[1] 10.363673 9.897081
model$sigma
[[1]]
[,1] [,2]
[1,] 0.62049934 0.06880802
[2,] 0.06880802 1.27431874
[[2]]
[,1] [,2]
[1,] 0.2961543 0.160783
[2,] 0.1607830 1.008878
model$loglik
[1] -46.89499
*/
val weights = Array(0.5333333, 0.4666667)
val means = Array(Vectors.dense(10.363673, 9.897081), Vectors.dense(0.11731091, -0.06192351))
val covs = Array(Matrices.dense(2, 2, Array(0.2961543, 0.1607830, 0.160783, 1.008878)),
Matrices.dense(2, 2, Array(0.62049934, 0.06880802, 0.06880802, 1.27431874)))
val gaussians = means.zip(covs).map { case (mean, cov) =>
new MultivariateGaussian(mean, cov)
}
val expected = new GaussianMixtureModel("dummy", weights, gaussians)
val actual = new GaussianMixture().setK(2).setSeed(seed).fit(rDataset)
modelEquals(expected, actual)
val llk = actual.summary.logLikelihood
assert(llk ~== -46.89499 absTol 1E-5)
}
test("upper triangular matrix unpacking") {
/*
The full symmetric matrix is as follows:
1.0 2.5 3.8 0.9
2.5 2.0 7.2 3.8
3.8 7.2 3.0 1.0
0.9 3.8 1.0 4.0
*/
val triangularValues = Array(1.0, 2.5, 2.0, 3.8, 7.2, 3.0, 0.9, 3.8, 1.0, 4.0)
val symmetricValues = Array(1.0, 2.5, 3.8, 0.9, 2.5, 2.0, 7.2, 3.8,
3.8, 7.2, 3.0, 1.0, 0.9, 3.8, 1.0, 4.0)
val symmetricMatrix = new DenseMatrix(4, 4, symmetricValues)
val expectedMatrix = GaussianMixture.unpackUpperTriangularMatrix(4, triangularValues)
assert(symmetricMatrix === expectedMatrix)
}
test("GaussianMixture with Array input") {
def trainAndComputlogLikelihood(dataset: Dataset[_]): Double = {
val model = new GaussianMixture().setK(k).setMaxIter(1).setSeed(1).fit(dataset)
model.summary.logLikelihood
}
val (newDataset, newDatasetD, newDatasetF) = MLTestingUtils.generateArrayFeatureDataset(dataset)
val trueLikelihood = trainAndComputlogLikelihood(newDataset)
val doubleLikelihood = trainAndComputlogLikelihood(newDatasetD)
val floatLikelihood = trainAndComputlogLikelihood(newDatasetF)
// checking the cost is fine enough as a sanity check
assert(trueLikelihood ~== doubleLikelihood absTol 1e-6)
assert(trueLikelihood ~== floatLikelihood absTol 1e-6)
}
test("GMM support instance weighting") {
val gm1 = new GaussianMixture().setK(k).setMaxIter(20).setSeed(seed)
val gm2 = new GaussianMixture().setK(k).setMaxIter(20).setSeed(seed).setWeightCol("weight")
Seq(1.0, 10.0, 100.0).foreach { w =>
val gmm1 = gm1.fit(dataset)
val ds2 = dataset.select(col("features"), lit(w).as("weight"))
val gmm2 = gm2.fit(ds2)
modelEquals(gmm1, gmm2)
}
}
test("prediction on single instance") {
val gmm = new GaussianMixture().setSeed(123L)
val model = gmm.fit(dataset)
testClusteringModelSinglePrediction(model, model.predict, dataset,
model.getFeaturesCol, model.getPredictionCol)
testClusteringModelSingleProbabilisticPrediction(model, model.predictProbability, dataset,
model.getFeaturesCol, model.getProbabilityCol)
}
}
object GaussianMixtureSuite extends SparkFunSuite {
/**
* Mapping from all Params to valid settings which differ from the defaults.
* This is useful for tests which need to exercise all Params, such as save/load.
* This excludes input columns to simplify some tests.
*/
val allParamSettings: Map[String, Any] = Map(
"predictionCol" -> "myPrediction",
"probabilityCol" -> "myProbability",
"k" -> 3,
"maxIter" -> 2,
"tol" -> 0.01
)
val denseData = Seq(
Vectors.dense(-5.1971), Vectors.dense(-2.5359), Vectors.dense(-3.8220),
Vectors.dense(-5.2211), Vectors.dense(-5.0602), Vectors.dense( 4.7118),
Vectors.dense( 6.8989), Vectors.dense( 3.4592), Vectors.dense( 4.6322),
Vectors.dense( 5.7048), Vectors.dense( 4.6567), Vectors.dense( 5.5026),
Vectors.dense( 4.5605), Vectors.dense( 5.2043), Vectors.dense( 6.2734)
)
val decompositionData: Seq[Vector] = Seq.tabulate(25) { i: Int =>
Vectors.dense(Array.tabulate(50)(i + _.toDouble))
}
val rData = Seq(
Vectors.dense(-0.6264538, 0.1836433), Vectors.dense(-0.8356286, 1.5952808),
Vectors.dense(0.3295078, -0.8204684), Vectors.dense(0.4874291, 0.7383247),
Vectors.dense(0.5757814, -0.3053884), Vectors.dense(1.5117812, 0.3898432),
Vectors.dense(-0.6212406, -2.2146999), Vectors.dense(11.1249309, 9.9550664),
Vectors.dense(9.9838097, 10.9438362), Vectors.dense(10.8212212, 10.5939013),
Vectors.dense(10.9189774, 10.7821363), Vectors.dense(10.0745650, 8.0106483),
Vectors.dense(10.6198257, 9.9438713), Vectors.dense(9.8442045, 8.5292476),
Vectors.dense(9.5218499, 10.4179416)
)
case class FeatureData(features: Vector)
def modelEquals(m1: GaussianMixtureModel, m2: GaussianMixtureModel): Unit = {
assert(m1.weights.length === m2.weights.length)
val s1 = m1.weights.zip(m1.gaussians).sortBy(_._1)
val s2 = m2.weights.zip(m2.gaussians).sortBy(_._1)
for (i <- m1.weights.indices) {
val (w1, g1) = s1(i)
val (w2, g2) = s2(i)
assert(w1 ~== w2 absTol 1E-3)
assert(g1.mean ~== g2.mean absTol 1E-3)
assert(g1.cov ~== g2.cov absTol 1E-3)
}
}
}
| jkbradley/spark | mllib/src/test/scala/org/apache/spark/ml/clustering/GaussianMixtureSuite.scala | Scala | apache-2.0 | 12,953 |
package com.twitter.gizzard.nameserver
import com.twitter.gizzard.shards.ShardId
case class Forwarding(tableId: Int, baseId: Long, shardId: ShardId)
| kmiku7/gizzard | src/main/scala/com/twitter/gizzard/nameserver/Forwarding.scala | Scala | apache-2.0 | 152 |
package io.youi.server.validation
import io.youi.http.{HttpConnection, HttpStatus}
import io.youi.net.IP
import scala.concurrent.Future
class IPAddressValidator(allow: Set[IP], reject: Set[IP], defaultAllow: Boolean) extends Validator {
override def validate(connection: HttpConnection): Future[ValidationResult] = Future.successful {
val ip = connection.request.originalSource
if ((allow.contains(ip) || defaultAllow) && !reject.contains(ip)) {
ValidationResult.Continue(connection)
} else {
scribe.warn(s"Unauthorized attempt to access: ${connection.request.url} from IP: $ip. Allowed: ${allow.mkString(", ")}, Reject: ${reject.mkString(", ")}, Default Allow? $defaultAllow")
ValidationResult.Error(connection, HttpStatus.Forbidden.code, s"Unauthorized IP address: $ip")
}
}
} | outr/youi | server/src/main/scala/io/youi/server/validation/IPAddressValidator.scala | Scala | mit | 819 |
package logging
import com.twitter.zipkin.gen.{zipkinCoreConstants, Endpoint, Annotation, Span}
import scala.util.Random
import java.net.InetAddress
import java.nio.ByteBuffer
import com.github.kristofa.brave.SpanCollector
trait TraceData {
def traceId: Long
def spanId: Long
def parentSpanId: Option[Long]
def shouldBeSampled: Boolean
def spanName: String
val span = {
val result = new Span()
result.setTrace_id(traceId)
result.setId(spanId)
parentSpanId.foreach(result.setParent_id)
result.setName(spanName)
result
}
def submitAnnotation(annotationName: String, endpoint: Endpoint) {
if (shouldBeSampled) {
val annotation = new Annotation
annotation.setTimestamp(currentTimeMicroseconds)
annotation.setHost(endpoint)
annotation.setValue(annotationName)
span.addToAnnotations(annotation)
}
}
def toCollector(collector: SpanCollector) {
if (shouldBeSampled)
collector.collect(span)
}
private def currentTimeMicroseconds: Long = System.currentTimeMillis() * 1000L
}
case class ServerTraceData(traceId: Long,
spanId: Long,
parentSpanId: Option[Long],
shouldBeSampled: Boolean,
spanName: String)
extends TraceData {
def serverReceived() {
submitAnnotation(zipkinCoreConstants.SERVER_RECV, TraceData.serverEndpoint)
}
def serverSend() {
submitAnnotation(zipkinCoreConstants.SERVER_SEND, TraceData.serverEndpoint)
}
def createClient(host: String, port: Short, serviceName: String, spanName: String) = {
val endpoint = new Endpoint(TraceData.serverEndpoint)
endpoint.setService_name(serviceName)
ClientTraceData(traceId, TraceData.generateId, Some(spanId), shouldBeSampled, spanName, endpoint)
}
}
case class ClientTraceData(traceId: Long,
spanId: Long,
parentSpanId: Option[Long],
shouldBeSampled: Boolean,
spanName: String,
endpoint: Endpoint)
extends TraceData {
def clientSend() {
submitAnnotation(zipkinCoreConstants.CLIENT_SEND, endpoint)
}
def clientReceived() {
submitAnnotation(zipkinCoreConstants.CLIENT_RECV, endpoint)
}
}
object TraceData {
val random = new Random()
def generateId: Long = random.nextLong() & Long.MaxValue
lazy val serverEndpoint =
new Endpoint(ByteBuffer.wrap(InetAddress.getLocalHost.getAddress).get, 80, "Web")
} | leanovate/microzon-web | app/logging/TraceData.scala | Scala | mit | 2,562 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.keras.objectives
import com.intel.analytics.bigdl.dllib.nn.abstractnn.AbstractCriterion
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.Shape
import com.intel.analytics.bigdl.dllib.keras.autograd.{CustomLoss, Variable, AutoGrad => A}
import scala.reflect.ClassTag
/**
* Hinge loss for pairwise ranking problems.
*
* @param margin Double. Default is 1.0.
*/
class RankHinge[@specialized(Float, Double) T: ClassTag](
margin: Double = 1.0)(implicit ev: TensorNumeric[T]) extends TensorLossFunction[T] {
override val loss: AbstractCriterion[Tensor[T], Tensor[T], T] =
CustomLoss[T](RankHinge.marginLoss[T](margin), Shape(2, 1))
}
object RankHinge {
def apply[@specialized(Float, Double) T: ClassTag](margin: Double = 1.0)
(implicit ev: TensorNumeric[T]): RankHinge[T] = {
new RankHinge[T](margin)
}
def marginLoss[T: ClassTag](margin: Double = 1.0)(implicit ev: TensorNumeric[T]):
(Variable[T], Variable[T]) => Variable[T] = {
def rankHingeLoss(yTrue: Variable[T], yPred: Variable[T])
(implicit ev: TensorNumeric[T]): Variable[T] = {
val target = yTrue - yTrue + yPred
val pos = target.indexSelect(1, 0)
val neg = target.indexSelect(1, 1)
A.maximum(neg - pos + margin, 0)
}
rankHingeLoss
}
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/keras/objectives/RankHinge.scala | Scala | apache-2.0 | 2,040 |
package vultura.util.graph
import vultura.util.{SIIndex, Index}
import scala.collection.mutable
/** An efficient directed graph implementation that uses one array per node for representing its children. */
class LabeledGraph[N] protected[LabeledGraph](val index: Index[N], protected val childs: Array[Array[Int]])
extends DiGraphOps[N] { outer =>
object intGraph {
val numNodes: Int = childs.length
val nodeRange: Range = 0 until numNodes
lazy val pars: Array[Array[Int]] = {
val resultHolder = IndexedSeq.fill(numNodes)(mutable.Set.newBuilder[Int])
for(pi <- nodeRange; ci <- childs(pi)) {
resultHolder(ci) += pi
}
resultHolder.map(_.result().toArray)(collection.breakOut)
}
def nodes: Set[Int] = nodeRange.toSet
def children(node: Int): Set[Int] = childs(node).toSet
def parents(node: Int): Set[Int] = pars(node).toSet
/** Find topologically ordered strongly connected components of the graph. */
def edges: Set[(Int, Int)] = for(p <- nodes; c <- children(p)) yield p -> c
/** Reverse the edges of the graph. */
def descendants(node: Int): Set[Int] = searchAll(children(node),Set(),childs)
def ancestors(node: Int): Set[Int] = searchAll(parents(node),Set(),pars)
def searchAll(fringe: Set[Int], closed: Set[Int], succ: Array[Array[Int]]): Set[Int] = {
val newNodes = fringe.flatMap(succ(_)) -- closed
if(newNodes.isEmpty)
closed ++ fringe
else
searchAll(newNodes, closed ++ fringe, succ)
}
lazy val tarjanSCC: List[Set[Int]] = {
val tj_index = Array.fill[Int](numNodes)(-1)
val lowLink = new Array[Int](numNodes)
val stack = new mutable.Stack[Int]()
val onStack = new mutable.HashSet[Int]()
var sccs: List[Set[Int]] = Nil
var nextIndex = 0
def strongConnect(v: Int): Unit = {
tj_index(v) = nextIndex
lowLink(v) = nextIndex
nextIndex += 1
stack.push(v)
onStack += v
//Consider successors of v
for (w <- childs(v)) {
if (tj_index(w) == -1) {
strongConnect(w)
lowLink(v) = math.min(lowLink(v), lowLink(w))
} else if (onStack(w)) {
lowLink(v) = math.min(lowLink(v), tj_index(w))
}
}
if (lowLink(v) == tj_index(v)) {
//start new scc
var newComponent: List[Int] = Nil
var w = 0
do {
w = stack.pop()
onStack -= w
newComponent = w :: newComponent
} while (w != v)
sccs = newComponent.toSet :: sccs
}
}
for (v <- tj_index.indices) {
if (tj_index(v) == -1) strongConnect(v)
}
sccs.reverse
}
}
override lazy val nodes: Set[N] = index.elements.toSet
override lazy val edges: Set[(N, N)] = intGraph.edges.map{case (i1,i2) => index.backward(i1) -> index.backward(i2)}
override def children(node: N): Set[N] = intGraph.children(index.forward(node)).map(index.backward)
override def descendants(node: N): Set[N] = intGraph.descendants(index.forward(node)).map(index.backward)
override def ancestors(node: N): Set[N] = intGraph.ancestors(index.forward(node)).map(index.backward)
override def parents(node: N): Set[N] = intGraph.parents(index.forward(node)).map(index.backward)
override def tarjanSCC: List[Set[N]] = intGraph.tarjanSCC.map(_.map(index.backward))
/** Reverse the edges of the graph. */
override def transpose: LabeledGraph[N] = LabeledGraph.fromChildList(nodes, parents)
override def filter(nodeP: (N) => Boolean, edgeP: ((N, N)) => Boolean): LabeledGraph[N] = {
val newNodes: Set[N] = nodes.filter(nodeP)
LabeledGraph.fromChildList(
newNodes,
newNodes.map(n => n -> children(n).filter(c => nodeP(n) && nodeP(c) && edgeP((n,c))))(collection.breakOut): Map[N,Iterable[N]])
}
val instGraph = new DiGraphInstOps[N] {
override type G = outer.type
override def typeClass: IsDirectedGraph[G, N] = new IsDirectedGraph[G,N] {
override def nodes(x: outer.type): Set[N] = outer.nodes
override def edges(x: outer.type): Set[(N, N)] = outer.edges
}
override def instance: G = outer
}
override def isAcyclic: Boolean = instGraph.isAcyclic
override def graphEqual[X](other: X)(implicit dg: IsDirectedGraph[X, N]): Boolean = instGraph.graphEqual(other)
override def isTree: Boolean = instGraph.isTree
/** Partition the graph into a set of (weakly) connected components, this means that arrow direction is ignored.
* @return A set of components (each a set of vertices). Each component is guaranteed to be non-empty. */
override def connectedComponents: Set[Set[N]] = instGraph.connectedComponents
}
object LabeledGraph {
def apply[X,N](x: X)(implicit dg: IsDirectedGraph[X,N]): LabeledGraph[N] = {
val index = new SIIndex[N](dg.nodes(x))
new LabeledGraph[N](
index,
index.elements.map(n => dg.children(x,n).map(index.forward)(collection.breakOut):Array[Int])(collection.breakOut))
}
def fromChildList[N](nodes: Iterable[N], children: N => Iterable[N]): LabeledGraph[N] = {
val index = new SIIndex(nodes)
new LabeledGraph[N](
index,
index.elements.map(n => children(n).map(index.forward)(collection.breakOut):Array[Int])(collection.breakOut))
}
implicit def isDiGraph[N]: IsDirectedGraph[LabeledGraph[N],N] = new IsDirectedGraph[LabeledGraph[N],N] {
override def nodes(x: LabeledGraph[N]): Set[N] = x.nodes
override def edges(x: LabeledGraph[N]): Set[(N, N)] = x.edges
}
}
| ziggystar/vultura-factor | util/src/main/scala/vultura/util/graph/LabeledGraph.scala | Scala | mit | 5,576 |
package com.wavesplatform.common
import java.util.concurrent.TimeUnit
import com.wavesplatform.common.ArrayCompareBenchmark.BytesSt
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import scala.util.Random
//noinspection ScalaStyle
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@BenchmarkMode(Array(Mode.Throughput))
@Threads(4)
@Fork(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
class ArrayCompareBenchmark {
@Benchmark
def sameElements_test(st: BytesSt, bh: Blackhole): Unit =
bh.consume(st.bytes.sameElements(st.bytes1))
@Benchmark
def arraysEquals_test(st: BytesSt, bh: Blackhole): Unit =
bh.consume(java.util.Arrays.equals(st.bytes, st.bytes1))
}
object ArrayCompareBenchmark {
@State(Scope.Benchmark)
class BytesSt {
val bytes = new Array[Byte](1024)
val bytes1 = new Array[Byte](1024)
Random.nextBytes(bytes)
bytes.copyToArray(bytes1)
}
}
| wavesplatform/Waves | benchmark/src/test/scala/com/wavesplatform/common/ArrayCompareBenchmark.scala | Scala | mit | 925 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.containerpool.v2.test
import akka.Done
import akka.actor.FSM.{CurrentState, SubscribeTransitionCallBack, Transition}
import akka.actor.{ActorRef, ActorSystem}
import akka.grpc.internal.ClientClosedException
import akka.testkit.{ImplicitSender, TestKit, TestProbe}
import common.StreamLogging
import io.grpc.StatusRuntimeException
import org.apache.openwhisk.common.TransactionId
import org.apache.openwhisk.core.connector.ActivationMessage
import org.apache.openwhisk.core.containerpool.ContainerId
import org.apache.openwhisk.core.containerpool.v2._
import org.apache.openwhisk.core.entity.ExecManifest.{ImageName, RuntimeManifest}
import org.apache.openwhisk.core.entity._
import org.apache.openwhisk.core.scheduler.SchedulerEndpoints
import org.apache.openwhisk.core.scheduler.grpc.{ActivationResponse => AResponse}
import org.apache.openwhisk.core.scheduler.queue.{ActionMismatch, NoActivationMessage, NoMemoryQueue}
import org.apache.openwhisk.grpc
import org.apache.openwhisk.grpc.{ActivationServiceClient, FetchRequest, RescheduleRequest, RescheduleResponse}
import org.junit.runner.RunWith
import org.scalamock.scalatest.MockFactory
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterAll, FlatSpecLike, Matchers}
import org.scalatest.concurrent.ScalaFutures
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.Future
import scala.concurrent.duration._
@RunWith(classOf[JUnitRunner])
class ActivationClientProxyTests
extends TestKit(ActorSystem("ActivationClientProxy"))
with ImplicitSender
with FlatSpecLike
with Matchers
with MockFactory
with BeforeAndAfterAll
with StreamLogging
with ScalaFutures {
override def afterAll: Unit = TestKit.shutdownActorSystem(system)
implicit val ec = system.dispatcher
val timeout = 20.seconds
val log = logging
val exec = CodeExecAsString(RuntimeManifest("actionKind", ImageName("testImage")), "testCode", None)
val action = ExecutableWhiskAction(EntityPath("actionSpace"), EntityName("actionName"), exec)
val fqn = action.fullyQualifiedName(true)
val rev = action.rev
val schedulerHost = "127.17.0.1"
val rpcPort = 13001
val containerId = ContainerId("fakeContainerId")
val messageTransId = TransactionId(TransactionId.testing.meta.id)
val invocationNamespace = EntityName("invocationSpace")
val uuid = UUID()
val message = ActivationMessage(
messageTransId,
action.fullyQualifiedName(true),
action.rev,
Identity(Subject(), Namespace(invocationNamespace, uuid), BasicAuthenticationAuthKey(uuid, Secret()), Set.empty),
ActivationId.generate(),
ControllerInstanceId("0"),
blocking = false,
content = None)
val entityStore = WhiskEntityStore.datastore()
behavior of "ActivationClientProxy"
it should "create a grpc client successfully" in within(timeout) {
val fetch = (_: FetchRequest) => Future(grpc.FetchResponse(AResponse(Right(message)).serialize))
val client = (_: String, _: FullyQualifiedEntityName, _: String, _: Int, _: Boolean) =>
Future(MockActivationServiceClient(fetch))
val probe = TestProbe()
val machine =
probe.childActorOf(
ActivationClientProxy
.props(invocationNamespace.asString, fqn, rev, schedulerHost, rpcPort, containerId, client))
registerCallback(machine, probe)
machine ! StartClient
probe.expectMsg(ClientCreationCompleted())
probe.expectMsg(Transition(machine, ClientProxyUninitialized, ClientProxyReady))
}
it should "be closed when failed to create grpc client" in within(timeout) {
val fetch = (_: FetchRequest) => Future(grpc.FetchResponse(AResponse(Right(message)).serialize))
val client = (_: String, _: FullyQualifiedEntityName, _: String, _: Int, _: Boolean) =>
Future {
throw new RuntimeException("failed to create client")
MockActivationServiceClient(fetch)
}
val probe = TestProbe()
val machine =
probe.childActorOf(
ActivationClientProxy
.props(invocationNamespace.asString, fqn, rev, schedulerHost, rpcPort, containerId, client))
registerCallback(machine, probe)
machine ! StartClient
probe.expectMsg(Transition(machine, ClientProxyUninitialized, ClientProxyRemoving))
probe.expectMsg(ClientClosed)
probe expectTerminated machine
}
it should "fetch activation message successfully" in within(timeout) {
val fetch = (_: FetchRequest) => Future(grpc.FetchResponse(AResponse(Right(message)).serialize))
val client = (_: String, _: FullyQualifiedEntityName, _: String, _: Int, _: Boolean) =>
Future(MockActivationServiceClient(fetch))
val probe = TestProbe()
val machine =
probe.childActorOf(
ActivationClientProxy
.props(invocationNamespace.asString, fqn, rev, schedulerHost, rpcPort, containerId, client))
registerCallback(machine, probe)
ready(machine, probe)
machine ! RequestActivation()
probe.expectMsg(message)
}
it should "be recreated when scheduler is changed" in within(timeout) {
var creationCount = 0
val fetch = (_: FetchRequest) => Future(grpc.FetchResponse(AResponse(Left(NoMemoryQueue())).serialize))
val client = (_: String, _: FullyQualifiedEntityName, _: String, _: Int, _: Boolean) => {
creationCount += 1
Future(MockActivationServiceClient(fetch))
}
val probe = TestProbe()
val machine =
probe.childActorOf(
ActivationClientProxy
.props(invocationNamespace.asString, fqn, rev, schedulerHost, rpcPort, containerId, client))
registerCallback(machine, probe)
ready(machine, probe)
// new scheduler is reached
machine ! RequestActivation(newScheduler = Some(SchedulerEndpoints("0.0.0.0", 10, 11)))
awaitAssert {
creationCount should be > 1
}
}
it should "be recreated when the queue does not exist" in within(timeout) {
var creationCount = 0
val fetch = (_: FetchRequest) => Future(grpc.FetchResponse(AResponse(Left(NoMemoryQueue())).serialize))
val client = (_: String, _: FullyQualifiedEntityName, _: String, _: Int, _: Boolean) => {
creationCount += 1
Future(MockActivationServiceClient(fetch))
}
val probe = TestProbe()
val machine =
probe.childActorOf(
ActivationClientProxy
.props(invocationNamespace.asString, fqn, rev, schedulerHost, rpcPort, containerId, client))
registerCallback(machine, probe)
ready(machine, probe)
machine ! RequestActivation()
awaitAssert {
creationCount should be > 1
}
}
it should "be closed when the action version does not match" in within(timeout) {
val fetch = (_: FetchRequest) => Future(grpc.FetchResponse(AResponse(Left(ActionMismatch())).serialize))
val client = (_: String, _: FullyQualifiedEntityName, _: String, _: Int, _: Boolean) =>
Future(MockActivationServiceClient(fetch))
val probe = TestProbe()
val machine =
probe.childActorOf(
ActivationClientProxy
.props(invocationNamespace.asString, fqn, rev, schedulerHost, rpcPort, containerId, client))
registerCallback(machine, probe)
ready(machine, probe)
machine ! RequestActivation()
probe.expectMsg(Transition(machine, ClientProxyReady, ClientProxyRemoving))
probe.expectMsg(ClientClosed)
probe expectTerminated machine
}
it should "retry to request activation message when scheduler response no activation message" in within(timeout) {
val fetch = (_: FetchRequest) => Future(grpc.FetchResponse(AResponse(Left(NoActivationMessage())).serialize))
val client = (_: String, _: FullyQualifiedEntityName, _: String, _: Int, _: Boolean) =>
Future(MockActivationServiceClient(fetch))
val probe = TestProbe()
val machine =
probe.childActorOf(
ActivationClientProxy
.props(invocationNamespace.asString, fqn, rev, schedulerHost, rpcPort, containerId, client))
registerCallback(machine, probe)
ready(machine, probe)
machine ! RequestActivation()
probe.expectMsg(RetryRequestActivation)
}
it should "create activation client on other scheduler when the queue does not exist" in within(timeout) {
val createClientOnOtherScheduler = new ArrayBuffer[Boolean]()
val fetch = (_: FetchRequest) => Future(grpc.FetchResponse(AResponse(Left(NoMemoryQueue())).serialize))
val client = (_: String, _: FullyQualifiedEntityName, _: String, _: Int, tryOtherScheduler: Boolean) => {
createClientOnOtherScheduler += tryOtherScheduler
Future(MockActivationServiceClient(fetch))
}
val probe = TestProbe()
val machine =
probe.childActorOf(
ActivationClientProxy
.props(invocationNamespace.asString, fqn, rev, schedulerHost, rpcPort, containerId, client))
registerCallback(machine, probe)
ready(machine, probe)
machine ! RequestActivation()
awaitAssert {
// Create activation client using original scheduler endpoint firstly
createClientOnOtherScheduler(0) shouldBe false
// Create activation client using latest scheduler endpoint(try other scheduler) when no memoryQueue
createClientOnOtherScheduler(1) shouldBe true
}
}
it should "request activation message when the message can't deserialize" in within(timeout) {
val fetch = (_: FetchRequest) => Future(grpc.FetchResponse("aaaaaa"))
val client = (_: String, _: FullyQualifiedEntityName, _: String, _: Int, _: Boolean) =>
Future(MockActivationServiceClient(fetch))
val probe = TestProbe()
val machine =
probe.childActorOf(
ActivationClientProxy
.props(invocationNamespace.asString, fqn, rev, schedulerHost, rpcPort, containerId, client))
registerCallback(machine, probe)
ready(machine, probe)
machine ! RequestActivation()
probe.expectMsg(RetryRequestActivation)
}
it should "be recreated when akka grpc server connection failed" in within(timeout) {
var creationCount = 0
val fetch = (_: FetchRequest) =>
Future {
throw new StatusRuntimeException(io.grpc.Status.UNAVAILABLE)
grpc.FetchResponse(AResponse(Right(message)).serialize)
}
val client = (_: String, _: FullyQualifiedEntityName, _: String, _: Int, _: Boolean) => {
creationCount += 1
Future(MockActivationServiceClient(fetch))
}
val probe = TestProbe()
val machine =
probe.childActorOf(
ActivationClientProxy
.props(invocationNamespace.asString, fqn, rev, schedulerHost, rpcPort, containerId, client))
registerCallback(machine, probe)
ready(machine, probe)
machine ! RequestActivation()
awaitAssert {
creationCount should be > 1
}
}
it should "be closed when grpc client is already closed" in within(timeout) {
val fetch = (_: FetchRequest) =>
Future {
throw new ClientClosedException()
grpc.FetchResponse(AResponse(Right(message)).serialize)
}
val client = (_: String, _: FullyQualifiedEntityName, _: String, _: Int, _: Boolean) =>
Future(MockActivationServiceClient(fetch))
val probe = TestProbe()
val machine =
probe.childActorOf(
ActivationClientProxy
.props(invocationNamespace.asString, fqn, rev, schedulerHost, rpcPort, containerId, client))
registerCallback(machine, probe)
ready(machine, probe)
machine ! RequestActivation()
probe.expectMsg(Transition(machine, ClientProxyReady, ClientProxyRemoving))
probe.expectMsg(ClientClosed)
probe expectTerminated machine
}
it should "be closed when it failed to getting activation from scheduler" in within(timeout) {
val fetch = (_: FetchRequest) =>
Future {
throw new Exception("Unknown exception")
grpc.FetchResponse(AResponse(Right(message)).serialize)
}
val client = (_: String, _: FullyQualifiedEntityName, _: String, _: Int, _: Boolean) =>
Future(MockActivationServiceClient(fetch))
val probe = TestProbe()
val machine =
probe.childActorOf(
ActivationClientProxy
.props(invocationNamespace.asString, fqn, rev, schedulerHost, rpcPort, containerId, client))
registerCallback(machine, probe)
ready(machine, probe)
machine ! RequestActivation()
probe.expectMsg(Transition(machine, ClientProxyReady, ClientProxyRemoving))
probe.expectMsg(ClientClosed)
probe expectTerminated machine
}
it should "be closed when it receives a CloseClientProxy message for a normal timeout case" in within(timeout) {
val fetch = (_: FetchRequest) => Future(grpc.FetchResponse(AResponse(Right(message)).serialize))
val activationClient = MockActivationServiceClient(fetch)
val client = (_: String, _: FullyQualifiedEntityName, _: String, _: Int, _: Boolean) => Future(activationClient)
val probe = TestProbe()
val machine =
probe.childActorOf(
ActivationClientProxy
.props(invocationNamespace.asString, fqn, rev, schedulerHost, rpcPort, containerId, client))
registerCallback(machine, probe)
ready(machine, probe)
machine ! CloseClientProxy
awaitAssert(activationClient.isClosed shouldBe true)
probe.expectMsg(Transition(machine, ClientProxyReady, ClientProxyRemoving))
machine ! RequestActivation()
probe expectMsg ClientClosed
probe expectTerminated machine
}
it should "be closed when it receives a StopClientProxy message for the case of graceful shutdown" in within(timeout) {
val fetch = (_: FetchRequest) => Future(grpc.FetchResponse(AResponse(Right(message)).serialize))
val activationClient = MockActivationServiceClient(fetch)
val client = (_: String, _: FullyQualifiedEntityName, _: String, _: Int, _: Boolean) => Future(activationClient)
val probe = TestProbe()
val machine =
probe.childActorOf(
ActivationClientProxy
.props(invocationNamespace.asString, fqn, rev, schedulerHost, rpcPort, containerId, client))
registerCallback(machine, probe)
ready(machine, probe)
machine ! StopClientProxy
awaitAssert(activationClient.isClosed shouldBe true)
probe expectMsg ClientClosed
probe expectTerminated machine
}
it should "be safely closed when the client is already closed" in within(timeout) {
val fetch = (_: FetchRequest) => Future(grpc.FetchResponse(AResponse(Right(message)).serialize))
val activationClient = MockActivationServiceClient(fetch)
val client = (_: String, _: FullyQualifiedEntityName, _: String, _: Int, _: Boolean) => Future(activationClient)
val probe = TestProbe()
val machine =
probe.childActorOf(
ActivationClientProxy
.props(invocationNamespace.asString, fqn, rev, schedulerHost, rpcPort, containerId, client))
registerCallback(machine, probe)
ready(machine, probe)
// close client
activationClient.close().futureValue
awaitAssert(activationClient.isClosed shouldBe true)
// close client again
machine ! StopClientProxy
probe expectMsg ClientClosed
probe expectTerminated machine
}
/** Registers the transition callback and expects the first message */
def registerCallback(c: ActorRef, probe: TestProbe) = {
c ! SubscribeTransitionCallBack(probe.ref)
probe.expectMsg(CurrentState(c, ClientProxyUninitialized))
probe watch c
}
def ready(machine: ActorRef, probe: TestProbe) = {
machine ! StartClient
probe.expectMsg(ClientCreationCompleted())
probe.expectMsg(Transition(machine, ClientProxyUninitialized, ClientProxyReady))
}
case class MockActivationServiceClient(customFetchActivation: FetchRequest => Future[grpc.FetchResponse])
extends ActivationServiceClient {
var isClosed = false
override def close(): Future[Done] = {
isClosed = true
Future.successful(Done)
}
override def closed(): Future[Done] = close()
override def rescheduleActivation(in: RescheduleRequest): Future[RescheduleResponse] = {
Future.successful(RescheduleResponse())
}
override def fetchActivation(in: FetchRequest): Future[grpc.FetchResponse] = {
if (!isClosed) {
customFetchActivation(in)
} else {
throw new ClientClosedException()
}
}
}
}
| style95/openwhisk | tests/src/test/scala/org/apache/openwhisk/core/containerpool/v2/test/ActivationClientProxyTests.scala | Scala | apache-2.0 | 17,116 |
package ua.kata
import org.scalatest.{FunSuite, Matchers}
class BinaryTreeTest extends FunSuite with Matchers {
private val tree: Tree[Int] = Tree[Int]()
test("create an empty tree") {
tree.size shouldBe 0
}
test("add single item to a tree") {
(tree + 10).size shouldBe 1
}
test("add many items to a tree") {
(tree + 10 + 20 + 30).size shouldBe 3
}
test("traverse tree in order") {
(tree + 5 + 2 + 1 + 4 + 7 + 6 + 8).inOrder should
contain theSameElementsInOrderAs List(1, 2, 4, 5, 6, 7, 8)
}
test("traverse tree pre order") {
(tree + 5 + 2 + 1 + 4 + 7 + 6 + 8).preOrder should
contain theSameElementsInOrderAs List(5, 2, 1, 4, 7, 6, 8)
}
test("traverse tree post order") {
(tree + 5 + 2 + 1 + 4 + 7 + 6 + 8).postOrder should
contain theSameElementsInOrderAs List(1, 4, 2, 6, 8, 7, 5)
}
test("reduce in order") {
(tree + 5 + 2 + 1 + 4 + 7 + 6 + 8).reduceInOrder(List[Int]())((acc, item) => item :: acc) should
contain theSameElementsInOrderAs List(1, 2, 4, 5, 6, 7 ,8)
}
test("reduce pre order") {
(tree + 5 + 2 + 1 + 4 + 7 + 6 + 8).reducePreOrder(List[Int]())((acc, item) => item :: acc) should
contain theSameElementsInOrderAs List(5, 2, 1, 4, 7, 6, 8)
}
test("reduce post order") {
(tree + 5 + 2 + 1 + 4 + 7 + 6 + 8).reducePostOrder(List[Int]())((acc, item) => item :: acc) should
contain theSameElementsInOrderAs List(1, 4, 2, 6, 8, 7, 5)
}
}
| Alex-Diez/Scala-TDD-Katas | binary_tree_kata/iteration_02/binary_tree_day_03/src/test/scala/ua/kata/BinaryTreeTest.scala | Scala | mit | 1,468 |
/*******************************************************************************
* Copyright (c) 2016 Andreas Wagner.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Andreas Wagner - Concept and implementation
* Christian Prehofer - Concept
*******************************************************************************/
package mctt.preprocessing
import mctt.All
import mctt.Any
import mctt.Choice
import mctt.Disabling
import mctt.Reset
import mctt.Repeat
import mctt.Enabling
import mctt.Concurrent
import mctt.Expr
import mctt.Task
import mctt.TaskType
import mctt.AnnotatedTask
/**
* Collect all distinct labels in the task tree.
*/
object LabelCollector {
def collect(tree: Expr): Set[String] = tree match {
case Any(e1: Expr, labels: List[String]) => { labels.toSet.union(collect(e1)) }
case All(e1: Expr, labels: List[String]) => { labels.toSet.union(collect(e1)) }
case Enabling(e1: Expr, e2: Expr) => { collect(e1).union(collect(e2)) }
case Disabling(e1: Expr, e2: Expr) => { collect(e1).union(collect(e2)) }
case Reset(e1: Expr, e2: Expr) => { collect(e1).union(collect(e2)) }
case Repeat(e1: Expr) => { collect(e1) }
case Concurrent(e1: Expr, e2: Expr) => { collect(e1).union(collect(e2)) }
case Choice(e1: Expr, e2: Expr) => { collect(e1).union(collect(e2)) }
case Task(ttype: TaskType.Value, name: String) => { Set() }
}
}
/**
* Annotate each task with corresponding labels.
* Labels indicate, whether tasks may run concurrently or isolated.
* Note: This function should be executed per label!
*/
object LabelAnnotator {
def annotate(tree: Expr, currLabel: String, labels: Set[String], definedLabels: Set[String]): (Expr, Map[Task, AnnotatedTask]) = tree match {
case Any(e1: Task, lblset: List[String]) => {
val (any, map) = annotate(e1, currLabel, lblset.toSet, lblset.toSet)
(Any(any, lblset), map)
}
/*
//TODO: Clarify semantics! (see tests 10 and 16)
case Any(Disabling(e1: Expr, e2: Expr), lblset: List[String]) => {
val (left, leftMap) = annotate(Any(e1, Set(currLabel).intersect(lblset.toSet).toList), currLabel, Set(currLabel).intersect(lblset.toSet), lblset.toSet)
val (right, rightMap) = annotate(Any(e2, Set(currLabel).intersect(lblset.toSet).toList), currLabel, Set(currLabel).intersect(lblset.toSet), lblset.toSet)
(Any(Disabling(left,right), lblset), leftMap ++ rightMap)
}
*
*/
case Any(Any(e1: Expr, lblset: List[String]), lblset_outer: List[String]) => {
val (any, map) = annotate(e1, currLabel, Set(currLabel).intersect(lblset.toSet), lblset.toSet)
(Any(any, lblset), map)
}
case Any(All(e1: Expr, lblset: List[String]), lblset_outer: List[String]) => {
val (all, map) = annotate(e1, currLabel, lblset.toSet, lblset.toSet)
(All(all, lblset), map)
}
case Any(e1: Expr, lblset: List[String]) => {
val (any, map) = annotate(e1, currLabel, Set(currLabel).intersect(lblset.toSet), lblset.toSet)
(Any(any, lblset), map)
}
case All(All(e1: Expr, lblset: List[String]), lblset_outer: List[String]) => {
val (all, map) = annotate(e1, currLabel, lblset.toSet, lblset.toSet)
(All(all, lblset), map)
}
case All(Any(e1: Expr, lblset: List[String]), lblset_outer: List[String]) => {
val (any, map) = annotate(e1, currLabel, Set(currLabel).intersect(lblset.toSet), lblset.toSet)
(Any(any, lblset), map)
}
/*
//TODO: also clarify semantics!
case All(Disabling(e1: Expr, e2: Expr), lblset: List[String]) => {
val (left, leftMap) = annotate(All(e1, Set(currLabel).intersect(lblset.toSet).toList), currLabel, lblset.toSet, lblset.toSet)
val (right, rightMap) = annotate(All(e2, lblset), currLabel, lblset.toSet, lblset.toSet)
(Disabling(left, right), leftMap ++ rightMap)
}
*
*/
case All(e1: Expr, lblset: List[String]) => {
val (expr, map) = annotate(e1, currLabel, lblset.toSet, lblset.toSet)
(All(expr, lblset), map)
}
case Enabling(e1: Expr, e2: Expr) => {
val (left, leftMap) = annotate(e1, currLabel, labels, definedLabels)
val (right, rightMap) = annotate(e2, currLabel, labels, definedLabels)
(Enabling(left, right), leftMap ++ rightMap)
}
case Disabling(e1: Expr, e2: Expr) => {
val (left, leftMap) = annotate(e1, currLabel, labels, definedLabels)
val (right, rightMap) = annotate(e2, currLabel, labels, definedLabels)
(Disabling(left, right), leftMap ++ rightMap)
}
case Reset(e1: Expr, e2: Expr) => {
val (left, leftMap) = annotate(e1, currLabel, labels, definedLabels)
val (right, rightMap) = annotate(e2, currLabel, labels, definedLabels)
(Reset(left, right), leftMap ++ rightMap)
}
case Repeat(e1: Expr) => {
val (left, leftMap) = annotate(e1, currLabel, labels, definedLabels)
(Repeat(left), leftMap)
}
case Choice(e1: Expr, e2: Expr) => {
val (left, leftMap) = annotate(e1, currLabel, labels, definedLabels)
val (right, rightMap) = annotate(e2, currLabel, labels, definedLabels)
(Choice(left, right), leftMap ++ rightMap)
}
case Concurrent(e1: Expr, e2: Expr) => {
val (left, leftMap) = annotate(e1, currLabel, labels, definedLabels)
val (right, rightMap) = annotate(e2, currLabel, labels, definedLabels)
(Concurrent(left, right), leftMap ++ rightMap)
}
case Task(t: TaskType.Value, s: String) => {
if(definedLabels == labels){ // Any(task)
val activeLabelTuples = for(l <- definedLabels) yield {
(l, labels)
}
val task = AnnotatedTask(t, s, Map(activeLabelTuples.toSeq:_*), definedLabels)
(task, Map((tree.asInstanceOf[Task], task)))
}
else{ // Any(expr)
val activeLabelTuples = for(l <- definedLabels) yield {
(l, Set(l))
}
val task = AnnotatedTask(t, s, Map(activeLabelTuples.toSeq:_*), definedLabels)
(task, Map((tree.asInstanceOf[Task], task)))
}
}
}
}
/**
* Annotate each task with information by what other task it is activated.
* Useful when generating state machines for ANY.
* Note: This function should be executed on the original tree!
*/
object ActivatedByAnnotator {
def annotate(tree: Expr): (Set[Task], Set[Task], Set[Task]) = tree match {
case Any(e1: Expr, lblset: List[String]) => {
var (first, all, last) = annotate(e1)
(first, all, last)
}
case All(e1: Expr, lblset: List[String]) => {
var (first, all, last) = annotate(e1)
(first, all, last)
}
case Enabling(e1: Expr, e2: Expr) => {
var (first_l, all_l, last_l) = annotate(e1)
var (first_r, all_r, last_r) = annotate(e2)
first_r.foreach { _.activatedBy = last_l.toSeq }
(first_l, all_l union all_r, last_r)
}
case Disabling(e1: Expr, e2: Expr) => {
var (first_l, all_l, last_l) = annotate(e1)
var (first_r, all_r, last_r) = annotate(e2)
//first_r.foreach { _.activatedBy = all_l.toSeq }
(first_l, all_l union all_r, last_r union last_l)
}
case Reset(e1: Expr, e2: Expr) => {
var (first_l, all_l, last_l) = annotate(e1)
var (first_r, all_r, last_r) = annotate(e2)
//first_r.foreach { _.activatedBy = all_l.toSeq }
(first_l, all_l union all_r, last_l)
}
case Repeat(e1: Expr) => {
var (first, all, last) = annotate(e1)
(first, all, last)
}
case Choice(e1: Expr, e2: Expr) => {
var (first_l, all_l, last_l) = annotate(e1)
var (first_r, all_r, last_r) = annotate(e2)
(first_l union first_r, all_l union all_r, last_l union last_r)
}
case Concurrent(e1: Expr, e2: Expr) => {
var (first_l, all_l, last_l) = annotate(e1)
var (first_r, all_r, last_r) = annotate(e2)
(first_l union first_r, all_l union all_r, last_l union last_r)
}
case Task(ttype: TaskType.Value, name: String) => {
(Set(tree.asInstanceOf[Task]), Set(tree.asInstanceOf[Task]), Set(tree.asInstanceOf[Task]))
}
}
}
| MultiDeviceCTT/MCTT | MCTT-Translator/src/main/scala/mctt/preprocessing/TreePreparator.scala | Scala | epl-1.0 | 8,433 |
package scala.xml.quote
class NamespaceSuite extends XmlQuoteSuite {
test("reconstruct not prefixed namespaced elem") {
assert(xml"""<foo xmlns="uri"/>""" ≈ <foo xmlns="uri"/>)
}
test("reconstruct namespaced elem") {
assert(xml"""<foo xmlns:pre="uri"/>""" ≈ <foo xmlns:pre="uri"/>)
}
test("reconstruct multi-namespaced elem") {
assert(xml"""<foo xmlns:a="uri1" xmlns:b="uri2"/>""" ≈ <foo xmlns:a="uri1" xmlns:b="uri2"/>)
}
test("reconstruct nested namespaced elem") {
assert(xml"""<foo xmlns:pre1="uri1"><bar xmlns:pre2="uri2"/></foo>""" ≈ <foo xmlns:pre1="uri1"><bar xmlns:pre2="uri2"/></foo>)
}
test("reconstruct shadowed namespaced elem") {
assert(xml"""<foo xmlns:pre="a"><bar xmlns:pre="b"/></foo>""" ≈ <foo xmlns:pre="a"><bar xmlns:pre="b"/></foo>)
}
test("reconstruct nested unquoted elems") {
assert(xml"""<a xmlns:pre="scope0">${ xml"<b/>" }</a>""" ≈
<a xmlns:pre="scope0">{ <b/> }</a>)
assert(xml"""<a xmlns:s0="s0">${ xml"""<b xmlns:s1="s1"><c/></b>""" }</a>""" ≈
<a xmlns:s0="s0">{ <b xmlns:s1="s1"><c/></b> }</a>)
val b = <b/>
assert(xml"""<a xmlns:pre="scope0">${ xml"<b/>" }</a>""" !≈
xml"""<a xmlns:pre="scope0">$b</a>""")
val _ = xml"""<a xmlns="1">${ () => xml"<b/>" }</a>""" // should compile
}
test("invalid namespace") {
" xml\\"\\"\\"<a xmlns=\\"&a;&b;\\" />\\"\\"\\" " shouldNot typeCheck
}
}
| densh/scala-xml-quote | src/test/scala/scala/xml/quote/NamespaceSuite.scala | Scala | bsd-3-clause | 1,425 |
package model
import io.circe.{Decoder, Encoder}
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
case class PublicAppConfig(googleClientId: String)
object PublicAppConfig {
implicit val appConfigDecoder: Decoder[PublicAppConfig] = deriveDecoder
implicit val appConfigEncoder: Encoder[PublicAppConfig] = deriveEncoder
}
case class ExternalUserInfo(email: String, sub: String)
case class AccessToken(value: String)
sealed trait ImageSize { def pixels: Int }
case object WebSize extends ImageSize { val pixels: Int = 1000000 }
object ImageSizeJson {
import cats.syntax.either._
val jsMappings: Map[String, ImageSize] = Map("WEB_SIZE" -> WebSize)
private def asString(imageSize: ImageSize): String = jsMappings.filter(pair => pair._2 == imageSize).head._1
implicit val encodeImageSize: Encoder[ImageSize] = Encoder.encodeString.contramap[ImageSize](asString)
implicit val decodeImageSize: Decoder[ImageSize] = Decoder.decodeString.emap { str =>
Either.catchNonFatal(jsMappings(str)).leftMap(_ => "ImageSize")
}
}
| Leonti/receipts-rest-service | app/src/main/scala/model/Models.scala | Scala | mit | 1,054 |
package com.sksamuel.scapegoat.inspections.collections
import com.sksamuel.scapegoat.PluginRunner
import org.scalatest.{ FreeSpec, Matchers, OneInstancePerTest }
/** @author Stephen Samuel */
class PredefSeqIsMutableTest extends FreeSpec with Matchers with PluginRunner with OneInstancePerTest {
override val inspections = Seq(new PredefSeqIsMutable)
"PredefSeqUse" - {
"should report warning" - {
"for predef seq apply" in {
val code = """object Test { val a = Seq("sammy") }""".stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 1
}
"for declaring Seq as return type" in {
val code = """object Test { def foo : Seq[String] = ??? }""".stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 1
}
}
"should not report warning" - {
"for scala.collection.mutable usage" in {
val code = """import scala.collection.mutable.Seq
|object Test { val a = Seq("sammy") }""".stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for scala.collection.immutable usage" in {
val code = """import scala.collection.immutable.Seq
|object Test { val a = Seq("sammy") }""".stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for scala.collection.mutable defs" in {
val code = """import scala.collection.mutable.Seq
|object Test { def foo : Seq[String] = ??? }""".stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for scala.collection.immutable defs" in {
val code = """import scala.collection.immutable.Seq
|object Test { def foo : Seq[String] = ??? }""".stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
}
}
}
| pwwpche/scalac-scapegoat-plugin | src/test/scala/com/sksamuel/scapegoat/inspections/collections/PredefSeqIsMutableTest.scala | Scala | apache-2.0 | 2,052 |
object SCL9429 {
case class UnneccessarillyGiantCaseClass23(
val a1: Int,
val b2: Int,
val c3: Int,
val d4: Int,
val e5: Int,
val f6: Int,
val f7: Int,
val f8: Int,
val f9: Int,
val f10: Int,
val f11: Int,
val f12: Int,
val f13: Int,
val f14: Int,
val f15: Int,
val f16: Int,
val f17: Int,
val f18: Int,
val f19: Int,
val f20: Int,
val f21: Int,
val f22: Int,
val f23: String)
def foo(i: Any) {
i match {
case UnneccessarillyGiantCaseClass23(protection, friends, autoEnableGps, lockOnLost, sirenOnLost, blockSettingsOnLost, blockUsbDebuggingOnLost,
forceMobileDataOnLost, lowBatteryNotification, lockText, passwordCheckFailure, pin, smsSendingAllowed,
sendLocationOnLost, recordAudioOnLost, takePictureOnLost, sendPersonalDataOnLost, lostOnSimChange, lostOnBluetoothDisconnect, locationOnLowBattery,
personalDataOnLowBattery, ccConfigOnLost, sendSimChangeBySms) =>
val s = /*start*/sendSimChangeBySms/*end*/
case _ =>
}
}
}
//String | ilinum/intellij-scala | testdata/typeInference/bugs211/SCL9429.scala | Scala | apache-2.0 | 2,045 |
package io.getquill.quotation
import scala.annotation.StaticAnnotation
import scala.reflect.ClassTag
import scala.reflect.macros.whitebox.Context
import io.getquill.ast._
import io.getquill.util.MacroContextExt._
import io.getquill.norm.BetaReduction
import io.getquill.util.Messages.TraceType
import io.getquill.util.{ EnableReflectiveCalls, Interpolator, Messages }
case class QuotedAst(ast: Ast) extends StaticAnnotation
abstract class LiftUnlift(numQuatFields: Int) extends Liftables with Unliftables {
lazy val serializeQuats: Boolean = numQuatFields > Messages.maxQuatFields
}
trait Quotation extends Parsing with ReifyLiftings {
val c: Context
import c.universe._
private val quoted = TermName("quoted")
def quote[T](body: Tree)(implicit t: WeakTypeTag[T]) = {
val interp = new Interpolator(TraceType.Quotation, 1)
import interp._
val ast = BetaReduction(trace"Parsing Quotation Body" andReturn (astParser(body)))
val id = TermName(s"id${ast.hashCode.abs}")
val (reifiedAst, liftings) = reifyLiftings(ast)
val liftUnlift = new { override val mctx: c.type = c } with LiftUnlift(reifiedAst.countQuatFields)
// Technically can just put reifiedAst into the quasi-quote directly but this is more comprehensible
val liftedAst: c.Tree = liftUnlift.astLiftable(reifiedAst)
val quotation =
c.untypecheck {
q"""
new io.getquill.Quoted[$t] {
..${EnableReflectiveCalls(c)}
@${c.weakTypeOf[QuotedAst]}($liftedAst)
def $quoted = ast
override def ast = $liftedAst
def $id() = ()
$liftings
}
"""
}
if (IsDynamic(ast)) {
q"$quotation: io.getquill.Quoted[$t]"
} else {
quotation
}
}
def doubleQuote[T: WeakTypeTag](body: Expr[Any]) =
body.tree match {
case q"null" => c.fail("Can't quote null")
case tree => q"${c.prefix}.unquote($tree)"
}
def quotedFunctionBody(func: Expr[Any]) =
func.tree match {
case q"(..$p) => $b" => q"${c.prefix}.quote((..$p) => ${c.prefix}.unquote($b))"
}
protected def unquote[T](tree: Tree)(implicit ct: ClassTag[T]) = {
val unlift = new { override val mctx: c.type = c } with Unliftables
import unlift._
astTree(tree).flatMap(astUnliftable.unapply).map {
case ast: T => ast
}
}
private def astTree(tree: Tree) =
for {
method <- tree.tpe.decls.find(_.name == quoted)
annotation <- method.annotations.headOption
astTree <- annotation.tree.children.lastOption
} yield astTree
}
| getquill/quill | quill-core/src/main/scala/io/getquill/quotation/Quotation.scala | Scala | apache-2.0 | 2,601 |
package clean.tex
import clean.lib._
import ml.classifiers.NoLearner
object tabMetaLeasConfia extends App with StratsTrait with LearnerTrait with CM {
Global.debug = 0
val context = this.getClass.getName
val ls = (args(0).split(",") map str2learner()).map(_.limp).toBuffer
val db = new Db("metanew", true)
val mcs = List("RoF500", "PCT", "RFw500", "ABoo500", "maj", "chu")
val sts = stratsTexForGraficoComplexo map (_(NoLearner()).limp)
db.open()
val tudo = Seq("f", "i") map { fi =>
sts map { st =>
val nome = st + (if (fi == "f") "¹" else "²")
val medidas = mcs map { mc =>
val sql = s"select p from prob where $fi='th' and st='$st' and mc='$mc' and ls='ArrayBuffer(5NNw, NB, C4.52, SVM)' and run=-1 and fold=-1 order by p desc"
print(db.readString(sql).map(_.head).mkString(",") + ",")
db.readString(sql).map(_.head.toDouble).sum / 90d
}
println
nome -> medidas.toList
}
}
val fla = tudo.flatten.toList
val txt = fla.sortBy(_._2.sum).reverse
val header = Seq("estratégia") ++ mcs mkString " "
println(header)
txt foreach { case (nome, meds) =>
println(s"$nome " + meds.mkString(" "))
}
db.close()
// val pairs = StatTests.friedmanNemenyi(tab, mcs.toVector)
// val fri = StatTests.pairTable(pairs, "stratsfriedpares", 2, "fried")
// println(s"${fri}")
def t2map[A, B](as: (A, A))(f: A => B) = as match {
case (a1, a2) => (f(a1), f(a2))
}
def t3map[A, B](as: (A, A, A))(f: A => B) = as match {
case (a1, a2, a3) => (f(a1), f(a2), f(a3))
}
}
/*
active-learning-scala: Active Learning library for Scala
Copyright (c) 2014 Davi Pereira dos Santos
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
| active-learning/active-learning-scala | src/main/scala/clean/tex/tabMetaLeasConfia.scala | Scala | gpl-2.0 | 2,323 |
package dhg.ccg.parse.pcfg.mcmc
import org.junit.Test
import org.junit.Assert._
import dhg.ccg.cat._
import dhg.ccg.prob._
import dhg.ccg.parse._
import dhg.ccg.parse.pcfg._
import dhg.ccg.tagdict.SimpleTagDictionary
import dhg.util._
import scala.collection.immutable.ListMap
class ScgInsideChartTests {
val S = cat"S".asInstanceOf[AtomCat]
val NP = cat"NP".asInstanceOf[AtomCat]
val N = cat"N".asInstanceOf[AtomCat]
val PP = cat"PP".asInstanceOf[AtomCat]
val STA = cat"<S>"
val END = cat"<E>"
val A = cat"A"
val B = cat"B"
val C = cat"C"
val D = cat"D"
val E = cat"E"
val F = cat"F"
@Test
def test_PcfgInsideChart() {
val t0 = Map(A -> LogDouble(0.1))
val t1 = Map(A -> LogDouble(0.2), B -> LogDouble(0.3))
val t2 = Map(C -> LogDouble(0.4))
val v02 = Map(D -> LogDouble(0.5))
val v03 = Map(E -> LogDouble(0.6))
val mockMatrix = Vector[Vector[Map[Cat, LogDouble]]](
Vector(Map(), t0, v02, v03),
Vector(Map(), Map(), t1, Map()),
Vector(Map(), Map(), Map(), t2))
val ic = new PcfgInsideChart(mockMatrix)
//ic.draw()
assertSame(mockMatrix(0), ic(0))
assertSame(mockMatrix(1), ic(1))
assertSame(mockMatrix(2), ic(2))
assertSame(mockMatrix(0)(0), ic(0, 0))
assertSame(mockMatrix(0)(1), ic(0, 1))
assertSame(mockMatrix(0)(2), ic(0, 2))
assertSame(mockMatrix(0)(3), ic(0, 3))
assertSame(mockMatrix(1)(0), ic(1, 0))
assertSame(mockMatrix(1)(1), ic(1, 1))
assertSame(mockMatrix(1)(2), ic(1, 2))
assertSame(mockMatrix(1)(3), ic(1, 3))
assertSame(mockMatrix(2)(0), ic(2, 0))
assertSame(mockMatrix(2)(1), ic(2, 1))
assertSame(mockMatrix(2)(2), ic(2, 2))
assertSame(mockMatrix(2)(3), ic(2, 3))
assertEquals(3, ic.length)
assertSame(mockMatrix(0)(3), ic.root)
val tdn: IndexedSeq[(Int, Int, Map[Cat, LogDouble])] = ic.topDownNodes
assertEquals(Vector(
(0, 3, v03),
(0, 2, v02), (1, 3, Map()),
(0, 1, t0), (1, 2, t1), (2, 3, t2)),
tdn)
}
@Test
def test_SimplePcfgInsideChartBuilder_buildInsideChart_withUnary {
// val sentence = "the dogs run".split("\\\\s+").toVector
// val tagdict = SimpleTagDictionary[Cat](Map(
// "the" -> Set(N / N, NP / N, N / S),
// "dogs" -> Set(N),
// "run" -> Set(S \\ N, S \\ NP, S)),
// "<S>", STA, "<E>", END)
// val builder = new SimpleCfgGuideChartBuilder(Vector(FA, BA, N2NP))
// val Some(table) = builder.build(sentence, Vector.empty, tagdict)
// table.draw()
// println(table.repr)
//println(f"${table.numPossibleParses}")
// +--------------------+--------------------+--------------------+
// | | (0,1) | (0,2) |
// | (N/N) -> "the" | N -> 1:[(N/N) N] | S -> 2:[N (S\\N)] |
// | (NP/N) -> "the" | NP -> 1:[(NP/N) N] | 2:[NP (S\\NP)] |
// | (N/S) -> "the" | N | N -> 1:[(N/S) S] |
// | | | NP -> N |
// +--------------------+--------------------+--------------------+
// | | | (1,2) |
// | | N -> "dogs" | S -> 2:[N (S\\N)] |
// | | NP -> N | 2:[NP (S\\NP)] |
// | | "dogs" | |
// | | | |
// +--------------------+--------------------+--------------------+
// | | | |
// | | | (S\\NP) -> "run" |
// | | | (S\\N) -> "run" |
// | | | |
// | | | |
// +--------------------+--------------------+--------------------+
val guideChart = CfgGuideChart("the dogs run".splitWhitespace, Vector[Vector[Map[Cat, Set[GuideChartEntry]]]](
Vector(ListMap(), ListMap((N / N) -> Set(TermGuideChartEntry(TermProd("the"))), (NP / N) -> Set(TermGuideChartEntry(TermProd("the"))), (N / S) -> Set(TermGuideChartEntry(TermProd("the")))), ListMap(N -> Set(BinaryGuideChartEntry(1, BinaryProd((N / N), N))), NP -> Set(BinaryGuideChartEntry(1, BinaryProd((NP / N), N)), UnaryGuideChartEntry(UnaryProd(N)))), ListMap(S -> Set(BinaryGuideChartEntry(2, BinaryProd(N, (S \\ N))), BinaryGuideChartEntry(2, BinaryProd(NP, (S \\ NP)))), N -> Set(BinaryGuideChartEntry(1, BinaryProd((N / S), S))), NP -> Set(UnaryGuideChartEntry(UnaryProd(N))))),
Vector(ListMap(), ListMap(), ListMap(N -> Set(TermGuideChartEntry(TermProd("dogs"))), NP -> Set(UnaryGuideChartEntry(UnaryProd(N)), TermGuideChartEntry(TermProd("dogs")))), ListMap(S -> Set(BinaryGuideChartEntry(2, BinaryProd(N, (S \\ N))), BinaryGuideChartEntry(2, BinaryProd(NP, (S \\ NP)))))),
Vector(ListMap(), ListMap(), ListMap(), ListMap((S \\ NP) -> Set(TermGuideChartEntry(TermProd("run"))), (S \\ N) -> Set(TermGuideChartEntry(TermProd("run")))))))
val icb = new SimplePcfgInsideChartBuilder()
val prodDist = new ConditionalLogProbabilityDistribution[Cat, Prod] {
def apply(x: Prod, given: Cat): LogDouble = (given, x) match {
case ((NP / N), /**/ TermProd("the")) /* */ => LogDouble(0.21)
case ((N / N), /* */ TermProd("the")) /* */ => LogDouble(0.02)
case ((N / S), /* */ TermProd("the")) /* */ => LogDouble(0.01)
case ((N), /* */ TermProd("dogs")) /**/ => LogDouble(0.05)
case ((NP), /* */ TermProd("dogs")) /**/ => LogDouble(0.04)
case ((S \\ NP), /**/ TermProd("run")) /* */ => LogDouble(0.06)
case ((S \\ N), /* */ TermProd("run")) /* */ => LogDouble(0.01)
case ((S), /* */ TermProd("run")) /* */ => LogDouble(0.03)
case ((NP), BinaryProd(NP / N, N)) /**/ => LogDouble(0.45)
case ((N), BinaryProd(N / N, N)) /* */ => LogDouble(0.25)
case ((N), BinaryProd(N / S, S)) /* */ => LogDouble(0.10)
case ((S), BinaryProd(NP, S \\ NP)) /**/ => LogDouble(0.65)
case ((S), BinaryProd(N, S \\ N)) /* */ => LogDouble(0.15)
case (NP, UnaryProd(N)) => LogDouble(0.07)
}
def sample(given: Cat): Prod = ???
}
/*
* v01(n/n): p(the|n/n) = 0.02 = 0.02
* v01(np/n): p(the|np/n) = 0.21 = 0.21
* v01(n/s): p(the|n/s) = 0.01 = 0.01
*
* v12(n): p(dogs|n) = 0.05 = 0.05
* v12(np): p(dogs|np) = 0.04 = 0.0435
* p(np -> n) * v12(n) = 0.0035
*
* v23(s\\np): p(run|s\\np) = 0.06 = 0.06
* v23(s\\n): p(run|s\\n) = 0.01 = 0.01
*
*
* v02(n): p(n -> n/n n) * v01(n/n) * v12(n) = 0.25 * 0.02 * 0.05 = 0.000250
* v02(np): p(np -> np/n n) * v01(np/n) * v12(n) = 0.45 * 0.21 * 0.05 = 0.004725 = 0.0047425
* p(np -> n) * v02(n) = 0.07 * 0.000250 = 0.0000175
*
* v13(s): p(s -> n s\\n) * v12(n) * v23(s\\n) = 0.15 * 0.05 * 0.01 = 0.000075 = 0.0017715
* p(s -> np s\\np) * v12(np) * v23(s\\np) = 0.65 * 0.0435 * 0.06 = 0.0001365
*
*
* v03(s): p(s -> np s\\np) * v02(np) * v23(s\\np) = 0.65 * 0.0047425 * 0.06 = 1.849575E-4 = 1.853325E-4
* p(s -> n s\\n ) * v02(n) * v23(s\\n) = 0.15 * 0.000250 * 0.01 = 3.75E-7
* v03(n): p(n -> n/s s) * v01(n/s) * v13(s) = 0.10 * 0.01 * 0.0017715 = 1.7715E-6 = 1.7715E-6
* v03(np): p(np -> n) * v03(n) = 0.07 * 1.7715E-6 = 1.24005E-7 = 1.24005E-7
*/
val ic = icb.buildInsideChart(guideChart, prodDist)
assertEqualsLog(LogDouble(0.02), ic(0, 1)(N / N), 1e-9)
assertEqualsLog(LogDouble(0.21), ic(0, 1)(NP / N), 1e-9)
assertEqualsLog(LogDouble(0.01), ic(0, 1)(N / S), 1e-9)
assertEqualsLog(LogDouble(0.05), ic(1, 2)(N), 1e-9)
assertEqualsLog(LogDouble(0.0435), ic(1, 2)(NP), 1e-9)
assertEqualsLog(LogDouble(0.06), ic(2, 3)(S \\ NP), 1e-9)
assertEqualsLog(LogDouble(0.01), ic(2, 3)(S \\ N), 1e-9)
assertEqualsLog(LogDouble(0.000250), ic(0, 2)(N), 1e-9)
assertEqualsLog(LogDouble(0.0047425), ic(0, 2)(NP), 1e-9)
assertEqualsLog(LogDouble(0.0017715), ic(1, 3)(S), 1e-9)
assertEqualsLog(LogDouble(1.853325E-4), ic(0, 3)(S), 1e-9)
assertEqualsLog(LogDouble(1.7715E-6), ic(0, 3)(N), 1e-9)
assertEqualsLog(LogDouble(1.24005E-7), ic(0, 3)(NP), 1e-9)
}
def assertEqualsLog(expected: LogDouble, actual: LogDouble, err: Double) {
assertEquals(expected.toDouble, actual.toDouble, err)
}
} | dhgarrette/2015-ccg-parsing | src/test/scala/dhg/ccg/parse/scg/mcmc/ScgInsideChartTests.scala | Scala | apache-2.0 | 8,773 |
package com.madsen.xcs.core.genetic
import java.nio.ByteBuffer
import com.madsen.xsc.interop.ParameterDto
/**
* Created by erikmadsen2 on 15/05/15.
*/
object Gene {
val GeneLength = 4098
val SizeOfInt = 8
val SizeOfFloat = 8
val SizeOfHeaders = SizeOfInt + 1 + 1
def byteToBooleans(byte: Byte): Seq[Boolean] = 0 to 7 map { bit => ((byte >> bit) & 1) == 1 }
/**
Take sequence of bytes and parse out a gene. Layout of incoming bytes:
Key: <code>header name (size in bytes)</code>
<code>
+------------------------------------------------------------------------------------------+
|id(8)|intCount(1)|doubleCount(1)|intParams(0-2040)|doubleParams(0-2040)|boolParams(8-4088)| TOTAL: 4098 bytes
+------------------------------------------------------------------------------------------+
</code>
*/
def apply(bytes: Seq[Byte]): Gene = {
require(Option(bytes).isDefined)
require(bytes.size == Gene.GeneLength)
val array = bytes.toArray
val buffer: ByteBuffer = ByteBuffer.wrap(array).asReadOnlyBuffer()
val id = buffer.getLong
val numInts = buffer.get() & 0xFF
val numFloats = buffer.get() & 0xFF
val intParams = 1 to numInts map { a => buffer.getLong }
val floatParams = 1 to numFloats map { a => buffer.getDouble }
val booleansAsBytes = new Array[Byte](booleanBytesCount(numInts, numFloats))
buffer.get(booleansAsBytes)
val boolParams = booleansAsBytes.toSeq.flatMap(byteToBooleans)
apply(id, intParams, floatParams, boolParams)
}
private def booleanBytesCount(intCount: Int, numFloats: Int): Int = {
GeneLength -
SizeOfHeaders -
intCount * SizeOfInt -
numFloats * SizeOfFloat
}
}
case class Gene(id: Long, ints: Seq[Long], floats: Seq[Double], booleans: Seq[Boolean]) {
import com.madsen.util.JavaConversions._
def parameters: ParameterDto = new ParameterDto(ints, floats, booleans)
}
| beatmadsen/xcs-main | src/main/scala/com/madsen/xcs/core/genetic/Gene.scala | Scala | mit | 1,934 |
import squants.time.Frequency
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
/**
* ==Squants==
* The Scala API for Quantities, Units of Measure and Dimensional Analysis
*
* ==Overview==
* Squants is a framework of data types and a domain specific language (DSL) for representing Quantities,
* their Units of Measure, and their Dimensional relationships.
* The API supports typesafe dimensional analysis, improved domain models and more.
* All types are immutable and thread-safe.
*
* Typedefs and implicits for common usages
*
* @author garyKeorkunian
* @version 0.1
* @since 0.1
*
*/
package object squants {
type QuantitySeries[A <: Quantity[A]] = IndexedSeq[QuantityRange[A]]
/* Quantity Types brought into scope with just squants._ */
/* SI Base Quantities and their Base Units */
type Length = squants.space.Length
val Meters = squants.space.Meters
type Mass = squants.mass.Mass
val Kilograms = squants.mass.Kilograms
type Time = squants.time.Time
val Seconds = squants.time.Seconds
type ElectricCurrent = squants.electro.ElectricCurrent
val Amperes = squants.electro.Amperes
type Temperature = squants.thermal.Temperature
val Kelvin = squants.thermal.Kelvin
type ChemicalAmount = squants.mass.ChemicalAmount
val Moles = squants.mass.Moles
type LuminousIntensity = squants.photo.LuminousIntensity
val Candelas = squants.photo.Candelas
/* Common Derived Quantities */
type Angle = squants.space.Angle
val Radians = squants.space.Radians
type SolidAngle = squants.space.SolidAngle
val SquareRadians = squants.space.SquaredRadians
type Area = squants.space.Area
type Volume = squants.space.Volume
type Density = squants.mass.Density
type Velocity = squants.motion.Velocity
type Acceleration = squants.motion.Acceleration
type Jerk = squants.motion.Jerk
type Momentum = squants.motion.Momentum
type Force = squants.motion.Force
type MassFlow = squants.motion.MassFlow
type VolumeFlow = squants.motion.VolumeFlow
type Energy = squants.energy.Energy
type Power = squants.energy.Power
type PowerRamp = squants.energy.PowerRamp
/* Market Quantities */
type Money = squants.market.Money
type Price[A <: Quantity[A]] = squants.market.Price[A]
/**
* Provides implicit conversions that allow Doubles to lead in * and / by Time operations
* {{{
* 1.5 * Kilometers(10) should be(Kilometers(15))
* }}}
*
* @param d Double
*/
implicit class SquantifiedDouble(d: Double) {
def *[A <: Quantity[A]](that: A): A = that * d
def *[A](that: SVector[A]): SVector[A] = that * d
def /(that: Time): Frequency = Each(d) / that
def per(that: Time): Frequency = /(that)
}
/**
* Provides implicit conversions that allow Longs to lead in * and / by Time operations
* {{{
* 5 * Kilometers(10) should be(Kilometers(15))
* }}}
*
* @param l Long
*/
implicit class SquantifiedLong(l: Long) {
def *[A <: Quantity[A]](that: A): A = that * l.toDouble
def *[A](that: SVector[A]): SVector[A] = that * l.toDouble
def /(that: Time) = Each(l) / that
def per(that: Time): Frequency = /(that)
}
/**
* Provides implicit conversions that allow BigDecimals to lead in * and / by Time operations
* {{{
* BigDecimal(1.5) * Kilometers(10) should be(Kilometers(15))
* }}}
*
* @param bd BigDecimal
*/
implicit class SquantifiedBigDecimal(bd: BigDecimal) {
def *[A <: Quantity[A]](that: A): A = that * bd.toDouble
def *[A](that: SVector[A]): SVector[A] = that * bd.toDouble
def /(that: Time) = Each(bd) / that
def per(that: Time): Frequency = /(that)
}
}
| underscorenico/squants | shared/src/main/scala/squants/package.scala | Scala | apache-2.0 | 4,129 |
package bootstrap.liftweb
import _root_.net.liftweb.util._
import _root_.net.liftweb.http._
import _root_.net.liftweb.sitemap._
import _root_.net.liftweb.sitemap.Loc._
import Helpers._
/**
* A class that's instantiated early and run. It allows the application
* to modify lift's environment
*/
class Boot {
def boot {
// where to search snippet
LiftRules.addToPackages("demo.helloworld")
// Build SiteMap
val entries = Menu(Loc("Home", List("index"), "Home")) :: Nil
LiftRules.setSiteMap(SiteMap(entries:_*))
}
}
| scalatest/scalatest-maven-plugin | src/it/lift/src/main/scala/bootstrap/liftweb/Boot.scala | Scala | apache-2.0 | 548 |
/*
* Copyright ixias.net All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license
* For the full copyright and license information,
* please view the LICENSE file that was distributed with this source code.
*/
package ixias.play.api.mvc
import play.api.mvc._
import cats.data.EitherT
import cats.instances.future._
import scala.concurrent.Future
import scala.language.implicitConversions
trait BaseExtensionMethods { self: BaseControllerHelpers =>
val Cursor = ixias.persistence.model.Cursor
val JsonHelper = ixias.play.api.mvc.JsonHelper
val FormHelper = ixias.play.api.mvc.FormHelper
val AttrHelper = ixias.play.api.mvc.RequestHeaderAttrHelper
/** The ExecutionContext with using on Playframework. */
implicit lazy val executionContext = defaultExecutionContext
// --[ Methods ] -------------------------------------------------------------
// Either[Result, Result] -> Result
implicit def convEitherToResult(v: Either[Result, Result]): Result =
v match { case Right(r) => r case Left(l) => l }
// Future[Either[Result, Result]] -> Future[Result]
implicit def convEitherToResult(f: Future[Either[Result, Result]]): Future[Result] =
f.map(convEitherToResult(_))
// EitherT[Future, Result, Result] -> Future[Result]
implicit def convEitherToResult(t: EitherT[Future, Result, Result]): Future[Result] =
t.valueOr(v => v)
// --[ Methods ] -------------------------------------------------------------
def DeviceDetection: ActionBuilder[Request, AnyContent] = DeviceDetectionBuilder(parse.default)
val DeviceDetectionAttrKey = ixias.play.api.mvc.DeviceDetectionAttrKey
}
| sp1rytus/ixias | framework/ixias-play-core/src/main/scala/ixias/play/api/mvc/BaseExtensionMethods.scala | Scala | mit | 1,670 |
package com.github.diegopacheco.scala.playground.spring
import scala.beans.BeanProperty
class SoccerTeam(@BeanProperty var address:String = "") extends Team {
def this() = this(address = "")
override def toString():String = {
val s:String = s" name: ${name} Adress: ${this.address} "
return s
}
} | diegopacheco/scala-playground | scala-2.10-spring-3.2-playground/src/main/scala/com/github/diegopacheco/scala/playground/spring/SoccerTeam.scala | Scala | unlicense | 312 |
package slamdata.engine.analysis
import slamdata.engine.fp._
import scalaz.{Tree => ZTree, Node => _, _}
import Scalaz._
import Id.Id
import slamdata.engine.{RenderTree, Terminal, NonTerminal}
sealed trait term {
case class Term[F[_]](unFix: F[Term[F]]) {
def cofree(implicit f: Functor[F]): Cofree[F, Unit] =
Cofree(Unit, Functor[F].map(unFix)(_.cofree))
def isLeaf(implicit F: Foldable[F]): Boolean =
Tag.unwrap(F.foldMap(unFix)(Function.const(Tags.Disjunction(true))))
def children(implicit F: Foldable[F]): List[Term[F]] =
F.foldMap(unFix)(_ :: Nil)
def universe(implicit F: Foldable[F]): List[Term[F]] =
children.flatMap(_.universe)
def transform(f: Term[F] => Term[F])(implicit T: Traverse[F]): Term[F] =
transformM[Free.Trampoline]((v: Term[F]) => f(v).pure[Free.Trampoline]).run
def transformM[M[_]](f: Term[F] => M[Term[F]])(implicit M: Monad[M], TraverseF: Traverse[F]): M[Term[F]] = {
def loop(term: Term[F]): M[Term[F]] = {
for {
y <- TraverseF.traverse(unFix)(loop _)
z <- f(Term(y))
} yield z
}
loop(this)
}
def topDownTransform(f: Term[F] => Term[F])(implicit T: Traverse[F]): Term[F] = {
topDownTransformM[Free.Trampoline]((term: Term[F]) => f(term).pure[Free.Trampoline]).run
}
def topDownTransformM[M[_]](f: Term[F] => M[Term[F]])(implicit M: Monad[M], F: Traverse[F]): M[Term[F]] = {
def loop(term: Term[F]): M[Term[F]] = {
for {
x <- f(term)
y <- F.traverse(x.unFix)(loop _)
} yield Term(y)
}
loop(this)
}
def topDownCata[A](a: A)(f: (A, Term[F]) => (A, Term[F]))(implicit F: Traverse[F]): Term[F] = {
topDownCataM[Free.Trampoline, A](a)((a: A, term: Term[F]) => f(a, term).pure[Free.Trampoline]).run
}
def foldMap[Z](f: Term[F] => Z)(implicit F: Traverse[F], Z: Monoid[Z]): Z = {
(foldMapM[Free.Trampoline, Z] { (term: Term[F]) =>
f(term).pure[Free.Trampoline]
}).run
}
def foldMapM[M[_], Z](f: Term[F] => M[Z])(implicit F: Traverse[F], M: Monad[M], Z: Monoid[Z]): M[Z] = {
def loop(z0: Z, term: Term[F]): M[Z] = {
for {
z1 <- f(term)
z2 <- F.foldLeftM(term.unFix, Z.append(z0, z1))(loop(_, _))
} yield z2
}
loop(Z.zero, this)
}
def topDownCataM[M[_], A](a: A)(f: (A, Term[F]) => M[(A, Term[F])])(implicit M: Monad[M], F: Traverse[F]): M[Term[F]] = {
def loop(a: A, term: Term[F]): M[Term[F]] = {
for {
tuple <- f(a, term)
(a, tf) = tuple
rec <- F.traverse(tf.unFix)(loop(a, _))
} yield Term(rec)
}
loop(a, this)
}
def descend(f: Term[F] => Term[F])(implicit F: Functor[F]): Term[F] = {
Term(F.map(unFix)(f))
}
def descendM[M[_]](f: Term[F] => M[Term[F]])(implicit M: Monad[M], TraverseF: Traverse[F]): M[Term[F]] = {
TraverseF.traverse(unFix)(f).map(Term.apply _)
}
def rewrite(f: Term[F] => Option[Term[F]])(implicit T: Traverse[F]): Term[F] = {
rewriteM[Free.Trampoline]((term: Term[F]) => f(term).pure[Free.Trampoline]).run
}
def rewriteM[M[_]](f: Term[F] => M[Option[Term[F]]])(implicit M: Monad[M], TraverseF: Traverse[F]): M[Term[F]] = {
transformM[M] { term =>
for {
x <- f(term)
y <- Traverse[Option].traverse(x)(_ rewriteM f).map(_.getOrElse(term))
} yield y
}
}
def restructure[G[_]](f: F[Term[G]] => G[Term[G]])(implicit T: Traverse[F]): Term[G] = {
restructureM[Free.Trampoline, G]((term: F[Term[G]]) => f(term).pure[Free.Trampoline]).run
}
def restructureM[M[_], G[_]](f: F[Term[G]] => M[G[Term[G]]])(implicit M: Monad[M], T: Traverse[F]): M[Term[G]] = {
for {
x <- T.traverse(unFix)(_ restructureM f)
y <- f(x)
} yield Term(y)
}
def trans[G[_]](f: F ~> G)(implicit G: Functor[G]): Term[G] = Term[G](G.map(f(unFix))(_.trans(f)(G)))
def cata[A](f: F[A] => A)(implicit F: Functor[F]): A = f(F.map(unFix)(_.cata(f)(F)))
def para[A](f: F[(Term[F], A)] => A)(implicit F: Functor[F]): A = f(F.map(unFix)(t => t -> t.para(f)(F)))
def para2[A](f: (Term[F], F[A]) => A)(implicit F: Functor[F]): A = f(this, F.map(unFix)(_.para2(f)(F)))
def paraList[A](f: (Term[F], List[A]) => A)(implicit F: Functor[F], F2: Foldable[F]): A = {
f(this, F2.foldMap(unFix)(_.paraList(f)(F, F2) :: Nil))
}
override def toString = unFix.toString
}
sealed trait TermInstances {
implicit def TermShow[F[_]](implicit showF: Show[F[_]], foldF: Foldable[F]) = new Show[Term[F]] {
implicit val ShowF: Show[F[Term[F]]] = new Show[F[Term[F]]] {
override def show(fa: F[Term[F]]): Cord = showF.show(fa)
}
override def show(term: Term[F]): Cord = {
def toTree(term: Term[F]): ZTree[F[Term[F]]] = {
ZTree.node(term.unFix, term.children.toStream.map(toTree _))
}
Cord(toTree(term).drawTree)
}
}
implicit def TermRenderTree[F[_]](implicit F: Foldable[F], RF: RenderTree[F[_]]) = new RenderTree[Term[F]] {
override def render(v: Term[F]) = {
val t = RF.render(v.unFix)
NonTerminal(t.label, v.children.map(render(_)), t.nodeType)
}
}
implicit def TermEqual[F[_]](implicit equalF: EqualF[F]): Equal[Term[F]] = new Equal[Term[F]] {
implicit val EqualFTermF = new Equal[F[Term[F]]] {
def equal(v1: F[Term[F]], v2: F[Term[F]]): Boolean = {
equalF.equal(v1, v2)(TermEqual[F](equalF))
}
}
def equal(v1: Term[F], v2: Term[F]): Boolean = {
EqualFTermF.equal(v1.unFix, v2.unFix)
}
}
}
object Term extends TermInstances {
}
def apo[F[_], A](a: A)(f: A => F[Term[F] \\/ A])(implicit F: Functor[F]): Term[F] = {
Term(F.map(f(a)) {
case -\\/(term) => term
case \\/-(a) => apo(a)(f)
})
}
def ana[F[_], A](a: A)(f: A => F[A])(implicit F: Functor[F]): Term[F] = {
Term(F.map(f(a))(a => ana(a)(f)(F)))
}
def hylo[F[_], A, B](b: B)(f: F[A] => A, g: B => F[B])(implicit F: Functor[F]): A = ana(b)(g).cata(f)
def zygo_[F[_], A, B](t: Term[F])(f: F[B] => B, g: F[(B, A)] => A)(implicit F: Functor[F]): A = zygo(t)(f, g)(F)._2
def zygo[F[_], A, B](t: Term[F])(f: F[B] => B, g: F[(B, A)] => A)(implicit F: Functor[F]): (B, A) = {
val fba = F.map(t.unFix)(zygo(_)(f, g)(F))
val b = f(F.map(fba)(_._1))
val a = g(fba)
(b, a)
}
}
sealed trait holes {
sealed trait Hole
val Hole = new Hole{}
def holes[F[_]: Traverse, A](fa: F[A]): F[(A, A => F[A])] = holes2(fa)(identity)
def holes2[F[_], A, B](fa: F[A])(f: A => B)(implicit F: Traverse[F]): F[(A, A => F[B])] = {
(F.mapAccumL(fa, 0) {
case (i, x) =>
val h: A => F[B] = { y =>
val g: (Int, A) => (Int, A) = (j, z) => (j + 1, if (i == j) y else z)
F.map(F.mapAccumL(fa, 0)(g)._2)(f)
}
(i + 1, (x, h))
})._2
}
def holesList[F[_]: Traverse, A](fa: F[A]): List[(A, A => F[A])] = Traverse[F].toList(holes(fa))
def transformChildren[F[_]: Traverse, A](fa: F[A])(f: A => A): F[F[A]] = {
val g: (A, A => F[A]) => F[A] = (x, replace) => replace(f(x))
Traverse[F].map(holes(fa))(g.tupled)
}
def transformChildren2[F[_]: Traverse, A, B](fa: F[A])(f: A => B): F[F[B]] = {
val g: (A, A => F[B]) => F[B] = (x, replace) => replace(x)
Traverse[F].map(holes2(fa)(f))(g.tupled)
}
def builder[F[_]: Traverse, A, B](fa: F[A], children: List[B]): F[B] = {
(Traverse[F].mapAccumL(fa, children) {
case (x :: xs, _) => (xs, x)
case _ => sys.error("Not enough children")
})._2
}
def project[F[_]: Foldable, A](index: Int, fa: F[A]): Option[A] = {
???
}
def sizeF[F[_]: Foldable, A](fa: F[A]): Int = Foldable[F].foldLeft(fa, 0)((a, b) => a + 1)
}
sealed trait zips {
def unzipF[F[_]: Functor, A, B](f: F[(A, B)]): (F[A], F[B]) = {
val F = Functor[F]
(F.map(f)(_._1), F.map(f)(_._2))
}
}
sealed trait ann extends term with zips {
case class Ann[F[_], A, B](attr: A, unAnn: F[B]) { ann =>
def trans[G[_]](f: F ~> G): Ann[G, A, B] = Ann(ann.attr, f(ann.unAnn))
}
sealed trait CoAnn[F[_], A, B] { coann =>
def trans[G[_]](f: F ~> G): CoAnn[G, A, B] = coann match {
case CoAnn.Pure(attr) => CoAnn.Pure(attr)
case CoAnn.UnAnn(unAnn) => CoAnn.UnAnn(f(unAnn))
}
}
object CoAnn {
case class Pure[F[_], A, B](attr: A) extends CoAnn[F, A, B]
case class UnAnn[F[_], A, B](unAnn: F[B]) extends CoAnn[F, A, B]
}
implicit def AnnShow[F[_], A](implicit S: Show[F[_]], A: Show[A]): Show[Ann[F, A, _]] = new Show[Ann[F, A, _]] {
override def show(v: Ann[F, A, _]): Cord = Cord("(" + A.show(v.attr) + ", " + S.show(v.unAnn) + ")")
}
implicit def AnnFoldable[F[_], A](implicit F: Foldable[F]): Foldable[({type f[X]=Ann[F, A, X]})#f] = new Foldable[({type f[X]=Ann[F, A, X]})#f] {
type AnnFA[X] = Ann[F, A, X]
def foldMap[A, B](fa: AnnFA[A])(f: A => B)(implicit B: Monoid[B]): B = F.foldMap(fa.unAnn)(f)
def foldRight[A, B](fa: AnnFA[A], z: => B)(f: (A, => B) => B): B = F.foldRight(fa.unAnn, z)(f)
}
// F: scalaz.Foldable[[b]attr.this.Ann[F,A,b]]
}
sealed trait attr extends ann with holes {
type Attr[F[_], A] = Term[({type f[b]=Ann[F, A, b]})#f]
object Attr {
// Helper functions to make it easier to NOT annotate constructors.
def apply[F[_], A](a: A, f: F[Attr[F, A]]): Term[({type f[X] = Ann[F, A, X]})#f] = {
type AnnFA[X] = Ann[F, A, X]
Term[AnnFA](Ann(a, f))
}
def unapply[F[_], A](a: Attr[F, A]): Option[(A, F[Attr[F, A]])] = Some((a.unFix.attr, a.unFix.unAnn))
}
type CoAttr[F[_], A] = Term[({type f[b]=CoAnn[F, A, b]})#f]
def attr[F[_], A](attr: Attr[F, A]): A = attr.unFix.attr
def attrUnit[F[_]: Functor](term: Term[F]): Attr[F, Unit] = attrK(term, ())
def attrK[F[_]: Functor, A](term: Term[F], k: A): Attr[F, A] = {
Attr(k, Functor[F].map(term.unFix)(attrK(_, k)(Functor[F])))
}
def attrSelf[F[_]: Functor](term: Term[F]): Attr[F, Term[F]] = {
type AnnFTermF[X] = Ann[F, Term[F], X]
Term[AnnFTermF](Ann(term, Functor[F].map(term.unFix)(attrSelf(_)(Functor[F]))))
}
def forget[F[_], A](attr: Attr[F, A])(implicit F: Functor[F]): Term[F] = Term(F.map(attr.unFix.unAnn)(forget[F, A](_)))
// TODO: Do the low-priority, high-priority implicits thing to select for most powerful of
// functor, foldable, traverse
def AttrFunctor[F[_]: Functor]: Functor[({type f[a]=Attr[F, a]})#f] = new Functor[({type f[a]=Attr[F, a]})#f] {
def map[A, B](v: Attr[F, A])(f: A => B): Attr[F, B] = {
type AnnFB[X] = Ann[F, B, X]
Attr[F, B](f(v.unFix.attr), Functor[F].map(v.unFix.unAnn)(t => AttrFunctor[F].map(t)(f)))
}
}
def AttrFoldable[F[_]: Foldable] = {
type AttrF[A] = Attr[F, A]
new Foldable[AttrF] {
def foldMap[A, B](fa: AttrF[A])(f: A => B)(implicit F: Monoid[B]): B = {
val head = f(fa.unFix.attr)
val tail = Foldable[F].foldMap(fa.unFix.unAnn)(v => foldMap(v)(f))
Monoid[B].append(head, tail)
}
def foldRight[A, B](fa: AttrF[A], z: => B)(f: (A, => B) => B): B = {
f(fa.unFix.attr, Foldable[F].foldRight(fa.unFix.unAnn, z)((a, z) => foldRight(a, z)(f)))
}
}
}
implicit def AttrTraverse[F[_]: Traverse]: Traverse[({type f[X] = Attr[F,X]})#f] = {
type AttrF[A] = Attr[F, A]
new Traverse[AttrF] {
def traverseImpl[G[_], A, B](fa: AttrF[A])(f: A => G[B])(implicit G: Applicative[G]): G[AttrF[B]] = {
type AnnF[X] = Ann[F, A, X]
type AnnF2[X] = Ann[F, B, X]
val gb: G[B] = f(fa.unFix.attr)
val gunAnn: G[F[AttrF[B]]] = Traverse[F].traverseImpl(fa.unFix.unAnn)((v: Term[AnnF]) => traverseImpl(v)(f))
G.apply2(gb, gunAnn)((b, unAnn) => Term[AnnF2](Ann(b, unAnn)))
}
}
}
implicit def AttrRenderTree[F[_], A](implicit F: Foldable[F], RF: RenderTree[F[_]], RA: RenderTree[A]) = new RenderTree[Attr[F, A]] {
override def render(attr: Attr[F, A]) = {
val t = RF.render(attr.unFix.unAnn)
NonTerminal(t.label,
RA.render(attr.unFix.attr).copy(label="<annotation>", nodeType=List("Annotation")) ::
t.children,
//attr.children.map(render(_))
t.nodeType)
}
}
implicit def AttrZip[F[_]: Traverse] = {
type AttrF[A] = Attr[F, A]
new Zip[AttrF] {
def zip[A, B](v1: => AttrF[A], v2: => AttrF[B]): AttrF[(A, B)] = unsafeZip2(v1, v2)
}
}
implicit def AttrComonad[F[_]: Functor] = {
type AttrF[X] = Attr[F, X]
new Comonad[AttrF] {
def cobind[A, B](fa: AttrF[A])(f: AttrF[A] => B): AttrF[B] = {
type AnnFB[X] = Ann[F, B, X]
Term[AnnFB](Ann(f(fa), Functor[F].map(fa.unFix.unAnn)(term => cobind(term)(f))))
}
def copoint[A](p: AttrF[A]): A = p.unFix.attr
def map[A, B](fa: AttrF[A])(f: A => B): AttrF[B] = attrMap(fa)(f)
}
}
def attrMap[F[_]: Functor, A, B](attr: Attr[F, A])(f: A => B): Attr[F, B] = {
AttrFunctor[F].map(attr)(f)
}
def attrMap2[F[_], A, B](attr: Attr[F, A])(f: Attr[F, A] => B)(implicit F: Functor[F]): Attr[F, B] = {
val b = f(attr)
Attr[F, B](b, F.map(attr.unFix.unAnn)(attrMap2(_)(f)(F)))
}
def duplicate[F[_]: Functor, A](attrfa: Attr[F, A]): Attr[F, Attr[F, A]] = attrMap2(attrfa)(identity)
def histo[F[_], A](t: Term[F])(f: F[Attr[F, A]] => A)(implicit F: Functor[F]): A = {
type AnnFA[X] = Ann[F, A, X]
def g: Term[F] => Attr[F, A] = { t =>
val a = histo(t)(f)(F)
Attr(a, F.map(t.unFix)(g))
}
f(F.map(t.unFix)(g))
}
def futu[F[_], A](a: A)(f: A => F[CoAttr[F, A]])(implicit F: Functor[F]): Term[F] = {
def g: CoAttr[F, A] => Term[F] = t => t.unFix match {
case CoAnn.Pure(attr) => futu(a)(f)(F)
case CoAnn.UnAnn(fcoattr) => Term(F.map(fcoattr)(g))
}
Term(F.map(f(a))(g))
}
def synthetize[F[_]: Functor, A](term: Term[F])(f: F[A] => A): Attr[F, A] = synthCata(term)(f)
def synthCata[F[_]: Functor, A](term: Term[F])(f: F[A] => A): Attr[F, A] = {
type AnnF[X] = Ann[F, A, X]
val fattr: F[Attr[F, A]] = Functor[F].map(term.unFix)(t => synthCata(t)(f))
val fa: F[A] = Functor[F].map(fattr)(attr _)
Attr(f(fa), fattr)
}
def scanCata[F[_]: Functor, A, B](attr0: Attr[F, A])(f: (A, F[B]) => B): Attr[F, B] = {
val a : A = attr0.unFix.attr
val unAnn = attr0.unFix.unAnn
val fattr: F[Attr[F, B]] = Functor[F].map(unAnn)(t => scanCata(t)(f))
val b : F[B] = Functor[F].map(fattr)(attr _)
Attr(f(a, b), fattr)
}
def synthPara2[F[_]: Functor, A](term: Term[F])(f: F[(Term[F], A)] => A): Attr[F, A] = {
scanPara(attrUnit(term))((_, ffab) => f(Functor[F].map(ffab) { case (tf, a, b) => (tf, b) }))
}
def synthPara3[F[_]: Functor, A](term: Term[F])(f: (Term[F], F[A]) => A): Attr[F, A] = {
scanPara(attrUnit(term))((attrfa, ffab) => f(forget(attrfa), Functor[F].map(ffab)(_._3)))
}
def scanPara0[F[_], A, B](term: Attr[F, A])(f: (Attr[F, A], F[Attr[F, (A, B)]]) => B)(implicit F: Functor[F]): Attr[F, B] = {
type AnnFAB[X] = Ann[F, (A, B), X]
def loop(term: Attr[F, A]): Attr[F, (A, B)] = {
val rec: F[Attr[F, (A, B)]] = F.map(term.unFix.unAnn)(loop _)
val a = term.unFix.attr
val b = f(term, rec)
Attr((a, b), rec)
}
AttrFunctor[F].map(loop(term))(_._2)
}
def scanPara[F[_], A, B](attr: Attr[F, A])(f: (Attr[F, A], F[(Term[F], A, B)]) => B)(implicit F: Functor[F]): Attr[F, B] = {
scanPara0[F, A, B](attr) {
case (attrfa, fattrfab) =>
val ftermab = F.map(fattrfab) { (attrfab: Attr[F, (A, B)]) =>
val (a, b) = attrfab.unFix.attr
(forget(attrfab), a, b)
}
f(attrfa, ftermab)
}
}
def scanPara2[F[_]: Functor, A, B](attr: Attr[F, A])(f: (A, F[(Term[F], A, B)]) => B): Attr[F, B] = {
scanPara(attr)((attrfa, ffab) => f(attrfa.unFix.attr, ffab))
}
def scanPara3[F[_]: Functor, A, B](attr: Attr[F, A])(f: (Attr[F, A], F[B]) => B): Attr[F, B] = {
scanPara(attr)((attrfa, ffab) => f(attrfa, Functor[F].map(ffab)(_._3)))
}
def synthZygo_[F[_]: Functor, A, B](term: Term[F])(f: F[B] => B, g: F[(B, A)] => A): Attr[F, A] = {
synthZygoWith[F, A, B, A](term)((b: B, a: A) => a, f, g)
}
def synthZygo[F[_]: Functor, A, B](term: Term[F])(f: F[B] => B, g: F[(B, A)] => A): Attr[F, (B, A)] = {
synthZygoWith[F, A, B, (B, A)](term)((b: B, a: A) => (b, a), f, g)
}
def synthZygoWith[F[_]: Functor, A, B, C](term: Term[F])(f: (B, A) => C, g: F[B] => B, h: F[(B, A)] => A): Attr[F, C] = {
type AnnFC[X] = Ann[F, C, X]
def loop(term: Term[F]): ((B, A), Attr[F, C]) = {
val (fba, s) : (F[(B, A)], F[Attr[F,C]]) = unzipF(Functor[F].map(term.unFix)(loop _))
val b : B = g(Functor[F].map(fba)(_._1))
val a : A = h(fba)
val c : C = f(b, a)
((b, a), Attr(c, s))
}
loop(term)._2
}
// synthAccumCata, synthAccumPara2, mapAccumCata, synthCataM, synthParaM, synthParaM2
// Inherited: inherit, inherit2, inherit3, inheritM, inheritM_
def inherit[F[_], A, B](tree: Attr[F, A], b: B)(f: (B, Attr[F, A]) => B)(implicit F: Functor[F]): Attr[F, B] = {
val b2 = f(b, tree)
Attr[F, B](b2, F.map(tree.unFix.unAnn)(inherit(_, b2)(f)(F)))
}
// TODO: Top down folds
def transform[F[_], A](attrfa: Attr[F, A])(f: A => Option[Attr[F, A]])(implicit F: Functor[F]): Attr[F, A] = {
lazy val fattrfa = F.map(attrfa.unFix.unAnn)(transform(_)(f)(F))
val a = attrfa.unFix.attr
f(a).map(transform(_)(f)(F)).getOrElse(Attr(a, fattrfa))
}
def swapTransform[F[_], A, B](attrfa: Attr[F, A])(f: A => B \\/ Attr[F, B])(implicit F: Functor[F]): Attr[F, B] = {
lazy val fattrfb = F.map(attrfa.unFix.unAnn)(swapTransform(_)(f)(F))
val a = attrfa.unFix.attr
f(a).fold(Attr(_, fattrfb), identity)
}
// Questionable value...
def circulate[F[_], A, B](tree: Attr[F, A])(f: A => B, up: (B, B) => B, down: (B, B) => B)(implicit F: Traverse[F]): Attr[F, B] = {
val pullup: Attr[F, B] = scanPara[F, A, B](tree) { (attr: Attr[F, A], fa: F[(Term[F], A, B)]) =>
F.foldLeft(fa, f(attr.unFix.attr))((acc, t) => up(up(f(t._2), t._3), acc))
}
def pushdown(attr: Attr[F, B]): Attr[F, B] = {
val b1 = attr.unFix.attr
Attr[F, B](b1, F.map(attr.unFix.unAnn) { attr =>
val b2 = attr.unFix.attr
val b3 = down(b1, b2)
pushdown(Attr[F, B](b3, attr.unFix.unAnn))
})
}
pushdown(pullup)
}
def sequenceUp[F[_], G[_], A](attr: Attr[F, G[A]])(implicit F: Traverse[F], G: Applicative[G]): G[Attr[F, A]] = {
type AnnGA[X] = Ann[F, G[A], X]
type AnnFA[X] = Ann[F, A, X]
val unFix = attr.unFix
val ga : G[A] = unFix.attr
val fgattr : F[G[Attr[F, A]]] = F.map(unFix.unAnn)(t => sequenceUp(t)(F, G))
val gfattr : G[F[Attr[F, A]]] = F.traverseImpl(fgattr)(identity)
G.apply2(gfattr, ga)((node, attr) => Attr(attr, node))
}
def sequenceDown[F[_], G[_], A](attr: Attr[F, G[A]])(implicit F: Traverse[F], G: Applicative[G]): G[Attr[F, A]] = {
type AnnGA[X] = Ann[F, G[A], X]
type AnnFA[X] = Ann[F, A, X]
val unFix = attr.unFix
val ga : G[A] = unFix.attr
val fgattr : F[G[Attr[F, A]]] = F.map(unFix.unAnn)(t => sequenceDown(t)(F, G))
val gfattr : G[F[Attr[F, A]]] = F.traverseImpl(fgattr)(identity)
G.apply2(ga, gfattr)((attr, node) => Attr(attr, node))
}
/**
* Zips two attributed nodes together. This is unsafe in the sense that the
* user is responsible for ensuring both left and right parameters have the
* same shape (i.e. represent the same tree).
*/
def unsafeZip2[F[_]: Traverse, A, B](left: Attr[F, A], right: Attr[F, B]): Attr[F, (A, B)] = {
type AnnFA[X] = Ann[F, A, X]
type AnnFB[X] = Ann[F, B, X]
type AnnFAB[X] = Ann[F, (A, B), X]
val lunFix = left.unFix
val lattr: A = lunFix.attr
val lunAnn: F[Term[AnnFA]] = lunFix.unAnn
val lunAnnL: List[Term[AnnFA]] = Foldable[F].toList(lunAnn)
val runFix = right.unFix
val rattr: B = runFix.attr
val runAnn: F[Term[AnnFB]] = runFix.unAnn
val runAnnL: List[Term[AnnFB]] = Foldable[F].toList(runAnn)
val abs: List[Term[AnnFAB]] = lunAnnL.zip(runAnnL).map { case ((a, b)) => unsafeZip2(a, b) }
val fabs : F[Term[AnnFAB]] = builder(lunAnn, abs)
Attr((lattr, rattr), fabs)
}
def context[F[_]](term: Term[F])(implicit F: Traverse[F]): Attr[F, Term[F] => Term[F]] = {
def loop(f: Term[F] => Term[F]): Attr[F, Term[F] => Term[F]] = {
//def g(y: Term[F], replace: Term[F] => Term[F]) = loop()
???
}
loop(identity[Term[F]])
}
}
sealed trait phases extends attr {
/**
* An annotation phase, represented as a monadic function from an attributed
* tree of one type (A) to an attributed tree of another type (B).
*
* This is a kleisli function, but specialized to transformations of
* attributed trees.
*
* The fact that a phase is monadic may be used to capture and propagate error
* information. Typically, error information is produced at the level of each
* node, but through sequenceUp / sequenceDown, the first error can be pulled
* out to yield a kleisli function.
*/
case class PhaseM[M[_], F[_], A, B](value: Attr[F, A] => M[Attr[F, B]]) extends (Attr[F, A] => M[Attr[F, B]]) {
def apply(x: Attr[F, A]) = value(x)
}
def liftPhase[M[_]: Monad, F[_], A, B](phase: Phase[F, A, B]): PhaseM[M, F, A, B] = {
PhaseM(attr => Monad[M].point(phase(attr)))
}
/**
* A non-monadic phase. This is only interesting for phases that cannot produce
* errors and don't need state.
*/
type Phase[F[_], A, B] = PhaseM[Id, F, A, B]
def Phase[F[_], A, B](x: Attr[F, A] => Attr[F, B]): Phase[F, A, B] = PhaseM[Id, F, A, B](x)
/**
* A phase that can produce errors. An error is captured using the left side of \\/.
*/
type PhaseE[F[_], E, A, B] = PhaseM[({type f[X] = E \\/ X})#f, F, A, B]
def PhaseE[F[_], E, A, B](x: Attr[F, A] => E \\/ Attr[F, B]): PhaseE[F, E, A, B] = {
type EitherE[X] = E \\/ X
PhaseM[EitherE, F, A, B](x)
}
def toPhaseE[F[_]: Traverse, E, A, B](phase: Phase[F, A, E \\/ B]): PhaseE[F, E, A, B] = {
type EitherE[X] = E \\/ X
PhaseE(attr => sequenceUp[F, EitherE, B](phase(attr)))
}
def liftPhaseE[F[_], E, A, B](phase: Phase[F, A, B]): PhaseE[F, E, A, B] = liftPhase[({type f[X] = E \\/ X})#f, F, A, B](phase)
/**
* A phase that requires state. State is represented using the state monad.
*/
type PhaseS[F[_], S, A, B] = PhaseM[({type f[X] = State[S, X]})#f, F, A, B]
def PhaseS[F[_], S, A, B](x: Attr[F, A] => State[S, Attr[F, B]]): PhaseS[F, S, A, B] = {
type StateS[X] = State[S, X]
PhaseM[StateS, F, A, B](x)
}
def toPhaseS[F[_]: Traverse, S, A, B](phase: Phase[F, A, State[S, B]]): PhaseS[F, S, A, B] = {
type StateS[X] = State[S, X]
PhaseS(attr => sequenceUp[F, StateS, B](phase(attr)))
}
def liftPhaseS[F[_], S, A, B](phase: Phase[F, A, B]): PhaseS[F, S, A, B] = liftPhase[({type f[X] = State[S, X]})#f, F, A, B](phase)
implicit def PhaseMArrow[M[_], F[_]](implicit F: Traverse[F], M: Monad[M]) = new Arrow[({type f[a, b] = PhaseM[M, F, a, b]})#f] {
type Arr[A, B] = PhaseM[M, F, A, B]
type AttrF[X] = Attr[F, X]
def arr[A, B](f: A => B): Arr[A, B] = PhaseM(attr => M.point(attrMap(attr)(f)))
def first[A, B, C](f: Arr[A, B]): Arr[(A, C), (B, C)] = PhaseM { (attr: Attr[F, (A, C)]) =>
val attrA = Functor[AttrF].map(attr)(_._1)
(f(attrA) |@| M.point(Functor[AttrF].map(attr)(_._2)))(unsafeZip2(_, _))
}
def id[A]: Arr[A, A] = PhaseM(attr => M.point(attr))
def compose[A, B, C](f: Arr[B, C], g: Arr[A, B]): Arr[A, C] =
PhaseM { (attr: Attr[F, A]) => g(attr).flatMap(f) }
}
implicit class ToPhaseMOps[M[_]: Monad, F[_]: Traverse, A, B](self: PhaseM[M, F, A, B]) {
def >>> [C](that: PhaseM[M, F, B, C]) = PhaseMArrow[M, F].compose(that, self)
def &&& [C](that: PhaseM[M, F, A, C]) = PhaseMArrow[M, F].combine(self, that)
def *** [C, D](that: PhaseM[M, F, C, D]) = PhaseMArrow[M, F].split(self, that)
def first[C]: PhaseM[M, F, (A, C), (B, C)] = PhaseMArrow[M, F].first(self)
def second[C]: PhaseM[M, F, (C, A), (C, B)] = PhaseM { (attr: Attr[F, (C, A)]) =>
first.map((t: (B, C)) => (t._2, t._1))(attrMap(attr)((t: (C, A)) => (t._2, t._1)))
}
def map[C](f: B => C): PhaseM[M, F, A, C] = PhaseM((attr: Attr[F, A]) => Functor[M].map(self(attr))(attrMap(_)(f)))
def dup: PhaseM[M, F, A, (B, B)] = map(v => (v, v))
def fork[C, D](left: PhaseM[M, F, B, C], right: PhaseM[M, F, B, D]): PhaseM[M, F, A, (C, D)] = PhaseM { (attr: Attr[F, A]) =>
(dup >>> (left.first) >>> (right.second))(attr)
}
}
implicit class ToPhaseEOps[F[_]: Traverse, E, A, B](self: PhaseE[F, E, A, B]) {
// This abomination exists because Scala has no higher-kinded type inference
// and I can't figure out how to make ToPhaseMOps work for PhaseE (despite
// the fact that PhaseE is just a type synonym for PhaseM). Revisit later.
type M[X] = E \\/ X
val ops = ToPhaseMOps[M, F, A, B](self)
def >>> [C](that: PhaseE[F, E, B, C]) = ops >>> that
def &&& [C](that: PhaseE[F, E, A, C]) = ops &&& that
def *** [C, D](that: PhaseE[F, E, C, D]) = ops *** that
def first[C]: PhaseE[F, E, (A, C), (B, C)] = ops.first[C]
def second[C]: PhaseE[F, E, (C, A), (C, B)] = ops.second[C]
def map[C](f: B => C): PhaseE[F, E, A, C] = ops.map[C](f)
def dup: PhaseE[F, E, A, (B, B)] = ops.dup
def fork[C, D](left: PhaseE[F, E, B, C], right: PhaseE[F, E, B, D]): PhaseE[F, E, A, (C, D)] = ops.fork[C, D](left, right)
}
implicit class ToPhaseOps[F[_]: Traverse, A, B](self: Phase[F, A, B]) {
// This abomination exists because Scala has no higher-kinded type inference
// and I can't figure out how to make ToPhaseMOps work for Phase (despite
// the fact that Phase is just a type synonym for PhaseM). Revisit later.
val ops = ToPhaseMOps[Id, F, A, B](self)
def >>> [C](that: Phase[F, B, C]) = ops >>> that
def &&& [C](that: Phase[F, A, C]) = ops &&& that
def *** [C, D](that: Phase[F, C, D]) = ops *** that
def first[C]: Phase[F, (A, C), (B, C)] = ops.first[C]
def second[C]: Phase[F, (C, A), (C, B)] = ops.second[C]
def map[C](f: B => C): Phase[F, A, C] = ops.map[C](f)
def dup: Phase[F, A, (B, B)] = ops.dup
def fork[C, D](left: Phase[F, B, C], right: Phase[F, B, D]): Phase[F, A, (C, D)] = ops.fork[C, D](left, right)
}
}
sealed trait binding extends phases {
trait Binder[F[_], G[_]] {
type AttrF[A] = Attr[F, A]
// The combination of an attributed node and the bindings valid in this scope.
type `AttrF * G`[A] = (AttrF[A], G[A])
// A function that can lift an attribute into an attributed node.
type Unsubst[A] = A => AttrF[A]
type Subst[A] = Option[(AttrF[A], Forall[Unsubst])]
// Extracts bindings from a node:
val bindings: AttrF ~> G
// Possibly binds a free term to its definition:
val subst: `AttrF * G` ~> Subst
def apply[M[_], A, B](phase: PhaseM[M, F, A, B])(implicit F: Traverse[F], G: Monoid[G[A]], M: Functor[M]): PhaseM[M, F, A, B] = PhaseM[M, F, A, B] { attrfa =>
def subst0(ga0: G[A], attrfa: AttrF[A]): AttrF[(A, Option[Forall[Unsubst]])] = {
// Possibly swap out this node for another node:
val optT: Option[(AttrF[A], Forall[Unsubst])] = subst((attrfa, ga0))
val (attrfa2, optF) = optT.map(tuple => tuple._1 -> Some(tuple._2)).getOrElse(attrfa -> None)
// Add any new bindings:
val ga: G[A] = G.append(ga0, bindings(attrfa2))
val Ann(a, node) = attrfa2.unFix
// Recursively apply binding:
Attr[F, (A, Option[Forall[Unsubst]])](a -> optF, F.map(node)(subst0(ga, _)))
}
val attrft: AttrF[(A, Option[Forall[Unsubst]])] = subst0(G.zero, attrfa)
val mattrfb: M[AttrF[B]] = phase(attrMap(attrft)(_._1))
M.map(mattrfb) { attrfb =>
val zipped = unsafeZip2(attrfb, attrMap(attrft)(_._2))
swapTransform(zipped) {
case (b, None) => -\\/ (b)
case (b, Some(f)) => \\/- (f[B](b))
}
}
}
}
def bound[M[_], F[_], G[_], A, B](phase: PhaseM[M, F, A, B])(implicit M: Functor[M], F: Traverse[F], G: Monoid[G[A]], B: Binder[F, G]): PhaseM[M, F, A, B] = {
B.apply[M, A, B](phase)
}
def boundE[F[_], E, G[_], A, B](phase: PhaseE[F, E, A, B])(implicit F: Traverse[F], G: Monoid[G[A]], B: Binder[F, G]): PhaseE[F, E, A, B] = {
type EitherE[A] = E \\/ A
B.apply[EitherE, A, B](phase)
}
}
object fixplate extends binding
| mossprescott/slamengine | src/main/scala/slamdata/engine/analysis/fixplate.scala | Scala | agpl-3.0 | 28,925 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.uuid
import org.opengis.feature.simple.{SimpleFeatureType, SimpleFeature}
/**
* Trait for generating feature ids based on attributes of a simple feature
*/
trait FeatureIdGenerator {
/**
* Create a unique feature ID
*/
def createId(sft: SimpleFeatureType, sf: SimpleFeature): String
}
| ddseapy/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/uuid/FeatureIdGenerator.scala | Scala | apache-2.0 | 804 |
package bad.robot.radiate.ui
import java.awt.GraphicsEnvironment.getLocalGraphicsEnvironment
trait FrameFactory {
def create: List[StatusFrame]
}
object FrameFactory {
def fullScreen = new FrameFactory {
def create = {
val screens = getLocalGraphicsEnvironment.getScreenDevices
val frames = (0 until screens.length).map(index => new StatusFrame(index, new FullScreen(screens(index).getDefaultConfiguration.getBounds)))
frames.toList
}
}
def desktopMode = new FrameFactory {
def create = {
val bounds = getLocalGraphicsEnvironment.getDefaultScreenDevice.getDefaultConfiguration.getBounds
val frames = Array(new StatusFrame(0, new DesktopMode(bounds)))
frames.toList
}
}
}
| tobyweston/radiate | src/main/scala/bad/robot/radiate/ui/FrameFactory.scala | Scala | apache-2.0 | 738 |
package com.arcusys.valamis.oauth
import java.io.Closeable
import java.net.URL
import net.oauth.client.httpclient4.HttpClientPool
import org.apache.http.client.HttpClient
import org.apache.http.impl.client.HttpClientBuilder
/**
* Created by mminin on 09.07.15.
*/
class HttpClientPoolImpl extends HttpClientPool with Closeable {
val client = HttpClientBuilder.create().build()
override def getHttpClient(url: URL): HttpClient = {
client
}
override def close(): Unit = {
client.close()
}
}
| igor-borisov/valamis | valamis-lrssupport/src/main/scala/com/arcusys/valamis/oauth/HttpClientPoolImpl.scala | Scala | gpl-3.0 | 514 |
package com.seanshubin.uptodate.logic
import scala.collection.mutable.ArrayBuffer
import scala.util.matching.Regex
object DurationFormat {
object MillisecondsFormat extends TimeUnitFormat(TimeUnitAndQuantity.MillisecondToDay)
object NanosecondsFormat extends TimeUnitFormat(TimeUnitAndQuantity.NanosecondToDay)
private case class FormattedPartsAndRemainingValue(formattedParts: List[Option[String]], remainingValue: Long) {
private def divMod(numerator: Long, denominator: Long): (Long, Long) = (numerator / denominator, numerator % denominator)
def applyTimeUnit(timeUnitAndQuantity: TimeUnitAndQuantity): FormattedPartsAndRemainingValue = {
timeUnitAndQuantity match {
case TimeUnitAndQuantity(timeUnit, Some(quantity)) =>
val (newRemainingValue, partOfValueToFormat) = divMod(remainingValue, quantity)
val formattedPart = timeUnit.format(partOfValueToFormat)
copy(formattedPart :: formattedParts, newRemainingValue)
case TimeUnitAndQuantity(timeUnit, None) =>
val formattedPart = timeUnit.format(remainingValue)
copy(formattedPart :: formattedParts, remainingValue)
}
}
}
private case class QuantityAndName(quantity: Long, timeUnit: TimeUnit) {
def toUnitsAtScale(fullScale: List[TimeUnitAndQuantity]): Long = {
val scale = fullScale.takeWhile(_.timeUnit != timeUnit)
def accumulateByMultiply(soFar: Long, timeUnitAndQuantity: TimeUnitAndQuantity): Long = {
timeUnitAndQuantity.maybeQuantity match {
case Some(currentQuantity) => soFar * currentQuantity
case None => throw new RuntimeException(s"No multiplier for ${timeUnitAndQuantity.timeUnit.plural}")
}
}
val units = scale.foldLeft(quantity)(accumulateByMultiply)
units
}
}
sealed abstract case class TimeUnit(singular: String, plural: String) {
TimeUnit.valuesBuffer += this
def format(value: Long): Option[String] =
if (value == 0) None
else if (value == 1) Some(s"$value $singular")
else Some(s"$value $plural")
def matchesString(target: String): Boolean = {
singular.equalsIgnoreCase(target) || plural.equalsIgnoreCase(target)
}
}
private object TimeUnit {
private val valuesBuffer = new ArrayBuffer[TimeUnit]
lazy val values = valuesBuffer.toSeq
val Nanosecond = new TimeUnit("nanosecond", "nanoseconds") {}
val Microsecond = new TimeUnit("microsecond", "microseconds") {}
val Millisecond = new TimeUnit("millisecond", "milliseconds") {}
val Second = new TimeUnit("second", "seconds") {}
val Minute = new TimeUnit("minute", "minutes") {}
val Hour = new TimeUnit("hour", "hours") {}
val Day = new TimeUnit("day", "days") {}
}
private case class TimeUnitAndQuantity(timeUnit: TimeUnit, maybeQuantity: Option[Int])
private object TimeUnitAndQuantity {
val SecondToDay =
TimeUnitAndQuantity(TimeUnit.Second, Some(60)) ::
TimeUnitAndQuantity(TimeUnit.Minute, Some(60)) ::
TimeUnitAndQuantity(TimeUnit.Hour, Some(24)) ::
TimeUnitAndQuantity(TimeUnit.Day, None) ::
Nil
val MillisecondToDay =
TimeUnitAndQuantity(TimeUnit.Millisecond, Some(1000)) ::
SecondToDay
val NanosecondToDay =
TimeUnitAndQuantity(TimeUnit.Nanosecond, Some(1000)) ::
TimeUnitAndQuantity(TimeUnit.Microsecond, Some(1000)) ::
MillisecondToDay
}
class TimeUnitFormat(scale: List[TimeUnitAndQuantity]) {
import com.seanshubin.uptodate.logic.DurationFormat.TimeUnitFormat._
def format(smallestUnits: Long): String = {
def accumulateFormat(soFar: FormattedPartsAndRemainingValue, timeUnitAndQuantity: TimeUnitAndQuantity): FormattedPartsAndRemainingValue = {
soFar.applyTimeUnit(timeUnitAndQuantity)
}
val initialValue = FormattedPartsAndRemainingValue(Nil, smallestUnits)
val finalValue = scale.foldLeft(initialValue)(accumulateFormat)
val formattedParts = finalValue.formattedParts.flatten
if (formattedParts.isEmpty) "0 " + scale.head.timeUnit.plural
else formattedParts.mkString(" ")
}
def parse(asString: String): Long = {
if (asString.matches(NumberPattern)) {
parseSimpleNumber(asString)
} else if (asString.matches(OneOrMoreQuantifiedTimeUnitPattern)) {
parseStringWithUnits(asString)
} else {
throw new RuntimeException(s"'$asString' does not match a valid pattern: $OneOrMoreQuantifiedTimeUnitPattern")
}
}
private def parseSimpleNumber(asString: String): Long = {
asString.toLong
}
private def parseStringWithUnits(asString: String): Long = {
val parts = for {
matchData <- QuantifiedTimeUnitCapturingRegex.findAllIn(asString).matchData
numberString = matchData.group("number")
nameString = matchData.group("name")
} yield {
val name = timeUnitFromString(nameString)
val number = numberString.toLong
val quantityAndName = QuantityAndName(number, name)
quantityAndName.toUnitsAtScale(scale)
}
val sum = parts.sum
sum
}
def timeUnitFromString(asString: String): TimeUnit = {
val pluralNames = scale.map(_.timeUnit.plural).mkString("(", ", ", ")")
def timeUnitMatches(timeUnit: TimeUnit): Boolean = timeUnit.matchesString(asString)
TimeUnit.values.find(timeUnitMatches) match {
case Some(timeUnit) => timeUnit
case None => throw new RuntimeException(s"'$asString' does not match a valid time unit $pluralNames")
}
}
}
private object TimeUnitFormat {
private val NumberPattern = """\\d+"""
private val NamePattern = """[a-zA-Z]+"""
private val SpacesPattern = """\\s+"""
private val QuantifiedTimeUnitPattern = NumberPattern + SpacesPattern + NamePattern
private val QuantifiedTimeUnitCapturingPattern = capturingGroup(NumberPattern) + SpacesPattern + capturingGroup(NamePattern)
private val OneOrMoreQuantifiedTimeUnitPattern = QuantifiedTimeUnitPattern + nonCapturingGroup(SpacesPattern + QuantifiedTimeUnitPattern) + "*"
private val QuantifiedTimeUnitCapturingRegex = new Regex(QuantifiedTimeUnitCapturingPattern, "number", "name")
private def nonCapturingGroup(s: String) = "(?:" + s + ")"
private def capturingGroup(s: String) = "(" + s + ")"
}
}
| SeanShubin/up-to-date | logic/src/main/scala/com/seanshubin/uptodate/logic/DurationFormat.scala | Scala | unlicense | 6,378 |
/**
* Copyright (c) 2012-2013, Tomasz Kaczmarzyk.
*
* This file is part of BeanDiff.
*
* BeanDiff is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* BeanDiff is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with BeanDiff; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.beandiff.core
class DeleteInsertMerger {
} | tkaczmarzyk/beandiff | src/main/scala/org/beandiff/core/DeleteInsertMerger.scala | Scala | lgpl-3.0 | 874 |
/* Copyright 2009-2021 EPFL, Lausanne */
package stainless
package termination
import scala.collection.mutable.{Map => MutableMap, HashSet => MutableSet, ListBuffer => MutableList}
object DebugSectionMeasureInference extends inox.DebugSection("measure-inference")
class MeasureInference(override val s: Trees, override val t: Trees)(using override val context: inox.Context)
extends extraction.CachingPhase
with extraction.SimplyCachedSorts
with extraction.IdentitySorts { self =>
import s._
import context.{options, timers, reporter}
type Postconditions = MutableMap[Identifier, Lambda]
type Applications = MutableMap[(Identifier, Identifier, Identifier), Seq[ValDef] => Expr]
// Result type is transformed function + all inductive lemmas found
type FunctionResult = (t.FunDef, Postconditions)
given givenDebugSection: DebugSectionMeasureInference.type = DebugSectionMeasureInference
// Measure inference depends on functions that are mutually recursive with `fd`,
// so we include all dependencies in the key calculation
override protected final val funCache = new ExtractionCache[s.FunDef, FunctionResult]((fd, context) =>
getDependencyKey(fd.id)(using context.symbols)
)
val sizes: SizeFunctions { val trees: s.type } = {
class SizeFunctionsImpl(override val trees: s.type) extends SizeFunctions(trees)
new SizeFunctionsImpl(s)
}
override protected def getContext(symbols: s.Symbols) = TransformerContext(symbols, MutableMap.empty, MutableMap.empty, MutableMap.empty)
protected case class TransformerContext(symbols: Symbols,
measureCache: MutableMap[FunDef, Expr],
postconditionCache: MutableMap[Identifier, Postconditions],
applicationCache: Applications) {
import symbols.given
val program = inox.Program(s)(symbols)
val pipeline = TerminationChecker(program, self.context)(sizes)
val transformer = new TransformerImpl(self.s, self.t)
class TransformerImpl(override val s: self.s.type, override val t: self.t.type)
extends inox.transformers.ConcreteTreeTransformer(s, t) {
override def transform(e: s.Expr): t.Expr = e match {
case Decreases(v: Variable, body) if v.getType.isInstanceOf[ADTType] =>
t.Decreases(transform(size(v)), transform(body)).setPos(e)
case Decreases(tup @ Tuple(ts), body) =>
t.Decreases(t.Tuple(ts.map {
case v: Variable if v.getType.isInstanceOf[ADTType] => transform(size(v))
case e => transform(e)
}).copiedFrom(tup), transform(body)).setPos(e)
case _ =>
super.transform(e)
}
private def size(v: Variable): Expr = {
require(v.getType.isInstanceOf[ADTType])
val ADTType(id, tps) = v.getType
FunctionInvocation(sizes.fullSizeId(symbols.sorts(id)), tps, Seq(v)).setPos(v)
}
}
def needsMeasure(fd: FunDef): Boolean = symbols.isRecursive(fd.id) && {
val specced = exprOps.BodyWithSpecs(fd.fullBody)
!specced.specs.exists(_.kind == exprOps.MeasureKind)
}
def getPosts(id: Identifier): Postconditions =
postconditionCache.getOrElse(id, MutableMap())
def annotateApps(original: FunDef) = {
class Injector(override val s: self.s.type, override val t: self.s.type)
extends inox.transformers.ConcreteTreeTransformer(s, t) {
override def transform(e: Expr): Expr = e match {
case fi @ FunctionInvocation(_, _, args) =>
fi.copy(args = (symbols.getFunction(fi.id).params.map(_.id) zip args).map {
case (id, l @ Lambda(largs, body)) if applicationCache.isDefinedAt(original.id, fi.id,id) =>
val cnstr = applicationCache(original.id, fi.id,id)
body match {
case FunctionInvocation(lid,_,_) if lid == original.id =>
Lambda(largs, Assume(cnstr(largs), body))
case _ =>
/*
a) This avoids a problem detected in LawTypeArgsElim.scala.
Annotating assume makes appear an undeclared variable in the
assumption and type checking fails.
b) This avoids annotating the lambda when it is not needed for
termination (condition lid == original.id). Annotating in that
case may make it difficult for the SMT solvers in some instances.
*/
l
}
case (_, arg) => transform(arg)
}).copiedFrom(fi)
case _ =>
super.transform(e)
}
}
new Injector(self.s, self.s).transform(original)
}
/* Annotation order matters, postconditions can
introduce size functions which are yet unknown
in the symbols */
def annotate(original: FunDef) = annotateApps(original)
def inferMeasure(original: FunDef): FunDef = measureCache.get(original) match {
case Some(measure) =>
val annotated = annotate(original)
annotated.copy(fullBody = exprOps.withMeasure(annotated.fullBody, Some(measure.setPos(original))))
case None => try {
val guarantee = timers.evaluators.termination.inference.run {
reporter.debug(s" - Inferring measure for ${original.id.asString}...")
pipeline.terminates(original)
}
val result = guarantee match {
case pipeline.Terminates(_, Some(measure), Some(lemmas)) =>
reporter.debug(s" => Found measure for ${original.id.asString}.")
measureCache ++= pipeline.measureCache.get
pipeline.measureCache.get.keys.map{ fd =>
postconditionCache(fd.id) = lemmas._1
}
applicationCache ++= lemmas._2
val annotated = annotate(original)
annotated.copy(fullBody = exprOps.withMeasure(annotated.fullBody, Some(measure.setPos(original))))
.setPos(original)
case pipeline.Terminates(_, None, _) =>
reporter.debug(s" => No measure needed for ${original.id.asString}.")
original
case _ if exprOps.measureOf(original.fullBody).isDefined =>
reporter.debug(s" => Function ${original.id.asString} already has a measure.")
original
case nt: pipeline.NonTerminating =>
reporter.warning(original.getPos, nt.asString)
original
case _ =>
reporter.warning(original.getPos, s"Could not infer measure for function ${original.id.asString}")
original
}
annotate(result, guarantee)
} catch {
case FailedMeasureInference(fd, msg) =>
reporter.warning(fd.getPos, msg)
original
}
}
private def annotate(fd: FunDef, guarantee: pipeline.TerminationGuarantee): FunDef = {
fd.copy(flags = fd.flags :+ TerminationStatus(status(guarantee))).copiedFrom(fd)
}
private def status(g: pipeline.TerminationGuarantee): TerminationReport.Status = g match {
case pipeline.NoGuarantee => TerminationReport.Unknown
case pipeline.Terminates(_,_,_) => TerminationReport.Terminating
case _ => TerminationReport.NonTerminating
}
}
override protected def extractFunction(context: TransformerContext, fd: s.FunDef): FunctionResult = {
if (options.findOptionOrDefault(optInferMeasures) && context.needsMeasure(fd)) {
val tfd = context.transformer.transform(context.inferMeasure(fd))
val posts = context.getPosts(fd.id)
(tfd, posts)
} else {
(context.transformer.transform(fd), MutableMap.empty)
}
}
override def registerFunctions(symbols: t.Symbols, functions: Seq[FunctionResult]): t.Symbols =
symbols
override protected def extractSymbols(context: TransformerContext, symbols: s.Symbols): t.Symbols = {
val results: Seq[(t.FunDef, MutableMap[Identifier,s.Lambda])] =
symbols.functions.values.map(fd =>
funCache.cached(fd, context)(extractFunction(context,fd))
).toSeq
val posts: Map[Identifier, s.Lambda] = results.flatMap{ case (tfd,post) => post }.toMap
def annotatePosts(original: t.FunDef) = {
class PostTransformer(override val s: self.s.type, override val t: self.t.type)
extends transformers.ConcreteTreeTransformer(s, t)
val postTransformer = new PostTransformer(self.s, self.t)
val postCache: Map[Identifier, t.Lambda] =
posts.view.mapValues{ (v: s.Lambda) =>
postTransformer.transform(v).asInstanceOf[t.Lambda]
}.toMap
postCache.get(original.id) match {
case Some([email protected](Seq(nlarg), nbody)) =>
val newVd = t.ValDef.fresh("arg", original.returnType)
val newMap: Map[t.ValDef, t.Expr] = Map((nlarg, newVd.toVariable))
val newNBody: t.Expr = t.exprOps.replaceFromSymbols(newMap, nbody)(using t.convertToVal)
val refinement = t.RefinementType(newVd, newNBody)
original.copy(returnType = refinement).copiedFrom(original)
case None => original
}
}
val sizeFunctions: Seq[t.FunDef] =
sizes.getFunctions(symbols).map(context.transformer.transform(_)).toSeq
val functions = results.map { case (tfd, post) =>
annotatePosts(tfd)
}.toSeq
val sorts = symbols.sorts.values.map { sort =>
sortCache.cached(sort, context)(extractSort(context, sort))
}.toSeq
t.NoSymbols.withSorts(sorts).withFunctions(functions ++ sizeFunctions)
}
}
object MeasureInference { self =>
def apply(tr: Trees)(using inox.Context): extraction.ExtractionPipeline {
val s: tr.type
val t: tr.type
} = {
class Impl(override val s: tr.type, override val t: tr.type) extends MeasureInference(s, t)
new Impl(tr, tr)
}
}
| epfl-lara/stainless | core/src/main/scala/stainless/termination/MeasureInference.scala | Scala | apache-2.0 | 9,995 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.visor.commands.cache
import org.apache.ignite.cache.{CacheAtomicityMode, CacheMode}
import CacheAtomicityMode._
import CacheMode._
import org.apache.ignite.visor.{VisorRuntimeBaseSpec, visor}
import org.apache.ignite.Ignition
import org.apache.ignite.configuration.{CacheConfiguration, IgniteConfiguration}
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder
import org.jetbrains.annotations.{NotNull, Nullable}
import org.apache.ignite.visor.commands.cache.VisorCacheCommand._
import scala.collection.JavaConversions._
/**
*
*/
class VisorCacheClearCommandSpec extends VisorRuntimeBaseSpec(2) {
/** IP finder. */
val ipFinder = new TcpDiscoveryVmIpFinder(true)
/**
* Creates grid configuration for provided grid host.
*
* @param name Ignite instance name.
* @return Grid configuration.
*/
override def config(name: String): IgniteConfiguration = {
val cfg = new IgniteConfiguration
cfg.setIgniteInstanceName(name)
cfg.setLocalHost("127.0.0.1")
cfg.setCacheConfiguration(cacheConfig("cache"))
val discoSpi = new TcpDiscoverySpi()
discoSpi.setIpFinder(ipFinder)
cfg.setDiscoverySpi(discoSpi)
cfg
}
/**
* @param name Cache name.
* @return Cache Configuration.
*/
def cacheConfig(@NotNull name: String): CacheConfiguration[Object, Object] = {
val cfg = new CacheConfiguration[Object, Object]
cfg.setCacheMode(REPLICATED)
cfg.setAtomicityMode(TRANSACTIONAL)
cfg.setName(name)
cfg
}
describe("An 'cclear' visor command") {
it("should show correct result for named cache") {
Ignition.ignite("node-1").cache[Int, Int]("cache").putAll(Map(1 -> 1, 2 -> 2, 3 -> 3))
val lock = Ignition.ignite("node-1").cache[Int, Int]("cache").lock(1)
lock.lock()
visor.cache("-clear -c=cache")
lock.unlock()
visor.cache("-clear -c=cache")
}
it("should show correct help") {
VisorCacheCommand
visor.help("cache")
}
it("should show empty projection error message") {
visor.cache("-clear -c=wrong")
}
}
}
| alexzaitzev/ignite | modules/visor-console/src/test/scala/org/apache/ignite/visor/commands/cache/VisorCacheClearCommandSpec.scala | Scala | apache-2.0 | 3,155 |
/**
* Created on February 17, 2011
* Copyright (c) 2011, Wei-ju Wu
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Wei-ju Wu nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY WEI-JU WU ''AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL WEI-JU WU BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.dmpp.adf.app
import java.io._
import org.scalatest.FlatSpec
import org.scalatest.matchers.ShouldMatchers
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class UserVolumeFactorySpec extends FlatSpec with ShouldMatchers {
"UserVolumeFactory" should "create an empty volume" in {
val empty = UserVolumeFactory.createEmptyDoubleDensityDisk()
empty.name should be ("Empty")
}
it should "create an empty volume with a name" in {
val empty = UserVolumeFactory.createEmptyDoubleDensityDisk("MyDisk")
empty.name should be ("MyDisk")
}
it should "read a workbench" in {
val workbenchFile = new File(getClass.getResource("/wbench1.3.adf").getFile)
val workbench = UserVolumeFactory.readFromFile(workbenchFile)
workbench.name should be ("Workbench1.3")
}
}
| weiju/adf-tools | adf-core/src/test/scala/org/dmpp/adf/app/UserVolumeFactoryTest.scala | Scala | bsd-3-clause | 2,399 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ld.cmw
import akka.actor.{Actor, ActorPath, ActorRef, ActorSystem, Cancellable, Props}
import akka.pattern._
import cmwell.domain.{FString, Infoton}
import cmwell.util.BoxedFailure
import com.google.common.cache.{Cache, CacheBuilder}
import com.typesafe.scalalogging.LazyLogging
import logic.CRUDServiceFS
import wsutil.{FieldKey, NnFieldKey}
import cmwell.ws.Settings.{maxTypesCacheSize, minimumEntryRefreshRateMillis}
import scala.collection.generic.CanBuildFrom
import scala.collection.mutable.{Set => MSet}
import scala.collection.immutable
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success, Try}
// TODO: indexTime based search for changes since last change?
object PassiveFieldTypesCache {
private[cmw] lazy val uniqueIdentifierForActorName = {
val n = cmwell.util.os.Props.machineName
if (ActorPath.isValidPathElement(n)) n
else cmwell.util.string.Hash.crc32(n)
}
private[cmw] case object UpdateCache
private[cmw] case object UpdateCompleted
private[cmw] case class RequestUpdateFor(field: FieldKey)
private[cmw] case class UpdateAndGet(field: FieldKey)
private[cmw] case class Put(field: String,
types: Set[Char],
reportWhenDone: Boolean = false,
reportTo: Option[ActorRef] = None)
private[cmw] class PassiveFieldTypesCacheActor(crudService: CRUDServiceFS,
cache: Cache[String, Either[Future[Set[Char]], (Long, Set[Char])]],
updatingExecutionContext: ExecutionContext)
extends Actor
with LazyLogging {
var requestedCacheUpdates: MSet[FieldKey] = _
var cancellable: Cancellable = _
override def preStart() = {
requestedCacheUpdates = MSet.empty[FieldKey]
cancellable =
context.system.scheduler.schedule(1.second, 2.minutes, self, UpdateCache)(updatingExecutionContext, self)
}
override def receive: Receive = {
case RequestUpdateFor(field) => requestedCacheUpdates += field
case UpdateCache =>
if (requestedCacheUpdates.nonEmpty) {
requestedCacheUpdates.foreach { fk =>
val maybe = cache.getIfPresent(fk.internalKey)
if (maybe eq null) {
val lefuture = Left(getMetaFieldInfoton(fk).map(infoptToChars)(updatingExecutionContext))
cache.put(fk.internalKey, lefuture)
} else
maybe.right.foreach {
case (oTime, chars) => {
getMetaFieldInfoton(fk).foreach { infopt =>
val types = infoptToChars(infopt)
if (types.diff(chars).nonEmpty) {
self ! Put(fk.internalKey, types.union(chars))
}
}(updatingExecutionContext)
}
}
}
requestedCacheUpdates.clear()
}
case Put(internalKey, types, reportWhenDone, reportTo) => {
lazy val sendr = reportTo.getOrElse(sender())
val maybe = cache.getIfPresent(internalKey)
if (maybe eq null) {
cache.put(internalKey, Right(System.currentTimeMillis() -> types))
if (reportWhenDone) {
sendr ! UpdateCompleted
}
} else
maybe match {
case Left(future) =>
future.onComplete {
case Failure(error) => self ! Put(internalKey, types, reportWhenDone, Some(sendr))
case Success(chars) => {
if (types.diff(chars).nonEmpty)
self ! Put(internalKey, types.union(chars), reportWhenDone, Some(sendr))
else if (reportWhenDone) sendr ! UpdateCompleted
}
}(updatingExecutionContext)
case Right((_, chars)) => {
if (types.diff(chars).nonEmpty) {
cache.put(internalKey, Right(System.currentTimeMillis() -> (chars.union(types))))
}
if (reportWhenDone) {
sendr ! UpdateCompleted
}
}
}
}
case UpdateAndGet(field: FieldKey) => {
val sndr = sender()
val rv = getMetaFieldInfoton(field).map(infoptToChars)(updatingExecutionContext)
rv.onComplete { //field.metaPath is already completed as it is memoized in a lazy val if it is truly async
case Failure(e) => logger.error(s"failed to update cache for: ${field.metaPath}", e)
case Success(types) => {
val nTime = System.currentTimeMillis()
lazy val right = Right(nTime -> types)
// provided cache should have passed a cache that has concurrencyLevel set to 1.
// So we should avoid useless updates, nevertheless,
// it's okay to risk blocking on the cache's write lock here,
// because writes are rare (once every 2 minutes, and on first-time asked fields)
val internalKey = field.internalKey
val maybe = cache.getIfPresent(internalKey)
if (maybe eq null) {
cache.put(internalKey, right)
sndr ! types
} else
maybe match {
case Right((oTime, chars)) => {
val allTypes = chars.union(types)
cache.put(internalKey, Right(math.max(oTime, nTime) → allTypes))
sndr ! allTypes
}
case Left(charsFuture) =>
charsFuture.onComplete {
case Failure(error) => {
logger.error("future stored in types cache failed", error)
self ! Put(internalKey, types)
sndr ! types // this could be only a subset of the types.
// maybe it is better to let the ask fail with timeout,
// or otherwise signal the failure?
}
case Success(chars) => {
sndr ! chars.union(types)
if (types.diff(chars).nonEmpty) {
self ! Put(internalKey, types.union(chars))
}
}
}(updatingExecutionContext)
}
}
}(updatingExecutionContext)
}
}
private def infoptToChars(infopt: Option[Infoton]) = {
val typesOpt = infopt.flatMap(_.fields.flatMap(_.get("mang")))
val rv = typesOpt.fold(Set.empty[Char])(_.collect {
case FString(t, _, _) if t.length == 1 => t.head
})
if (rv.isEmpty && infopt.isDefined) {
logger.error(s"got empty type set for $infopt")
}
rv
}
private def getMetaFieldInfoton(field: FieldKey): Future[Option[Infoton]] =
crudService
.getInfotonByPathAsync(field.metaPath)
.transform {
case Failure(err) => Failure(new Exception(s"failed to getMetaFieldInfoton($field)", err))
case Success(BoxedFailure(err)) =>
Failure(new Exception(s"failed to getMetaFieldInfoton($field) from IRW", err))
// logger.info(s"got empty type infoton for [$field], this means either someone searched a non-existing field,
// or that we were unable to load from cassandra.")
case success => success.map(_.toOption)
}(updatingExecutionContext)
}
}
trait PassiveFieldTypesCacheTrait {
def get(fieldKey: FieldKey, forceUpdateForType: Option[Set[Char]] = None)(
implicit ec: ExecutionContext
): Future[Set[Char]]
def update(fieldKey: FieldKey, types: Set[Char])(implicit ec: ExecutionContext): Future[Unit]
}
abstract class PassiveFieldTypesCache(val cache: Cache[String, Either[Future[Set[Char]], (Long, Set[Char])]])
extends PassiveFieldTypesCacheTrait { this: LazyLogging =>
import PassiveFieldTypesCache._
implicit val timeout = akka.util.Timeout(10.seconds)
private val cbf = implicitly[CanBuildFrom[MSet[FieldKey], (String, FieldKey), MSet[(String, FieldKey)]]]
def get(fieldKey: FieldKey, forceUpdateForType: Option[Set[Char]] = None)(
implicit ec: ExecutionContext
): Future[Set[Char]] = fieldKey match {
// TODO: instead of checking a `FieldKey` for `NnFieldKey(k) if k.startsWith("system.")` maybe it is better to add `SysFieldKey` ???
case NnFieldKey(k) if k.startsWith("system.") || k.startsWith("content.") || k.startsWith("link.") =>
Future.successful(Set.empty)
case field =>
Try {
val key = field.internalKey
val maybeEither = cache.getIfPresent(key)
if (maybeEither eq null) (actor ? UpdateAndGet(field)).mapTo[Set[Char]].transform {
case Success(s) if s.isEmpty =>
Failure(
new NoSuchElementException(s"(async) empty type set for [$field] ([$forceUpdateForType],[$maybeEither])")
)
case successOrFailure => successOrFailure
} else
maybeEither match {
case Right((ts, types)) =>
forceUpdateForType match {
case None =>
if (System.currentTimeMillis() - ts > minimumEntryRefreshRateMillis) {
actor ! RequestUpdateFor(field)
}
if (types.isEmpty)
Future.failed(
new NoSuchElementException(
s"empty type set for [$field] ([$forceUpdateForType],[$maybeEither],${types.mkString("[", ",", "]")})"
)
)
else Future.successful(types)
case Some(forceReCheckForTypes) =>
if (forceReCheckForTypes.diff(types).nonEmpty || (System
.currentTimeMillis() - ts > minimumEntryRefreshRateMillis))
(actor ? UpdateAndGet(field)).mapTo[Set[Char]].transform {
case Success(s) if s.isEmpty =>
Failure(
new NoSuchElementException(
s"(async) empty type set for [$field] ([$forceUpdateForType],[$maybeEither],${types
.mkString("[", ",", "]")})"
)
)
case successOrFailure => successOrFailure
} else if (types.isEmpty)
Future.failed(
new NoSuchElementException(
s"empty type set for [$field] ([$forceUpdateForType],[$maybeEither],${types.mkString("[", ",", "]")})"
)
)
else Future.successful(types)
}
case Left(fut) => fut
}
}.recover {
case t: Throwable =>
Future.failed[Set[Char]](new Exception(s"failed to get([$field], [$forceUpdateForType])", t))
}.get
}
def update(fieldKey: FieldKey, types: Set[Char])(implicit ec: ExecutionContext): Future[Unit] = fieldKey match {
case NnFieldKey(k) if k.startsWith("system.") || k.startsWith("content.") || k.startsWith("link.") =>
Future.successful(())
case field => {
val key = field.internalKey
lazy val doneFut = (actor ? Put(key, types, true)).map(_ => ())
val maybeEither = cache.getIfPresent(key)
if (maybeEither eq null) doneFut
else
maybeEither match {
case Right((_, set)) =>
if (types.diff(set).nonEmpty) doneFut
else Future.successful(())
case Left(future) =>
future
.flatMap { set =>
if (types.diff(set).nonEmpty) doneFut
else future.map(_ => ())
}
.recoverWith {
case err: Throwable => {
logger.error("cannot update cache. internalKey failure.", err)
doneFut
}
}
}
}
}
def getState: String = {
import scala.collection.JavaConverters._
val m = cache.asMap().asScala
val sb = new StringBuilder("{\n ")
var notFirst = false
m.foreach {
case (k, v) =>
if (notFirst) sb ++= ",\n "
else notFirst = true
sb += '"'
sb ++= k
sb ++= "\":{\"cooked\":"
v match {
case Left(f) =>
sb ++= "false,\"status\":\""
sb ++= f.value.toString
sb ++= "\"}"
case Right((ts, s)) =>
sb ++= "true,\"age\":"
sb ++= ts.toString
sb ++= ",\"types\":"
if (s.isEmpty) sb ++= "[]"
else sb ++= s.mkString("[\"", "\",\"", "\"]")
sb += '}'
}
}
sb.append("\n}").result()
}
protected def createActor: ActorRef = null.asInstanceOf[ActorRef]
private[this] lazy val actor: ActorRef = createActor
}
class passiveFieldTypesCacheImpl(crud: CRUDServiceFS, ec: ExecutionContext, sys: ActorSystem)
extends
// cache's concurrencyLevel set to 1, so we should avoid useless updates,
// nevertheless, it's okay to risk blocking on the cache's write lock here,
// because writes are rare (once every 2 minutes, and on first-time asked fields)
PassiveFieldTypesCache(
CacheBuilder
.newBuilder()
.concurrencyLevel(1)
.maximumSize(maxTypesCacheSize)
.build()
)
with LazyLogging {
private val props = Props(classOf[PassiveFieldTypesCache.PassiveFieldTypesCacheActor], crud, cache, ec)
override def createActor: ActorRef =
sys.actorOf(props, "passiveFieldTypesCacheImpl_" + PassiveFieldTypesCache.uniqueIdentifierForActorName)
}
| hochgi/CM-Well | server/cmwell-ws/app/ld/cmw/PassiveFieldTypesCache.scala | Scala | apache-2.0 | 14,367 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models
import play.api.libs.json.{Json, OFormat}
case class MandatoryRadioButton(selectionValue: String)
object MandatoryRadioButton {
implicit val mandatoryDecisionFormats: OFormat[MandatoryRadioButton] = Json.format[MandatoryRadioButton]
}
| hmrc/pbik-frontend | app/models/MandatoryRadioButton.scala | Scala | apache-2.0 | 860 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.tools.stats
import com.beust.jcommander.{Parameter, ParameterException}
import org.geotools.data.DataStore
import org.locationtech.geomesa.index.stats.{GeoMesaStats, HasGeoMesaStats}
import org.locationtech.geomesa.tools.stats.StatsHistogramCommand.StatsHistogramParams
import org.locationtech.geomesa.tools.utils.Prompt
import org.locationtech.geomesa.tools.{Command, DataStoreCommand}
import org.locationtech.geomesa.utils.geotools.converters.FastConverter
import org.locationtech.geomesa.utils.stats.{Histogram, MinMax, Stat}
import org.locationtech.jts.geom.{Geometry, Point}
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter.Filter
import scala.reflect.ClassTag
import scala.util.Try
trait StatsHistogramCommand[DS <: DataStore with HasGeoMesaStats] extends DataStoreCommand[DS] {
override val name = "stats-histogram"
override def params: StatsHistogramParams
override def execute(): Unit = withDataStore(histogram)
protected def histogram(ds: DS): Unit = {
val sft = ds.getSchema(params.featureName)
if (sft == null) {
throw new ParameterException(s"Schema '${params.featureName}' does not exist")
}
val attributes = getAttributesFromParams(sft, params)
val filter = Option(params.cqlFilter).getOrElse(Filter.INCLUDE)
val bins = Option(params.bins).map(_.intValue)
val histograms = if (params.exact) {
val bounds = scala.collection.mutable.Map.empty[String, (Any, Any)]
attributes.foreach { attribute =>
ds.stats.getMinMax[Any](sft, attribute).foreach { b =>
bounds.put(attribute, if (b.min == b.max) Histogram.buffer(b.min) else b.bounds)
}
}
if (bounds.size != attributes.size) {
val noBounds = attributes.filterNot(bounds.contains)
Command.user.warn(s"Initial bounds are not available for attributes ${noBounds.mkString(", ")}.")
var response: Integer = null
Command.user.info("Would you like to:\\n" +
" 1. Calculate bounds (may be slow)\\n" +
" 2. Use default bounds (may be less accurate)\\n" +
" 3. Manually enter bounds\\n" +
" 4. Cancel operation\\n")
while (response == null) {
val in = Prompt.read("Please enter the number of your choice: ")
response = Try(in.toInt.asInstanceOf[Integer]).filter(r => r > 0 && r < 5).getOrElse(null)
if (response == null) {
Command.user.error("Invalid input. Please enter 1-4.")
}
}
if (response == 1) {
Command.user.info("Running bounds query...")
ds.stats.getSeqStat[MinMax[Any]](sft, noBounds.map(Stat.MinMax), filter, exact = true).foreach { mm =>
bounds.put(mm.property, mm.bounds)
}
} else if (response == 2) {
noBounds.foreach { attribute =>
val binding = sft.getDescriptor(attribute).getType.getBinding
bounds.put(attribute, GeoMesaStats.defaultBounds(binding))
}
} else if (response == 3) {
noBounds.foreach { attribute =>
val ct = sft.getDescriptor(attribute).getType.getBinding
var lower: Any = null
var upper: Any = null
while (lower == null) {
lower = FastConverter.convert(Prompt.read(s"Enter initial lower bound for '$attribute': "), ct)
if (lower == null) {
Command.user.error(s"Couldn't convert input to appropriate type: ${ct.getSimpleName}")
}
}
while (upper == null) {
upper = FastConverter.convert(Prompt.read(s"Enter initial upper bound for '$attribute': "), ct)
if (upper == null) {
Command.user.error(s"Couldn't convert input to appropriate type: ${ct.getSimpleName}")
}
}
if (lower == upper) {
bounds.put(attribute, Histogram.buffer(lower))
} else {
bounds.put(attribute, (lower, upper))
}
}
} else {
Command.user.info("Operation cancelled.")
return // cancel operation
}
}
Command.user.info("Running stat query...")
val length = bins.getOrElse(GeoMesaStats.DefaultHistogramSize)
val queries = attributes.map { attribute =>
val ct = ClassTag[Any](sft.getDescriptor(attribute).getType.getBinding)
val (lower, upper) = bounds(attribute)
Stat.Histogram[Any](attribute, length, lower, upper)(ct)
}
ds.stats.getSeqStat[Histogram[Any]](sft, queries, filter, exact = true)
} else {
if (filter != Filter.INCLUDE) {
Command.user.warn("Non-exact stat queries may not fully account for the specified CQL filter")
}
val queries = attributes.map(attribute => Stat.Histogram[AnyRef](attribute, 0, null, null))
ds.stats.getSeqStat[Histogram[Any]](sft, queries, filter).map {
case histogram: Histogram[Any] if bins.forall(_ == histogram.length) => histogram
case histogram: Histogram[Any] =>
val descriptor = sft.getDescriptor(histogram.property)
val ct = ClassTag[Any](descriptor.getType.getBinding)
val statString = Stat.Histogram[Any](histogram.property, bins.get, histogram.min, histogram.max)(ct)
val binned = Stat(sft, statString).asInstanceOf[Histogram[Any]]
binned.addCountsFrom(histogram)
binned
}
}
attributes.foreach { attribute =>
histograms.find(_.property == attribute) match {
case None => Command.user.info(s"No histogram available for attribute '$attribute'")
case Some(hist) =>
if (classOf[Geometry].isAssignableFrom(sft.getDescriptor(attribute).getType.getBinding)) {
Command.output.info(StatsHistogramCommand.geomHistToString(attribute, hist.asInstanceOf[Histogram[Geometry]]))
} else {
StatsHistogramCommand.printHist(hist, sft, attribute)
}
}
}
}
}
object StatsHistogramCommand {
// @Parameters(commandDescription = "View or calculate counts of attribute in a GeoMesa feature type, grouped by sorted values")
trait StatsHistogramParams extends StatsParams with AttributeStatsParams {
@Parameter(names = Array("--bins"), description = "How many bins the data will be divided into. " +
"For example, if you are examining a week of data, you may want to divide the date into 7 bins, one per day.")
var bins: Integer = _
}
/**
* Creates a readable string for the histogram.
*/
def printHist(stat: Histogram[Any], sft: SimpleFeatureType, attribute: String): Unit = {
Command.output.info(s"Binned histogram for '$attribute':")
if (stat.isEmpty) {
Command.output.info(" No values")
} else {
val stringify = Stat.stringifier(sft.getDescriptor(attribute).getType.getBinding)
(0 until stat.length).foreach { i =>
val (min, max) = stat.bounds(i)
Command.output.info(s" [ ${stringify(min)} to ${stringify(max)} ] ${stat.count(i)}")
}
}
}
/**
* Creates string containing an ASCII, color-coded map of densities.
*/
def geomHistToString(attribute: String, stat: Histogram[Geometry]): String = {
// grid of counts, corresponds to our world map dimensions
val counts = Array.fill[Array[Long]](AsciiWorldMapHeight)(Array.fill[Long](AsciiWorldMapLength)(0))
// translate histogram values into the grid and also calculate min/max for normalization
def putCountsInGrid(): Unit = {
var i = 0
while (i < stat.length) {
val count = stat.count(i)
if (count > 0) {
val point = stat.medianValue(i).asInstanceOf[Point]
val (x, y) = (point.getX, point.getY)
val xOffset = (x + 180) / 360 // normalize to 0-1
val yOffset = 1 - (y + 90) / 180 // normalize to 0-1 and invert axis
// index into map string
val j = math.floor(yOffset * AsciiWorldMapHeight).toInt
val k = math.floor(xOffset * AsciiWorldMapLength).toInt
counts(j)(k) += count
}
i += 1
}
}
putCountsInGrid()
// min/max to normalize our densities
val min = counts.map(_.min).min
val max = counts.map(_.max).max
// normalize a count to 0-1 based on our min/max values
def normalize(count: Long): Float = (count - min).toFloat / (max - min)
// reverse a normalized percent
def denormalize(percent: Float): Long = (percent * (max - min)).toLong + min
val sb = new StringBuilder
// build up our string - if we have data in a given cell, put a sized circle, else put the map char
// in addition to size of circle, use console ANSI colors to mark our densities
var i = 0
var currentColor: String = null
while (i < AsciiWorldMapHeight) {
val row = counts(i)
var j = 0
while (j < AsciiWorldMapLength) {
val normalized = normalize(row(j))
val (color, char) =
if (normalized < .1f) {
(Console.RESET, AsciiWorldMap(i)(j))
} else if (normalized < .3f) {
Threshold1
} else if (normalized < .5f) {
Threshold2
} else if (normalized < .8f) {
Threshold3
} else {
Threshold4
}
if (color != currentColor) {
sb.append(color)
currentColor = color
}
sb.append(char)
j += 1
}
sb.append('\\n')
i += 1
}
// write out a key of the actual count ranges represented by each marker
sb.append(s"${Console.RESET}\\nKey: ")
sb.append(s"[${Threshold1._1}${Threshold1._2}${Console.RESET} ${denormalize(0.1f)} to ${denormalize(0.3f)}] ")
sb.append(s"[${Threshold2._1}${Threshold2._2}${Console.RESET} ${denormalize(0.3f)} to ${denormalize(0.5f)}] ")
sb.append(s"[${Threshold3._1}${Threshold3._2}${Console.RESET} ${denormalize(0.5f)} to ${denormalize(0.8f)}] ")
sb.append(s"[${Threshold4._1}${Threshold4._2}${Console.RESET} ${denormalize(0.8f)} to $max]")
sb.toString
}
// ANSI colors and unicode values for various density counts
private val Threshold1 = (Console.BOLD + Console.CYAN, '\\u26AB') // 26AB - black circle
private val Threshold2 = (Console.BOLD + Console.GREEN, '\\u25CF') // 25CF - medium black circle
private val Threshold3 = (Console.BOLD + Console.YELLOW, '\\u25CF') // 25CF - medium black circle
private val Threshold4 = (Console.BOLD + Console.RED, '\\u2B24') // 2B24 - black circle large
// note: keep these height and length variables consistent with the map
private val AsciiWorldMapLength = 140
private val AsciiWorldMapHeight = 42
// map is spaced so that points project semi-accurately - see @StatsHistogramCommandTest for test cases
private val AsciiWorldMap = Array(
""" """,
""" """,
""" . . ,:,,: """,
""" . :,.:,,,::,.,,,.,__,__,,.,; :. """,
""" ,. ;:,__:,.,,,,.,,____________,, :,,: __ """,
""" ,.,::::, .::; . ,,____________. ,,. ;.,.,,__ ,;. """,
""" .,,:::;:,:: .,, . ,__________, , .,. ,,________.,.,,__ :: """,
""" ______,: :,,,/; ,.,,; ,; ,. ,,,. :______,,., ,____; __ , __.,__________________________.:;;;.,, """,
""" ,,____________; ________.,, .,,,. __.,./. ,.,,;__. ;,,,______.;__________________________________,,:,., """,
""" ,,__.,.,________.:,__,.,, .,,, , :.,. ,,,__:__.,:______________________________________.,,.,.,__; """,
""" .:,, .,,______;,,__.: ____, ,,., ;,__.,________________________________., :, """,
""" ; ,__________,,__,,.:,____,, . ,.,,,______.,:__________________.,:________\\__ ,, """,
""" ,,,________,.:__.,.,,:: , .:,________________________________,,,__________,, """,
""" __________.,.: .,__,/: __.,,,____,:;__.:,.,;__.,:,__________________,, : """,
""" .,__________,__., ,,.: : , .::;.,:.,__:,.,__________________,:__ __ """,
""" ,,,__________.,, :,,__. :,;__________________________,, ,.:,, """,
""" :,____.,.,,: ______.,.:____,,__,,,____________________.,,, """,
""" ,,,., ,,________________.,__:.: ,,,______,________.,; """,
""" ,,, :, ;:. ________________,. ,__,,, ,.,,. ,,__ : """,
""" ,:. ,________.,,______.,,,, ,., ,,__. :, """,
""" , .,,,,,, ,________________.,,, ,: , , """,
""" ,,____.,,., ,__________.: .,; :,,. ,: """,
""" __________.,__: ,,____,,., . __: . :,__, """,
""" ,,____________ ________, . : . :. """,
""" ,,,________. __________ ,,. :__., ,, """,
""" :________,: ,____.,. .: ,__________, ,. """,
""" ,,____., ,____,, ,__________,, """,
""" ____.,: :__, ;.,.:__,____, """,
""" ,____. .,,. :;. """,
""" ;__/ ,, . """,
""" ,__ """,
""" ,, """,
""" """,
""" """,
""" / """,
""" ____ _ _/:\\_____/________________ ____________________________________. """,
""" :.,.________________________. ______________________________________________________________________ """,
""" .________________________________ .___/____________________________________________________________________________, """,
""" ________________________________________________________________________________________________________________________________ """,
""" """,
""" """,
""" """,
""" """
).map(_.toCharArray)
}
| aheyne/geomesa | geomesa-tools/src/main/scala/org/locationtech/geomesa/tools/stats/StatsHistogramCommand.scala | Scala | apache-2.0 | 17,931 |
import java.io.IOException;
import java.util.Arrays;
import javax.jms.Connection;
import javax.jms.DeliveryMode;
import javax.jms.Destination;
import javax.jms.ExceptionListener;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageListener;
import javax.jms.MessageProducer;
import javax.jms.Session;
import javax.jms.TextMessage;
import javax.jms.Topic;
import org.apache.activemq.ActiveMQConnection;
import org.apache.activemq.ActiveMQConnectionFactory;
class MessageBridgeListener(val callback: (TextMessage) => Boolean) extends MessageListener {
def onMessage(message: Message) = {
if (message.isInstanceOf[TextMessage]) {
val textMessage = message.asInstanceOf[TextMessage];
if(callback(textMessage)) textMessage.acknowledge
}
}
}
object MessageBridge {
def start(callback: (TextMessage) => Boolean) = {
RiddanceCore.log.info("Starting Riddance/JMS")
val connFactory = new ActiveMQConnectionFactory("tcp://localhost:61616")
val conn = connFactory.createConnection
conn.start
val session = conn.createSession(false, Session.CLIENT_ACKNOWLEDGE)
val inputQueue = session.createQueue("*.riddance.in")
val consumer = session.createConsumer(inputQueue)
consumer.setMessageListener(new MessageBridgeListener(callback))
}
}
| fmarani/riddance | src/main/scala/MessageBridge.scala | Scala | lgpl-3.0 | 1,351 |
package Integrals
object DoubleIntegral{
def leftRect(f:Double=>Double, a:Double, b:Double)=f(a)
//> leftRect: (f: Double => Double, a: Double, b: Double)Double
def midRect(f:Double=>Double, a:Double, b:Double)=f((a+b)/2)
//> midRect: (f: Double => Double, a: Double, b: Double)Double
def rightRect(f:Double=>Double, a:Double, b:Double)=f(b)
//> rightRect: (f: Double => Double, a: Double, b: Double)Double
def trapezoid(f:Double=>Double, a:Double, b:Double)=(f(a)+f(b))/2
//> trapezoid: (f: Double => Double, a: Double, b: Double)Double
def simpson(f:Double=>Double, a:Double, b:Double)=(f(a)+4*f((a+b)/2)+f(b))/6;
//> simpson: (f: Double => Double, a: Double, b: Double)Double
def fn1(x:Double)=x*x*x //> fn1: (x: Double)Double
def fn2(x:Double)=1/x //> fn2: (x: Double)Double
def fn3(x:Double)=x //> fn3: (x: Double)Double
type Method = (Double=>Double, Double, Double) => Double
def integrate(f:Double=>Double, a:Double, b:Double, steps:Double, m:Method)={
val delta:Double=(b-a)/steps
delta*(a until b by delta).foldLeft(0.0)((s,x) => s+m(f, x, x+delta))
} //> integrate: (f: Double => Double, a: Double, b: Double, steps: Double, m: (Do
//| uble => Double, Double, Double) => Double)Double
def print(f:Double=>Double, a:Double, b:Double, steps:Double)={
println("rectangular left : %f".format(integrate(f, a, b, steps, leftRect)))
println("rectangular middle : %f".format(integrate(f, a, b, steps, midRect)))
println("rectangular right : %f".format(integrate(f, a, b, steps, rightRect)))
println("trapezoid : %f".format(integrate(f, a, b, steps, trapezoid)))
println("simpson : %f".format(integrate(f, a, b, steps, simpson)))
} //> print: (f: Double => Double, a: Double, b: Double, steps: Double)Unit
def main(args: Array[String]): Unit = {
print(fn1, 0, 1, 100)
println("------")
print(fn2, 1, 100, 1000)
println("------")
print(fn3, 0, 5000, 5000000)
println("------")
print(fn3, 0, 6000, 6000000)
} //> main: (args: Array[String])Unit
} | HaprianVlad/miniboxing-experiments | components/example/src/Integrals/DoubleIntegral.scala | Scala | bsd-3-clause | 2,580 |
/*
* Copyright 2012-2014 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.xfinity.sirius.api.impl.state
import com.comcast.xfinity.sirius.api.{SiriusConfiguration, RequestHandler}
import akka.agent.Agent
import com.comcast.xfinity.sirius.writeaheadlog.SiriusLog
import com.comcast.xfinity.sirius.api.impl._
import akka.event.Logging
import state.SiriusPersistenceActor.LogQuery
import akka.actor.{ActorContext, Props, ActorRef, Actor}
import com.comcast.xfinity.sirius.admin.MonitoringHooks
object StateSup {
/**
* Factory for creating children of StateSup.
*
* @param requestHandler the RequestHandler to apply updates to and perform Gets on
* @param siriusLog the SiriusLog to persist OrderedEvents to
* @param config SiriusConfiguration object for configuring children actors.
*/
private[state] class ChildProvider(requestHandler: RequestHandler, siriusLog: SiriusLog, config: SiriusConfiguration) {
def createStateActor()(implicit context: ActorContext): ActorRef =
context.actorOf(SiriusStateActor.props(requestHandler), "state")
def createPersistenceActor(stateActor: ActorRef)(implicit context: ActorContext): ActorRef =
context.actorOf(SiriusPersistenceActor.props(stateActor, siriusLog, config), "persistence")
}
/**
* Create a StateSup managing the state of requestHandler and persisting data to siriusLog.
*
* @param requestHandler the RequestHandler containing the callbacks for manipulating this instance's state
* @param siriusLog the log to be used for persisting events
* @param siriusStateAgent agent containing information on the state of the system
* @param config SiriusConfiguration object full of all kinds of configuration goodies, see SiriusConfiguration for
* more information
* @return Props for creating this actor, which can then be further configured
* (e.g. calling `.withDispatcher()` on it)
*/
def props(requestHandler: RequestHandler,
siriusLog: SiriusLog,
siriusStateAgent: Agent[SiriusState],
config: SiriusConfiguration): Props = {
val childProvider = new ChildProvider(requestHandler, siriusLog, config)
Props(classOf[StateSup], requestHandler, siriusLog, siriusStateAgent, childProvider, config)
}
}
/**
* Actor for supervising state related matters (in memory state and persistent state).
*
* @param requestHandler the RequestHandler to apply updates to and perform Gets on
* @param siriusLog the SiriusLog to persist OrderedEvents to
* @param siriusStateAgent agent containing information on the state of the system.
* @param childProvider factory for creating children of StateSup
* @param config SiriusCOnfiguration object for configuring children actors.
*/
// TODO rename this StateSupervisor
class StateSup(requestHandler: RequestHandler,
siriusLog: SiriusLog,
siriusStateAgent: Agent[SiriusState],
childProvider: StateSup.ChildProvider,
config: SiriusConfiguration)
extends Actor with MonitoringHooks {
val logger = Logging(context.system, "Sirius")
val stateActor = childProvider.createStateActor
val persistenceActor = childProvider.createPersistenceActor(stateActor)
// monitor stuff
var eventReplayFailureCount: Long = 0
// it would be cool to be able to observe this during boot...
var bootstrapTime: Option[Long] = None
override def preStart() {
registerMonitor(new StateInfo, config)
bootstrapState()
siriusStateAgent send (_.copy(stateInitialized = true))
}
override def postStop() {
unregisterMonitors(config)
}
def receive = {
case get: Get =>
stateActor forward get
case orderedEvent: OrderedEvent =>
persistenceActor ! orderedEvent
case logQuery: LogQuery =>
persistenceActor forward logQuery
}
// TODO perhaps this should be pulled out into a BootstrapActor. The StateSup should really only supervise.
private def bootstrapState() {
val start = System.currentTimeMillis
logger.info("Beginning SiriusLog replay at {}", start)
// TODO convert this to foreach
siriusLog.foldLeft(())(
(_, orderedEvent) =>
try {
orderedEvent.request match {
case Put(key, body) => requestHandler.handlePut(key, body)
case Delete(key) => requestHandler.handleDelete(key)
}
} catch {
case rte: RuntimeException =>
eventReplayFailureCount += 1
logger.error("Exception replaying {}: {}", orderedEvent, rte)
}
)
val totalBootstrapTime = System.currentTimeMillis - start
bootstrapTime = Some(totalBootstrapTime)
logger.info("Replayed SiriusLog in {}ms", totalBootstrapTime)
}
trait StateInfoMBean {
def getEventReplayFailureCount: Long
def getBootstrapTime: String
}
class StateInfo extends StateInfoMBean {
def getEventReplayFailureCount = eventReplayFailureCount
def getBootstrapTime = bootstrapTime.toString
}
}
| weggert/sirius | src/main/scala/com/comcast/xfinity/sirius/api/impl/state/StateSup.scala | Scala | apache-2.0 | 5,616 |
/*
* Copyright (c) 2015 Mind Eratosthenes Kft.
* License: AGPL v3
*/
package com.mind_era.zizized.util
import spire.math.UInt
import spire.math.ULong
/**
* various hashing methods for primitives & strings
*
* @author Szabolcs Ivan
* @since 1.0
*/
object Hash {
private def mix(a: UInt, b: UInt, c: UInt, da: Int, db: Int, dc: Int): (UInt, UInt, UInt) = {
val a2 = (a - b - c) ^ (c >> dc)
val b2 = (b - c - a2) ^ (a2 << da)
val c2 = (c - a2 - b2) ^ (b2 >> db)
(a2, b2, c2)
}
@SuppressWarnings(Array("org.brianmckenna.wartremover.warts.Throw" /*False positive*/ ))
def mix(a: UInt, b: UInt, c: UInt): (UInt, UInt, UInt) = {
val (a2, b2, c2) = mix(a, b, c, 8, 13, 13)
val (a3, b3, c3) = mix(a2, b2, c2, 16, 5, 12)
mix(a3, b3, c3, 1, 15, 3)
}
def hashUnsigned(a: UInt): UInt = {
val a2 = (a + UInt(0x7ed55d16)) + (a << 12)
val a3 = (a2 ^ UInt(0xc761c23c)) ^ (a2 >> 19)
val a4 = (a3 + UInt(0x165667b1)) + (a3 << 5)
val a5 = (a4 + UInt(0xd3a2646c)) ^ (a4 << 9)
val a6 = (a5 + UInt(0xfd7046c5)) + (a5 << 3)
(a6 ^ UInt(0xb55a4f09)) ^ (a6 >> 16)
}
def hashUnsignedLongLong(a: ULong): UInt = {
val a2 = (~a) + (a << 18)
val a3 = a2 ^ (a2 >> 31)
val a4 = a3 + (a3 << 2) + (a3 << 4);
val a5 = a4 ^ (a4 >> 11)
val a6 = a5 + (a5 << 6)
UInt((a6 ^ (a6 >> 22)).toInt)
}
def combineHash(h1: UInt, h2: UInt): UInt = {
val h22 = (h2 - h1) ^ (h1 << 8)
val h12 = h1 - h22
((h22 ^ (h12 << 16)) - h12) ^ (h12 << 10)
}
def hashUU(a: UInt, b: UInt): UInt = combineHash(hashUnsigned(a), hashUnsigned(b))
private def stringHashCases(strIter: Iterator[Char], len: Int, _a: UInt, _b: UInt, _c: UInt): (UInt, UInt, UInt) = {
var a = _a
var b = _b
var c = _c
if (len == 11) c = c + (charToUInt(strIter.next()) << 24)
if (len >= 10) c = c + (charToUInt(strIter.next()) << 16)
if (len >= 9) c = c + (charToUInt(strIter.next()) << 8)
if (len >= 8) b = b + (charToUInt(strIter.next()) << 24)
if (len >= 7) b = b + (charToUInt(strIter.next()) << 16)
if (len >= 6) b = b + (charToUInt(strIter.next()) << 8)
if (len >= 5) b = b + charToUInt(strIter.next())
if (len >= 4) a = a + (charToUInt(strIter.next()) << 24)
if (len >= 3) a = a + (charToUInt(strIter.next()) << 16)
if (len >= 2) a = a + (charToUInt(strIter.next()) << 8)
if (len >= 1) a = a + charToUInt(strIter.next())
mix(a, b, c)
}
def stringHash(str: String, initValue: UInt): UInt = {
var a: UInt = UInt(0x9e3779b9)
var b: UInt = UInt(0x9e3779b9)
var c: UInt = initValue
var len: Int = str.length()
var iter: Iterator[Char] = str.iterator
while (len >= 12) {
a = a + (charToUInt(iter.next()) << 16) + charToUInt(iter.next())
b = b + (charToUInt(iter.next()) << 16) + charToUInt(iter.next())
c = c + (charToUInt(iter.next()) << 16) + charToUInt(iter.next())
@SuppressWarnings(Array("org.brianmckenna.wartremover.warts.Throw" /*False positive*/ ))
val (a1, b1, c1) = mix(a, b, c)
a = a1
b = b1
c = c1
len = len - 12
}
c = c + UInt(str.length())
@SuppressWarnings(Array("org.brianmckenna.wartremover.warts.Throw" /*False positive*/ ))
val (_, _, c2) = stringHashCases(str.reverseIterator, len, a, b, c)
c2
}
} | mind-era/zizized | src/main/scala/com/mind_era/zizized/util/Hash.scala | Scala | agpl-3.0 | 3,317 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.containerpool.test
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import org.scalatest.junit.JUnitRunner
import pureconfig._
import org.apache.openwhisk.core.ConfigKeys
import org.apache.openwhisk.core.containerpool.ContainerArgsConfig
@RunWith(classOf[JUnitRunner])
class ContainerArgsConfigTest extends FlatSpec with Matchers {
it should "use defaults for container args map" in {
val config = loadConfigOrThrow[ContainerArgsConfig](ConfigKeys.containerArgs)
//check defaults
config.network shouldBe "bridge"
config.dnsServers shouldBe Seq[String]()
config.dnsSearch shouldBe Seq[String]()
config.dnsOptions shouldBe Seq[String]()
config.extraArgs shouldBe Map[String, Set[String]]()
}
it should "override defaults from system properties" in {
System.setProperty("whisk.container-factory.container-args.extra-args.label.0", "l1")
System.setProperty("whisk.container-factory.container-args.extra-args.label.1", "l2")
System.setProperty("whisk.container-factory.container-args.extra-args.label.3", "l3")
System.setProperty("whisk.container-factory.container-args.extra-args.env.0", "e1")
System.setProperty("whisk.container-factory.container-args.extra-args.env.1", "e2")
System.setProperty("whisk.container-factory.container-args.dns-servers.0", "google.com")
System.setProperty("whisk.container-factory.container-args.dns-servers.1", "1.2.3.4")
System.setProperty("whisk.container-factory.container-args.dns-search.0", "a.b.c")
System.setProperty("whisk.container-factory.container-args.dns-search.1", "a.b")
System.setProperty("whisk.container-factory.container-args.dns-options.0", "ndots:5")
val config = loadConfigOrThrow[ContainerArgsConfig](ConfigKeys.containerArgs)
//check defaults
config.network shouldBe "bridge"
config.dnsServers shouldBe Seq[String]("google.com", "1.2.3.4")
config.dnsSearch shouldBe Seq[String]("a.b.c", "a.b")
config.dnsOptions shouldBe Seq[String]("ndots:5")
//check map parsing of extra-args config
config.extraArgs.get("label") shouldBe Some(Set("l1", "l2", "l3"))
config.extraArgs.get("env") shouldBe Some(Set("e1", "e2"))
}
}
| csantanapr/incubator-openwhisk | tests/src/test/scala/org/apache/openwhisk/core/containerpool/test/ContainerArgsConfigTest.scala | Scala | apache-2.0 | 3,068 |
package org.http4s.util
object string extends CaseInsensitiveStringSyntax
| hvesalai/http4s | core/src/main/scala/org/http4s/util/String.scala | Scala | apache-2.0 | 75 |
val x = play {
RandSeed.ir(trig = 1, seed = 56789.0)
val lFDNoise3_0 = LFDNoise3.ar(0.93811005)
val in_0 = LeakDC.ar(0.93811005, coeff = 0.995)
val twoZero_0 = TwoZero.ar(in_0, freq = 10.0, radius = 1.0)
val sqrsum = twoZero_0 sqrsum 0.93811005
val in_1 = LeakDC.ar(0.93811005, coeff = 0.995)
val max_0 = twoZero_0 max 0.0
val radius_0 = max_0 min 1.0
val a_0 = TwoPole.ar(in_1, freq = 10.0, radius = radius_0)
val in_2 = LinCongN.ar(freq = 27.382282, a = a_0, c = 0.09420269, m = 6208.715, xi = 0.93811005)
val ring1 = a_0 ring1 in_2
val ring2 = ring1 ring2 6208.715
val difsqr = a_0 difsqr twoZero_0
val delay1_0 = Delay1.ar(difsqr)
val ring4 = a_0 ring4 0.0
val xi_0 = a_0 amclip 4321.0586
val freq_0 = RHPF.ar(in_2, freq = 95.52773, rq = 0.01)
val lFCub = LFCub.ar(freq = 0.01, iphase = 1.0)
val lFDNoise3_1 = LFDNoise3.ar(lFCub)
val in_3 = LeakDC.ar(0.1128317, coeff = 0.995)
val max_1 = lFCub max 0.0
val timeUp = max_1 min 30.0
val max_2 = freq_0 max 0.0
val timeDown = max_2 min 30.0
val yi_0 = LagUD.ar(in_3, timeUp = timeUp, timeDown = timeDown)
val in_4 = StandardN.ar(freq = 0.008342929, k = lFDNoise3_1, xi = 0.5, yi = yi_0)
val max_3 = lFDNoise3_1 max 0.8
val coeff_0 = max_3 min 0.99
val in_5 = LeakDC.ar(lFDNoise3_1, coeff = coeff_0)
val yi_1 = -0.7252769 & in_5
val max_4 = a_0 max 0.0
val width_0 = max_4 min 1.0
val lFGauss_0 = LFGauss.ar(dur = 0.1128317, width = width_0, phase = 0.0, loop = -0.30011576, doneAction = doNothing)
val max_5 = in_4 max -3.0
val a_1 = max_5 min 3.0
val max_6 = yi_1 max 0.5
val b = max_6 min 1.5
val freq_1 = LatoocarfianN.ar(freq = freq_0, a = a_1, b = b, c = 0.5, d = -25.763144, xi = 6208.715, yi = lFGauss_0)
val max_7 = yi_1 max -3.0
val a_2 = max_7 min 3.0
val latoocarfianC = LatoocarfianC.ar(freq = freq_1, a = a_2, b = 1.5, c = 0.5, d = 0.1128317, xi = xi_0, yi = lFCub)
val bRF = BRF.ar(in_2, freq = 10.0, rq = 95.52773)
val in_6 = LeakDC.ar(lFDNoise3_1, coeff = 0.995)
val decay = Decay.ar(in_6, time = 7.360578)
val delay1_1 = Delay1.ar(in_5)
val wrap2 = 409.47137 wrap2 in_5
val eq = 0.93811005 sig_== decay
val varSaw = VarSaw.ar(freq = 0.01, iphase = 0.0, width = 0.5)
val in_7 = LeakDC.ar(0.03103786, coeff = 0.995)
val max_8 = difsqr max 0.0
val maxDelayTime = max_8 min 20.0
val delayTime = Constant(203.33731f) min maxDelayTime
val freq_2 = CombL.ar(in_7, maxDelayTime = maxDelayTime, delayTime = delayTime, decayTime = -61.093445)
val in_8 = LFDNoise0.ar(freq_2)
val in_9 = LeakDC.ar(in_8, coeff = 0.995)
val lPZ2 = LPZ2.ar(in_9)
val geq = lPZ2 >= ring1
val in_10 = LeakDC.ar(in_4, coeff = 0.995)
val max_9 = wrap2 max 0.0
val radius_1 = max_9 min 1.0
val twoZero_1 = TwoZero.ar(in_10, freq = 10.0, radius = radius_1)
val in_11 = LeakDC.ar(0.03103786, coeff = 0.995)
val max_10 = in_5 max 0.0
val spread = max_10 min 43.0
val maxRoomSize = lFDNoise3_1 max 0.55
val roomSize = Constant(6.706537f) min maxRoomSize
val gVerb = GVerb.ar(in_11, roomSize = roomSize, revTime = 0.93811005, damping = 1.0, inputBW = 5.222734E-4, spread = spread, dryLevel = lFCub, earlyRefLevel = difsqr, tailLevel = sqrsum, maxRoomSize = maxRoomSize)
val gbmanN = GbmanN.ar(freq = 95.52773, xi = 145.82329, yi = sqrsum)
val neq = 4321.0586 sig_!= gbmanN
val in_12 = LeakDC.ar(4321.0586, coeff = 0.995)
val delayL = DelayL.ar(in_12, maxDelayTime = 0.0, delayTime = 0.0)
val loop_0 = 0.008342929 pow a_0
val max_11 = yi_0 max 0.0
val width_1 = max_11 min 1.0
val lFGauss_1 = LFGauss.ar(dur = 100.0, width = width_1, phase = 0.0, loop = loop_0, doneAction = doNothing)
val max_12 = a_0 max 0.0
val h = max_12 min 0.06
val in_13 = LorenzL.ar(freq = 1403.8345, s = 0.03103786, r = 145.82329, b = lFGauss_0, h = h, xi = 6.706537, yi = yi_1, zi = -0.6822276)
val lag = Lag.ar(in_13, time = 0.0)
val gt = 203.33731 > gbmanN
val in_14 = LeakDC.ar(4321.0586, coeff = 0.995)
val max_13 = bRF max 0.0
val radius_2 = max_13 min 1.0
val twoZero_2 = TwoZero.ar(in_14, freq = 10.0, radius = radius_2)
val mix = Mix(Seq[GE](lFDNoise3_0, ring2, delay1_0, ring4, latoocarfianC, delay1_1, eq, varSaw, geq, twoZero_1, gVerb, neq, delayL, lFGauss_1, lag, gt, twoZero_2))
val in_15 = Mix.Mono(mix)
val checkBadValues = CheckBadValues.ar(in_15, id = 0.0, post = 0.0)
val gate = checkBadValues sig_== 0.0
val in_16 = Gate.ar(in_15, gate = gate)
val pan2 = Pan2.ar(in_16, pos = 0.0, level = 1.0)
val sig = pan2 // Resonz.ar(pan2, "freq".kr(777), rq = 1)
Out.ar(0, Limiter.ar(LeakDC.ar(sig)) * "amp".kr(0.2))
}
/*---
*/
| Sciss/AnemoneActiniaria | kubus/DS_IO_iter20_no356.scala | Scala | gpl-3.0 | 5,452 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.testng
import org.scalatest._
import org.testng.TestNG
import org.testng.TestListenerAdapter
/**
* <p>
* Suite that wraps existing TestNG test suites, described by TestNG XML config files. This class allows
* existing TestNG tests written in Java to be run by ScalaTest.
* </p>
*
* <p>
* One way to use this class is to extend it and provide a list of one or more
* names of TestNG XML config file names to run. Here's an example:
* </p>
*
* <pre class="stHighlight">
* class MyWrapperSuite extends TestNGWrapperSuite(
* List("oneTest.xml", "twoTest.xml", "redTest.xml", "blueTest.xml")
* )
* </pre>
*
* <p>
* You can also specify TestNG XML config files on <code>Runner</code>'s command line with <code>-t</code> parameters. See
* the documentation for <code>Runner</code> for more information.
* </p>
*
* <p>
* To execute <code>TestNGWrapperSuite</code>s with ScalaTest's <code>Runner</code>, you must include TestNG's jar file on the class path or runpath.
* This version of <code>TestNGSuite</code> was tested with TestNG version 6.3.1.
* </p>
*
* @author Josh Cough
*/
class TestNGWrapperSuite(xmlSuiteFilenames: List[String]) extends TestNGSuite {
// Probably mention FileNotFoundException here
// If any files contained in the property cannot be found, a FileNotFoundException will be thrown.
/**
* Runs TestNG with the XML config file or files provided to the primary constructor, passing reports to the specified <code>Reporter</code>.
*
* @param testName If present (Some), then only the method with the supplied name is executed and groups will be ignored.
* @param args the <code>Args</code> for this run
*/
override def run(testName: Option[String], args: Args): Status = {
import args._
val tagsToInclude =
filter.tagsToInclude match {
case None => Set[String]()
case Some(tti) => tti
}
val tagsToExclude = filter.tagsToExclude
val status = new ScalaTestStatefulStatus
runTestNG(reporter, tagsToInclude, tagsToExclude, tracker, status)
status.setCompleted()
status
}
/**
* Runs all tests in the xml suites.
* @param reporter the reporter to be notified of test events (success, failure, etc)
*/
override private[testng] def runTestNG(reporter: Reporter, tracker: Tracker, status: ScalaTestStatefulStatus) {
runTestNG(reporter, Set[String](), Set[String](), tracker, status)
}
/**
* Executes the following:
*
* 1) Calls the super class to set up groups with the given groups Sets.
* 2) Adds the xml suites to TestNG
* 3) Runs TestNG
*
* @param reporter the reporter to be notified of test events (success, failure, etc)
* @param groupsToInclude contains the names of groups to run. only tests in these groups will be executed
* @param groupsToExclude tests in groups in this Set will not be executed
* @param status Run status.
*/
private[testng] def runTestNG(reporter: Reporter, groupsToInclude: Set[String],
groupsToExclude: Set[String], tracker: Tracker, status: ScalaTestStatefulStatus) {
val testng = new TestNG
handleGroups(groupsToInclude, groupsToExclude, testng)
addXmlSuitesToTestNG(testng)
run(testng, reporter, tracker, status)
}
// TODO: We should probably do this checking in the constructor.
/**
* TestNG allows users to programmatically tell it which xml suites to run via the setTestSuites method.
* This method takes a java.util.List containing java.io.File objects, where each file is a TestNG xml suite.
* TestNGWrapperSuite takes xmlSuitesPropertyName in its constructor. This property should contain
* the full paths of one or more xml suites, comma seperated. This method simply creates a java.util.List
* containing each xml suite contained in xmlSuitesPropertyName and calls the setTestSuites method on the
* given TestNG object.
*
* @param testng the TestNG object to set the suites on
*
* @throws FileNotFoundException if a file in xmlSuitesPropertyName does not exist.
*
*/
private def addXmlSuitesToTestNG(testng: TestNG) {
import java.io.File
import java.io.FileNotFoundException
val files = new java.util.ArrayList[String]
xmlSuiteFilenames.foreach( { name =>
val f = new File( name )
if( ! f.exists ) throw new FileNotFoundException( f.getAbsolutePath )
files add name
}
)
testng.setTestSuites(files)
}
}
| travisbrown/scalatest | src/main/scala/org/scalatest/testng/TestNGWrapperSuite.scala | Scala | apache-2.0 | 5,128 |
/*
* Copyright 2013 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.storage.anormdb
import com.twitter.zipkin.storage.Storage
import com.twitter.zipkin.common._
import com.twitter.zipkin.common.Annotation
import com.twitter.zipkin.common.BinaryAnnotation
import com.twitter.zipkin.util.Util
import com.twitter.util.{Duration, Future}
import anorm._
import anorm.SqlParser._
import java.nio.ByteBuffer
import java.sql.Connection
/**
* Retrieve and store span information.
*
* This is one of two places where Zipkin interacts directly with the database,
* the other one being AnormIndex.
*
* NOTE: We're ignoring TTL for now since unlike Cassandra and Redis, SQL
* databases don't have that built in and it shouldn't be a big deal for most
* sites. Several methods in this class deal with TTL and we just assume that
* all spans will live forever.
*/
case class AnormStorage(db: DB, openCon: Option[Connection] = None) extends Storage {
// Database connection object
private implicit val conn = openCon match {
case None => db.getConnection()
case Some(con) => con
}
/**
* Close the storage
*/
def close() { conn.close() }
/**
* Store the span in the underlying storage for later retrieval.
* @return a future for the operation
*/
def storeSpan(span: Span): Future[Unit] = {
val createdTs: Option[Long] = span.firstAnnotation match {
case Some(anno) => Some(anno.timestamp)
case None => None
}
SQL(
"""INSERT INTO zipkin_spans
| (span_id, parent_id, trace_id, span_name, debug, duration, created_ts)
|VALUES
| ({span_id}, {parent_id}, {trace_id}, {span_name}, {debug}, {duration}, {created_ts})
""".stripMargin)
.on("span_id" -> span.id)
.on("parent_id" -> span.parentId)
.on("trace_id" -> span.traceId)
.on("span_name" -> span.name)
.on("debug" -> (if (span.debug) 1 else 0))
.on("duration" -> span.duration)
.on("created_ts" -> createdTs)
.execute()
span.annotations.foreach(a =>
SQL(
"""INSERT INTO zipkin_annotations
| (span_id, trace_id, span_name, service_name, value, ipv4, port,
| a_timestamp, duration)
|VALUES
| ({span_id}, {trace_id}, {span_name}, {service_name}, {value},
| {ipv4}, {port}, {timestamp}, {duration})
""".stripMargin)
.on("span_id" -> span.id)
.on("trace_id" -> span.traceId)
.on("span_name" -> span.name)
.on("service_name" -> a.serviceName)
.on("value" -> a.value)
.on("ipv4" -> a.host.map(_.ipv4))
.on("port" -> a.host.map(_.port))
.on("timestamp" -> a.timestamp)
.on("duration" -> a.duration.map(_.inNanoseconds))
.execute()
)
span.binaryAnnotations.foreach(b =>
SQL(
"""INSERT INTO zipkin_binary_annotations
| (span_id, trace_id, span_name, service_name, annotation_key,
| annotation_value, annotation_type_value, ipv4, port)
|VALUES
| ({span_id}, {trace_id}, {span_name}, {service_name}, {key}, {value},
| {annotation_type_value}, {ipv4}, {port})
""".stripMargin)
.on("span_id" -> span.id)
.on("trace_id" -> span.traceId)
.on("span_name" -> span.name)
.on("service_name" -> b.host.map(_.serviceName).getOrElse("Unknown service name")) // from Annotation
.on("key" -> b.key)
.on("value" -> Util.getArrayFromBuffer(b.value))
.on("annotation_type_value" -> b.annotationType.value)
.on("ipv4" -> b.host.map(_.ipv4))
.on("port" -> b.host.map(_.ipv4))
.execute()
)
Future.Unit
}
/**
* Set the ttl of a trace. Used to store a particular trace longer than the
* default. It must be oh so interesting!
*/
def setTimeToLive(traceId: Long, ttl: Duration): Future[Unit] = {
Future.Unit
}
/**
* Get the time to live for a specific trace.
* If there are multiple ttl entries for one trace, pick the lowest one.
*/
def getTimeToLive(traceId: Long): Future[Duration] = {
Future.value(Duration.Top)
}
/**
* Finds traces that have been stored from a list of trace IDs
*
* @param traceIds a List of trace IDs
* @return a Set of those trace IDs from the list which are stored
*/
def tracesExist(traceIds: Seq[Long]): Future[Set[Long]] = {
Future {
SQL(
"SELECT trace_id FROM zipkin_spans WHERE trace_id IN (%s)".format(traceIds.mkString(","))
).as(long("trace_id") *).toSet
}
}
/**
* Get the available trace information from the storage system.
* Spans in trace should be sorted by the first annotation timestamp
* in that span. First event should be first in the spans list.
*/
def getSpansByTraceIds(traceIds: Seq[Long]): Future[Seq[Seq[Span]]] = {
val traceIdsString:String = traceIds.mkString(",")
val spans:List[DBSpan] =
SQL(
"""SELECT span_id, parent_id, trace_id, span_name, debug
|FROM zipkin_spans
|WHERE trace_id IN (%s)
""".stripMargin.format(traceIdsString))
.as((long("span_id") ~ get[Option[Long]]("parent_id") ~
long("trace_id") ~ str("span_name") ~ int("debug") map {
case a~b~c~d~e => DBSpan(a, b, c, d, e > 0)
}) *)
val annos:List[DBAnnotation] =
SQL(
"""SELECT span_id, trace_id, service_name, value, ipv4, port, a_timestamp, duration
|FROM zipkin_annotations
|WHERE trace_id IN (%s)
""".stripMargin.format(traceIdsString))
.as((long("span_id") ~ long("trace_id") ~ str("service_name") ~ str("value") ~
get[Option[Int]]("ipv4") ~ get[Option[Int]]("port") ~
long("a_timestamp") ~ get[Option[Long]]("duration") map {
case a~b~c~d~e~f~g~h => DBAnnotation(a, b, c, d, e, f, g, h)
}) *)
val binAnnos:List[DBBinaryAnnotation] =
SQL(
"""SELECT span_id, trace_id, service_name, annotation_key,
| annotation_value, annotation_type_value, ipv4, port
|FROM zipkin_binary_annotations
|WHERE trace_id IN (%s)
""".stripMargin.format(traceIdsString))
.as((long("span_id") ~ long("trace_id") ~ str("service_name") ~
str("annotation_key") ~ db.bytes("annotation_value") ~
int("annotation_type_value") ~ get[Option[Int]]("ipv4") ~
get[Option[Int]]("port") map {
case a~b~c~d~e~f~g~h => DBBinaryAnnotation(a, b, c, d, e, f, g, h)
}) *)
val results: Seq[Seq[Span]] = traceIds.map { traceId =>
spans.filter(_.traceId == traceId).map { span =>
val spanAnnos = annos.filter(_.traceId == span.traceId).map { anno =>
val host:Option[Endpoint] = (anno.ipv4, anno.port) match {
case (Some(ipv4), Some(port)) => Some(Endpoint(ipv4, port.toShort, anno.serviceName))
case _ => None
}
val duration:Option[Duration] = anno.duration match {
case Some(nanos) => Some(Duration.fromNanoseconds(nanos))
case None => None
}
Annotation(anno.timestamp, anno.value, host, duration)
}
val spanBinAnnos = binAnnos.filter(_.traceId == span.traceId).map { binAnno =>
val host:Option[Endpoint] = (binAnno.ipv4, binAnno.port) match {
case (Some(ipv4), Some(port)) => Some(Endpoint(ipv4, port.toShort, binAnno.serviceName))
case _ => None
}
val value = ByteBuffer.wrap(binAnno.value)
val annotationType = AnnotationType.fromInt(binAnno.annotationTypeValue)
BinaryAnnotation(binAnno.key, value, annotationType, host)
}
Span(traceId, span.spanName, span.spanId, span.parentId, spanAnnos, spanBinAnnos, span.debug)
}
}
Future {
results.filter(!_.isEmpty)
}
}
def getSpansByTraceId(traceId: Long): Future[Seq[Span]] = {
getSpansByTraceIds(Seq(traceId)).map {
_.head
}
}
/**
* How long do we store the data before we delete it? In seconds.
*/
def getDataTimeToLive: Int = {
Int.MaxValue
}
case class DBSpan(spanId: Long, parentId: Option[Long], traceId: Long, spanName: String, debug: Boolean)
case class DBAnnotation(spanId: Long, traceId: Long, serviceName: String, value: String, ipv4: Option[Int], port: Option[Int], timestamp: Long, duration: Option[Long])
case class DBBinaryAnnotation(spanId: Long, traceId: Long, serviceName: String, key: String, value: Array[Byte], annotationTypeValue: Int, ipv4: Option[Int], port: Option[Int])
}
| rajatdutta/zipkin | zipkin-anormdb/src/main/scala/com/twitter/zipkin/storage/anormdb/AnormStorage.scala | Scala | apache-2.0 | 9,180 |
package enumeratum.values
/** Created by Lloyd on 4/13/16.
*
* Copyright 2016
*/
sealed abstract class JsonDrinks(val value: Short, name: String) extends ShortEnumEntry
case object JsonDrinks extends ShortEnum[JsonDrinks] with ShortPlayJsonValueEnum[JsonDrinks] {
case object OrangeJuice extends JsonDrinks(value = 1, name = "oj")
case object AppleJuice extends JsonDrinks(value = 2, name = "aj")
case object Cola extends JsonDrinks(value = 3, name = "cola")
case object Beer extends JsonDrinks(value = 4, name = "beer")
val values = findValues
}
| lloydmeta/enumeratum | enumeratum-play-json/src/test/scala/enumeratum/values/JsonDrinks.scala | Scala | mit | 583 |
package com.greencatsoft.angularjs.core
import scala.concurrent.Future
import scala.language.implicitConversions
import scala.scalajs.js
import scala.scalajs.js.Any.{ fromFunction1, fromFunction5 }
import scala.scalajs.js.UndefOr
import scala.scalajs.js.UndefOr.undefOr2ops
import scala.scalajs.js.annotation.JSExportAll
import com.greencatsoft.angularjs.Factory
import com.greencatsoft.angularjs.core.HttpStatus.int2HttpStatus
import com.greencatsoft.angularjs.injectable
import com.greencatsoft.angularjs.core.Defer.DeferredPromise
@js.native
@injectable("$http")
trait HttpService extends js.Object {
def get[T](url: String): HttpPromise[T] = js.native
def get[T](url: String, config: HttpConfig): HttpPromise[T] = js.native
def head[T](url: String): HttpPromise[T] = js.native
def head[T](url: String, config: HttpConfig): HttpPromise[T] = js.native
def post[T](url: String): HttpPromise[T] = js.native
def post[T](url: String, data: js.Any): HttpPromise[T] = js.native
def post[T](url: String, data: js.Any, config: HttpConfig): HttpPromise[T] = js.native
def jsonp[T](url: String, config: HttpConfig): HttpPromise[T] = js.native
def put[T](url: String): HttpPromise[T] = js.native
def put[T](url: String, data: js.Any): HttpPromise[T] = js.native
def put[T](url: String, data: js.Any, config: HttpConfig): HttpPromise[T] = js.native
def delete[T](url: String): HttpPromise[T] = js.native
def delete[T](url: String, data: js.Any): HttpPromise[T] = js.native
def delete[T](url: String, data: js.Any, config: HttpConfig): HttpPromise[T] = js.native
}
@js.native
trait HttpConfig extends js.Object {
var url: String = js.native
var params: js.Dictionary[js.Any] = js.native
var method: String = js.native
var timeout: Int = js.native
var withCredentials: Boolean = js.native
var cache: Boolean = js.native
var responseType: String = js.native
var headers: js.Dictionary[String] = js.native
var transformResponse: js.Array[js.Function3[js.Any, js.Any, js.Any, js.Any]] = js.native
var transformRequest: js.Array[js.Function2[js.Any, js.Any, js.Any]] = js.native
}
object HttpConfig {
def empty: HttpConfig = {
val config = new js.Object().asInstanceOf[HttpConfig]
config.headers = js.Dictionary()
config.transformRequest = js.Array()
config.transformResponse = js.Array()
config
}
def documentHandler: HttpConfig = {
val config = empty
config.responseType = "document"
config
}
def postHandler: HttpConfig = {
val config = empty
config.headers = js.Dictionary(
"method" -> "POST",
"Content-Type" -> "application/x-www-form-urlencoded")
config
}
}
@js.native
@injectable("$httpProvider")
trait HttpProvider extends js.Object {
var defaults: HttpConfig = js.native
var interceptors: js.Array[String] = js.native
}
@js.native
trait HttpPromise[T] extends Promise[T] {
def success(callback: js.Function1[T, Unit]): this.type = js.native
def success(callback: js.Function2[T, Int, Unit]): this.type = js.native
def success(callback: js.Function3[T, js.Any, Int, Unit]): this.type = js.native
def success(callback: js.Function4[T, Int, js.Any, js.Any, Unit]): this.type = js.native
def success(callback: js.Function5[T, Int, js.Any, js.Any, js.Any, Unit]): this.type = js.native
def error(callback: js.Function1[Any, Unit]): this.type = js.native
def error(callback: js.Function2[Any, Int, Unit]): this.type = js.native
def error(callback: js.Function3[Any, js.Any, Int, Unit]): this.type = js.native
def error(callback: js.Function4[Any, Int, js.Any, js.Any, Unit]): this.type = js.native
def error(callback: js.Function5[Any, Int, js.Any, js.Any, UndefOr[String], Unit]): this.type = js.native
}
trait HttpInterceptor {
def q: Q
def request(config: HttpConfig): HttpConfig = config
def requestError[T](rejection: HttpResult): Promise[T] = q.reject(rejection)
def response(response: HttpResult): HttpResult = response
def responseError[T](rejection: HttpResult): Promise[T] = q.reject(rejection)
}
@JSExportAll
case class HttpInterceptorFunctions(
request: js.Function1[HttpConfig, HttpConfig],
requestError: js.Function1[HttpResult, Promise[_]],
response: js.Function1[HttpResult, HttpResult],
responseError: js.Function1[HttpResult, Promise[_]])
trait HttpInterceptorFactory extends Factory[HttpInterceptorFunctions] {
implicit def toInterceptorFunctions(interceptor: HttpInterceptor): HttpInterceptorFunctions = {
import interceptor._
HttpInterceptorFunctions(request _, requestError _, response _, responseError _)
}
}
case class HttpStatus(code: Int)
object HttpStatus {
//From https://github.com/spray/spray/blob/master/spray-http/src/main/scala/spray/http/StatusCode.scala
val Continue = HttpStatus(100)
val SwitchingProtocols = HttpStatus(101)
val Processing = HttpStatus(102)
val Ok = HttpStatus(200)
val Created = HttpStatus(201)
val Accepted = HttpStatus(202)
val NonAuthoritativeInformation = HttpStatus(203)
val NoContent = HttpStatus(204)
val ResetContent = HttpStatus(205)
val PartialContent = HttpStatus(206)
val MultiStatus = HttpStatus(207)
val AlreadyReported = HttpStatus(208)
val IMUsed = HttpStatus(226)
val MultipleChoices = HttpStatus(300)
val MovedPermanently = HttpStatus(301)
val Found = HttpStatus(302)
val SeeOther = HttpStatus(303)
val NotModified = HttpStatus(304)
val UseProxy = HttpStatus(305)
val TemporaryRedirect = HttpStatus(307)
val PermanentRedirect = HttpStatus(308)
val BadRequest = HttpStatus(400)
val Unauthorized = HttpStatus(401)
val PaymentRequired = HttpStatus(402)
val Forbidden = HttpStatus(403)
val NotFound = HttpStatus(404)
val MethodNotAllowed = HttpStatus(405)
val NotAcceptable = HttpStatus(406)
val ProxyAuthenticationRequired = HttpStatus(407)
val RequestTimeout = HttpStatus(408)
val Conflict = HttpStatus(409)
val Gone = HttpStatus(410)
val LengthRequired = HttpStatus(411)
val PreconditionFailed = HttpStatus(412)
val EntityTooLarge = HttpStatus(413)
val RequestUriTooLong = HttpStatus(414)
val UnsupportedMediaType = HttpStatus(415)
val RequestedRangeNotSatisfiable = HttpStatus(416)
val ExpectationFailed = HttpStatus(417)
val EnhanceYourCalm = HttpStatus(420)
val UnprocessableEntity = HttpStatus(422)
val Locked = HttpStatus(423)
val FailedDependency = HttpStatus(424)
val UnorderedCollection = HttpStatus(425)
val UpgradeRequired = HttpStatus(426)
val PreconditionRequired = HttpStatus(428)
val TooManyRequests = HttpStatus(429)
val RequestHeaderFieldsTooLarge = HttpStatus(431)
val RetryWith = HttpStatus(449)
val BlockedByParentalControls = HttpStatus(450)
val UnavailableForLegalReasons = HttpStatus(451)
val InternalServerError = HttpStatus(500)
val NotImplemented = HttpStatus(501)
val BadGateway = HttpStatus(502)
val ServiceUnavailable = HttpStatus(503)
val GatewayTimeout = HttpStatus(504)
val HTTPVersionNotSupported = HttpStatus(505)
val VariantAlsoNegotiates = HttpStatus(506)
val InsufficientStorage = HttpStatus(507)
val LoopDetected = HttpStatus(508)
val BandwidthLimitExceeded = HttpStatus(509)
val NotExtended = HttpStatus(510)
val NetworkAuthenticationRequired = HttpStatus(511)
val NetworkReadTimeout = HttpStatus(598)
val NetworkConnectTimeout = HttpStatus(599)
implicit def int2HttpStatus(code: Int): HttpStatus = HttpStatus(code)
}
case class HttpException(status: HttpStatus, message: String) extends Exception
object HttpPromise {
implicit def promise2future[A](promise: HttpPromise[A]): Future[A] = {
val p = concurrent.Promise[A]
def onSuccess(data: A): Unit = p.success(data.asInstanceOf[A])
def onError(data: Any, status: Int, config: js.Any, headers: js.Any, statusText: UndefOr[String]): Unit =
p failure HttpException(status, statusText getOrElse s"Failed to process HTTP request: '$data'")
promise.success(onSuccess _).error(onError _)
p.future
}
}
@js.native
trait HttpResult extends js.Object {
val config: js.Any = js.native
val data: js.Any = js.native
val status: Int = js.native
val statusText: String = js.native
val headers: js.Function1[String, String] = js.native
}
| svenwiegand/scalajs-angular | src/main/scala/com/greencatsoft/angularjs/core/Http.scala | Scala | apache-2.0 | 8,291 |
package com.cloudray.scalapress.plugin.variations
import org.scalatest.{OneInstancePerTest, FlatSpec}
import org.scalatest.mock.MockitoSugar
import com.cloudray.scalapress.item.{Item, ItemType}
import com.cloudray.scalapress.plugin.variations.controller.DimensionEditController
/** @author Stephen Samuel */
class DimensionEditControllerTest extends FlatSpec with MockitoSugar with OneInstancePerTest {
val controller = new DimensionEditController
controller.dimensionDao = mock[DimensionDao]
val d1 = new Dimension
d1.objectType = new ItemType
d1.objectType.id = 54
val obj = new Item
obj.id = 1
obj.itemType = d1.objectType
"a dimension edit controller" should "redirect to the dimensions list page after saving" in {
val redirect = controller.save(d1)
assert("redirect:/backoffice/plugin/variations/dimensions?objectTypeId=54" === redirect)
}
}
| vidyacraghav/scalapress | src/test/scala/com/cloudray/scalapress/plugin/variations/DimensionEditControllerTest.scala | Scala | apache-2.0 | 882 |
package com.airbnb.scheduler.jobs
/**
* A stream of schedules.
* Calling tail will return a clipped schedule.
* The schedule consists of a string representation of an ISO8601 expression as well as a BaseJob.
* @author Florian Leibert ([email protected])
*/
class ScheduleStream(val schedule: String, val jobName: String) {
def head(): (String, String) = {
(schedule, jobName)
}
/**
* Returns a clipped schedule.
* @return
*/
def tail(): Option[ScheduleStream] = {
//TODO(FL) Represent the schedule as a data structure instead of a string.
val (rec, start, per) = Iso8601Expressions.parse(schedule)
if (rec == -1)
return Some(new ScheduleStream(Iso8601Expressions.create(rec, start.plus(per), per), jobName))
else if (rec > 0)
return Some(new ScheduleStream(Iso8601Expressions.create(rec - 1, start.plus(per), per), jobName))
else
return None
}
}
| snegi/chronos | src/main/scala/com/airbnb/scheduler/jobs/ScheduleStream.scala | Scala | apache-2.0 | 913 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.utils.collection
/**
* Provides synchronized or unsynchronized access to an underlying object. The main use case is to transition to
* a final desired state - if our initial state is our desired state, we can totally avoid synchronization.
*
* If the object is going to be updated, use `IsSynchronized` access. If the value is read-only,
* use `NotSynchronized` access.
*
* @tparam T type parameter
*/
trait MaybeSynchronized[T] {
/**
* Gets the current value
*
* @return
*/
def get: T
/**
* Atomic operation to conditionally set the current value. If the current value matches the expected value
* passed in, 'onMatch' will be executed and the current value will be updated. Otherwise, nothing will happen.
*
* @param value value to set
* @param expected expected current value
* @param onMatch will be executed if current value == expected
* @return true if match was excuted
*/
def set(value: T, expected: T, onMatch: => Unit = {}): Boolean
}
/**
* Access to the underlying object is synchronized. Supports both get and set
*
* @param initial initial value for the underlying object
* @tparam T type parameter
*/
class IsSynchronized[T](initial: T) extends MaybeSynchronized[T] {
private var current = initial
override def get: T = synchronized(current)
override def set(value: T, expected: T, onMatch: => Unit): Boolean = synchronized {
if (current == expected) {
onMatch
current = value
true
} else {
false
}
}
}
/**
* Access to the underlying value is not synchronized. Supports get but does not support set
*
* @param value underlying value
* @tparam T type parameter
*/
class NotSynchronized[T](value: T) extends MaybeSynchronized[T] {
override def get: T = value
override def set(ignored: T, expected: T, f: => Unit): Boolean =
if (value == expected) {
throw new NotImplementedError("NotSynchronized is read-only")
} else {
false
}
}
| tkunicki/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/collection/MaybeSynchronized.scala | Scala | apache-2.0 | 2,506 |
package com.socrata.datacoordinator
package secondary
import com.socrata.datacoordinator.id.DatasetId
import com.socrata.datacoordinator.secondary.Secondary.Cookie
import com.socrata.datacoordinator.truth.metadata
import scala.concurrent.duration.FiniteDuration
import java.util.UUID
import org.joda.time.DateTime
case class SecondaryRecord(storeId: String,
claimantId: UUID,
datasetId: DatasetId,
startingDataVersion: Long,
endingDataVersion: Long,
retryNum: Int,
replayNum: Int,
initialCookie: Option[String],
pendingDrop: Boolean)
class DatasetAlreadyInSecondary(val storeId: String, val DatasetId: DatasetId) extends Exception
/**
* Manages the manifest of replications to secondaries, persisted in a database.
*/
trait SecondaryManifest {
def readLastDatasetInfo(storeId: String, datasetId: DatasetId): Option[(Long, Option[String])]
@throws(classOf[DatasetAlreadyInSecondary])
def addDataset(storeId: String, datasetId: DatasetId): Unit
def dropDataset(storeId: String, datasetId: DatasetId): Unit
def datasets(storeId: String): Map[DatasetId, Long]
def stores(datasetId: DatasetId): Map[String, Long]
def brokenAts(datasetId: DatasetId): Map[String, DateTime]
def cleanOrphanedClaimedDatasets(storeId: String, claimantId: UUID): Unit
def claimDatasetNeedingReplication(storeId: String,
claimantId: UUID,
claimTimeout: FiniteDuration): Option[SecondaryRecord]
def releaseClaimedDataset(job: SecondaryRecord): Unit
def markSecondaryDatasetBroken(job: SecondaryRecord, cookie: Cookie): Unit
def completedReplicationTo(storeId: String,
claimantId: UUID,
datasetId: DatasetId,
dataVersion: Long,
newCookie: Option[String])
def updateRetryInfo(storeId: String, datasetId: DatasetId, retryNum: Int, nextRetryDelaySecs: Int): Unit
def updateReplayInfo(storeId: String, datasetId: DatasetId, cookie: Secondary.Cookie, replayNum: Int, nextReplayDelaySecs: Int): Unit
def markDatasetForDrop(storeId: String, datasetId: DatasetId): Boolean
def feedbackSecondaries(datasetId: DatasetId): Set[String] // store IDs
def outOfDateFeedbackSecondaries(datasetId: DatasetId): Set[String] // store IDs
def lockResync(datasetId: DatasetId, storeId: String, groupName: String): Unit
def unlockResync(datasetId: DatasetId, storeId: String, groupName: String): Unit
}
case class NamedSecondary[CT, CV](storeId: String, store: Secondary[CT, CV], groupName: String)
| socrata-platform/data-coordinator | coordinatorlib/src/main/scala/com/socrata/datacoordinator/secondary/SecondaryManifest.scala | Scala | apache-2.0 | 2,805 |
package com.imaginea.activegrid.core.models
/**
* Created by sivag on 28/10/16.
*/
trait ViewFilter[T <: BaseEntity] {
def filterInstance(t: T, viewLevel: ViewLevel): T
}
| eklavya/activeGrid | src/main/scala/com/imaginea/activegrid/core/models/ViewFilter.scala | Scala | apache-2.0 | 179 |
package com.wincom.dcim.sharded
import akka.actor.{Actor, ActorLogging, ActorRef, Props}
import akka.cluster.sharding.{ClusterSharding, ClusterShardingSettings}
import com.wincom.dcim.domain.Settings
import com.wincom.dcim.message.common.Command
import com.wincom.dcim.message.signal._
import com.wincom.dcim.signal.FunctionRegistry
import scala.collection.convert.ImplicitConversions._
/**
* Created by wangxy on 17-8-17.
*/
object ShardedSignals {
def props = Props(new ShardedSignals)
def name = "sharded-signals"
}
class ShardedSignals extends Actor with ActorLogging {
val settings = Settings(context.system)
ShardedSignal.numberOfShards = settings.actor.numberOfShards
val registry: FunctionRegistry = (new FunctionRegistry(log)).initialize()
ClusterSharding(context.system).start(
ShardedSignal.shardName,
ShardedSignal.props(shardedDriver, registry),
ClusterShardingSettings(context.system),
ShardedSignal.extractEntityId,
ShardedSignal.extractShardId)
def shardedDriver(): ActorRef = {
ClusterSharding(context.system).shardRegion(ShardedDriver.shardName)
}
def shardedSignal: ActorRef = {
ClusterSharding(context.system).shardRegion(ShardedSignal.shardName)
}
override def receive: Receive = {
case _: GetSupportedFuncsCmd =>
sender() ! SupportedFuncsVo(registry.names().toMap)
case GetFuncParamsCmd(modelName) =>
sender() ! FuncParamsVo(registry.paramOptions(modelName).toSeq)
case cmd: Command =>
shardedSignal forward cmd
case x => log.info("COMMAND: {} {}", this, x)
}
}
| xtwxy/mysc | dcim-cluster/cluster/src/main/scala/com/wincom/dcim/sharded/ShardedSignals.scala | Scala | apache-2.0 | 1,583 |
package com.sksamuel.elastic4s.requests.mappings
import com.sksamuel.elastic4s.fields.builders.ElasticFieldBuilderFn
import com.sksamuel.elastic4s.json.{XContentBuilder, XContentFactory}
import com.sksamuel.elastic4s.requests.mappings.dynamictemplate.{DynamicMapping, DynamicTemplateBodyFn}
object MappingBuilderFn {
def build(d: MappingDefinitionLike): XContentBuilder =
d.rawSource match {
//user raw source if provided, ignore other mapping settings
case Some(rs) => XContentFactory.parse(rs)
case None =>
val builder = XContentFactory.jsonBuilder()
build(d, builder)
builder.endObject()
}
// returns the mapping json wrapped in the mapping type name, eg "mytype" : { mapping }
def buildWithName(d: MappingDefinitionLike, tpe: String): XContentBuilder =
d.rawSource match {
//user raw source if provided, ignore other mapping settings
case Some(rs) =>
val builder = XContentFactory.jsonBuilder
builder.rawField(tpe, XContentFactory.parse(rs))
builder
case None =>
val builder = XContentFactory.jsonBuilder
builder.startObject(tpe)
build(d, builder)
builder.endObject()
builder.endObject()
}
def build(d: MappingDefinitionLike, builder: XContentBuilder): Unit = {
for (all <- d.all) builder.startObject("_all").field("enabled", all).endObject()
(d.source, d.sourceExcludes) match {
case (_, l) if l.nonEmpty => builder.startObject("_source").array("excludes", l.toArray).endObject()
case (Some(source), _) => builder.startObject("_source").field("enabled", source).endObject()
case _ =>
}
if (d.dynamicDateFormats.nonEmpty)
builder.array("dynamic_date_formats", d.dynamicDateFormats.toArray)
for (dd <- d.dateDetection) builder.field("date_detection", dd)
for (nd <- d.numericDetection) builder.field("numeric_detection", nd)
d.dynamic.foreach(
dynamic =>
builder.field("dynamic", dynamic match {
case DynamicMapping.Strict => "strict"
case DynamicMapping.False => "false"
case _ => "true"
})
)
d.boostName.foreach(
x =>
builder.startObject("_boost").field("name", x).field("null_value", d.boostNullValue.getOrElse(0D)).endObject()
)
d.analyzer.foreach(x => builder.startObject("_analyzer").field("path", x).endObject())
d.parent.foreach(x => builder.startObject("_parent").field("type", x).endObject())
d.size.foreach(x => builder.startObject("_size").field("enabled", x).endObject())
if (d.fields.nonEmpty) {
builder.startObject("properties")
for (field <- d.fields)
builder.rawField(field.name, FieldBuilderFn(field))
builder.endObject() // end properties
}
if (d.properties.map(_.name).distinct.size != d.properties.size)
throw new RuntimeException("Mapping contained properties with the same name")
if (d.properties.nonEmpty) {
builder.startObject("properties")
for (property <- d.properties)
builder.rawField(property.name, ElasticFieldBuilderFn(property))
builder.endObject() // end properties
}
if (d.meta.nonEmpty) {
builder.startObject("_meta")
for (meta <- d.meta)
meta match {
case (name, s: String) => builder.field(name, s)
case (name, s: Double) => builder.field(name, s)
case (name, s: Boolean) => builder.field(name, s)
case (name, s: Long) => builder.field(name, s)
case (name, s: Float) => builder.field(name, s)
case (name, s: Int) => builder.field(name, s)
}
builder.endObject()
}
d.routing.foreach(routing => {
builder.startObject("_routing").field("required", routing.required)
routing.path.foreach(path => builder.field("path", path))
builder.endObject()
})
if (d.templates.nonEmpty) {
builder.startArray("dynamic_templates")
d.templates.foreach { template =>
builder.rawValue(DynamicTemplateBodyFn.build(template))
}
builder.endArray()
}
}
}
| stringbean/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/mappings/MappingBuilderFn.scala | Scala | apache-2.0 | 4,170 |
package com.countrygamer.arcanacraft.common
import com.countrygamer.arcanacraft.client.KeyHandler
import com.countrygamer.arcanacraft.common.extended.ArcanePlayer
import com.countrygamer.arcanacraft.common.init.{Quoms, ACBlocks, ACItems}
import com.countrygamer.arcanacraft.common.network.PacketKeyPress
import com.countrygamer.cgo.common.RegisterHelper
import com.countrygamer.cgo.wrapper.common.PluginWrapper
import cpw.mods.fml.common.event.{FMLInitializationEvent, FMLPostInitializationEvent, FMLPreInitializationEvent}
import cpw.mods.fml.common.{FMLCommonHandler, Mod, SidedProxy}
import cpw.mods.fml.relauncher.Side
/**
*
*
* @author CountryGamer
*/
@Mod(modid = ArcanaCraft.pluginID, name = ArcanaCraft.pluginName, version = "@PLUGIN_VERSION@",
guiFactory = "com.countrygamer.arcanacraft.client.gui.configFactory.ArcanaCraftFactory",
modLanguage = "scala",
dependencies = "required-after:Forge@[10.13,);required-after:cgo@[3.0.2,);required-after:Baubles@[1.0.1.3,)"
)
object ArcanaCraft extends PluginWrapper {
final val pluginID = "arcanacraft"
final val pluginName = "ArcanaCraft"
@SidedProxy(serverSide = "com.countrygamer.arcanacraft.common.CommonProxy",
clientSide = "com.countrygamer.arcanacraft.client.ClientProxy")
var proxy: CommonProxy = null
@Mod.EventHandler
def preInit(event: FMLPreInitializationEvent): Unit = {
super.preInitialize(this.pluginID, this.pluginName, event, proxy, ACOptions, ACItems,
ACBlocks)
RegisterHelper.registerExtendedPlayer("ArcanaPlayer", classOf[ArcanePlayer], deathPersistance = true)
RegisterHelper.registerPacketHandler(this.pluginID, classOf[PacketKeyPress])
RegisterHelper.registerHandler(DiscoveryHandler, null)
Quoms.register()
if (event.getSide == Side.CLIENT) {
FMLCommonHandler.instance().bus().register(KeyHandler)
}
}
@Mod.EventHandler
def init(event: FMLInitializationEvent): Unit = {
super.initialize(event)
}
@Mod.EventHandler
def postInit(event: FMLPostInitializationEvent): Unit = {
super.postInitialize(event)
}
}
| TheTemportalist/ArcanaCraft | src/main/scala/com/countrygamer/arcanacraft/common/ArcanaCraft.scala | Scala | apache-2.0 | 2,041 |
package xyz.discretezoo.web.db.v1
import java.util.UUID
import xyz.discretezoo.web.db.ZooPostgresProfile.api._
case class GraphTypes(
isBipartite: Boolean,
// isCartesianProduct: Option[Boolean],
// isChordal: Option[Boolean],
// isCirculant: Option[Boolean],
// isCircularPlanar: Option[Boolean],
isDistanceRegular: Boolean,
isDistanceTransitive: Boolean,
isEulerian: Boolean,
isForest: Boolean,
// isGallaiTree: Option[Boolean],
isHamiltonian: Option[Boolean],
// isInterval: Option[Boolean],
// isLineGraph: Option[Boolean],
isPartialCube: Boolean,
// isPerfect: Option[Boolean],
// isPlanar: Option[Boolean],
// isPrime: Option[Boolean],
// isRegular: Boolean,
isSplit: Boolean,
isStronglyRegular: Boolean,
isTree: Option[Boolean]
)
case class OtherProperties(
// averageDistance: Option[Float],
chromaticIndex: Option[Int],
// chromaticNumber: Option[Int],
// clusterTransitivity: Option[Float],
// clusteringAverage: Option[Float],
// fractionalChromaticIndex: Option[Int],
// isAsteroidalTripleFree: Option[Boolean],
// isEvenHoleFree: Option[Boolean],
// isLongAntiholeFree: Option[Boolean],
// isLongHoleFree: Option[Boolean],
// isOddHoleFree: Option[Boolean],
isOverfull: Boolean,
// lovaszTheta: Option[Float],
// maximumAverageDegree: Option[Float],
// spanningTreesCount: Option[Int],
// szegedIndex: Option[Int],
// treewidth: Option[Int],
// wienerIndex: Option[Int],
// zagreb1Index: Option[Int],
// zagreb2Index: Option[Int]
)
case class SymmetryProperties(
isArcTransitive: Boolean,
isCayley: Boolean,
isEdgeTransitive: Boolean
// isVertexTransitive: Boolean
)
case class Graph(
zooid: Int,
data: String,
name: Option[String],
order: Int,
averageDegree: Float,
cliqueNumber: Int,
connectedComponentsNumber: Int,
diameter: Option[Int],
// edgeConnectivity: Option[Int],
// genus: Option[Int],
girth: Option[Int],
hasMultipleEdges: Boolean,
numberOfLoops: Int,
oddGirth: Option[Int],
// radius: Option[Int],
size: Int,
trianglesCount: Int,
// vertexConnectivity: Option[Int],
graphTypes: GraphTypes,
symmetryProperties: SymmetryProperties,
otherProperties: OtherProperties
)
class Graphs(tag: Tag) extends Table[Graph](tag, "graph") {
def zooid: Rep[Int] = column[Int]("zooid", O.PrimaryKey)
def data = column[String]("data")
def averageDegree = column[Float]("average_degree")
// def averageDistance = column[Option[Float]]("average_distance")
def name = column[Option[String]]("name")
def order = column[Int]("order")
def chromaticIndex = column[Option[Int]]("chromatic_index")
// def chromaticNumber = column[Option[Int]]("chromatic_number")
def cliqueNumber = column[Int]("clique_number")
// def clusterTransitivity = column[Option[Float]]("cluster_transitivity")
// def clusteringAverage = column[Option[Float]]("clustering_average")
def connectedComponentsNumber = column[Int]("connected_components_number")
def diameter = column[Option[Int]]("diameter")
// def edgeConnectivity = column[Option[Int]]("edge_connectivity")
// def fractionalChromaticIndex = column[Option[Int]]("fractional_chromatic_index")
// def genus = column[Option[Int]]("genus")
def girth = column[Option[Int]]("girth")
def hasMultipleEdges = column[Boolean]("has_multiple_edges")
def isArcTransitive = column[Boolean]("is_arc_transitive")
// def isAsteroidalTripleFree = column[Option[Boolean]]("is_asteroidal_triple_free")
def isBipartite = column[Boolean]("is_bipartite")
// def isCartesianProduct = column[Option[Boolean]]("is_cartesian_product")
def isCayley = column[Boolean]("is_cayley")
// def isChordal = column[Option[Boolean]]("is_chordal")
// def isCirculant = column[Option[Boolean]]("is_circulant")
// def isCircularPlanar = column[Option[Boolean]]("is_circular_planar")
def isDistanceRegular = column[Boolean]("is_distance_regular")
def isDistanceTransitive = column[Boolean]("is_distance_transitive")
def isEdgeTransitive = column[Boolean]("is_edge_transitive")
def isEulerian = column[Boolean]("is_eulerian")
// def isEvenHoleFree = column[Option[Boolean]]("is_even_hole_free")
def isForest = column[Boolean]("is_forest")
// def isGallaiTree = column[Option[Boolean]]("is_gallai_tree")
def isHamiltonian = column[Option[Boolean]]("is_hamiltonian")
// def isInterval = column[Option[Boolean]]("is_interval")
// def isLineGraph = column[Option[Boolean]]("is_line_graph")
// def isLongAntiholeFree = column[Option[Boolean]]("is_long_antihole_free")
// def isLongHoleFree = column[Option[Boolean]]("is_long_hole_free")
// def isOddHoleFree = column[Option[Boolean]]("is_odd_hole_free")
def isOverfull = column[Boolean]("is_overfull")
def isPartialCube = column[Boolean]("is_partial_cube")
// def isPerfect = column[Option[Boolean]]("is_perfect")
// def isPlanar = column[Option[Boolean]]("is_planar")
// def isPrime = column[Option[Boolean]]("is_prime")
def isRegular = column[Boolean]("is_regular")
def isSplit = column[Boolean]("is_split")
def isStronglyRegular = column[Boolean]("is_strongly_regular")
def isTree = column[Option[Boolean]]("is_tree")
def isVertexTransitive = column[Boolean]("is_vertex_transitive")
// def lovaszTheta = column[Option[Float]]("lovasz_theta")
// def maximumAverageDegree = column[Option[Float]]("maximum_average_degree")
def numberOfLoops = column[Int]("number_of_loops")
def oddGirth = column[Option[Int]]("odd_girth")
// def radius = column[Option[Int]]("radius")
def size = column[Int]("size")
// def spanningTreesCount = column[Option[Int]]("spanning_trees_count")
// def szegedIndex = column[Option[Int]]("szeged_index")
// def treewidth = column[Option[Int]]("treewidth")
def trianglesCount = column[Int]("triangles_count")
// def vertexConnectivity = column[Option[Int]]("vertex_connectivity")
// def wienerIndex = column[Option[Int]]("wiener_index")
// def zagreb1Index = column[Option[Int]]("zagreb1_index")
// def zagreb2Index = column[Option[Int]]("zagreb1_index")
def graphTypesProjection = (
isBipartite,
// isCartesianProduct,
// isChordal,
// isCirculant,
// isCircularPlanar,
isDistanceRegular,
isDistanceTransitive,
isEulerian,
isForest,
// isGallaiTree,
isHamiltonian,
// isInterval,
// isLineGraph,
isPartialCube,
// isPerfect,
// isPlanar,
// isPrime,
// isRegular,
isSplit,
isStronglyRegular,
isTree
) <> ((GraphTypes.apply _).tupled, GraphTypes.unapply)
def otherPropertiesProjection = (
// averageDistance,
chromaticIndex,
// chromaticNumber,
// clusterTransitivity,
// clusteringAverage,
// fractionalChromaticIndex,
// isAsteroidalTripleFree,
// isEvenHoleFree,
// isLongAntiholeFree,
// isLongHoleFree,
// isOddHoleFree,
isOverfull,
// lovaszTheta,
// maximumAverageDegree,
// spanningTreesCount,
// szegedIndex,
// treewidth,
// wienerIndex,
// zagreb1Index,
// zagreb2Index
) <> ((OtherProperties.apply _).tupled, OtherProperties.unapply)
def symmetryPropertiesProjection = (
isArcTransitive,
isCayley,
isEdgeTransitive
// isVertexTransitive
) <> ((SymmetryProperties.apply _).tupled, SymmetryProperties.unapply)
def * = (
zooid,
data,
name,
order,
averageDegree,
cliqueNumber,
connectedComponentsNumber,
diameter,
// edgeConnectivity,
// genus,
girth,
hasMultipleEdges,
numberOfLoops,
oddGirth,
// radius,
size,
trianglesCount,
// vertexConnectivity,
graphTypesProjection,
symmetryPropertiesProjection,
otherPropertiesProjection
) <> ((Graph.apply _).tupled, Graph.unapply)
} | DiscreteZOO/DiscreteZOO-web | src/main/scala/xyz/discretezoo/web/db/v1/Graph.scala | Scala | mit | 9,202 |
package poker.core
/**
* Cards that do not themselves take part in determining the rank of the hand,
* but that may be used to break ties between hands of the same rank
*/
final case class Kickers(cs: Traversable[Card]) extends Ordered[Kickers] {
/** Compare the strongest of the kickers */
// TODO Do we need to consider more than just the strongest kicker cards?
override def compare(that: Kickers): Int = {
cs.map(_.rankAsInt).max - that.cs.map(_.rankAsInt).max
}
override def toString: String = {
val sOpt = if (cs.size > 1) "s" else ""
s"kicker$sOpt ${cs.mkString(", ")}"
}
}
object Kickers {
def apply(card: Card): Kickers = Kickers(Vector(card))
def apply(strings: String*): Kickers = Kickers(strings.map(Card(_)))
def apply(hand: Hand): Kickers = Kickers(hand.cards)
}
| kyuksel/poker | src/main/scala/poker/core/Kickers.scala | Scala | mit | 818 |
package com.etsy.conjecture.scalding
import org.apache.commons.math3.linear._
import cascading.pipe.Pipe
import cascading.pipe.joiner.InnerJoin
import cascading.tuple.Fields
import scala.util.Random
object SVD extends Serializable {
import com.twitter.scalding.Dsl._
/**
* based on http://amath.colorado.edu/faculty/martinss/Pubs/2012_halko_dissertation.pdf
* page 121.
*
* generic parameters:
* R: the type of the row name variable.
* C: the type of the column name variable.
*
* input:
* X: a sparse matrix in the form ('row, 'col, 'val), with tuples of type (R, C, Double).
* d: number of principle components / singular values to compute
* extra_power: whether to take the second power of XX' in order to improve the approximation quality.
* reducers: how many reducers to use in the map-reduce stages.
*
* output:
* (U, E, V) with
* U : pipe of ('row, 'vec) where vec is a RealVector
* E : pipe of 'E which is an Array[Double] of singular values.
* V : pipe of ('col, 'vec) where vec is a RealVector
* note that the vectors are rows of the matrices U and V, not the columns which correspond to the left and right singular vectors.
*/
def apply[R, C](X : Pipe, d : Int, extra_power : Boolean = true, reducers : Int = 500) : (Pipe, Pipe, Pipe) = {
// Sample the columns, into the thin matrix.
val XS = X.groupBy('row){_.toList[(C, Double)](('col, 'val) -> 'list).reducers(reducers)}
.map('list -> 'vec){l : List[(C, Double)] =>
val a = new Array[Double](d+10)
l.foreach{i =>
val r = new Random(i._1.hashCode.toLong)
(0 until (d+10)).foreach{j =>
a(j) += r.nextGaussian * i._2
}
}
MatrixUtils.createRealVector(a)
}
.project('row, 'vec)
// Multiply by powers of XX'. This improves the approximation quality.
val XXXS = X
.joinWithSmaller('row -> 'row_, XS.rename('row -> 'row_), new InnerJoin(), reducers)
.map(('val, 'vec) -> 'vec){x : (Double, RealVector) => x._2.mapMultiply(x._1)}
.groupBy('col){_.reduce('vec -> 'vec){(a : RealVector, b : RealVector) => a.add(b)}.forceToReducers.reducers(reducers)}
.joinWithSmaller('col -> 'col_, X.rename('col -> 'col_), new InnerJoin(), reducers)
.map(('val, 'vec) -> 'vec){x : (Double, RealVector) => x._2.mapMultiply(x._1)}
.groupBy('row){_.reduce('vec -> 'vec2){(a : RealVector, b : RealVector) => a.add(b)}.forceToReducers.reducers(reducers)}
val Y = (if(extra_power) {
val XXXXXS = X
.joinWithSmaller('row -> 'row_, XXXS.rename('row -> 'row_), new InnerJoin(), reducers)
.map(('val, 'vec2) -> 'vec2){x : (Double, RealVector) => x._2.mapMultiply(x._1)}
.groupBy('col){_.reduce('vec2 -> 'vec2){(a : RealVector, b : RealVector) => a.add(b)}.forceToReducers.reducers(reducers)}
.joinWithSmaller('col -> 'col_, X.rename('col -> 'col_), new InnerJoin(), reducers)
.map(('val, 'vec2) -> 'vec2){x : (Double, RealVector) => x._2.mapMultiply(x._1)}
.groupBy('row){_.reduce('vec2 -> 'vec2){(a : RealVector, b : RealVector) => a.add(b)}.forceToReducers.reducers(reducers)}
XS
.joinWithSmaller('row -> 'row, XXXS, new InnerJoin(), reducers)
.map(('vec, 'vec2) -> 'vec){x : (RealVector, RealVector) => x._1.append(x._2)}
.project('row, 'vec)
.joinWithSmaller('row -> 'row, XXXXXS, new InnerJoin(), reducers)
.map(('vec, 'vec2) -> 'vec){x : (RealVector, RealVector) => x._1.append(x._2)}
.project('row, 'vec)
} else {
XS
.joinWithSmaller('row -> 'row, XXXS, new InnerJoin(), reducers)
.map(('vec, 'vec2) -> 'vec){x : (RealVector, RealVector) => x._1.append(x._2)}
.project('row, 'vec)
})
// What follows is a QR decomposition of Y.
// Note: Y = QR means Y'Y = R'R so R = chol(Y'Y)
val YY = Y.mapTo('vec -> 'mat){x : RealVector => x.outerProduct(x)}
.groupAll{_.reduce('mat -> 'mat){(a : RealMatrix, b : RealMatrix) => a.add(b)}}
.mapTo('mat -> 'mat){m : RealMatrix =>
val chol = new CholeskyDecomposition(m)
new LUDecomposition(chol.getL).getSolver.getInverse
}
// Determine Q = YR^{-1}
val Q = Y.crossWithTiny(YY)
.map(('vec, 'mat) -> 'vec){x : (RealVector, RealMatrix) => x._2.operate(x._1)}
.project('row, 'vec)
// B = Q'X
val B = X.joinWithSmaller('row -> 'row, Q, new InnerJoin(), reducers)
.map(('val, 'vec) -> 'vec){x : (Double, RealVector) => x._2.mapMultiply(x._1)}
.groupBy('col){_.reduce('vec -> 'vec){(a : RealVector, b : RealVector) => a.add(b)}.reducers(reducers).forceToReducers}
val EB = B.mapTo('vec -> 'mat){x : RealVector => x.outerProduct(x)}
.groupAll{_.reduce('mat -> 'mat){(a : RealMatrix, b : RealMatrix) => a.add(b)}}
.mapTo('mat -> ('eigs, 'eigmat, 'orthomat)){m : RealMatrix =>
val e = new EigenDecomposition(m)
(e.getRealEigenvalues,
e.getVT,
e.getVT.multiply(MatrixUtils.createRealDiagonalMatrix(e.getRealEigenvalues.map{v => if(v < 0.00000001) 0.0 else 1.0 / math.sqrt(v)})))
}
val E = EB.project('eigs).map('eigs -> 'eigs){x : Array[Double] => (0 until d).map{i => math.sqrt(x(i))}.toArray}
val U = Q.crossWithTiny(EB.project('eigmat))
.map(('vec, 'eigmat) -> 'vec){x : (RealVector, RealMatrix) => x._2.operate(x._1).getSubVector(0,d)}
.project('row, 'vec)
val V = B.crossWithTiny(EB.project('orthomat))
.map(('vec, 'orthomat) -> 'vec){x : (RealVector, RealMatrix) => x._2.operate(x._1).getSubVector(0,d)}
.project('col, 'vec)
(U, E, V)
}
}
| zviri/Conjecture | src/main/scala/com/etsy/conjecture/scalding/SVD.scala | Scala | mit | 5,647 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.sources
import java.io.File
import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.internal.SQLConf.BUCKETING_MAX_BUCKETS
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.util.Utils
class CreateTableAsSelectSuite extends DataSourceTest with SharedSparkSession {
import testImplicits._
override def sparkConf: SparkConf =
super.sparkConf
.setAppName("test")
.set("spark.sql.parquet.columnarReaderBatchSize", "4096")
.set("spark.sql.sources.useV1SourceList", "avro")
.set("spark.sql.extensions", "com.intel.oap.ColumnarPlugin")
.set("spark.sql.execution.arrow.maxRecordsPerBatch", "4096")
//.set("spark.shuffle.manager", "org.apache.spark.shuffle.sort.ColumnarShuffleManager")
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "false")
.set("spark.sql.columnar.window", "false")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
protected override lazy val sql = spark.sql _
private var path: File = null
override def beforeAll(): Unit = {
super.beforeAll()
val ds = (1 to 10).map(i => s"""{"a":$i, "b":"str${i}"}""").toDS()
spark.read.json(ds).createOrReplaceTempView("jt")
}
override def afterAll(): Unit = {
try {
spark.catalog.dropTempView("jt")
Utils.deleteRecursively(path)
} finally {
super.afterAll()
}
}
override def beforeEach(): Unit = {
super.beforeEach()
path = Utils.createTempDir()
path.delete()
}
override def afterEach(): Unit = {
Utils.deleteRecursively(path)
super.afterEach()
}
test("CREATE TABLE USING AS SELECT") {
withTable("jsonTable") {
sql(
s"""
|CREATE TABLE jsonTable
|USING json
|OPTIONS (
| path '${path.toURI}'
|) AS
|SELECT a, b FROM jt
""".stripMargin)
checkAnswer(
sql("SELECT a, b FROM jsonTable"),
sql("SELECT a, b FROM jt"))
}
}
ignore("CREATE TABLE USING AS SELECT based on the file without write permission") {
// setWritable(...) does not work on Windows. Please refer JDK-6728842.
assume(!Utils.isWindows)
val childPath = new File(path.toString, "child")
path.mkdir()
path.setWritable(false)
val e = intercept[SparkException] {
sql(
s"""
|CREATE TABLE jsonTable
|USING json
|OPTIONS (
| path '${childPath.toURI}'
|) AS
|SELECT a, b FROM jt
""".stripMargin)
sql("SELECT a, b FROM jsonTable").collect()
}
assert(e.getMessage().contains("Job aborted"))
path.setWritable(true)
}
ignore("create a table, drop it and create another one with the same name") {
withTable("jsonTable") {
sql(
s"""
|CREATE TABLE jsonTable
|USING json
|OPTIONS (
| path '${path.toURI}'
|) AS
|SELECT a, b FROM jt
""".stripMargin)
checkAnswer(
sql("SELECT a, b FROM jsonTable"),
sql("SELECT a, b FROM jt"))
// Creates a table of the same name with flag "if not exists", nothing happens
sql(
s"""
|CREATE TABLE IF NOT EXISTS jsonTable
|USING json
|OPTIONS (
| path '${path.toURI}'
|) AS
|SELECT a * 4 FROM jt
""".stripMargin)
checkAnswer(
sql("SELECT * FROM jsonTable"),
sql("SELECT a, b FROM jt"))
// Explicitly drops the table and deletes the underlying data.
sql("DROP TABLE jsonTable")
if (path.exists()) Utils.deleteRecursively(path)
// Creates a table of the same name again, this time we succeed.
sql(
s"""
|CREATE TABLE jsonTable
|USING json
|OPTIONS (
| path '${path.toURI}'
|) AS
|SELECT b FROM jt
""".stripMargin)
checkAnswer(
sql("SELECT * FROM jsonTable"),
sql("SELECT b FROM jt"))
}
}
test("disallows CREATE TEMPORARY TABLE ... USING ... AS query") {
withTable("t") {
val error = intercept[ParseException] {
sql(
s"""
|CREATE TEMPORARY TABLE t USING PARQUET
|OPTIONS (PATH '${path.toURI}')
|PARTITIONED BY (a)
|AS SELECT 1 AS a, 2 AS b
""".stripMargin
)
}.getMessage
assert(error.contains("Operation not allowed") &&
error.contains("CREATE TEMPORARY TABLE ... USING ... AS query"))
}
}
test("disallows CREATE EXTERNAL TABLE ... USING ... AS query") {
withTable("t") {
val error = intercept[ParseException] {
sql(
s"""
|CREATE EXTERNAL TABLE t USING PARQUET
|OPTIONS (PATH '${path.toURI}')
|AS SELECT 1 AS a, 2 AS b
""".stripMargin
)
}.getMessage
assert(error.contains("Operation not allowed") &&
error.contains("CREATE EXTERNAL TABLE ..."))
}
}
ignore("create table using as select - with partitioned by") {
val catalog = spark.sessionState.catalog
withTable("t") {
sql(
s"""
|CREATE TABLE t USING PARQUET
|OPTIONS (PATH '${path.toURI}')
|PARTITIONED BY (a)
|AS SELECT 1 AS a, 2 AS b
""".stripMargin
)
val table = catalog.getTableMetadata(TableIdentifier("t"))
assert(table.partitionColumnNames == Seq("a"))
}
}
ignore("create table using as select - with valid number of buckets") {
val catalog = spark.sessionState.catalog
withTable("t") {
sql(
s"""
|CREATE TABLE t USING PARQUET
|OPTIONS (PATH '${path.toURI}')
|CLUSTERED BY (a) SORTED BY (b) INTO 5 BUCKETS
|AS SELECT 1 AS a, 2 AS b
""".stripMargin
)
val table = catalog.getTableMetadata(TableIdentifier("t"))
assert(table.bucketSpec == Option(BucketSpec(5, Seq("a"), Seq("b"))))
}
}
test("create table using as select - with invalid number of buckets") {
withTable("t") {
Seq(0, 100001).foreach(numBuckets => {
val e = intercept[AnalysisException] {
sql(
s"""
|CREATE TABLE t USING PARQUET
|OPTIONS (PATH '${path.toURI}')
|CLUSTERED BY (a) SORTED BY (b) INTO $numBuckets BUCKETS
|AS SELECT 1 AS a, 2 AS b
""".stripMargin
)
}.getMessage
assert(e.contains("Number of buckets should be greater than 0 but less than"))
})
}
}
ignore("create table using as select - with overriden max number of buckets") {
def createTableSql(numBuckets: Int): String =
s"""
|CREATE TABLE t USING PARQUET
|OPTIONS (PATH '${path.toURI}')
|CLUSTERED BY (a) SORTED BY (b) INTO $numBuckets BUCKETS
|AS SELECT 1 AS a, 2 AS b
""".stripMargin
val maxNrBuckets: Int = 200000
val catalog = spark.sessionState.catalog
withSQLConf(BUCKETING_MAX_BUCKETS.key -> maxNrBuckets.toString) {
// Within the new limit
Seq(100001, maxNrBuckets).foreach(numBuckets => {
withTable("t") {
sql(createTableSql(numBuckets))
val table = catalog.getTableMetadata(TableIdentifier("t"))
assert(table.bucketSpec == Option(BucketSpec(numBuckets, Seq("a"), Seq("b"))))
}
})
// Over the new limit
withTable("t") {
val e = intercept[AnalysisException](sql(createTableSql(maxNrBuckets + 1)))
assert(
e.getMessage.contains("Number of buckets should be greater than 0 but less than "))
}
}
}
test("SPARK-17409: CTAS of decimal calculation") {
withTable("tab2") {
withTempView("tab1") {
spark.range(99, 101).createOrReplaceTempView("tab1")
val sqlStmt =
"SELECT id, cast(id as long) * cast('1.0' as decimal(38, 18)) as num FROM tab1"
sql(s"CREATE TABLE tab2 USING PARQUET AS $sqlStmt")
checkAnswer(spark.table("tab2"), sql(sqlStmt))
}
}
}
test("specifying the column list for CTAS") {
withTable("t") {
val e = intercept[ParseException] {
sql("CREATE TABLE t (a int, b int) USING parquet AS SELECT 1, 2")
}.getMessage
assert(e.contains("Schema may not be specified in a Create Table As Select (CTAS)"))
}
}
}
| Intel-bigdata/OAP | oap-native-sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala | Scala | apache-2.0 | 9,923 |
package akka.persistence.cassandra.journal
import scala.concurrent.duration._
import akka.actor._
import akka.persistence._
import akka.persistence.cassandra.CassandraLifecycle
import akka.testkit._
import com.typesafe.config.ConfigFactory
import org.scalatest._
object CassandraIntegrationSpec {
val config = ConfigFactory.parseString(
"""
|akka.persistence.snapshot-store.plugin = "cassandra-snapshot-store"
|akka.persistence.journal.plugin = "cassandra-journal"
|akka.persistence.journal.max-deletion-batch-size = 3
|akka.persistence.publish-confirmations = on
|akka.persistence.publish-plugin-commands = on
|akka.test.single-expect-default = 10s
|cassandra-journal.max-partition-size = 5
|cassandra-journal.max-result-size = 3
|cassandra-journal.port = 9142
|cassandra-snapshot-store.port = 9142
""".stripMargin)
var testSnapshotStore: ActorRef = _
case class DeleteTo(snr: Long, permanent: Boolean)
class ProcessorA(val persistenceId: String) extends PersistentActor {
def receiveRecover: Receive = handle
def receiveCommand: Receive = {
case DeleteTo(sequenceNr, permanent) =>
deleteMessages(sequenceNr, permanent)
case payload: String =>
persist(payload)(handle)
}
def handle: Receive = {
case payload: String =>
sender ! payload
sender ! lastSequenceNr
sender ! recoveryRunning
}
override private[persistence] lazy val snapshotStore: ActorRef = testSnapshotStore
}
class ProcessorC(val persistenceId: String, val probe: ActorRef) extends PersistentActor {
var last: String = _
def receiveRecover: Receive = {
case SnapshotOffer(_, snapshot: String) =>
last = snapshot
probe ! s"offered-${last}"
case payload: String =>
handle(payload)
}
def receiveCommand: Receive = {
case "snap" =>
saveSnapshot(last)
case SaveSnapshotSuccess(_) =>
probe ! s"snapped-${last}"
case payload: String =>
persist(payload)(handle)
case DeleteTo(sequenceNr, permanent) =>
deleteMessages(sequenceNr, permanent)
}
def handle: Receive = {
case payload: String =>
last = s"${payload}-${lastSequenceNr}"
probe ! s"updated-${last}"
}
override private[persistence] lazy val snapshotStore: ActorRef = testSnapshotStore
}
class ProcessorCNoRecover(override val persistenceId: String, probe: ActorRef) extends ProcessorC(persistenceId, probe) {
override def preStart() = ()
}
class ViewA(val viewId: String, val persistenceId: String, probe: ActorRef) extends PersistentView {
def receive = {
case payload =>
probe ! payload
}
override def autoUpdate: Boolean = false
override def autoUpdateReplayMax: Long = 0
}
}
import CassandraIntegrationSpec._
class CassandraIntegrationSpec extends TestKit(ActorSystem("test", config)) with ImplicitSender with WordSpecLike with Matchers with CassandraLifecycle {
testSnapshotStore = Persistence(system).snapshotStoreFor("cassandra-snapshot-store")
def subscribeToRangeDeletion(probe: TestProbe): Unit =
system.eventStream.subscribe(probe.ref, classOf[JournalProtocol.DeleteMessagesTo])
def awaitRangeDeletion(probe: TestProbe): Unit =
probe.expectMsgType[JournalProtocol.DeleteMessagesTo]
def testIndividualDelete(persistenceId: String, permanent: Boolean): Unit = {
val deleteProbe = TestProbe()
subscribeToRangeDeletion(deleteProbe)
val processor1 = system.actorOf(Props(classOf[ProcessorA], persistenceId))
1L to 16L foreach { i =>
processor1 ! s"a-${i}"
expectMsgAllOf(s"a-${i}", i, false)
}
// delete single message in partition
processor1 ! DeleteTo(12L, permanent)
awaitRangeDeletion(deleteProbe)
system.actorOf(Props(classOf[ProcessorA], persistenceId))
1L to 16L foreach { i =>
if (i != 12L) expectMsgAllOf(s"a-${i}", i, true)
}
// delete whole partition
6L to 10L foreach { i =>
processor1 ! DeleteTo(i, permanent)
awaitRangeDeletion(deleteProbe)
}
system.actorOf(Props(classOf[ProcessorA], persistenceId))
1L to 5L foreach { i =>
expectMsgAllOf(s"a-${i}", i, true)
}
11L to 16L foreach { i =>
if (i != 12L) expectMsgAllOf(s"a-${i}", i, true)
}
}
def testRangeDelete(persistenceId: String, permanent: Boolean): Unit = {
val deleteProbe = TestProbe()
subscribeToRangeDeletion(deleteProbe)
val processor1 = system.actorOf(Props(classOf[ProcessorA], persistenceId))
1L to 16L foreach { i =>
processor1 ! s"a-${i}"
expectMsgAllOf(s"a-${i}", i, false)
}
processor1 ! DeleteTo(3L, permanent)
awaitRangeDeletion(deleteProbe)
system.actorOf(Props(classOf[ProcessorA], persistenceId))
4L to 16L foreach { i =>
expectMsgAllOf(s"a-${i}", i, true)
}
processor1 ! DeleteTo(7L, permanent)
awaitRangeDeletion(deleteProbe)
system.actorOf(Props(classOf[ProcessorA], persistenceId))
8L to 16L foreach { i =>
expectMsgAllOf(s"a-${i}", i, true)
}
}
"A Cassandra journal" should {
"write and replay messages" in {
val processor1 = system.actorOf(Props(classOf[ProcessorA], "p1"))
1L to 16L foreach { i =>
processor1 ! s"a-${i}"
expectMsgAllOf(s"a-${i}", i, false)
}
val processor2 = system.actorOf(Props(classOf[ProcessorA], "p1"))
1L to 16L foreach { i =>
expectMsgAllOf(s"a-${i}", i, true)
}
processor2 ! "b"
expectMsgAllOf("b", 17L, false)
}
// "not replay messages marked as deleted" in {
// testIndividualDelete("p3", false)
// }
// "not replay permanently deleted messages" in {
// testIndividualDelete("p4", true)
// }
"not replay messages marked as range-deleted" in {
testRangeDelete("p5", false)
}
"not replay permanently range-deleted messages" in {
testRangeDelete("p6", true)
}
"replay messages incrementally" in {
val probe = TestProbe()
val processor1 = system.actorOf(Props(classOf[ProcessorA], "p7"))
1L to 6L foreach { i =>
processor1 ! s"a-${i}"
expectMsgAllOf(s"a-${i}", i, false)
}
val view = system.actorOf(Props(classOf[ViewA], "p7-view", "p7", probe.ref))
probe.expectNoMsg(200.millis)
view ! Update(true, replayMax = 3L)
probe.expectMsg(s"a-1")
probe.expectMsg(s"a-2")
probe.expectMsg(s"a-3")
probe.expectNoMsg(200.millis)
view ! Update(true, replayMax = 3L)
probe.expectMsg(s"a-4")
probe.expectMsg(s"a-5")
probe.expectMsg(s"a-6")
probe.expectNoMsg(200.millis)
}
}
"A processor" should {
"recover from a snapshot with follow-up messages" in {
val processor1 = system.actorOf(Props(classOf[ProcessorC], "p10", testActor))
processor1 ! "a"
expectMsg("updated-a-1")
processor1 ! "snap"
expectMsg("snapped-a-1")
processor1 ! "b"
expectMsg("updated-b-2")
system.actorOf(Props(classOf[ProcessorC], "p10", testActor))
expectMsg("offered-a-1")
expectMsg("updated-b-2")
}
"recover from a snapshot with follow-up messages and an upper bound" in {
val processor1 = system.actorOf(Props(classOf[ProcessorCNoRecover], "p11", testActor))
processor1 ! Recover()
processor1 ! "a"
expectMsg("updated-a-1")
processor1 ! "snap"
expectMsg("snapped-a-1")
2L to 7L foreach { i =>
processor1 ! "a"
expectMsg(s"updated-a-${i}")
}
val processor2 = system.actorOf(Props(classOf[ProcessorCNoRecover], "p11", testActor))
processor2 ! Recover(toSequenceNr = 3L)
expectMsg("offered-a-1")
expectMsg("updated-a-2")
expectMsg("updated-a-3")
processor2 ! "d"
expectMsg("updated-d-8")
}
"recover from a snapshot without follow-up messages inside a partition" in {
val processor1 = system.actorOf(Props(classOf[ProcessorC], "p12", testActor))
processor1 ! "a"
expectMsg("updated-a-1")
processor1 ! "snap"
expectMsg("snapped-a-1")
val processor2 = system.actorOf(Props(classOf[ProcessorC], "p12", testActor))
expectMsg("offered-a-1")
processor2 ! "b"
expectMsg("updated-b-2")
}
"recover from a snapshot without follow-up messages at a partition boundary (where next partition is invalid)" in {
val processor1 = system.actorOf(Props(classOf[ProcessorC], "p13", testActor))
1L to 5L foreach { i =>
processor1 ! "a"
expectMsg(s"updated-a-${i}")
}
processor1 ! "snap"
expectMsg("snapped-a-5")
val processor2 = system.actorOf(Props(classOf[ProcessorC], "p13", testActor))
expectMsg("offered-a-5")
processor2 ! "b"
expectMsg("updated-b-6")
}
"recover from a snapshot without follow-up messages at a partition boundary (where next partition contains a message marked as deleted)" in {
val deleteProbe = TestProbe()
subscribeToRangeDeletion(deleteProbe)
val processor1 = system.actorOf(Props(classOf[ProcessorC], "p14", testActor))
1L to 5L foreach { i =>
processor1 ! "a"
expectMsg(s"updated-a-${i}")
}
processor1 ! "snap"
expectMsg("snapped-a-5")
processor1 ! "a"
expectMsg("updated-a-6")
processor1 ! DeleteTo(6L, false)
awaitRangeDeletion(deleteProbe)
val processor2 = system.actorOf(Props(classOf[ProcessorC], "p14", testActor))
expectMsg("offered-a-5")
processor2 ! "b"
expectMsg("updated-b-7")
}
"recover from a snapshot without follow-up messages at a partition boundary (where next partition contains a permanently deleted message)" in {
val deleteProbe = TestProbe()
subscribeToRangeDeletion(deleteProbe)
val processor1 = system.actorOf(Props(classOf[ProcessorC], "p15", testActor))
1L to 5L foreach { i =>
processor1 ! "a"
expectMsg(s"updated-a-${i}")
}
processor1 ! "snap"
expectMsg("snapped-a-5")
processor1 ! "a"
expectMsg("updated-a-6")
processor1 ! DeleteTo(6L, true)
awaitRangeDeletion(deleteProbe)
val processor2 = system.actorOf(Props(classOf[ProcessorC], "p15", testActor))
expectMsg("offered-a-5")
processor2 ! "b"
expectMsg("updated-b-6") // sequence number of permanently deleted message can be re-used
}
"properly recover after all messages have been deleted" in {
val deleteProbe = TestProbe()
subscribeToRangeDeletion(deleteProbe)
val p = system.actorOf(Props(classOf[ProcessorA], "p16"))
p ! "a"
expectMsgAllOf("a", 1L, false)
p ! DeleteTo(1L, true)
awaitRangeDeletion(deleteProbe)
val r = system.actorOf(Props(classOf[ProcessorA], "p16"))
r ! "b"
expectMsgAllOf("b", 1L, false)
}
}
}
| bjgbeelen/AkkaPersistenceCassandraWithAkka2.4 | src/test/scala/akka/persistence/cassandra/journal/CassandraIntegrationSpec.scala | Scala | apache-2.0 | 11,017 |
package pl.touk.nussknacker.engine.lite.kafka
import com.typesafe.scalalogging.LazyLogging
import org.apache.commons.lang3.concurrent.BasicThreadFactory
import org.apache.kafka.common.errors.InterruptException
import pl.touk.nussknacker.engine.lite.kafka.TaskStatus.{DuringDeploy, Restarting, Running, TaskStatus}
import java.util.concurrent.atomic.AtomicBoolean
import java.util.concurrent.{Executors, TimeUnit}
import scala.concurrent.duration.{Duration, FiniteDuration}
import scala.concurrent.{ExecutionContext, Future}
import scala.util.control.NonFatal
import scala.util.{Failure, Success, Try}
//Runs task in loop, in several parallel copies restarting on errors
//TODO: probably there is some util for that? :)
class TaskRunner(taskName: String,
taskParallelCount: Int,
singleRun: String => Task,
terminationTimeout: Duration,
waitAfterFailureDelay: FiniteDuration) extends AutoCloseable with LazyLogging {
def status(): TaskStatus = Option(tasks).filterNot(_.isEmpty)
.map(_.maxBy(_.status)).map(_.status)
.getOrElse(Running)
private val threadFactory = new BasicThreadFactory.Builder()
.namingPattern(s"worker-$taskName-%d")
.build()
private val threadPool = Executors.newFixedThreadPool(taskParallelCount, threadFactory)
private val tasks: List[LoopUntilClosed] = (0 until taskParallelCount).map(idx => new LoopUntilClosed(() => singleRun(s"task-$idx"), waitAfterFailureDelay)).toList
def run(implicit ec: ExecutionContext): Future[Unit] = {
Future.sequence(runAllTasks()).map(_ => ())
}
/*
This is a bit tricky, we split the run method as we have to use two different ExecutionContextes:
- one is backed by fixed threadPool and runs Tasks
- the other (passed in run()) method is used to sequence over list of Futures and do final mapping
*/
private def runAllTasks(): List[Future[Unit]] = {
val ecForRunningTasks = ExecutionContext.fromExecutor(threadPool)
tasks.map { task =>
Future {
task.run()
}(ecForRunningTasks)
}
}
override def close(): Unit = {
tasks.foreach(_.close())
logger.debug("Tasks notified of closure, closing thread pool...")
threadPool.shutdownNow()
val terminatedSuccessfully = threadPool.awaitTermination(terminationTimeout.toSeconds, TimeUnit.SECONDS)
if (terminatedSuccessfully) {
logger.info("Thread pool terminated successfully")
} else {
logger.error("Thread pool termination timeout")
}
}
}
object TaskStatus extends Enumeration {
type TaskStatus = Value
// Value.id determines the precedence of statuses (i.e. if one of the tasks is Restarting while others are During Deploy, Restarting status should be displayed)
val Running: Value = Value(0, "RUNNING")
val DuringDeploy: Value = Value(1, "DURING_DEPLOY")
val Restarting: Value = Value(2, "RESTARTING")
}
//Assumptions: run will be invoked only after successful init, close will be invoked if init fails
trait Task extends Runnable with AutoCloseable {
def init(): Unit
}
class LoopUntilClosed(prepareSingleRunner: () => Task, waitAfterFailureDelay: FiniteDuration) extends Runnable with AutoCloseable with LazyLogging {
private val closed = new AtomicBoolean(false)
@volatile var status: TaskStatus = DuringDeploy
override def run(): Unit = {
//we recreate runner until closed
var attempt = 1
var previousErrorWithTimestamp = Option.empty[(Throwable, Long)]
while (!closed.get()) {
val wasFailureDuringSleep = handleSleepBeforeRestart(previousErrorWithTimestamp).exists(_.isFailure)
// in case of failure during sleep we should check main loop condition again instead of initializing run again
if (!wasFailureDuringSleep) {
logger.info(s"Starting runner, attempt: $attempt")
previousErrorWithTimestamp = handleOneRunLoop().failed.toOption.map((_, System.currentTimeMillis()))
attempt += 1
}
}
logger.info("Finishing runner")
}
private def handleSleepBeforeRestart(previousErrorWithTimestamp: Option[(Throwable, Long)]): Option[Try[Unit]] = {
previousErrorWithTimestamp.map {
case (e, failureTimestamp) =>
val delayToWait = failureTimestamp + waitAfterFailureDelay.toMillis - System.currentTimeMillis()
if (delayToWait > 0) {
logger.warn(s"Failed to run. Waiting: $delayToWait millis to restart...", e)
tryWithInterruptedHandle {
status = Restarting
Thread.sleep(delayToWait)
} { }
} else {
logger.warn(s"Failed to run. Restarting...", e)
Success(Unit)
}
}
}
//We don't use Using.resources etc. because we want to treat throwing in .close() differently - this should be propagated
//and handled differently as it leads to resource leak, so we'll let uncaughtExceptionHandler deal with that
private def handleOneRunLoop(): Try[Unit] = {
val singleRun = prepareSingleRunner()
tryWithInterruptedHandle {
singleRun.init()
status = Running
//we loop until closed or exception occurs, then we close ourselves
while (!closed.get()) {
singleRun.run()
}
} {
singleRun.close()
}
}
private def tryWithInterruptedHandle(runWithSomeWaiting: => Unit)
(handleFinally: => Unit): Try[Unit] = {
try {
runWithSomeWaiting
Success(Unit)
} catch {
/*
After setting closed = true, we close pool, which interrupts all threads.
In most cases Interrupt(ed)Exception will be thrown - either from Await.result or consumer.poll (in second case it'll be wrapped)
We want to ignore it and proceed with normal closing - otherwise there will be errors in closing consumer
*/
case _: InterruptedException | _: InterruptException if closed.get() =>
//This is important - as it's the only way to clear interrupted flag...
val wasInterrupted = Thread.interrupted()
logger.debug(s"Interrupted: $wasInterrupted, finishing normally")
Success(Unit)
case NonFatal(e) =>
Failure(e)
} finally {
handleFinally
}
}
override def close(): Unit = {
closed.set(true)
}
}
| TouK/nussknacker | engine/lite/kafka/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/kafka/LoopUntilClosed.scala | Scala | apache-2.0 | 6,285 |
package org.openapitools.models
import io.circe._
import io.finch.circe._
import io.circe.generic.semiauto._
import io.circe.java8.time._
import org.openapitools._
import org.openapitools.models.FavoriteImpllinks
import org.openapitools.models.PipelineImpl
/**
*
* @param Underscoreclass
* @param Underscorelinks
* @param item
*/
case class FavoriteImpl(Underscoreclass: Option[String],
Underscorelinks: Option[FavoriteImpllinks],
item: Option[PipelineImpl]
)
object FavoriteImpl {
/**
* Creates the codec for converting FavoriteImpl from and to JSON.
*/
implicit val decoder: Decoder[FavoriteImpl] = deriveDecoder
implicit val encoder: ObjectEncoder[FavoriteImpl] = deriveEncoder
}
| cliffano/swaggy-jenkins | clients/scala-finch/generated/src/main/scala/org/openapitools/models/FavoriteImpl.scala | Scala | mit | 765 |
/*
* Exceptions.scala
*
* Updated: Dec 3, 2014
*
* Copyright (c) 2014, CodeMettle
*/
package com.codemettle.akkasnmp4j.transport
import java.net.InetSocketAddress
import scala.util.control.NoStackTrace
/**
* @author steven
*
*/
case class BindFailedException(bindAddr: InetSocketAddress)
extends Exception(s"Binding failed to $bindAddr") with NoStackTrace
case class TransportShuttingDownException() extends Exception("Transport is stopping") with NoStackTrace
case class TransportRequestTimeout(method: String) extends Exception(s"Timed out in $method") with NoStackTrace
| CodeMettle/akka-snmp4j | src/main/scala/com/codemettle/akkasnmp4j/transport/Exceptions.scala | Scala | apache-2.0 | 591 |
/**
* Copyright (C) 2013 Carnegie Mellon University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tdb.master
import scala.collection.mutable
import tdb.Constants.TaskId
class Scheduler {
private val workers = mutable.Buffer[TaskId]()
def addWorker(workerId: TaskId) {
workers += workerId
}
def removeWorker(workerId: TaskId) {
workers -= workerId
}
def nextWorker(): TaskId = {
val w = workers.head
workers -= w
workers += w
w
}
}
| twmarshall/tdb | core/src/main/scala/tdb/master/Scheduler.scala | Scala | apache-2.0 | 1,005 |
package com.rydgel.scalagram.responses
import play.api.libs.json.Json
/** Cause Instagram is returning a String for the Id in some endpoint... */
case class LocationSearch(id: Option[String], latitude: Option[Double], longitude: Option[Double], name: Option[String])
object LocationSearch {
implicit val LocationSearchReads = Json.reads[LocationSearch]
implicit val LocationSearchWrites = Json.writes[LocationSearch]
}
| Rydgel/scalagram | src/main/scala/com/rydgel/scalagram/responses/LocationSearch.scala | Scala | mit | 425 |
package com.kodekutters.collector
import akka.actor.{ActorLogging, Actor}
import scala.xml.PrettyPrinter
import com.scalakml.io.KmlPrintWriter
import com.scalakml.kml._
import com.kodekutters.gpsd4scala.collector.Collector
import com.kodekutters.gpsd4scala.protocol.{TPV, Report}
import com.kodekutters.gpsd4scala.messages.Collect
/**
* Author: Ringo Wathelet
* Date: 30/03/13
* Version: 1
*/
/**
* another example collector.
*
* collect TPV location messages, create a NetworkLink file and a kml file
* with a placemark at the GPS location.
*
* Launch Google Earth by double clicking on the created NetworkLink file.
* The location in the kml file is poled by the NetworkLink every 2 seconds in this example.
*
*/
object GoogleEarthCollector {
def apply(fileName: String): GoogleEarthCollector = new GoogleEarthCollector(fileName)
}
/**
* An example collector showing the gps location in Google Earth
* @param testFile the kml file name containing the location,
* an associated NetworkLink file will also be created
*/
class GoogleEarthCollector(val testFile: String) extends Actor with Collector with ActorLogging {
// create a kml network link
val kml = new Kml(new NetworkLink("TestLink", new Link(testFile, OnInterval, 2)))
// print the network link to file
new KmlPrintWriter("NetworkLink_" + testFile).write(kml, new PrettyPrinter(80, 3))
def receive = {
case Collect(info) => collect(info)
}
def collect(info: Report) {
info match {
case tpv: TPV =>
// if have a 2d or 3d fix, proceed
if (tpv.mode.get > 1) {
// must have at least some values for the lat lon
if (tpv.lon.isDefined && tpv.lat.isDefined) {
val alt = if (tpv.alt.isDefined) tpv.alt.get else 0.0
val kml = new Kml(new Placemark("test", new Point(RelativeToGround, tpv.lon.get, tpv.lat.get, alt)))
// write the placemark to the kml file
new KmlPrintWriter(testFile).write(kml, new PrettyPrinter(80, 3))
}
}
}
}
} | workingDog/Gpsd4Scala | src/main/scala/com/kodekutters/gpsd4scala/collector/GoogleEarthCollector.scala | Scala | bsd-3-clause | 2,064 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml
import org.apache.spark.ml.param.ParamMap
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset}
/**
* A wrapper for org.apache.spark.ml.Transformer.
* Extends MlTransformer and override process to gain compatibility with
* both spark 1.5 and spark 2.0.
*/
private[ml] abstract class DLTransformerBase[M <: DLTransformerBase[M]]
extends Model[M] {
/**
* convert feature columns(MLlib Vectors or Array) to Seq format
*/
protected def internalTransform(dataFrame: DataFrame): DataFrame
override def transform(dataset: Dataset[_]): DataFrame = {
transformSchema(dataset.schema, logging = true)
internalTransform(dataset.toDF())
}
override def copy(extra: ParamMap): M = defaultCopy(extra)
}
| qiuxin2012/BigDL | spark/spark-version/2.0/src/main/scala/org/apache/spark/ml/DLTransformerBase.scala | Scala | apache-2.0 | 1,379 |
package io.kaitai.struct.format
import io.kaitai.struct.{JavaScriptImporter, Main, RuntimeConfig}
import scala.concurrent.Future
import scala.scalajs.js
import scala.concurrent.ExecutionContext.Implicits.global
object JavaScriptKSYParser {
/**
* Converts first YAML (given as JavaScript object) to the ClassSpecs
* object, fully imported and precompiled.
* @param yaml first KSY file (YAML), given as JavaScript object
* @return future of ClassSpecs object
*/
def yamlToSpecs(yaml: Any, importer: JavaScriptImporter, config: RuntimeConfig): Future[ClassSpecs] = {
val yamlScala = yamlJavascriptToScala(yaml)
val firstSpec = ClassSpec.fromYaml(yamlScala, None)
val specs = new JavaScriptClassSpecs(importer, firstSpec)
Main.importAndPrecompile(specs, config).map((_) => specs)
}
def yamlJavascriptToScala(src: Any): Any = {
src match {
case array: js.Array[AnyRef] =>
array.toList.map(yamlJavascriptToScala)
case _: String | _: Int | _: Double | _: Boolean =>
src
case dict =>
dict.asInstanceOf[js.Dictionary[AnyRef]].toMap.mapValues(yamlJavascriptToScala)
}
}
}
| kaitai-io/kaitai_struct_compiler | js/src/main/scala/io/kaitai/struct/format/JavaScriptKSYParser.scala | Scala | gpl-3.0 | 1,162 |
package outer
package inner
object Foo {
// val a: Int = 1
}
| som-snytt/dotty | tests/pos/false-companion/01_outerinnerFoo_2.scala | Scala | apache-2.0 | 63 |
package com.antiparagon.cvexperimenter.chessscanner
import java.awt.Rectangle
import java.util
import com.typesafe.scalalogging.Logger
import org.opencv.core._
import org.opencv.imgcodecs.Imgcodecs
import org.opencv.imgproc.Imgproc
import org.slf4j.LoggerFactory
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
/**
* Created by wmckay on 11/7/16.
*/
object ChessboardFinderContoursAlgorithm {
def apply(): ChessboardFinderContoursAlgorithm = {
new ChessboardFinderContoursAlgorithm
}
def apply(debugImagePrefix: String): ChessboardFinderContoursAlgorithm = {
val chessboardFinder = new ChessboardFinderContoursAlgorithm
chessboardFinder.outputDebugImgs = true
chessboardFinder.debugImgPrefix = debugImagePrefix + chessboardFinder.debugImgPrefix
chessboardFinder
}
}
/**
* Uses the OpenCV Imgproc.findContours() function to find rectangles. It looks
* for a rectangle that contains more than the rectangles inside it that are
* about 1/64 the area of the inclosing rectangle.
*
* Created by wmckay on 10/12/16.
*/
class ChessboardFinderContoursAlgorithm {
val log = Logger(LoggerFactory.getLogger("ChessboardFinderContoursAlgorithm"))
/*
For debugging of the algorithm. Outputs intermediate stage images.
*/
var outputDebugImgs = false
// Prefix for debug images
var debugImgPrefix = "ChessboardFinderContoursAlgorithm"
/**
* Finds a chessboard in an image and returns a cropped image of
* just the chessboard.
*
* @param inImg with a chessboard
* @return Option cropped image of only the chessboard
*/
def getChessboard(inImg: Mat): Option[Mat] = {
if(inImg == null) {
log.debug("Input image null")
return None
}
if(inImg.empty()) {
log.debug("Input image empty")
return None
}
val bbox = findChessboard(inImg)
bbox match {
case Some(bbox) => {
Some(new Mat(inImg, bbox))
}
case None => {
log.debug("No chessboard found")
None
}
}
}
/**
* Finds a chessboard in an image and returns the rectangle of the found chessboard.
*
* @param inImg that contains a chessboard
* @return Option rectangle coordinates of the chessboard
*/
def findChessboard(inImg: Mat): Option[Rect] = {
import scala.collection.JavaConversions._
if (inImg == null || inImg.empty()) {
return None
}
val tempImg = new Mat
val debugImg = inImg.clone()
Imgproc.cvtColor(inImg, tempImg, Imgproc.COLOR_BGR2GRAY)
//Imgproc.threshold(tempImg, tempImg, 0, 255, Imgproc.THRESH_BINARY + Imgproc.THRESH_OTSU)
Imgproc.Canny(tempImg, tempImg, 0, 0)
Imgproc.threshold(tempImg, tempImg, 0, 255, Imgproc.THRESH_BINARY + Imgproc.THRESH_OTSU)
val contours = new util.ArrayList[MatOfPoint]()
val rectangles = ArrayBuffer[Rect]()
val hierarchy = new Mat
Imgproc.findContours(tempImg, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE)
for (contour <- contours) {
val area = Imgproc.contourArea(contour)
//println(s"Area: $area")
if (area > 100.0) {
//contourSave = contour.clone().asInstanceOf[MatOfPoint]
contour.convertTo(contour, CvType.CV_32FC2)
val contour2f = new MatOfPoint2f(contour)
val peri = Imgproc.arcLength(contour2f, true)
val approx = new MatOfPoint2f
Imgproc.approxPolyDP(contour2f, approx, 0.02 * peri, true)
//println(s"Size: ${approx.size()}")
if(approx.rows == 4) {
val rectPoints = new MatOfPoint
approx.convertTo(rectPoints, CvType.CV_32S)
val rect = getBoundingRect(rectPoints)
rectangles += rect
Imgproc.rectangle(debugImg, rect.tl, rect.br, new Scalar(0.0, 255.0, 0.0), 3)
}
}
}
if(outputDebugImgs) {
Imgcodecs.imwrite(debugImgPrefix + "_Rectangles.png", debugImg)
}
return scanRectList(rectangles)
}
/**
* Scans the list of rectangles and returns a rectangle that contains
* a rectangle with multiple smaller rectangle inside. It looks for a rectangle
* that contains at least 3 smaller rectangles with 1/50th the area of the
* containing rectangle.
*
* @param rectList
* @return Option rectangle coordinates of the rectangle
*/
def scanRectList(rectList: ArrayBuffer[Rect]): Option[Rect] = {
if (rectList == null || rectList.size < 4) {
return None
}
//println(s"Number of rectangles: ${rectList.size}")
val rectMap = mutable.Map[Rectangle, ArrayBuffer[Rectangle]]()
for (rect <- rectList) {
// Convert to Java Rectangle
val rectangle = rect2Rectangle(rect)
// Check if the lookup contains the Rectangle yet
if(!rectMap.contains(rectangle)) {
rectMap.put(rectangle, ArrayBuffer[Rectangle]())
}
}
for (rect <- rectList) {
// Convert to Java Rectangle
val rectangle = rect2Rectangle(rect)
// Calculate the area of the rectangle
val area = rectangle.width.toDouble * rectangle.height.toDouble
// Loop through the lookup of Rectangles
for((jRect, rList) <- rectMap) {
if(jRect.contains(rectangle)) {
val ja = jRect.width.toDouble * jRect.height.toDouble
val multiple = ja / area
//log.debug(s"Rectangle area $ja has rectangle with $multiple multiple inside")
if(multiple > 50.0) {
rectMap(jRect) += rectangle
}
}
}
}
var board: Rectangle = null
var max = 0
for((jRect, rList) <- rectMap) {
var squaresInside = rectMap(jRect).size
//log.debug(s"Rectangle $jRect has $squaresInside rectangles inside")
if(squaresInside >= 3) {
if(squaresInside > max) {
board = jRect
max = squaresInside
//log.debug(s"Found a new rectangle with $max rectangles inside: $board")
}
}
}
if(board == null)
return None
else
return Some(new Rect(board.x, board.y, board.width, board.height))
}
def rect2Rectangle(rect: Rect): Rectangle = {
val rectangle = new Rectangle(rect.x, rect.y, rect.width, rect.height)
rectangle
}
def getBoundingRect(rect: MatOfPoint): Rect = {
val bbox = new Rect
var minX = Double.MaxValue
var maxX = Double.MinValue
var minY = Double.MaxValue
var maxY = Double.MinValue
for(point <- rect.toArray) {
if(point.x > maxX) maxX = point.x
if(point.x < minX) minX = point.x
if(point.y > maxY) maxY = point.y
if(point.y < minY) minY = point.y
}
bbox.x = minX.toInt
bbox.y = minY.toInt
bbox.width = (maxX - minX).toInt
bbox.height = (maxY - minY).toInt
bbox
}
}
| antiparagon/CVExperimenter | src/main/scala/com/antiparagon/cvexperimenter/chessscanner/ChessboardFinderContoursAlgorithm.scala | Scala | mit | 6,780 |
package com.supergloo.ml.streaming
import org.apache.spark._
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.types._
import org.apache.spark.sql.functions._
import org.apache.spark.sql._
import org.apache.spark.ml.feature.VectorAssembler
import org.apache.spark.ml.clustering.KMeans
/**
* Refactor of
* https://github.com/caroljmcdonald/spark-ml-kmeans-uber/blob/master/ClusterUber.scala
* Not that the original is bad or incorrect, I just wanted to play with it and
* adjust it so I can learn from it.
*
* Consider this refactor a hat tip to the original
*
* Assumes the training data csv has been downloaded from
* https://github.com/caroljmcdonald/spark-ml-kmeans-uber/tree/master/data
* and saved in /tmp/csv as you'll see in code below
*/
object UberTrain {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("Uber Train")
conf.setIfMissing("spark.master", "local[*]")
val spark = SparkSession
.builder()
.config(conf)
.getOrCreate()
import spark.sqlContext.implicits._
import org.apache.spark.sql.functions._
val df = spark.read.option("header", "false")
.csv("file:///tmp/csv/*.csv")
.withColumnRenamed("_c0", "dt")
.withColumnRenamed("_c1", "lat")
.withColumnRenamed("_c2", "lon")
.withColumnRenamed("_c3", "base")
.withColumn("dt", to_date($"dt"))
.withColumn("lat", $"lat".cast("decimal"))
.withColumn("lon", $"lon".cast("decimal"))
.withColumn("base", $"base")
.as[Uber]
df.cache
df.show
df.schema
val featureCols = Array("lat", "lon")
val assembler = new VectorAssembler().setInputCols(featureCols).setOutputCol("features")
val df2 = assembler.transform(df)
val Array(trainingData, testData) = df2.randomSplit(Array(0.7, 0.3), 5043)
val kmeans = new KMeans().setK(10).setFeaturesCol("features").setMaxIter(3)
val model = kmeans.fit(trainingData)
println("Final Centers: ")
model.clusterCenters.foreach(println)
val categories = model.transform(testData)
categories.show
categories.createOrReplaceTempView("uber")
//Which cluster had highest number of pickups by month, day, hour?
categories.select(month($"dt").alias("month"), dayofmonth($"dt")
.alias("day"), hour($"dt").alias("hour"), $"prediction")
.groupBy("month", "day", "hour", "prediction").
agg(count("prediction").alias("count")).orderBy("day", "hour", "prediction").show
//Which cluster had highest number of pickups by hour?
categories.select(hour($"dt").alias("hour"), $"prediction")
.groupBy("hour", "prediction").agg(count("prediction")
.alias("count")).orderBy(desc("count")).show
// number of pickups per cluster
categories.groupBy("prediction").count().show()
// pick your preference DataFrame API above or can use SQL directly
spark.sql(" select prediction, count(prediction) as count from uber group by prediction").show
spark.sql("SELECT hour(uber.dt) as hr,count(prediction) as ct FROM uber group By hour(uber.dt)").show
// to save the categories dataframe as json data
// categories.select("dt", "base", "prediction").write.format("json").save("uberclusterstest")
// to save the model
// model.write.overwrite().save("/user/user01/data/savemodel")
// to re-load the model
// val sameModel = KMeansModel.load("/user/user01/data/savemodel")
}
}
case class Uber(dt: java.sql.Timestamp, lat: BigDecimal,
lon: BigDecimal, base: String)
| tmcgrath/spark-2-streaming | src/main/scala/com/supergloo/ml/streaming/UberTrain.scala | Scala | apache-2.0 | 3,569 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import kafka.log.LogManager
import java.util.concurrent.CountDownLatch
import java.util.concurrent.atomic.AtomicBoolean
import kafka.utils.{Mx4jLoader, Utils, SystemTime, KafkaScheduler, Logging}
import kafka.network.{SocketServerStats, SocketServer}
import java.io.File
/**
* Represents the lifecycle of a single Kafka broker. Handles all functionality required
* to start up and shutdown a single Kafka node.
*/
class KafkaServer(val config: KafkaConfig) extends Logging {
val CLEAN_SHUTDOWN_FILE = ".kafka_cleanshutdown"
private val isShuttingDown = new AtomicBoolean(false)
private val shutdownLatch = new CountDownLatch(1)
private val statsMBeanName = "kafka:type=kafka.SocketServerStats"
var socketServer: SocketServer = null
val scheduler = new KafkaScheduler(1, "kafka-logcleaner-", false)
private var logManager: LogManager = null
/**
* Start up API for bringing up a single instance of the Kafka server.
* Instantiates the LogManager, the SocketServer and the request handlers - KafkaRequestHandlers
*/
def startup() {
info("Starting Kafka server...")
var needRecovery = true
val cleanShutDownFile = new File(new File(config.logDir), CLEAN_SHUTDOWN_FILE)
if (cleanShutDownFile.exists) {
needRecovery = false
cleanShutDownFile.delete
}
logManager = new LogManager(config,
scheduler,
SystemTime,
1000L * 60 * config.logCleanupIntervalMinutes,
1000L * 60 * 60 * config.logRetentionHours,
needRecovery)
val handlers = new KafkaRequestHandlers(logManager)
socketServer = new SocketServer(config.port,
config.numThreads,
config.monitoringPeriodSecs,
handlers.handlerFor,
config.socketSendBuffer,
config.socketReceiveBuffer,
config.maxSocketRequestSize)
Utils.registerMBean(socketServer.stats, statsMBeanName)
socketServer.startup()
Mx4jLoader.maybeLoad
/**
* Registers this broker in ZK. After this, consumers can connect to broker.
* So this should happen after socket server start.
*/
logManager.startup()
info("Kafka server started.")
}
/**
* Shutdown API for shutting down a single instance of the Kafka server.
* Shuts down the LogManager, the SocketServer and the log cleaner scheduler thread
*/
def shutdown() {
val canShutdown = isShuttingDown.compareAndSet(false, true);
if (canShutdown) {
info("Shutting down Kafka server")
scheduler.shutdown()
if (socketServer != null)
socketServer.shutdown()
Utils.unregisterMBean(statsMBeanName)
if (logManager != null)
logManager.close()
val cleanShutDownFile = new File(new File(config.logDir), CLEAN_SHUTDOWN_FILE)
cleanShutDownFile.createNewFile
shutdownLatch.countDown()
info("Kafka server shut down completed")
}
}
/**
* After calling shutdown(), use this API to wait until the shutdown is complete
*/
def awaitShutdown(): Unit = shutdownLatch.await()
def getLogManager(): LogManager = logManager
def getStats(): SocketServerStats = socketServer.stats
}
| tnachen/kafka | core/src/main/scala/kafka/server/KafkaServer.scala | Scala | apache-2.0 | 4,354 |
/* Copyright (C) 2008-2016 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.model
import cc.factorie._
import cc.factorie.variable.{BooleanVariable, DiffList, RealVariable, Var, _}
import org.junit.Test
import org.scalatest.junit._
/**
* @author sriedel
*/
class TestTemplates extends JUnitSuite with cc.factorie.util.FastLogging {
@Test
def testFactorsOfDiffList() {
val template = new DotTemplateWithStatistics1[BooleanVariable] with Parameters { val weights = Weights(new la.DenseTensor1(BooleanDomain.size)) }
val b = new BooleanVariable(true)
val diff = new DiffList
b.set(false)(diff)
val factors = template.factors(diff)
assert(factors.head.asInstanceOf[DotTemplateWithStatistics1[BooleanVariable]#FactorType].family === template)
assert(factors.head.variables.head == b)
// TODO Uncomment this next line
//assert(factors.head.statistics.asInstanceOf[TemplateWithDotStatistics1[BooleanVariable]#Stat]._1 == false)
}
@Test
def testCascadeUnroll() {
object Aggregate extends BooleanVariable {
val b1 = new BooleanVariable {
//override def unrollCascade: scala.Iterable[Var] = Seq(Aggregate)
}
}
val diff = new DiffList
val template = new DotTemplateWithStatistics1[BooleanVariable] with Parameters {
val weights = Weights(new la.DenseTensor1(BooleanDomain.size))
override def unroll(v:Var) = v match { case Aggregate.b1 => Factor(Aggregate); case _ => Nil }
}
Aggregate.b1.set(true)(diff)
val factors = template.factors(diff)
assert(factors.exists(factor => factor.variables.head == Aggregate.b1))
assert(factors.exists(factor => factor.variables.head == Aggregate))
}
@Test
def testVarArgs() {
class Aggregate extends BooleanVariable {
class Member extends BooleanVariable {
def owner = Aggregate.this
}
val members = for (i <- 0 until 10) yield new Member
}
val aggregate = new Aggregate
val template = new DotTemplate2[Aggregate,Vars[Aggregate#Member]] with Parameters {
val weights = Weights(new la.DenseTensor1(1))
def unroll2(v: Vars[Aggregate#Member]) = sys.error("Not needed")
def unroll1(v: Aggregate) = Factor(v,Vars(v.members))
//override def unroll2s(v: Aggregate#Member) = Factor(v.owner,Vars(v.owner.members))
override def unroll(v:Var) = v match { case v:Aggregate#Member => Factor(v.owner, Vars(v.owner.members)); case _ => Nil }
override def statistics(v1:Aggregate#Value, v2:Vars[Aggregate#Member]#Value) =
new RealVariable(v2.count(_.booleanValue)).value // TODO Just create a RealValue; don't bother with a RealVariable
}
val diff = new DiffList
aggregate.members(0).set(true)(diff)
aggregate.members(2).set(true)(diff)
val factors = template.factors(diff).toSeq
assert(factors.size === 1)
assert(factors(0).variables(0) === aggregate)
assert(factors(0).variables(1) === Vars(aggregate.members))
}
}
//class SettingIteratorTests extends TestCase {
// val v1 = new BooleanVariable(true)
// val v2 = new BooleanVariable(true)
// val v3 = new BooleanVariable(true)
//
// //TODO: test fixed assignments
//
// def testLimitedSettingsIterator1 {
// val template = new TemplateWithDotStatistics1[BooleanVariable] { def statisticsDomains = Tuple1(BooleanDomain) }
// val factor = template.unroll1(v1).head
// assert(factor.valuesIterator(Set(factor._1.asInstanceOf[Variable])).size == 2)
// logger.debug("Template1 valuesIterator:")
// factor.valuesIterator(Set(factor._1.asInstanceOf[Variable])).foreach(logger.debug(_))
// logger.debug("--------------------------------")
//
// template.addLimitedDiscreteValues(Seq(BooleanDomain.head.intValue))
// template.isLimitingValuesIterator = true
// assert(factor.valuesIterator(Set(factor._1.asInstanceOf[Variable])).size == 1)
// logger.debug("Template1 limitedValuesIterator:")
// factor.valuesIterator(Set(factor._1.asInstanceOf[Variable])).foreach(logger.debug(_))
// logger.debug("--------------------------------")
// }
//
// def testLimitedSettingsIterator2 {
// val template = new TemplateWithDotStatistics2[BooleanVariable, BooleanVariable] {
// def statisticsDomains = ((BooleanDomain, BooleanDomain))
// def unroll1(v: BooleanVariable) = Factor(v1, v2)
// def unroll2(v: BooleanVariable) = sys.error("Not needed")
// }
//
// val factor = template.unroll1(v1).head
// assert(factor.valuesIterator(factor.variables.toSet).size == 4)
// logger.debug("Template2 valuesIterator:")
// factor.valuesIterator(factor.variables.toSet).foreach(logger.debug(_))
// logger.debug("--------------------------------")
//
// template.addLimitedDiscreteValues(Seq((0,0),(1,1)))
// template.isLimitingValuesIterator = true
//
// assert(factor.valuesIterator(factor.variables.toSet).size == 2)
// logger.debug("Template2 limitedValuesIterator:")
// factor.valuesIterator(factor.variables.toSet).foreach(logger.debug(_))
// logger.debug("--------------------------------")
// }
//
// def testLimitedSettingsIterator3 {
// val template = new TemplateWithDotStatistics3[BooleanVariable, BooleanVariable, BooleanVariable] {
// def statisticsDomains = ((BooleanDomain, BooleanDomain, BooleanDomain))
// def unroll1(v: BooleanVariable) = Factor(v1, v2, v3)
// def unroll2(v: BooleanVariable) = sys.error("Not needed")
// def unroll3(v: BooleanVariable) = sys.error("Not needed")
// }
//
// var factor = template.unroll1(v1).head
// logger.debug("Template3 valuesIterator:")
// factor.valuesIterator(factor.variables.toSet).foreach(logger.debug(_))
// assert(factor.valuesIterator(factor.variables.toSet).size == 8)
// logger.debug("--------------------------------")
//
// template.addLimitedDiscreteValues(Seq((0,0,0),(1,1,1)))
// template.isLimitingValuesIterator = true
//
// logger.debug("limiting factor? : " + factor.isLimitingValuesIterator)
// logger.debug("Template3 limitedValuesIterator:")
// factor.valuesIterator(factor.variables.toSet).foreach(logger.debug(_))
// assert(factor.valuesIterator(factor.variables.toSet).size == 2)
// logger.debug("--------------------------------")
// }
//
//}
| Craigacp/factorie | src/test/scala/cc/factorie/model/TestTemplates.scala | Scala | apache-2.0 | 6,959 |
/**
* Copyright 2011-2012 eBusiness Information, Groupe Excilys (www.excilys.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.excilys.ebi.gatling.core.result.reader
import com.excilys.ebi.gatling.core.config.GatlingConfiguration.configuration
import com.excilys.ebi.gatling.core.result.Group
import com.excilys.ebi.gatling.core.result.message.{ RequestStatus, RunRecord }
import com.excilys.ebi.gatling.core.result.message.RequestStatus.RequestStatus
object DataReader {
val NO_PLOT_MAGIC_VALUE = -1
def newInstance(runOn: String) = Class.forName(configuration.data.dataReaderClass).asInstanceOf[Class[DataReader]].getConstructor(classOf[String]).newInstance(runOn)
}
abstract class DataReader(runUuid: String) {
def runRecord: RunRecord
def runStart: Long
def runEnd: Long
def groupsAndRequests: List[(Option[Group], Option[String])]
def scenarioNames: List[String]
def numberOfActiveSessionsPerSecond(scenarioName: Option[String] = None): Seq[(Int, Int)]
def numberOfRequestsPerSecond(status: Option[RequestStatus] = None, requestName: Option[String] = None, group: Option[Group] = None): Seq[(Int, Int)]
def numberOfTransactionsPerSecond(status: Option[RequestStatus] = None, requestName: Option[String] = None, group: Option[Group] = None): Seq[(Int, Int)]
def responseTimeDistribution(slotsNumber: Int, requestName: Option[String] = None, group: Option[Group] = None): (Seq[(Int, Int)], Seq[(Int, Int)])
def generalStats(status: Option[RequestStatus] = None, requestName: Option[String] = None, group: Option[Group] = None): GeneralStats
def groupStats(group: Option[Group]): Long
def numberOfRequestInResponseTimeRange(requestName: Option[String] = None, group: Option[Group] = None): Seq[(String, Int)]
def responseTimeGroupByExecutionStartDate(status: RequestStatus, requestName: Option[String] = None, group: Option[Group] = None): Seq[(Int, (Int, Int))]
def latencyGroupByExecutionStartDate(status: RequestStatus, requestName: Option[String] = None, group: Option[Group] = None): Seq[(Int, (Int, Int))]
def responseTimeAgainstGlobalNumberOfRequestsPerSec(status: RequestStatus.RequestStatus, requestName: Option[String] = None, group: Option[Group] = None): Seq[(Int, Int)]
}
| Tjoene/thesis | Case_Programs/gatling-1.4.0/gatling-core/src/main/scala/com/excilys/ebi/gatling/core/result/reader/DataReader.scala | Scala | gpl-2.0 | 2,751 |
/*
* Copyright 2009-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package json
import org.specs2.mutable.Specification
import java.util.UUID
object SerializationBugs extends Specification {
import Serialization.{read, write => swrite}
implicit val formats = Serialization.formats(NoTypeHints)
"plan1.Plan can be serialized (issue 341)" in {
import plan1._
val game = Game(Map("a" -> Plan(Some(Action(1, None)))))
val ser = swrite(game)
read[Game](ser) mustEqual game
}
"plan2.Plan can be serialized (issue 341)" in {
import plan2._
val g1 = Game(Map("a" -> Plan(Some(Action("f1", "s", Array(), None)),
Some("A"),
Some(Action("f2", "s2", Array(0, 1, 2), None)))))
val ser = swrite(g1)
val g2 = read[Game](ser)
val plan = g2.buy("a")
g2.buy.size mustEqual 1
val leftOp = plan.leftOperand.get
leftOp.functionName mustEqual "f1"
leftOp.symbol mustEqual "s"
leftOp.inParams.toList mustEqual Nil
leftOp.subOperand mustEqual None
plan.operator mustEqual Some("A")
val rightOp = plan.rightOperand.get
rightOp.functionName mustEqual "f2"
rightOp.symbol mustEqual "s2"
rightOp.inParams.toList mustEqual List(0, 1, 2)
rightOp.subOperand mustEqual None
}
"null serialization bug" in {
val x = new X(null)
val ser = swrite(x)
read[X](ser) mustEqual x
}
"StackOverflowError with large Lists" in {
val xs = LongList(List.fill(5000)(0).map(Num))
val ser = swrite(xs)
read[LongList](ser).xs.length mustEqual 5000
}
"Custom serializer should work with Option" in {
class UUIDFormat extends Serializer[UUID] {
val UUIDClass = classOf[UUID]
def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), UUID] = {
case (TypeInfo(UUIDClass, _), JString(x)) => UUID.fromString(x)
}
def serialize(implicit format: Formats): PartialFunction[Any, JValue] = {
case x: UUID => JString(x.toString)
}
}
implicit val formats = Serialization.formats(NoTypeHints) + new UUIDFormat
val o1 = OptionalUUID(None)
val o2 = OptionalUUID(Some(UUID.randomUUID))
read[OptionalUUID](swrite(o1)) mustEqual o1
read[OptionalUUID](swrite(o2)) mustEqual o2
}
"TypeInfo is not correctly constructed for customer serializer -- 970" in {
class SeqFormat extends Serializer[Seq[_]] {
val SeqClass = classOf[Seq[_]]
def serialize(implicit format: Formats) = {
case seq: Seq[_] => JArray(seq.toList.map(Extraction.decompose))
}
def deserialize(implicit format: Formats) = {
case (TypeInfo(SeqClass, parameterizedType), JArray(xs)) =>
val typeInfo = TypeInfo(parameterizedType
.map(_.getActualTypeArguments()(0))
.getOrElse(failure("No type parameter info for type Seq")).asInstanceOf[Class[_]], None)
xs.map(x => Extraction.extract(x, typeInfo))
}
}
implicit val formats = DefaultFormats + new SeqFormat
val seq = Seq(1, 2, 3)
val ser = Extraction.decompose(seq)
Extraction.extract[Seq[Int]](ser) mustEqual seq
}
"Serialization of an opaque value should not fail" in {
val o = Opaque(JObject(JField("some", JString("data")) :: Nil))
val ser = Serialization.write(o)
ser mustEqual """{"x":{"some":"data"}}"""
}
"Map with Map value" in {
val a = Map("a" -> Map("a" -> 5))
val b = Map("b" -> 1)
val str = Serialization.write(MapWithMap(a, b))
read[MapWithMap](str) mustEqual MapWithMap(a, b)
}
"Either can't be deserialized with type hints" in {
implicit val formats = DefaultFormats + FullTypeHints(classOf[Either[_, _]] :: Nil)
val x = Eith(Left("hello"))
val s = Serialization.write(x)
read[Eith](s) mustEqual x
}
"Custom serializer should work as Map key (scala 2.9) (issue #1077)" in {
class SingleOrVectorSerializer extends Serializer[SingleOrVector[Double]] {
private val singleOrVectorClass = classOf[SingleOrVector[Double]]
def deserialize(implicit format: Formats) = {
case (TypeInfo(`singleOrVectorClass`, _), json) => json match {
case JObject(List(JField("val", JDouble(x)))) => SingleValue(x)
case JObject(List(JField("val", JArray(xs: List[JDouble])))) => VectorValue(xs.map(_.num).toIndexedSeq)
case x => throw new MappingException("Can't convert " + x + " to SingleOrVector")
}
}
def serialize(implicit format: Formats) = {
case SingleValue(x: Double) => JObject(List(JField("val", JDouble(x))))
case VectorValue(x: Vector[Double]) => JObject(List(JField("val", JArray(x.toList.map(JDouble(_))))))
}
}
implicit val formats = DefaultFormats + new SingleOrVectorSerializer
val ser = swrite(MapHolder(Map("hello" -> SingleValue(2.0))))
read[MapHolder](ser) mustEqual MapHolder(Map("hello" -> SingleValue(2.0)))
}
}
case class Eith(x: Either[String, Int])
case class MapWithMap(a: Map[String, Map[String, Int]], b: Map[String, Int])
case class LongList(xs: List[Num])
case class Num(x: Int)
case class X(yy: Y)
case class Y(ss: String)
case class OptionalUUID(uuid: Option[UUID])
package plan1 {
case class Plan(plan: Option[Action])
case class Game(game: Map[String, Plan])
case class Action(id: Int, subAction: Option[Action])
}
package plan2 {
case class Plan(leftOperand: Option[Action], operator: Option[String],
rightOperand: Option[Action])
case class Game(buy: Map[String, Plan])
case class Action(functionName: String, symbol: String,
inParams: Array[Number], subOperand: Option[Action])
}
case class Opaque(x: JValue)
sealed trait SingleOrVector[A]
case class SingleValue[A](value: A) extends SingleOrVector[A]
case class VectorValue[A](value: IndexedSeq[A]) extends SingleOrVector[A]
case class MapHolder(a: Map[String, SingleOrVector[Double]])
| pbrant/framework | core/json/src/test/scala/net/liftweb/json/SerializationBugs.scala | Scala | apache-2.0 | 6,557 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.linker
import scala.concurrent._
import org.junit.Test
import org.scalajs.ir.Names._
import org.scalajs.ir.Trees._
import org.scalajs.ir.Types._
import org.scalajs.logging._
import org.scalajs.junit.async._
import org.scalajs.linker.interface._
import org.scalajs.linker.testutils._
import org.scalajs.linker.testutils.TestIRBuilder._
/** Basic backwards compatibility test.
*
* This does not replace the usual two-commit tests we do when introducing
* backwards compatibility hacks. But rather, it serves as addititional defense
* in depth.
*/
class BackwardsCompatTest {
import scala.concurrent.ExecutionContext.Implicits.global
@Test
def testHelloWorld(): AsyncResult = await {
val classDefs = Seq(
mainTestClassDef(predefPrintln(str("Hello world!")))
)
test(classDefs, MainTestModuleInitializers)
}
@Test // #3976
def testSystemIdentityHashCode(): AsyncResult = await {
val classDefs = Seq(
mainTestClassDef(
predefPrintln(Apply(EAF,
LoadModule("java.lang.System$"),
m("identityHashCode", List(O), I),
List(JSObjectConstr(Nil)))(IntType)))
)
test(classDefs, MainTestModuleInitializers)
}
@Test // #4391
def testClone(): AsyncResult = await {
val classDefs = Seq(
classDef("A",
superClass = Some(ObjectClass),
interfaces = List(CloneableClass),
memberDefs = List(trivialCtor("A"))),
mainTestClassDef(
predefPrintln(Apply(EAF,
New("A", NoArgConstructorName, Nil),
m("clone", Nil, O), Nil)(AnyType)))
)
test(classDefs, MainTestModuleInitializers)
}
private def test(classDefs: Seq[ClassDef],
moduleInitializers: Seq[ModuleInitializer]): Future[_] = {
val classDefFiles = classDefs.map(MemClassDefIRFile(_))
val logger = new ScalaConsoleLogger(Level.Error)
Future.traverse(TestIRRepo.previousLibs.toSeq) { case (version, libFuture) =>
libFuture.flatMap { lib =>
val config = StandardConfig().withCheckIR(true)
val linker = StandardImpl.linker(config)
val out = MemOutputDirectory()
linker.link(lib ++ classDefFiles, moduleInitializers, out, logger)
}.recover {
case e: Throwable =>
throw new AssertionError(
s"linking stdlib $version failed: ${e.getMessage()}", e)
}
}
}
}
| scala-js/scala-js | linker/shared/src/test/scala/org/scalajs/linker/BackwardsCompatTest.scala | Scala | apache-2.0 | 2,699 |
package com.clank.tests
import org.scalacheck._
import Gen._
import Arbitrary.arbitrary
import com.clank.spellers._
object Helper {
class DumbSpeller(val words: List[String]) extends Speller {
def spell(word: String): List[String] = words
override def toString = "DumbSpeller(" + words.mkString(", ") + ")"
}
type NiceChar = Char
type NiceString = String
type NiceSet[T] = Set[T]
type NiceMap[K, V] = Map[K, V]
implicit val arbNiceChar: Arbitrary[NiceChar] =
Arbitrary(oneOf(alphaUpperChar, alphaLowerChar))
implicit val arbNiceString: Arbitrary[NiceString] =
Arbitrary((for {
h <- arbitrary[NiceChar]
n <- choose(2, 20)
t <- listOfN(n, alphaLowerChar)
} yield (h :: t).mkString) suchThat (!_.isEmpty))
implicit def arbNiceSet[T](implicit ae: Arbitrary[T]): Arbitrary[NiceSet[T]] =
Arbitrary(for {
n <- choose(1, 3)
l <- containerOfN[Set, T](n, ae.arbitrary)
} yield l.toSet)
implicit def arbNiceMap[K, V](implicit ak: Arbitrary[K],
av: Arbitrary[V]): Arbitrary[NiceMap[K, V]] =
Arbitrary(for {
n <- choose(1, 6)
keys <- listOfN(n, ak.arbitrary)
vals <- listOfN(n, av.arbitrary)
} yield Map(keys zip vals: _*))
def mkSimpleDumbSpellerArb(constructor: List[NiceString] => DumbSpeller) =
Arbitrary(listOf(Arbitrary.arbitrary[NiceString]) map {
l => constructor(l.distinct)
})
}
| brx/clank | src/test/scala/Helper.scala | Scala | gpl-3.0 | 1,437 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.models.des.ihtReturn
import play.api.libs.json.Json
case class TransferOfNilRateBand(totalNilRateBandTransferred: Option[BigDecimal] = None,
deceasedSpouses: Set[TNRBForm])
object TransferOfNilRateBand {
implicit val formats = Json.format[TransferOfNilRateBand]
}
| hmrc/iht-frontend | app/iht/models/des/ihtReturn/TransferOfNilRateBand.scala | Scala | apache-2.0 | 920 |
package blended.streams.jms
import java.util.concurrent.atomic.{AtomicLong, AtomicReference}
import akka.actor.ActorSystem
import akka.pattern.after
import akka.stream.stage.{AsyncCallback, TimerGraphStageLogic}
import blended.jms.utils.{IdAwareConnectionFactory, JmsSession}
import blended.util.logging.Logger
import javax.jms._
import scala.concurrent.{ExecutionContext, Future, TimeoutException}
object JmsConnector {
private[this] val sessionIdCounter : AtomicLong = new AtomicLong(0L)
def nextSessionId : String = {
if (sessionIdCounter.get() == Long.MaxValue) {
sessionIdCounter.set(0l)
}
s"${sessionIdCounter.incrementAndGet()}"
}
}
trait JmsConnector[S <: JmsSession] { this: TimerGraphStageLogic =>
private case object RecreateSessions
implicit protected var ec : ExecutionContext = _
implicit protected var system : ActorSystem = _
@volatile protected var jmsConnection: Future[Connection] = _
protected var jmsSessions : Map[String, S] = Map.empty
protected def jmsSettings: JmsSettings
protected def onSessionOpened(jmsSession: S): Unit
// Just to identify the Source stage in log statements
protected val id : String = { jmsSettings.connectionFactory match {
case idAware: IdAwareConnectionFactory => idAware.id
case cf => cf.toString()
}}
protected val fail: AsyncCallback[Throwable] = getAsyncCallback[Throwable]{e =>
jmsSettings.log.warn(s"Failing stage [$id]")
failStage(e)
}
private val onSession: AsyncCallback[S] = getAsyncCallback[S] { session =>
jmsSettings.log.debug(s"Session of type [${session.getClass().getSimpleName()}] with id [${session.sessionId}] has been created.")
jmsSessions += (session.sessionId -> session)
onSessionOpened(session)
}
private val onSessionClosed : AsyncCallback[S] = getAsyncCallback { s =>
if (isTimerActive(RecreateSessions)) {
// do nothing as we have already scheduled to recreate the sessions
} else {
scheduleOnce(RecreateSessions, jmsSettings.sessionRecreateTimeout)
}
}
protected def handleTimer : PartialFunction[Any, Unit] = {
case RecreateSessions =>
initSessionAsync()
}
override protected def onTimer(timerKey: Any): Unit = handleTimer(timerKey)
protected def nextSessionId() : String = s"$id-${JmsConnector.nextSessionId}"
protected def createSession(connection: Connection): S
protected[this] def closeSession(session: S) : Unit = {
try {
jmsSettings.log.debug(s"Closing session [${session.sessionId}]")
session.closeSessionAsync().onComplete { _ =>
jmsSessions -= session.sessionId
onSessionClosed.invoke(session)
}
} catch {
case _ : Throwable =>
jmsSettings.log.error(s"Error closing session with id [${session.sessionId}]")
}
}
sealed trait ConnectionStatus
case object Connecting extends ConnectionStatus
case object Connected extends ConnectionStatus
case object TimedOut extends ConnectionStatus
protected def initSessionAsync(): Unit = {
def failureHandler(ex: Throwable) = {
jmsSettings.log.warn(s"Session creation failed [${ex.getMessage()}]")
fail.invoke(ex)
}
val allSessions = openSessions(failureHandler)
allSessions.failed.foreach(failureHandler)
// wait for all sessions to successfully initialize before invoking the onSession callback.
// reduces flakiness (start, consume, then crash) at the cost of increased latency of startup.
allSessions.foreach(_.foreach{ s =>
onSession.invoke(s)
})(ec)
}
def openSessions(onConnectionFailure: JMSException => Unit): Future[Seq[S]] =
openConnection(startConnection = true, onConnectionFailure).flatMap { connection =>
val toBeCreated = jmsSettings.sessionCount - jmsSessions.size
jmsSettings.log.debug(s"Trying to create [$toBeCreated] sessions ...")
val sessionFutures =
for (_ <- 0 until toBeCreated) yield Future {
val s = createSession(connection)
s
}
Future.sequence(sessionFutures)
}
private def openConnection(startConnection: Boolean) : Future[Connection] = {
val factory = jmsSettings.connectionFactory
val connectionRef = new AtomicReference[Option[Connection]](None)
// status is also the decision point between the two futures below which one will win.
val status = new AtomicReference[ConnectionStatus](Connecting)
val connectionFuture = Future {
val connection = factory.createConnection()
if (status.get == Connecting) { // `TimedOut` can be set at any point. So we have to check whether to continue.
connectionRef.set(Some(connection))
if (startConnection) {
connection.start()
}
}
// ... and close if the connection is not to be used, don't return the connection
if (!status.compareAndSet(Connecting, Connected)) {
connectionRef.get.foreach(_.close())
connectionRef.set(None)
throw new TimeoutException("Received timed out signal trying to establish connection")
} else connection
}
val connectTimeout = jmsSettings.connectionTimeout
val timeoutFuture = after(connectTimeout, system.scheduler) {
// Even if the timer goes off, the connection may already be good. We use the
// status field and an atomic compareAndSet to see whether we should indeed time out, or just return
// the connection. In this case it does not matter which future returns. Both will have the right answer.
if (status.compareAndSet(Connecting, TimedOut)) {
connectionRef.get.foreach(_.close())
connectionRef.set(None)
Future.failed(
new TimeoutException(
s"Timed out after [$connectTimeout] trying to establish connection. " +
"Please see ConnectionRetrySettings.connectTimeout"
)
)
} else
connectionRef.get match {
case Some(connection) =>
Future.successful(connection)
case None =>
Future.failed(new IllegalStateException("BUG: Connection reference not set when connected"))
}
}
Future.firstCompletedOf(Iterator(connectionFuture, timeoutFuture))(ec)
}
private[jms] def openConnection(
startConnection: Boolean,
onConnectionFailure: JMSException => Unit
): Future[Connection] = {
jmsConnection = openConnection(startConnection).map { connection =>
connection.setExceptionListener(new ExceptionListener {
override def onException(ex: JMSException) = {
try {
connection.close() // best effort closing the connection.
} catch {
case _: Throwable =>
}
jmsSessions = Map.empty
onConnectionFailure(ex)
}
})
connection
}
jmsConnection
}
}
| lefou/blended | blended.streams/src/main/scala/blended/streams/jms/JMSConnector.scala | Scala | apache-2.0 | 6,860 |
package com.komanov.mysql.streaming.tests
import com.komanov.mysql.streaming._
import org.specs2.mutable.SpecificationWithJUnit
import org.specs2.specification.Scope
import org.specs2.specification.core.Fragments
import scala.collection.mutable
class QueryTest extends SpecificationWithJUnit {
sequential
MysqlRunner.run()
Fragments.foreach(Drivers.list) { driver =>
s"${driver.name}" should {
s"prepare/select/clear database" in new ctx {
Query.clearTable(driver)
Query.prepareTable(driver)
Query.selectAtOnce(driver) must be_===(Query.TestData)
Query.selectViaStreaming(driver) must be_===(Query.TestData)
Query.clearTable(driver)
Query.selectAtOnce(driver) must beEmpty
}
s"stream everything" in new ctx {
Query.clearTable(driver)
Query.prepareTable(driver)
val result = mutable.ListBuffer[TestTableRow]()
Query.forEach(driver, row => result += row)
result.toList must be_===(Query.TestData)
}
}
}
class ctx extends Scope {
}
}
| dkomanov/stuff | src/com/komanov/mysql/streaming/tests/QueryTest.scala | Scala | mit | 1,072 |
package org.workcraft
package object services {
type Model = ModelServiceProvider
type ModelService[T] = Service[ModelScope, T]
type GlobalService[T] = Service[GlobalScope, T]
}
| tuura/workcraft-2.2 | Core/src/main/scala/org/workcraft/services/Model.scala | Scala | gpl-3.0 | 184 |
/*-
* #%L
* FWAPP Framework
* %%
* Copyright (C) 2016 - 2017 Open Design Flow
* %%
* This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.odfi.wsb.fwapp.lib.chart.plotly
import org.odfi.wsb.fwapp.module.jquery.JQueryView
import org.odfi.wsb.fwapp.module.semantic.SemanticView
import org.w3c.dom.html.HTMLElement
import com.idyria.osi.vui.html.Div
import com.idyria.osi.tea.listeners.ListeningSupport
import com.idyria.osi.ooxoo.core.buffers.structural.ElementBuffer
import com.idyria.osi.ooxoo.core.buffers.structural.xelement
import com.idyria.osi.ooxoo.core.buffers.datatypes.IntegerBuffer
import com.idyria.osi.ooxoo.core.buffers.structural.XList
import com.idyria.osi.ooxoo.core.buffers.datatypes.XSDStringBuffer
import com.idyria.osi.ooxoo.lib.json.JSonUtilTrait
import com.idyria.osi.ooxoo.core.buffers.datatypes.DoubleBuffer
import org.odfi.wsb.fwapp.framework.websocket.WebsocketView
trait PlotlyView extends JQueryView with WebsocketView{
this.addLibrary("plotly") {
case (Some(l), targetNode) =>
case (None, targetNode) =>
onNode(targetNode) {
script(createAssetsResolverURI("/fwapp/external/plotly/plotly-latest.min.js")) {
}
script(createAssetsResolverURI("/fwapp/lib/plotly/plotly-update.js")) {
}
}
}
// Updates
//-----------------
class PlotlyLineChart extends ElementBuffer with JSonUtilTrait {
@xelement
var TargetID: XSDStringBuffer = _
@xelement
var yPoints : XList[DoubleBuffer] = XList{new DoubleBuffer}
}
class PlotlyAddPoint extends ElementBuffer with JSonUtilTrait {
@xelement
var TargetID: XSDStringBuffer = _
@xelement
var point :DoubleBuffer = 0.0
}
class PlotlyDiv(val d: Div[HTMLElement, Div[HTMLElement, _]]) extends ListeningSupport{
def onDataAvailable(cl: => Unit) = {
this.on("data"){
cl
}
}
def triggerData = {
this.@->("data")
}
def sendPoint(p:Double) = {
val ap = new PlotlyAddPoint
ap.TargetID = d.getId
ap.point = p
broadCastSOAPBackendMessage(ap)
}
def sendLineChart(points:Array[Double]) = {
var lineUpdate = new PlotlyLineChart
lineUpdate.TargetID = d.getId
lineUpdate.yPoints ++= points.map { p => DoubleBuffer(p)}.toList
broadCastSOAPBackendMessage(lineUpdate)
}
def makeLineChart(points: Array[Int]) : Unit = {
makeLineChart(points.map(_.toDouble))
}
def makeLineChart(points: Array[Double]) : Unit = {
onNode(d) {
var generator = jqueryGenerateOnLoad("plotly-linechart-"+d.getId).get
//x: ${(0 until points.size).map(_.toString).mkString("[", ",", "]")},
generator.print(s"""|
|var trace = {
|
| y: ${points.map(_.toString).mkString("[", ",", "]")},
| type: 'scatter'
|}
|
|Plotly.newPlot('${d.getId}', [trace]);
|
|
|
|""".stripMargin)
generator.close()
}
triggerData
}
}
def plotlyPlot(tid: String)(cl: => Any) = {
val targetId = currentNodeUniqueId(tid)
/*val d = new PlotlyDiv
d.id = tid
d*/
new PlotlyDiv(div {
id(targetId)
//+@("style"->"width:400px;height:400px")
cl
})
}
/* script(s"""
$$(function() {
var trace1 = {
x: [1, 2, 3, 4],
y: [10, 15, 13, 17],
type: 'scatter'
};
var trace2 = {
x: [1, 2, 3, 4],
y: [16, 5, 11, 9],
type: 'scatter'
};
var data = [trace1, trace2];
Plotly.newPlot('$tid', data);
});
""")
}*/
}
trait PlotlySemantic extends PlotlyView with SemanticView {
/**
*
*/
def semanticPlotly(tid: String, message: String , widthPercent : Int = 100)(cl: => Any) = {
val plot = plotlyPlot(tid) {
data("ui-load" -> "fwapp.ui.heightToRatio( this, 4.0 / 3.0)")
++@("style" -> s"with:$widthPercent%;")
}
/**val segmentd = "ui segment" :: div {
val dimmerDiv = s"ui active dimmer" :: div {
classes("")
//data("vui-size-ratio" -> 4.0 / 3.0)
"ui large text" :: message
plot.d.detach
add(plot.d)
cl
}
plot.onDataAvailable {
dimmerDiv.removeClass("active").removeClass("dimmer")
}
}
plot.d = segmentd*/
plot
}
}
| opendesignflow/fwapp | src/main/scala/org/odfi/wsb/fwapp/lib/chart/plotly/PlotlyView.scala | Scala | agpl-3.0 | 5,173 |
object test {
class annot extends scala.annotation.Annotation
def foo = {
def bar(i: Int): Int = i
@annot class Silly {} // used to be: not found, but now ok after backing out of 2b12868070be50fb70
bar(5)
}
}
| som-snytt/dotty | tests/pos/i3702.scala | Scala | apache-2.0 | 227 |
package one.murch.bitcoin.coinselection
/**
* Created by murch on 31.12.16.
*/
object Simulation {
def main(args: Array[String]) = {
//
// val testCases4 = new Simulator(TestCase500Euler.startingUtxoSet, TestCase500Euler.operations, TestCase500Euler.descriptor)
// val testCases5 = new Simulator(TestCaseJunkWallet.startingUtxoSet, TestCaseJunkWallet.operations, TestCaseJunkWallet.descriptor)
// val testCases6 = new Simulator(GaussianWallet.startingUtxoSet, GaussianWallet.operations, GaussianWallet.descriptor)
// val testCases7 = new Simulator(EmptyGaussianWallet.startingUtxoSet, EmptyGaussianWallet.operations, EmptyGaussianWallet.descriptor)
//
// val testCases9 = new Simulator(TestCaseMoneyPot50.startingUtxoSet, TestCaseMoneyPot50.operations, TestCaseMoneyPot50.descriptor)
// val testCases10 = new Simulator(TestCaseMoneyPot15.startingUtxoSet, TestCaseMoneyPot15.operations, TestCaseMoneyPot15.descriptor)
// val testCases11 = new Simulator(TestCaseMoneyPot15Coins.startingUtxoSet, TestCaseMoneyPot15Coins.operations, TestCaseMoneyPot15Coins.descriptor)
// val testCases12 = new Simulator(Set(), TestCaseMoneyPotEmpty.operations, TestCaseMoneyPotEmpty.descriptor)
val mpAfterLF = new Simulator(MoneyPotAfterLF.startingUtxoSet, MoneyPotAfterLF.operations, MoneyPotAfterLF.descriptor)
// // println("--------------------------------------------------------------------------------------")
// // println("---------------TEST CASE 1 STARTING---------------------------------------------------")
// // println("--------------------------------------------------------------------------------------")
// // testCases.simulate()
// // println("--------------------------------------------------------------------------------------")
// // println("---------------TEST CASE 2 STARTING---------------------------------------------------")
// // println("--------------------------------------------------------------------------------------")
// // testCases2.simulate()
// // println("--------------------------------------------------------------------------------------")
// // println("---------------TEST CASE 3 STARTING---------------------------------------------------")
// // println("--------------------------------------------------------------------------------------")
// // testCases3.simulate()
// println("--------------------------------------------------------------------------------------")
// println("---------------TEST CASE 4 STARTING---------------------------------------------------")
// println("--------------------------------------------------------------------------------------")
// testCases4.simulate()
// println("--------------------------------------------------------------------------------------")
// println("---------------TEST CASE 5 STARTING---------------------------------------------------")
// println("--------------------------------------------------------------------------------------")
// testCases5.simulate()
// println("--------------------------------------------------------------------------------------")
// println("---------------TEST CASE 6 STARTING---------------------------------------------------")
// println("--------------------------------------------------------------------------------------")
// testCases6.simulate()
// println("--------------------------------------------------------------------------------------")
// println("---------------TEST CASE 7 STARTING---------------------------------------------------")
// println("--------------------------------------------------------------------------------------")
// testCases7.simulate()
// // println("--------------------------------------------------------------------------------------")
// // println("---------------TEST CASE 8 STARTING---------------------------------------------------")
// // println("--------------------------------------------------------------------------------------")
// // testCases8.simulate()
// println("--------------------------------------------------------------------------------------")
// println("---------------TEST CASE 9 STARTING---------------------------------------------------")
// println("--------------------------------------------------------------------------------------")
// testCases9.simulate()
// println("--------------------------------------------------------------------------------------")
// println("---------------TEST CASE 10 STARTING--------------------------------------------------")
// println("--------------------------------------------------------------------------------------")
// testCases10.simulate()
// println("--------------------------------------------------------------------------------------")
// println("---------------TEST CASE 11 STARTING--------------------------------------------------")
// println("--------------------------------------------------------------------------------------")
// testCases11.simulate()
// println("--------------------------------------------------------------------------------------")
// println("---------------TEST CASE 12 STARTING--------------------------------------------------")
// println("--------------------------------------------------------------------------------------")
// testCases12.simulate()
println("--------------------------------------------------------------------------------------")
println("---------------TEST CASE MP-after-LF STARTING--------------------------------------------------")
println("--------------------------------------------------------------------------------------")
mpAfterLF.simulate()
}
}
| Xekyo/CoinSelectionSimulator | src/main/scala/one/murch/bitcoin/coinselection/Simulation.scala | Scala | mit | 6,550 |
/*
* Copyright 2017-2022 John Snow Labs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.johnsnowlabs.nlp.annotators.common
import com.johnsnowlabs.nlp.{Annotation, AnnotatorType}
import scala.collection.Map
/**
* structure representing a sentence and its boundaries
*/
case class Sentence(content: String, start: Int, end: Int, index: Int, metadata: Option[Map[String, String]] = None)
object Sentence {
def fromTexts(texts: String*): Seq[Sentence] = {
var idx = 0
texts.zipWithIndex.map { case (text, textIndex) =>
val sentence = Sentence(text, idx, idx + text.length - 1, textIndex)
idx += text.length + 1
sentence
}
}
}
/**
* Helper object to work work with Sentence
*/
object SentenceSplit extends Annotated[Sentence] {
override def annotatorType: String = AnnotatorType.DOCUMENT
override def unpack(annotations: Seq[Annotation]): Seq[Sentence] = {
annotations.filter(_.annotatorType == annotatorType)
.zipWithIndex.map { case (annotation, index) =>
Sentence(annotation.result, annotation.begin, annotation.end, index, Option(annotation.metadata))
}
}
override def pack(items: Seq[Sentence]): Seq[Annotation] = {
items.sortBy(i => i.start).zipWithIndex.map { case (item, index) => Annotation(
annotatorType,
item.start,
item.end,
item.content,
Map("sentence" -> index.toString)
)
}
}
}
/**
* Helper object to work work with Chunks
*/
object ChunkSplit extends Annotated[Sentence] {
override def annotatorType: String = AnnotatorType.CHUNK
override def unpack(annotations: Seq[Annotation]): Seq[Sentence] = {
annotations.filter(_.annotatorType == annotatorType)
.map(annotation =>
Sentence(annotation.result, annotation.begin, annotation.end, annotation.metadata("sentence").toInt)
)
}
override def pack(items: Seq[Sentence]): Seq[Annotation] = {
items.map(item => Annotation(annotatorType, item.start, item.end, item.content, Map.empty[String, String]))
}
}
| JohnSnowLabs/spark-nlp | src/main/scala/com/johnsnowlabs/nlp/annotators/common/SentenceSplit.scala | Scala | apache-2.0 | 2,549 |
package loader.core.context
import scala.collection.mutable.HashMap
import utils.RegexMap
/** Tag manager.
* It behaves roughly like a map.
*/
trait TagMap {
def fetch(name: String): Option[loader.core.context.Context#FieldMapping]
def put(mapping:(String,loader.core.context.Context#FieldMapping)):Unit
def asXml(tab:Int):String
def hasNonContig:Boolean
def size:Int
def values:Iterable[loader.core.context.Context#FieldMapping]
}
/**
* A TagMap that can handle regex.
*/
final protected class RegexTagMap extends TagMap { self=>
//fields or field/attribute definitions
//implementation choice: standard fields are expected to be much more common that partitioned ones (such as attributes)
//the following map is direct for standard fields and carries no overhead.
//partitioned fields are prefixed by their partition name followed by '�'
private var flds = RegexMap(HashMap.empty[String,Context#FieldMapping])
/** indicates sequences are expected */
lazy val hasSeqs = flds!=null && flds.exists(_._2.annot.isSeq)
/** indicates that non contiguous sequences are expected */
lazy val hasNonContig = hasSeqs && (flds.exists ((x:(String,Context#FieldMapping))=>x._2.annot.isSeq && !x._2.annot.contiguous))
def size = flds.size
def values = flds.values
def fetch(name:String) = flds.get(name)
def put(mapping:(String,loader.core.context.Context#FieldMapping)) = flds.put(mapping._1,mapping._2)
def asXml(tab:Int):String = s"${flds.values.foldLeft("")((x,y)=>x+XmlHelper.t(tab)+y.annot.asXml)}"
} | Y-P-/data-processing-binding | Core/src/loader/core/context/TagMap.scala | Scala | gpl-3.0 | 1,588 |
/**
* Copyright (C) 2014 Reactibility Inc. <http://www.reactibility.com>
*/
package garde.security
import akka.actor._
import akka.persistence.{RecoveryCompleted, Recover, PersistentActor, SnapshotOffer}
import akka.util.Timeout
import scalaz._
import Scalaz._
/**
* Persistent actor representing an active module.
*/
class ActiveModule(id: String)(implicit val timeout: Timeout) extends PersistentActor {
import CommonValidations._
import ActiveModule._
override def persistenceId = id
var state: ModuleState = _
val receiveCommand: Receive = {
case cmd @ CreateModule(id, name) =>
validateCreate(cmd) match {
case Success(v) =>
persist(ModuleCreated(v.id, 0L, v.name)) { event =>
state = ModuleState(event.id, event.version, event.name)
sender ! event
}
case f @ Failure(v) =>
sender ! CreationException(id, f)
context stop self
}
case cmd @ ChangeModuleName(id, expectedVersion, name) =>
validateChangeName(cmd) match {
case Success(v) =>
persist(ModuleNameChanged(v.id, state.version + 1, v.name)) { event =>
state = state copy (version = event.version, name = event.name)
sender ! event.successNel
}
case f @ Failure(v) => sender ! f
}
case SaveModuleSnapshot => saveSnapshot(state)
case GetModuleState => sender ! state
case RecoveryCompleted => sender ! ModuleRecoveryComplete(recoveryFinished)
}
val receiveRecover: Receive = {
case evt: ModuleCreated => state = ModuleState(evt.id, evt.version, evt.name)
case evt: ModuleNameChanged => state = state.copy(name = evt.name, version = evt.version)
case SnapshotOffer(_, snapshot: ModuleState) => state = snapshot
}
def validateCreate(cmd: CreateModule): DomainValidation[CreateModule] =
(checkString(cmd.id, IdRequired).toValidationNel |@|
0L.successNel |@|
checkString(cmd.name, NameRequired).toValidationNel) { (i, v, n) =>
CreateModule(i, n)
}
def validateChangeName(cmd: ChangeModuleName): DomainValidation[ChangeModuleName] =
(checkId(cmd.id, state.id, IdMismatch).toValidationNel |@|
checkVersion(cmd.expectedVersion, state.version, IncorrectVersion).toValidationNel |@|
checkString(cmd.name, NameRequired).toValidationNel) { (i, v, n) =>
ChangeModuleName(state.id, v, n)
}
}
/**
* Companion object for ActiveModule.
*/
object ActiveModule {
sealed case class ModuleState(id: String, version: Long, name: String)
case object IncorrectVersion extends ValidationKey
case object IdRequired extends ValidationKey
case object IdMismatch extends ValidationKey
case object NameRequired extends ValidationKey
sealed trait ModuleCommand {
def id: String
def expectedVersion: Long
}
final case class CreateModule(id: String, name: String)
final case class ChangeModuleName(id: String, expectedVersion: Long, name: String) extends ModuleCommand
final case class ModuleCreated(id: String, version: Long, name: String)
final case class ModuleNameChanged(id: String, version: Long, name: String)
sealed case class CreationException(moduleId: String, validation: Failure[NonEmptyList[String], CreateModule])
case object SaveModuleSnapshot
case object GetModuleState
case class ModuleRecoveryComplete(completed: Boolean)
}
/**
* An actor as a factory for modules and for supervision.
*/
class ModuleSupervisor(implicit val timeout: Timeout) extends Actor {
import ActiveModule._
import ModuleSupervisor._
import CommonValidations._
def receive = {
case cmd @ CreateModule(id, name) =>
val v = checkString(id, IdRequired)
if (v.isSuccess) {
val client = sender
context actorOf(Props(new ModuleCreationSaga(client, cmd, context.actorOf(Props(new ActiveModule(cmd.id)), cmd.id))),
s"module-creation-saga-$id")
}
else sender ! v.toValidationNel
case msg @ RecoverModule(moduleId) =>
val m = context.actorOf(Props(new ActiveModule(moduleId)), moduleId)
m ! Recover
sender ! m
}
}
/**
* Companion object for ModuleSupervisor.
*/
object ModuleSupervisor {
final val ModuleSupervisorName = "module-supervisor"
sealed case class RecoverModule(moduleId: String)
}
/**
* A saga type actor to orchestration the creation of a new module and the syncing up between the
* module supervisor and successful module creation.
* @param client ActorRef original command sender
* @param cmd CreateModule the module creation command
* @param actorRef ActorRef the persistence module actor ref
*/
class ModuleCreationSaga(client: ActorRef, cmd: ActiveModule.CreateModule, actorRef: ActorRef)(implicit val timeout: Timeout) extends Actor {
import ActiveModule._
private case object Start
context.setReceiveTimeout(timeout.duration)
override def preStart() = {
self ! Start
}
def receive = {
case Start =>
context become awaitCreation
actorRef ! cmd
}
def awaitCreation: Receive = {
case evt @ ModuleCreated(id, version, name) =>
context.system.eventStream.publish(evt)
client ! evt.success
context stop self
case msg @ CreationException(id, ex) =>
context.system.eventStream.publish(msg)
client ! ex
context stop self
case ReceiveTimeout =>
client ! s"CreateModule timed out after ${timeout.duration.toSeconds} seconds for module:${cmd.id}".failureNel
context stop self
}
}
/**
* This is a saga actor that manages the conditional locating, instantiating, and recovery of the module
* persistent actor.
* @param moduleId String the module id, matches the persistenceId
* @param timeout Timeout the timeout for any wait state
*/
class ModuleLocator(moduleId: String)(implicit val timeout: Timeout) extends Actor {
import ModuleLocator._
import ActiveModule._
import ModuleSupervisor._
context.setReceiveTimeout(timeout.duration)
var client: ActorRef = _
def receive = {
case LocateModule =>
context become awaitLookup
client = sender
context.actorSelection(s"/user/$ModuleSupervisorName/module-1") ! Identify(moduleId)
}
def awaitLookup: Receive = {
case ActorIdentity(identityId, Some(ref)) =>
client ! ref
context stop self
case ActorIdentity(identityId, None) =>
context become awaitInstantiation
context.actorSelection(s"/user/$ModuleSupervisorName") ! RecoverModule(moduleId)
case ReceiveTimeout =>
client ! s"Unable to establish identity for module $moduleId."
context stop self
}
def awaitInstantiation: Receive = {
case m: ActorRef =>
context become awaitRecovery
m ! RecoveryCompleted
case ReceiveTimeout =>
client ! s"Unable to instantiate module $moduleId."
context stop self
}
def awaitRecovery: Receive = {
case msg @ ModuleRecoveryComplete(completed) =>
if (completed) {
val module = sender
client ! module.path
context stop self
}
else
sender ! RecoveryCompleted
case ReceiveTimeout =>
client ! s"Unable to instantiate module $moduleId."
context stop self
}
}
/**
* Companion object for ModuleLocator.
*/
object ModuleLocator {
case object LocateModule
} | reactibility/garde | garde-security/src/main/scala/garde/security/Module.scala | Scala | apache-2.0 | 7,572 |
package doc.jockey.horse
import org.scalatest.WordSpec
class StringsInPatternMatchingSpec extends WordSpec {
implicit class PatternMatchableUrlAdapter(val sc: StringContext) {
val url = sc.parts.mkString("(.+)").r
}
"We can pattern match in interpolated Strings" in {
def matcher: PartialFunction[String, String] = {
case url"this ${a} is a simple ${b}" => s"Matched: $a, $b"
case url"/dj/$a/$b" => s"Matched: $a, $b"
case x => "Did not match anything"
}
assert(matcher("this sentence is a simple string") === "Matched: sentence, string")
assert(matcher("/dj/investment/StockAnalyser") === "Matched: investment, StockAnalyser")
}
"We can pattern match in interpolated Strings inside Tim's HTTP DSL" in {
case class GET(x: String)
def matcher: PartialFunction[GET, String] = {
case GET(url"/trade/$tradeID/message/$messageID") => s"Matched: $tradeID, $messageID" //processTradeMEssage(tradeId, messageId)
case x => "It fucked up"
}
assert(matcher(GET("/trade/123/message/456")) === "Matched: 123, 456")
}
} | agmenc/doc-jockey | src/test/scala/doc/jockey/horse/StringsInPatternMatchingSpec.scala | Scala | mit | 1,091 |
package com.github.romangrebennikov.columnize.protocol.cql.types
import java.math.{BigDecimal, BigInteger}
import java.net.InetAddress
import java.nio.ByteBuffer
import java.util.{UUID, Date}
import com.datastax.driver.core.DataType
import com.datastax.driver.core.exceptions.InvalidTypeException
import com.datastax.driver.core.utils.Bytes
import com.github.romangrebennikov.columnize.protocol.body.Body
import org.joda.time.DateTime
/**
* Created by shutty on 10/6/15.
*/
object CQL {
trait Type {
def deserialize(raw:ByteBuffer):Value
}
trait Value {
def serialize = ???
}
}
| shuttie/columnize | src/main/scala/com/github/romangrebennikov/columnize/protocol/cql/types/CQL.scala | Scala | bsd-2-clause | 615 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.FunctionBuilder
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, GenericArrayData, TypeUtils}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.Platform
import org.apache.spark.unsafe.array.ByteArrayMethods
import org.apache.spark.unsafe.types.UTF8String
/**
* Returns an Array containing the evaluation of all children expressions.
*/
@ExpressionDescription(
usage = "_FUNC_(expr, ...) - Returns an array with the given elements.",
examples = """
Examples:
> SELECT _FUNC_(1, 2, 3);
[1,2,3]
""")
case class CreateArray(children: Seq[Expression]) extends Expression {
override def foldable: Boolean = children.forall(_.foldable)
override def checkInputDataTypes(): TypeCheckResult = {
TypeUtils.checkForSameTypeInputExpr(children.map(_.dataType), s"function $prettyName")
}
override def dataType: ArrayType = {
ArrayType(
children.headOption.map(_.dataType).getOrElse(StringType),
containsNull = children.exists(_.nullable))
}
override def nullable: Boolean = false
override def eval(input: InternalRow): Any = {
new GenericArrayData(children.map(_.eval(input)).toArray)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val et = dataType.elementType
val evals = children.map(e => e.genCode(ctx))
val (preprocess, assigns, postprocess, arrayData) =
GenArrayData.genCodeToCreateArrayData(ctx, et, evals, false)
ev.copy(
code = preprocess + ctx.splitExpressions(ctx.INPUT_ROW, assigns) + postprocess,
value = arrayData,
isNull = "false")
}
override def prettyName: String = "array"
}
private [sql] object GenArrayData {
/**
* Return Java code pieces based on DataType and isPrimitive to allocate ArrayData class
*
* @param ctx a [[CodegenContext]]
* @param elementType data type of underlying array elements
* @param elementsCode a set of [[ExprCode]] for each element of an underlying array
* @param isMapKey if true, throw an exception when the element is null
* @return (code pre-assignments, assignments to each array elements, code post-assignments,
* arrayData name)
*/
def genCodeToCreateArrayData(
ctx: CodegenContext,
elementType: DataType,
elementsCode: Seq[ExprCode],
isMapKey: Boolean): (String, Seq[String], String, String) = {
val arrayName = ctx.freshName("array")
val arrayDataName = ctx.freshName("arrayData")
val numElements = elementsCode.length
if (!ctx.isPrimitiveType(elementType)) {
val genericArrayClass = classOf[GenericArrayData].getName
ctx.addMutableState("Object[]", arrayName,
s"$arrayName = new Object[$numElements];")
val assignments = elementsCode.zipWithIndex.map { case (eval, i) =>
val isNullAssignment = if (!isMapKey) {
s"$arrayName[$i] = null;"
} else {
"throw new RuntimeException(\\"Cannot use null as map key!\\");"
}
eval.code + s"""
if (${eval.isNull}) {
$isNullAssignment
} else {
$arrayName[$i] = ${eval.value};
}
"""
}
("",
assignments,
s"final ArrayData $arrayDataName = new $genericArrayClass($arrayName);",
arrayDataName)
} else {
val unsafeArraySizeInBytes =
UnsafeArrayData.calculateHeaderPortionInBytes(numElements) +
ByteArrayMethods.roundNumberOfBytesToNearestWord(elementType.defaultSize * numElements)
val baseOffset = Platform.BYTE_ARRAY_OFFSET
ctx.addMutableState("UnsafeArrayData", arrayDataName)
val primitiveValueTypeName = ctx.primitiveTypeName(elementType)
val assignments = elementsCode.zipWithIndex.map { case (eval, i) =>
val isNullAssignment = if (!isMapKey) {
s"$arrayDataName.setNullAt($i);"
} else {
"throw new RuntimeException(\\"Cannot use null as map key!\\");"
}
eval.code + s"""
if (${eval.isNull}) {
$isNullAssignment
} else {
$arrayDataName.set$primitiveValueTypeName($i, ${eval.value});
}
"""
}
(s"""
byte[] $arrayName = new byte[$unsafeArraySizeInBytes];
$arrayDataName = new UnsafeArrayData();
Platform.putLong($arrayName, $baseOffset, $numElements);
$arrayDataName.pointTo($arrayName, $baseOffset, $unsafeArraySizeInBytes);
""",
assignments,
"",
arrayDataName)
}
}
}
/**
* Returns a catalyst Map containing the evaluation of all children expressions as keys and values.
* The children are a flatted sequence of kv pairs, e.g. (key1, value1, key2, value2, ...)
*/
@ExpressionDescription(
usage = "_FUNC_(key0, value0, key1, value1, ...) - Creates a map with the given key/value pairs.",
examples = """
Examples:
> SELECT _FUNC_(1.0, '2', 3.0, '4');
{1.0:"2",3.0:"4"}
""")
case class CreateMap(children: Seq[Expression]) extends Expression {
lazy val keys = children.indices.filter(_ % 2 == 0).map(children)
lazy val values = children.indices.filter(_ % 2 != 0).map(children)
override def foldable: Boolean = children.forall(_.foldable)
override def checkInputDataTypes(): TypeCheckResult = {
if (children.size % 2 != 0) {
TypeCheckResult.TypeCheckFailure(
s"$prettyName expects a positive even number of arguments.")
} else if (keys.map(_.dataType).distinct.length > 1) {
TypeCheckResult.TypeCheckFailure(
"The given keys of function map should all be the same type, but they are " +
keys.map(_.dataType.simpleString).mkString("[", ", ", "]"))
} else if (values.map(_.dataType).distinct.length > 1) {
TypeCheckResult.TypeCheckFailure(
"The given values of function map should all be the same type, but they are " +
values.map(_.dataType.simpleString).mkString("[", ", ", "]"))
} else {
TypeCheckResult.TypeCheckSuccess
}
}
override def dataType: DataType = {
MapType(
keyType = keys.headOption.map(_.dataType).getOrElse(StringType),
valueType = values.headOption.map(_.dataType).getOrElse(StringType),
valueContainsNull = values.exists(_.nullable))
}
override def nullable: Boolean = false
override def eval(input: InternalRow): Any = {
val keyArray = keys.map(_.eval(input)).toArray
if (keyArray.contains(null)) {
throw new RuntimeException("Cannot use null as map key!")
}
val valueArray = values.map(_.eval(input)).toArray
new ArrayBasedMapData(new GenericArrayData(keyArray), new GenericArrayData(valueArray))
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val mapClass = classOf[ArrayBasedMapData].getName
val MapType(keyDt, valueDt, _) = dataType
val evalKeys = keys.map(e => e.genCode(ctx))
val evalValues = values.map(e => e.genCode(ctx))
val (preprocessKeyData, assignKeys, postprocessKeyData, keyArrayData) =
GenArrayData.genCodeToCreateArrayData(ctx, keyDt, evalKeys, true)
val (preprocessValueData, assignValues, postprocessValueData, valueArrayData) =
GenArrayData.genCodeToCreateArrayData(ctx, valueDt, evalValues, false)
val code =
s"""
final boolean ${ev.isNull} = false;
$preprocessKeyData
${ctx.splitExpressions(ctx.INPUT_ROW, assignKeys)}
$postprocessKeyData
$preprocessValueData
${ctx.splitExpressions(ctx.INPUT_ROW, assignValues)}
$postprocessValueData
final MapData ${ev.value} = new $mapClass($keyArrayData, $valueArrayData);
"""
ev.copy(code = code)
}
override def prettyName: String = "map"
}
/**
* An expression representing a not yet available attribute name. This expression is unevaluable
* and as its name suggests it is a temporary place holder until we're able to determine the
* actual attribute name.
*/
case object NamePlaceholder extends LeafExpression with Unevaluable {
override lazy val resolved: Boolean = false
override def foldable: Boolean = false
override def nullable: Boolean = false
override def dataType: DataType = StringType
override def prettyName: String = "NamePlaceholder"
override def toString: String = prettyName
}
/**
* Returns a Row containing the evaluation of all children expressions.
*/
object CreateStruct extends FunctionBuilder {
def apply(children: Seq[Expression]): CreateNamedStruct = {
CreateNamedStruct(children.zipWithIndex.flatMap {
case (e: NamedExpression, _) if e.resolved => Seq(Literal(e.name), e)
case (e: NamedExpression, _) => Seq(NamePlaceholder, e)
case (e, index) => Seq(Literal(s"col${index + 1}"), e)
})
}
/**
* Entry to use in the function registry.
*/
val registryEntry: (String, (ExpressionInfo, FunctionBuilder)) = {
val info: ExpressionInfo = new ExpressionInfo(
"org.apache.spark.sql.catalyst.expressions.NamedStruct",
null,
"struct",
"_FUNC_(col1, col2, col3, ...) - Creates a struct with the given field values.",
"",
"",
"",
"")
("struct", (info, this))
}
}
/**
* Common base class for both [[CreateNamedStruct]] and [[CreateNamedStructUnsafe]].
*/
trait CreateNamedStructLike extends Expression {
lazy val (nameExprs, valExprs) = children.grouped(2).map {
case Seq(name, value) => (name, value)
}.toList.unzip
lazy val names = nameExprs.map(_.eval(EmptyRow))
override def nullable: Boolean = false
override def foldable: Boolean = valExprs.forall(_.foldable)
override lazy val dataType: StructType = {
val fields = names.zip(valExprs).map {
case (name, expr) =>
val metadata = expr match {
case ne: NamedExpression => ne.metadata
case _ => Metadata.empty
}
StructField(name.toString, expr.dataType, expr.nullable, metadata)
}
StructType(fields)
}
override def checkInputDataTypes(): TypeCheckResult = {
if (children.length < 1) {
TypeCheckResult.TypeCheckFailure(
s"input to function $prettyName requires at least one argument")
} else if (children.size % 2 != 0) {
TypeCheckResult.TypeCheckFailure(s"$prettyName expects an even number of arguments.")
} else {
val invalidNames = nameExprs.filterNot(e => e.foldable && e.dataType == StringType)
if (invalidNames.nonEmpty) {
TypeCheckResult.TypeCheckFailure(
"Only foldable StringType expressions are allowed to appear at odd position, got:" +
s" ${invalidNames.mkString(",")}")
} else if (!names.contains(null)) {
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure("Field name should not be null")
}
}
}
/**
* Returns Aliased [[Expression]]s that could be used to construct a flattened version of this
* StructType.
*/
def flatten: Seq[NamedExpression] = valExprs.zip(names).map {
case (v, n) => Alias(v, n.toString)()
}
override def eval(input: InternalRow): Any = {
InternalRow(valExprs.map(_.eval(input)): _*)
}
}
/**
* Creates a struct with the given field names and values
*
* @param children Seq(name1, val1, name2, val2, ...)
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(name1, val1, name2, val2, ...) - Creates a struct with the given field names and values.",
examples = """
Examples:
> SELECT _FUNC_("a", 1, "b", 2, "c", 3);
{"a":1,"b":2,"c":3}
""")
// scalastyle:on line.size.limit
case class CreateNamedStruct(children: Seq[Expression]) extends CreateNamedStructLike {
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val rowClass = classOf[GenericInternalRow].getName
val values = ctx.freshName("values")
ctx.addMutableState("Object[]", values, s"$values = null;")
ev.copy(code = s"""
$values = new Object[${valExprs.size}];""" +
ctx.splitExpressions(
ctx.INPUT_ROW,
valExprs.zipWithIndex.map { case (e, i) =>
val eval = e.genCode(ctx)
eval.code + s"""
if (${eval.isNull}) {
$values[$i] = null;
} else {
$values[$i] = ${eval.value};
}"""
}) +
s"""
final InternalRow ${ev.value} = new $rowClass($values);
$values = null;
""", isNull = "false")
}
override def prettyName: String = "named_struct"
}
/**
* Creates a struct with the given field names and values. This is a variant that returns
* UnsafeRow directly. The unsafe projection operator replaces [[CreateStruct]] with
* this expression automatically at runtime.
*
* @param children Seq(name1, val1, name2, val2, ...)
*/
case class CreateNamedStructUnsafe(children: Seq[Expression]) extends CreateNamedStructLike {
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval = GenerateUnsafeProjection.createCode(ctx, valExprs)
ExprCode(code = eval.code, isNull = "false", value = eval.value)
}
override def prettyName: String = "named_struct_unsafe"
}
/**
* Creates a map after splitting the input text into key/value pairs using delimiters
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(text[, pairDelim[, keyValueDelim]]) - Creates a map after splitting the text into key/value pairs using delimiters. Default delimiters are ',' for `pairDelim` and ':' for `keyValueDelim`.",
examples = """
Examples:
> SELECT _FUNC_('a:1,b:2,c:3', ',', ':');
map("a":"1","b":"2","c":"3")
> SELECT _FUNC_('a');
map("a":null)
""")
// scalastyle:on line.size.limit
case class StringToMap(text: Expression, pairDelim: Expression, keyValueDelim: Expression)
extends TernaryExpression with CodegenFallback with ExpectsInputTypes {
def this(child: Expression, pairDelim: Expression) = {
this(child, pairDelim, Literal(":"))
}
def this(child: Expression) = {
this(child, Literal(","), Literal(":"))
}
override def children: Seq[Expression] = Seq(text, pairDelim, keyValueDelim)
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, StringType, StringType)
override def dataType: DataType = MapType(StringType, StringType)
override def checkInputDataTypes(): TypeCheckResult = {
if (Seq(pairDelim, keyValueDelim).exists(! _.foldable)) {
TypeCheckResult.TypeCheckFailure(s"$prettyName's delimiters must be foldable.")
} else {
super.checkInputDataTypes()
}
}
override def nullSafeEval(
inputString: Any,
stringDelimiter: Any,
keyValueDelimiter: Any): Any = {
val keyValues =
inputString.asInstanceOf[UTF8String].split(stringDelimiter.asInstanceOf[UTF8String], -1)
val iterator = new Iterator[(UTF8String, UTF8String)] {
var index = 0
val keyValueDelimiterUTF8String = keyValueDelimiter.asInstanceOf[UTF8String]
override def hasNext: Boolean = {
keyValues.length > index
}
override def next(): (UTF8String, UTF8String) = {
val keyValueArray = keyValues(index).split(keyValueDelimiterUTF8String, 2)
index += 1
(keyValueArray(0), if (keyValueArray.length < 2) null else keyValueArray(1))
}
}
ArrayBasedMapData(iterator, keyValues.size, identity, identity)
}
override def prettyName: String = "str_to_map"
}
| adrian-ionescu/apache-spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala | Scala | apache-2.0 | 16,474 |
package com.rxbytes.splitpal.providers.events
/**
* Created by pnagarjuna on 18/12/15.
*/
case class EventEntity(id: Int,
name: String)
| pamu/split-pal | src/main/scala/com/rxbytes/splitpal/providers/events/EventEntity.scala | Scala | apache-2.0 | 164 |
/*
* Copyright (C) 2007-2008 Artima, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Example code from:
*
* Programming in Scala (First Edition, Version 6)
* by Martin Odersky, Lex Spoon, Bill Venners
*
* http://booksites.artima.com/programming_in_scala
*/
import scala.actors.Actor
import Actor._
object ParallelSimulation {
case class Ping(time: Int)
case class Pong(time: Int, from: Actor)
case object Start
case object Stop
case class AfterDelay(delay: Int, msg: Any, target: Actor)
class Clock extends Actor {
private var running = false
private var currentTime = 0
private var agenda: List[WorkItem] = List()
private var allSimulants: List[Actor] = List()
private var busySimulants: Set[Actor] = Set.empty
start()
def add(sim: Simulant) {
allSimulants = sim :: allSimulants
}
case class WorkItem(time: Int, msg: Any, target: Actor)
private def insert(ag: List[WorkItem],
item: WorkItem): List[WorkItem] = {
if (ag.isEmpty || item.time < ag.head.time) item :: ag
else ag.head :: insert(ag.tail, item)
}
def act() {
loop {
if (running && busySimulants.isEmpty)
advance()
reactToOneMessage()
}
}
def advance() {
if (agenda.isEmpty && currentTime > 0) {
println("** Agenda empty. Clock exiting at time "+
currentTime+".")
self ! Stop
return
}
currentTime += 1
println("Advancing to time "+currentTime)
processCurrentEvents()
for (sim <- allSimulants)
sim ! Ping(currentTime)
busySimulants = Set.empty ++ allSimulants
}
private def processCurrentEvents() {
val todoNow = agenda.takeWhile(_.time <= currentTime)
agenda = agenda.drop(todoNow.length)
for (WorkItem(time, msg, target) <- todoNow) {
assert(time == currentTime)
target ! msg
}
}
def reactToOneMessage() {
react {
case AfterDelay(delay, msg, target) =>
val item = WorkItem(currentTime + delay, msg, target)
agenda = insert(agenda, item)
case Pong(time, sim) =>
assert(time == currentTime)
assert(busySimulants contains sim)
busySimulants -= sim
case Start => running = true
case Stop =>
for (sim <- allSimulants)
sim ! Stop
exit()
}
}
}
trait Simulant extends Actor {
val clock: Clock
def handleSimMessage(msg: Any)
def simStarting() { }
def act() {
loop {
react {
case Stop => exit()
case Ping(time) =>
if (time == 1) simStarting()
clock ! Pong(time, self)
case msg => handleSimMessage(msg)
}
}
}
start()
}
}
| peachyy/scalastu | concurrency/ParallelSimulation.scala | Scala | apache-2.0 | 3,266 |
package reactor.core.scala.publisher
import org.reactivestreams.Subscription
import reactor.core.{Scannable, publisher}
import reactor.core.publisher.{UnicastProcessor => JUnicastProcessor}
/**
* A Processor implementation that takes a custom queue and allows
* only a single subscriber.
*
* <p>
* The implementation keeps the order of signals.
*
* @tparam T the input and output type
*/
class UnicastProcessor[T](val jUnicastProcessor: JUnicastProcessor[T]) extends SFlux[T] with FluxProcessor[T, T] {
override def onComplete(): Unit = jUnicastProcessor.onComplete()
override def onError(t: Throwable): Unit = jUnicastProcessor.onError(t)
override def onNext(t: T): Unit = jUnicastProcessor.onNext(t)
override def onSubscribe(s: Subscription): Unit = jUnicastProcessor.onSubscribe(s)
override protected def jFluxProcessor: publisher.FluxProcessor[T, T] = jUnicastProcessor
override def jScannable: Scannable = jFluxProcessor
override private[publisher] def coreFlux = jUnicastProcessor
}
object UnicastProcessor {
private[publisher] def apply[T](jUnicastProcessor: JUnicastProcessor[T]): UnicastProcessor[T] = new UnicastProcessor[T](jUnicastProcessor)
/**
* Create a unicast [[FluxProcessor]] that will buffer on a given queue in an
* unbounded fashion.
*
* @tparam T the relayed type
* @return a unicast [[FluxProcessor]]
*/
def create[T](): UnicastProcessor[T] = apply[T](JUnicastProcessor.create[T]())
}
| reactor/reactor-scala-extensions | src/main/scala/reactor/core/scala/publisher/UnicastProcessor.scala | Scala | apache-2.0 | 1,481 |
package scalaz.stream.merge
import scala.collection.immutable.Queue
import scalaz.\\/._
import scalaz.stream.Process._
import scalaz.stream.async.mutable.Signal
import scalaz.stream.merge.Junction._
import scalaz.stream.process1
import scalaz.{\\/, -\\/}
protected[stream] object JunctionStrategies {
/** Typed constructor helper to create Junction.Strategy */
def junction[W, I, O](f: JunctionSignal[W, I, O] => JunctionStrategy[W, I, O]): JunctionStrategy[W, I, O] =
receive1[JunctionSignal[W, I, O], JunctionAction[W, O]](f)
/**
* Bounded Queue strategy, where every `A` received is distributed to all downstream on first-come, first-serve basis.
* Queue may have max parameter defined. This allows to restrict size of queue, and stops to taking more `A` from
* upstreams when size of internal queue is same or exceeds max size.
* @param max when <= 0, indicates queue is not bounded, otherwise controls when upstreams will get allowed to push more `A`
*/
def boundedQ[A](max: Int): JunctionStrategy[Int, A, A] = {
val bounded = max > 0
def drain(q: Queue[A], rsn: Throwable): JunctionStrategy[Int, A, A] =
junction[Int, A, A] {
case Open(jx, ref: UpRef) => jx.close(ref, rsn) fby drain(q, rsn)
case Open(jx, ref: DownRefW) => jx.writeW(q.size, ref) fby drain(q, rsn)
case Receive(jx, _, ref) => jx.close(ref, rsn) fby drain(q, rsn)
case Ready(jx, ref: DownRefO) =>
val (a, nq) = q.dequeue
val next = jx.writeO(a, ref) fby jx.broadcastW(nq.size)
if (nq.size > 0) next fby drain(nq, rsn)
else next fby Halt(rsn)
case o =>
drain(q, rsn)
}
def go(q: Queue[A]): JunctionStrategy[Int, A, A] =
junction[Int, A, A] {
case Open(jx, ref: UpRef) =>
if (bounded && q.size >= max) go(q)
else jx.more(ref) fby go(q)
case Open(jx, ref: DownRefW) =>
jx.writeW(q.size, ref) fby go(q)
case Receive(jx, sa, ref) =>
val (nq, distribute) = jx.distributeO(q ++ sa, jx.downReadyO)
val next = distribute fby jx.broadcastW(nq.size) fby go(nq)
if (!bounded || nq.size < max) jx.more(ref) fby next
else next
case Ready(jx, ref: DownRefO) =>
if (q.nonEmpty) {
val (a, nq) = q.dequeue
val next = jx.writeO(a, ref) fby jx.broadcastW(nq.size) fby go(nq)
if (bounded && nq.size < max && jx.upReady.nonEmpty) jx.moreAll fby next
else next
} else {
if (jx.upReady nonEmpty) jx.moreAll fby go(q)
else go(q)
}
case DoneDown(jx, rsn) =>
if (q.nonEmpty && jx.downO.nonEmpty) jx.closeAllUp(rsn) fby drain(q, rsn)
else Halt(rsn)
case o =>
go(q)
}
go(Queue())
}
/**
* Converts Writer1 to JunctionStrategy.
*
* Like publish-subscribe merging strategy backed by supplied Writer1.
* Any `I` received from upstream will be published to _all_ downstreams on `O` side if emmited by
* Writer1 as `O` or, to downstreams on `W` side if emitted by Writer1 as `W`.
*
* Additionally all `W` downstreams will see last `W` emitted from Writer1. If there is no `W` yet
* emitted by Writer1 downstreams on `W` side will wait until one will be available.
*
* This strategy can be used also to feed sources from upstreams whenever at least one
* downstream is started
*
* Note this strategy terminates when Writer1 terminates or when downstream is closed.
*
*@return
*/
def liftWriter1[W, I, O](w: Writer1[W, I, O]): JunctionStrategy[W, I, O] = {
def go(cur: Writer1[W, I, O], last: Option[W]): JunctionStrategy[W, I, O] = {
def lastW(swo:Seq[W\\/O]) : Option[W] = swo.collect({ case -\\/(w) => w }).lastOption
junction[W, I, O] {
case Open(jx, ref: UpRef) => emit(OpenNext) fby jx.more(ref) fby go(cur, last)
case Open(jx, ref: DownRefW) => last match {
case Some(w0) => jx.writeW(w0, ref) fby go(cur, last)
case None => cur.unemit match {
case (swo, next) =>
def goNext(ow: Option[W]) = next match {
case hlt@Halt(rsn) => hlt
case next => go(next, ow)
}
lastW(swo) match {
case s@Some(w) => jx.writeW(w,ref) fby goNext(s)
case None => goNext(None)
}
}
}
case Receive(jx, is, ref) =>
process1.feed(is)(cur).unemit match {
case (swo, hlt@Halt(rsn)) =>
jx.close(ref,rsn) fby jx.broadcastAllBoth(swo) fby hlt
case (swo, next) =>
jx.more(ref) fby jx.broadcastAllBoth(swo) fby go(next, lastW(swo) orElse last)
}
case DoneDown(jx, rsn) =>
val (swo, _) = cur.killBy(rsn).unemit
jx.broadcastAllBoth(swo) fby Halt(rsn)
case _ => go(cur, last)
}
}
emit(OpenNext) fby go(w, None)
}
/**
* MergeN strategy for mergeN combinator. Please see [[scalaz.stream.merge]] for more details.
*/
def mergeN[A](max:Int):JunctionStrategy[Nothing,A,A] = {
def openNextIfNeeded(current:Int) : JunctionStrategy[Nothing,A,A] =
if (max <= 0 || max > current) emit(OpenNext) else halt
def go(q:Queue[A],closedUp:Option[Throwable]) : JunctionStrategy[Nothing,A,A] = {
junction[Nothing,A,A] {
case Open(jx,ref:UpRef) =>
if (q.size < jx.up.size) openNextIfNeeded(jx.up.size) fby jx.more(ref) fby go(q,closedUp)
else openNextIfNeeded(jx.up.size) fby go(q,closedUp)
case Open(jx,ref:DownRefO) =>
if (jx.downO.size == 1) go(q,closedUp)
else jx.close(ref,new Exception("Only one downstream allowed for mergeN"))
case Receive(jx, as, ref) =>
if (jx.downReadyO.nonEmpty) {
jx.writeAllO(as,jx.downO.head) fby jx.more(ref) fby go(q,closedUp)
} else {
val nq = q.enqueue(scala.collection.immutable.Iterable.concat(as))
if (nq.size < jx.up.size) jx.more(ref) fby go(nq,closedUp)
else go(nq,closedUp)
}
case Ready(jx,ref:DownRefO) =>
if (q.nonEmpty) jx.writeAllO(q,ref) fby jx.moreAll fby go(Queue(),closedUp)
else if (jx.up.isEmpty && closedUp.isDefined) Halt(closedUp.get)
else jx.moreAll fby go(q,closedUp)
case DoneUp(jx,rsn) =>
if (jx.up.nonEmpty || q.nonEmpty) go(q,Some(rsn))
else Halt(rsn)
case Done(jx,_:UpRef,End) => closedUp match {
case Some(rsn) if jx.up.isEmpty && q.isEmpty => Halt(rsn)
case _ => openNextIfNeeded(jx.up.size) fby go(q,closedUp)
}
case Done(jx,_:UpRef,rsn) => Halt(rsn)
case Done(jx,_:DownRefO, rsn) =>
if (jx.downO.isEmpty) Halt(rsn)
else go(q,closedUp)
case _ => go(q, closedUp)
}
}
emit(OpenNext) fby go(Queue(),None)
}
/** various writers used in merge strategies **/
object writers {
/** writer that only echoes `A` on `O` side **/
def echoO[A]: Writer1[Nothing, A, A] = process1.id[A].map(right)
/** Writer1 that interprets the Signal messages to provide discrete source of `A` **/
def signal[A]: Writer1[A, Signal.Msg[A], Nothing] = {
def go(oa: Option[A]): Writer1[A, Signal.Msg[A], Nothing] = {
receive1[Signal.Msg[A], A \\/ Nothing] {
case Signal.Set(a) => emit(left(a)) fby go(Some(a))
case Signal.CompareAndSet(f: (Option[A] => Option[A])@unchecked) => f(oa) match {
case Some(a) => emit(left(a)) fby go(Some(a))
case None => go(oa)
}
case Signal.Fail(rsn) => Halt(rsn)
}
}
go(None)
}
}
/**
* Publish-subscribe merge strategy, where every `A` received from upstream is delivered to all downstream
* @tparam A
* @return
*/
def publishSubscribe[A]: JunctionStrategy[Nothing, A, A] = liftWriter1(writers.echoO[A])
/**
* Signal merge strategy, that interprets [[scalaz.stream.async.mutable.Signal]] algebra and produces discrete
* source of signal
* @tparam A
* @return
*/
def signal[A]: JunctionStrategy[A, Signal.Msg[A], Nothing] = liftWriter1(writers.signal[A])
} | doctau/scalaz-stream | src/main/scala/scalaz/stream/merge/JunctionStrategies.scala | Scala | mit | 8,506 |
package io.chymyst.benchmark
import java.time.LocalDateTime
import java.time.temporal.ChronoUnit
object Common {
val warmupTimeMs = 50L
def elapsed(initTime: LocalDateTime): Long = initTime.until(LocalDateTime.now, ChronoUnit.MILLIS)
def elapsed(initTime: Long): Long = System.currentTimeMillis() - initTime
def timeThis(task: => Unit): Long = {
val initTime = LocalDateTime.now
task
elapsed(initTime)
}
def timeWithPriming(task: => Unit): Long = {
task // this is just priming, no measurement
val result1 = timeThis {
task
}
val result2 = timeThis {
task
}
(result1 + result2 + 1) / 2
}
def waitSome(): Unit = Thread.sleep(warmupTimeMs)
}
| Chymyst/chymyst-core | benchmark/src/main/scala/io/chymyst/benchmark/Common.scala | Scala | apache-2.0 | 713 |
package raft
import scala.language.higherKinds
import akka.actor.ActorRef
case class Term(current: Int) extends Ordered[Term] {
def nextTerm: Term = this.copy(current = current + 1)
def compare(that: Term) = current.compare(that.current)
}
object Term {
def max(t1: Term, t2: Term): Term =
if (t1 > t2) t1
else t2
}
case class Votes(
votedFor: Option[NodeId] = None,
received: List[NodeId] = List()) {
def gotVoteFrom(ref: ActorRef): Votes = this.copy(received = ref :: received)
def majority(size: Int): Boolean =
(this.received.length >= Math.ceil(size / 2.0))
def vote(ref: NodeId) = votedFor match {
case Some(vote) => this
case None => copy(votedFor = Some(ref)) // TODO: Persist this value before returning
}
}
case class Meta(
var term: Term,
var log: Log,
rsm: TotalOrdering, // TODO: Make generic
var nodes: List[NodeId],
var votes: Votes = Votes(),
var leader: Option[NodeId] = None) {
def leaderAppend(ref: ActorRef, e: Vector[Entry]) = {
val entries = log.entries.append(e)
log = log.copy(entries = entries)
log = log.resetNextFor(ref)
log = log.matchFor(ref, Some(log.entries.lastIndex))
}
def append(e: Vector[Entry], at: Int) =
log = log.copy(entries = log.entries.append(e, at))
def selectTerm(other: Term) = {
if (other > term) {
term = other
votes = Votes()
}
}
// term = Term.max(this.term, other)
def nextTerm = {
votes = Votes()
term = term.nextTerm
}
def setLeader(ref: NodeId) =
leader = Some(ref)
}
object Meta {
def apply(nodes: List[NodeId]): Meta =
Meta(Term(0), Log(nodes, Vector[Entry]()), new TotalOrdering, nodes)
}
| archie/raft | src/main/scala/raft/State.scala | Scala | mit | 1,705 |
package models.product
import play.api.libs.json.Json
case class Products(activeProducts: Seq[ActiveProduct])
object Products {
implicit val jsonReads = Json.reads[Products]
}
| leanovate/microzon-web | app/models/product/Products.scala | Scala | mit | 181 |
package functionalops.systemz
import scalaz._
import Scalaz._
package object core extends CoreFunctions
| functionalops/systemz | core/src/main/scala/functionalops/package.scala | Scala | bsd-3-clause | 106 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.