code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
package com.github.andyglow.relaxed
import scala.language.experimental.macros
import scala.language.postfixOps
import scala.reflect.macros.blackbox
private[relaxed] object Util {
def fieldMap[T]: Map[String, Class[_]] = macro fieldMapImpl[T]
def fieldMapImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[Map[String, Class[_]]] = {
import c.universe._
val tpe = weakTypeOf[T]
val fields = Util.fieldMap(c)(tpe) map { case (n, t) =>
val name = n.decodedName.toString
val tpe = TypeName(t.typeSymbol.name.decodedName.toString)
q"$name -> classOf[$tpe]"
}
val code = q"Map( ..$fields )"
c.Expr[Map[String, Class[_]]](code)
}
def fieldMap(c: blackbox.Context)(tpe: c.universe.Type): Seq[(c.universe.TermName, c.universe.Type)] = {
import c.universe._
val annotations = tpe.decls.collect {
case s: MethodSymbol if s.isCaseAccessor =>
// workaround: force loading annotations
s.typeSignature
s.accessed.annotations.foreach(_.tree.tpe)
s.name.toString.trim -> s.accessed.annotations
}.toMap
def shouldSkip(name: String): Boolean = {
val fieldAnnotations = annotations.getOrElse(name, List.empty)
fieldAnnotations.exists(_.tree.tpe <:< typeOf[skip])
}
object UpdatableField {
def unapply(s: TermSymbol): Option[(TermName, Type)] = {
val name = s.name.toString.trim
if ( s.isVal
&& s.isCaseAccessor
&& !shouldSkip(name)) Some((TermName(name), s.typeSignature)) else None
}
}
tpe.decls.collect {case UpdatableField(n, t) => (n, t)} toSeq
}
}
| andyglow/relaxed-json-update | api/src/main/scala-2.11/com/github/andyglow/relaxed/Util.scala | Scala | lgpl-3.0 | 1,624 |
package app.server
import akka.actor.ActorRef
import akka.testkit.{TestActorRef, TestKit}
import app.adapters.database.{DbSpec, TaskDAO}
import app.models.Task
import app.services.TaskService
import org.joda.time.DateTime
import org.scalatest.{Matchers, WordSpec}
import play.api.libs.json.{JsNumber, JsArray, JsValue, JsString}
import spray.http.StatusCodes
import spray.httpx.PlayJsonSupport
import spray.testkit.ScalatestRouteTest
/**
* User: asalvadore
*/
class TaskWebServiceSpec extends WordSpec with ScalatestRouteTest with Matchers with DbSpec with PlayJsonSupport {
import dbProfile.profile.simple._
override def afterAll {
TestKit.shutdownActorSystem(system)
}
val dao = new TaskDAO(dbProfile)
val actorRef = TestActorRef(new TaskService(dao), "task-service")
val taskWebService = new TaskWebService {
override implicit def actorRefFactory = system
override val taskService: ActorRef = actorRef
}
def loadTasks(tasks: List[Task]): Unit ={
conn.withSession { implicit session =>
dao.tasks ++= tasks
}
}
"the taskWebService" should {
"provide a method to retrieve all tasks" in {
loadTasks(List(
Task(0, content = "c1", created = new DateTime(), finished = false, assignee = "a1"),
Task(0, content = "c2", created = new DateTime(), finished = false, assignee = "a2")
))
Get("/tasks") ~> taskWebService.taskServiceRoutes ~> check {
status should be(StatusCodes.OK)
(responseAs[JsValue] \\ "tasks")(0) \\ "content" should be(JsString("c1"))
}
}
"provide a method to only get ids" in {
loadTasks(List(
Task(0, content = "c1", created = new DateTime(), finished = false, assignee = "a1"),
Task(0, content = "c2", created = new DateTime(), finished = false, assignee = "a2")
))
Get("/tasks/ids") ~> taskWebService.taskServiceRoutes ~> check {
status should be(StatusCodes.OK)
(responseAs[JsValue] \\ "ids") should be (JsArray(Seq(JsNumber(1),JsNumber(2))))
}
}
}
}
| mericano1/spray-akka-slick-postgres | src/test/scala/app/server/TaskWebServiceSpec.scala | Scala | mit | 2,083 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin
import org.apache.zeppelin.AbstractFunctionalSuite.SERVER_ADDRESS
import org.openqa.selenium.WebDriver
import org.openqa.selenium.chrome.ChromeDriver
import org.openqa.selenium.firefox.{FirefoxBinary, FirefoxDriver, FirefoxProfile}
import org.openqa.selenium.safari.SafariDriver
import org.scalatest.concurrent.Eventually._
import org.scalatest.time._
import org.scalatest.selenium.WebBrowser
import org.scalatest.{BeforeAndAfterAll, FunSuite, Suite}
import scala.sys.process._
import scala.util.Try
object AbstractFunctionalSuite {
val SERVER_ADDRESS = "http://localhost:8080"
}
class AbstractFunctionalSuite extends FunSuite with WebBrowser with BeforeAndAfterAll {
implicit val webDriver = getDriver()
override def beforeAll() = {
"../bin/zeppelin-daemon.sh start" !
eventually (timeout(Span(20, Seconds))) {
go to SERVER_ADDRESS
assert(find("welcome").isDefined)
}
}
override def nestedSuites =
List[Suite](new WelcomePageSuite).toIndexedSeq
override def afterAll() = {
"../bin/zeppelin-daemon.sh stop" !
}
def getDriver(): WebDriver = {
val possibleDrivers = List[() => WebDriver](safary, chrome, firefox)
val createdDriver = possibleDrivers.map(driverFactory => Try(driverFactory.apply())).find(_.isSuccess)
createdDriver match {
case Some(driver) => driver.get
case None => throw new RuntimeException("Could not initialize any driver")
}
}
def safary(): WebDriver = {
new SafariDriver()
}
def chrome(): WebDriver = {
new ChromeDriver()
}
def firefox(): WebDriver = {
val ffox: FirefoxBinary = new FirefoxBinary
if ("true" == System.getenv("TRAVIS")) {
ffox.setEnvironmentProperty("DISPLAY", ":99")
}
val profile: FirefoxProfile = new FirefoxProfile
new FirefoxDriver(ffox, profile)
}
}
| rohit2b/incubator-zeppelin | zeppelin-server/src/test/scala/org/apache/zeppelin/AbstractFunctionalSuite.scala | Scala | apache-2.0 | 2,660 |
package org.yaqoob.scala.hackerearth
import scala.io.StdIn
//Given the size and the elements of array A, print all the elements in reverse order.
//
//Input:
//First line of input contains, N - size of the array.
//Following N lines, each contains one integer, i{th} element of the array i.e. A[i].
//
//Output:
//Print all the elements of the array in reverse order, each element in a new line.
object PrintArrayInReverse extends App {
val arraySize = StdIn.readInt
(0 until arraySize).map(_ => StdIn.readInt).reverse.foreach(println)
}
| adnanyaqoobvirk/scala-experiments | src/main/scala/org/yaqoob/scala/hackerearth/PrintArrayInReverse.scala | Scala | mit | 545 |
import sbt._
import Keys._
import PlayProject._
object ApplicationBuild extends Build {
val appName = "module-repo"
val appVersion = "1.0-SNAPSHOT"
val appDependencies = Seq(
"be.objectify" %% "deadbolt-2" % "1.1.2",
"rome" % "rome" % "1.0",
"org.markdownj" % "markdownj" % "0.3.0-1.0.2b4"
)
val main = PlayProject(appName, appVersion, appDependencies, mainLang = JAVA).settings (
resolvers += Resolver.url("Objectify Play Repository", url("http://schaloner.github.com/releases/"))(Resolver.ivyStylePatterns),
resolvers += Resolver.url("Scala Tools", url("http://scala-tools.org/repo-releases/"))(Resolver.ivyStylePatterns)
)
}
| playframework/modules.playframework.org | project/Build.scala | Scala | apache-2.0 | 692 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import org.mockito.Mockito._
import uk.gov.hmrc.ct.accounts.{MockAbridgedAccountsRetriever, AccountsMoneyValidationFixture}
import uk.gov.hmrc.ct.accounts.frs102.retriever.Frs102AccountsBoxRetriever
import uk.gov.hmrc.ct.box.CtValidation
class AC125AbridgedSpec extends AccountsMoneyValidationFixture[Frs102AccountsBoxRetriever] with MockAbridgedAccountsRetriever {
override def setUpMocks() = {
when(boxRetriever.ac44()).thenReturn(AC44(Some(99)))
clearMockTangibleAssetsFields()
when(boxRetriever.ac125()).thenReturn(AC125(Some(99)))
super.setUpMocks()
}
private def clearMockTangibleAssetsFields() = {
when(boxRetriever.ac124()).thenReturn(AC124(None))
when(boxRetriever.ac125()).thenReturn(AC125(None))
when(boxRetriever.ac126()).thenReturn(AC126(None))
when(boxRetriever.ac212()).thenReturn(AC212(None))
when(boxRetriever.ac213()).thenReturn(AC213(None))
when(boxRetriever.ac128()).thenReturn(AC128(None))
when(boxRetriever.ac219()).thenReturn(AC219(None))
when(boxRetriever.ac130()).thenReturn(AC130(None))
when(boxRetriever.ac214()).thenReturn(AC214(None))
when(boxRetriever.ac5133()).thenReturn(AC5133(None))
}
testAccountsMoneyValidationWithMin("AC125", 0, AC125.apply)
"AC125" should {
"fail validation when at least one intangible assets field is not populated" in {
when(boxRetriever.ac125()).thenReturn(AC125(None))
AC125(None).validate(boxRetriever) shouldBe Set(CtValidation(None, "error.tangible.assets.note.one.box.required"))
}
"fail validation when note cannot be populated" in {
when(boxRetriever.ac44()).thenReturn(AC44(None))
when(boxRetriever.ac45()).thenReturn(AC45(None))
when(boxRetriever.ac128()).thenReturn(AC128(Some(123)))
AC125(Some(10)).validate(boxRetriever) shouldBe Set(CtValidation(None, "error.balanceSheet.tangibleAssetsNote.cannot.exist"))
}
"pass validation if no fields populated and AC44 not populated" in {
clearMockTangibleAssetsFields()
when(boxRetriever.ac44()).thenReturn(AC44(None))
AC125(None).validate(boxRetriever) shouldBe Set()
}
"pass validation if one field populated" in {
when(boxRetriever.ac44()).thenReturn(AC44(Some(1)))
when(boxRetriever.ac125()).thenReturn(AC125(None))
when(boxRetriever.ac124()).thenReturn(AC124(Some(99)))
AC125(None).validate(boxRetriever) shouldBe Set()
clearMockTangibleAssetsFields()
when(boxRetriever.ac125()).thenReturn(AC125(Some(99)))
AC125(None).validate(boxRetriever) shouldBe Set()
clearMockTangibleAssetsFields()
when(boxRetriever.ac126()).thenReturn(AC126(Some(99)))
AC125(None).validate(boxRetriever) shouldBe Set()
clearMockTangibleAssetsFields()
when(boxRetriever.ac212()).thenReturn(AC212(Some(99)))
AC125(None).validate(boxRetriever) shouldBe Set()
clearMockTangibleAssetsFields()
when(boxRetriever.ac213()).thenReturn(AC213(Some(99)))
AC125(None).validate(boxRetriever) shouldBe Set()
clearMockTangibleAssetsFields()
when(boxRetriever.ac128()).thenReturn(AC128(Some(99)))
AC125(None).validate(boxRetriever) shouldBe Set()
clearMockTangibleAssetsFields()
when(boxRetriever.ac219()).thenReturn(AC219(Some(99)))
AC125(None).validate(boxRetriever) shouldBe Set()
clearMockTangibleAssetsFields()
when(boxRetriever.ac130()).thenReturn(AC130(Some(99)))
AC125(None).validate(boxRetriever) shouldBe Set()
clearMockTangibleAssetsFields()
when(boxRetriever.ac214()).thenReturn(AC214(Some(99)))
AC125(None).validate(boxRetriever) shouldBe Set()
clearMockTangibleAssetsFields()
when(boxRetriever.ac5133()).thenReturn(AC5133(Some("hello")))
AC125(None).validate(boxRetriever) shouldBe Set()
}
}
}
| hmrc/ct-calculations | src/test/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC125AbridgedSpec.scala | Scala | apache-2.0 | 4,495 |
/*
* # Trove
*
* This file is part of Trove - A FREE desktop budgeting application that
* helps you track your finances, FREES you from complex budgeting, and
* enables you to build your TROVE of savings!
*
* Copyright © 2016-2021 Eric John Fredericks.
*
* Trove is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Trove is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Trove. If not, see <http://www.gnu.org/licenses/>.
*/
package trove.core.infrastructure.persist
import grizzled.slf4j.Logging
import slick.jdbc.SQLiteProfile.backend._
import trove.core.Project
import trove.core.accounts.AccountsServiceImpl
import trove.core.infrastructure.persist.lock.ProjectLock
private[persist] class ProjectImpl(
val name: String,
val lock: ProjectLock,
val db: DatabaseDef)
extends Project with Logging {
override def toString: String = s"Project($name)"
val accountsService = new AccountsServiceImpl
def close(): Unit = {
db.close()
logger.debug(s"Database for project $name closed")
lock.release()
logger.debug(s"Lock for project $name released")
logger.info(s"Closed project $name")
}
}
| emanchgo/trove | src/main/scala/trove/core/infrastructure/persist/ProjectImpl.scala | Scala | gpl-3.0 | 1,610 |
package com.sksamuel.elastic4s.requests.searches.queries
import com.sksamuel.elastic4s.requests.script.ScriptBuilderFn
import com.sksamuel.elastic4s.json.{XContentBuilder, XContentFactory}
object ScriptQueryBodyFn {
def apply(q: ScriptQuery): XContentBuilder = {
val builder = XContentFactory.jsonBuilder()
builder.startObject("script")
builder.rawField("script", ScriptBuilderFn(q.script))
q.boost.foreach(builder.field("boost", _))
q.queryName.foreach(builder.field("_name", _))
builder.endObject()
builder.endObject()
builder
}
}
| stringbean/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/searches/queries/ScriptQueryBodyFn.scala | Scala | apache-2.0 | 572 |
package controllers.save_for_later
import models.domain.Claim
import models.view.{CachedClaim, Navigable}
import play.api.Play._
import play.api.i18n._
import play.api.mvc.Controller
import services.EmailServices
import utils.helpers.CarersCrypto
import app.ConfigProperties._
import scala.language.reflectiveCalls
import play.api.mvc._
object GSaveForLater extends Controller with CachedClaim with Navigable with I18nSupport {
override val messagesApi: MessagesApi = current.injector.instanceOf[MMessages]
def present(resumePath: String) = claimingWithCheck { implicit claim => implicit request => implicit request2lang =>
getBooleanProperty("saveForLaterSaveEnabled") match {
case false => BadRequest(views.html.common.switchedOff("sfl-save", request2lang))
case true => Ok(views.html.save_for_later.saveClaimSuccess(resumePath, request2lang))
}
}
def submit = claimingWithCheck { implicit claim => implicit request => implicit request2lang =>
getBooleanProperty("saveForLaterSaveEnabled") match {
case false => BadRequest(views.html.common.switchedOff("sfl-save", request2lang))
case true => processSaveForLater(request.body.asFormUrlEncoded.get, claim, request2lang, request)
}
}
def processSaveForLater(parameters: Map[String, Seq[String]], claim: Claim, lang: Lang, request: Request[AnyContent]) = {
val updatedClaim = claim.update(createSaveForLaterMap(parameters))
val resumePath=claim.navigation.saveForLaterRoute(iterationResumePath(parameters)).toString
saveForLaterInCache(updatedClaim, resumePath)
EmailServices.sendSaveForLaterEmail(claim, request)
updatedClaim -> Redirect(controllers.save_for_later.routes.GSaveForLater.present(resumePath))
}
def iterationResumePath(parameters: Map[String, Seq[String]]):String={
(parameters.contains("iterationID"), parameters.contains("hasBreakEnded.date.year"), parameters.contains("employerName")) match{
case( true, true, false ) => "/breaks/break/"+ parameters.get("iterationID").head(0).toString()
case( true, false, true ) => "/your-income/employment/job-details/"+ parameters.get("iterationID").head(0).toString()
case(_,_,_) => ""
}
}
def createSaveForLaterMap(parameters: Map[String, Seq[String]]) = {
parameters.map { case (k, v) => {
k match {
case "csrfToken" | "action" | "jsEnabled" => k
case _ => CarersCrypto.decryptAES(k)
}
} -> v.mkString
}
}
}
| Department-for-Work-and-Pensions/ClaimCapture | c3/app/controllers/save_for_later/GSaveForLater.scala | Scala | mit | 2,470 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.validation
import org.apache.flink.api.scala._
import org.apache.flink.table.api.TableException
import org.apache.flink.table.api.scala._
import org.apache.flink.table.utils.TableTestBase
import org.junit.Test
class InlineTableValidationTest extends TableTestBase {
@Test
def testFieldNamesDuplicate() {
thrown.expect(classOf[TableException])
thrown.expectMessage("Field names must be unique.\\n" +
"List of duplicate fields: [a].\\n" +
"List of all fields: [a, a, b].")
val util = batchTestUtil()
util.addTable[(Int, Int, String)]("MyTable", 'a, 'a, 'b)
}
}
| ueshin/apache-flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/validation/InlineTableValidationTest.scala | Scala | apache-2.0 | 1,437 |
package com.harrys.hyppo.worker.proto
import com.harrys.hyppo.executor.proto.com.ValidateIntegrationCommand
import com.harrys.hyppo.executor.proto.res.ValidateIntegrationResult
import com.harrys.hyppo.worker.{ProcessedDataStub, RawDataStub, TestObjects}
import org.apache.avro.Schema
import scala.concurrent.duration._
/**
* Created by jpetty on 7/23/15.
*/
class ValidateIntegrationTest extends ExecutorCommandTest {
override def integrationClass = classOf[RawDataStub]
"The validate integration test" must {
val validateResult = commander.executeCommand(new ValidateIntegrationCommand(TestObjects.testIngestionSource())).result
"produce a result of the correct type" in {
validateResult shouldBe a [ValidateIntegrationResult]
}
val result = validateResult.asInstanceOf[ValidateIntegrationResult]
"wait for the result object" in {
result.getSchema shouldBe a [Schema]
result.getSchema shouldEqual new ProcessedDataStub().avroType().recordSchema()
}
"then exit cleanly" in {
commander.sendExitCommandAndWait(Duration(100, MILLISECONDS)) shouldEqual 0
}
}
}
| harrystech/hyppo-worker | worker/src/test/scala/com/harrys/hyppo/worker/proto/ValidateIntegrationTest.scala | Scala | mit | 1,129 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License,
*
* Contributors:
* Hao Jiang - initial API and implementation
*
*/
package edu.uchicago.cs.encsel.ptnmining.rule
/**
* Created by harper on 3/31/17.
*/
class FreqSimilarWordRule {
}
| harperjiang/enc-selector | src/main/scala/edu/uchicago/cs/encsel/ptnmining/rule/FreqSimilarWordRule.scala | Scala | apache-2.0 | 997 |
/*
* Copyright 2016 Alexey Kardapoltsev
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.kardapoltsev.astparser.parser.http
import com.github.kardapoltsev.astparser.parser.ParseException
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
trait HttpParserEnv extends HttpParser {
protected def parse[T](p: Parser[T], input: CharSequence): T = {
parse(phrase(p), input, "test_source")
}
}
class HttpParserSpec extends AnyWordSpec with Matchers {
"HttpParser" should {
"parse method" in new HttpParserEnv {
parse(method, "GET") shouldBe Get()
parse(method, "POST") shouldBe Post()
parse(method, "PATCH") shouldBe Patch()
parse(method, "DELETE") shouldBe Delete()
parse(method, "PUT") shouldBe Put()
}
"not parse invalid method" in new HttpParserEnv {
a[ParseException] shouldBe thrownBy {
parse(method, "GE")
}
}
"parse path segments" in new HttpParserEnv {
parse(path, "/user/") shouldBe Seq(PathSegment("user"))
parse(path, "/user") shouldBe Seq(PathSegment("user"))
parse(path, "/users/me") shouldBe Seq(PathSegment("users"), PathSegment("me"))
}
"handle symbols in path segments" in new HttpParserEnv {
parse(path, "/p-s/") shouldBe Seq(PathSegment("p-s"))
parse(path, "/p_s/") shouldBe Seq(PathSegment("p_s"))
}
"parse path parameters" in new HttpParserEnv {
parse(path, "/users/{userId}") shouldBe Seq(PathSegment("users"), PathParam("userId"))
parse(path, "/{param1}/{param2}/") shouldBe Seq(PathParam("param1"), PathParam("param2"))
}
"not parse invalid path" in new HttpParserEnv {
a[ParseException] shouldBe thrownBy {
parse(path, "/users/{userId}bug")
}
a[ParseException] shouldBe thrownBy {
parse(path, "/users//")
}
a[ParseException] shouldBe thrownBy {
parse(path, "/users{}/")
}
}
"parse http request" in new HttpParserEnv {
override protected val enableProfiling: Boolean = true
parse(request, "GET /api/users/{userId}?{param1}&{param2}") shouldBe
HttpRequest(
Get(),
Url(
Seq(PathSegment("api"), PathSegment("users"), PathParam("userId")),
Seq(QueryParam("param1"), QueryParam("param2"))
),
cached = false
)
}
"parse cached http request" in new HttpParserEnv {
override protected val enableProfiling: Boolean = true
parse(request, "CACHED GET /api/users") shouldBe
HttpRequest(
Get(),
Url(Seq(PathSegment("api"), PathSegment("users")), Nil),
cached = true
)
}
"not parse invalid http request" in new HttpParserEnv {
a[ParseException] shouldBe thrownBy {
parse("GET /api/users{bug}", "test_src")
}
a[ParseException] shouldBe thrownBy {
parse("GET /api/users/{bug", "test_src")
}
}
}
}
| kardapoltsev/astparser | src/test/scala/com/github/kardapoltsev/astparser/parser/http/HttpParserSpec.scala | Scala | apache-2.0 | 3,512 |
package slick.test.lifted
import org.junit.Test
import org.junit.Assert._
/** Test case for the SQL schema support in table definitions */
@deprecated("Using deprecated .simple API", "3.0")
class SchemaSupportTest {
@Test def testSchemaSupport {
import slick.driver.H2Driver.simple._
class T(tag: Tag) extends Table[Int](tag, Some("myschema"), "mytable") {
def id = column[Int]("id")
def * = id
}
val ts = TableQuery[T]
val s1 = ts.filter(_.id < 5).selectStatement
println(s1)
assertTrue("select ... from uses schema name", s1 contains """from "myschema"."mytable" """)
//val s2 = ts.insertStatement
//println(s2)
val s3 = ts.filter(_.id < 5).updateStatement
println(s3)
assertTrue("update uses schema name", s3 contains """update "myschema"."mytable" """)
val s4 = ts.filter(_.id < 5).deleteStatement
println(s4)
assertTrue("delete uses schema name", s4 contains """delete from "myschema"."mytable" """)
val s5 = ts.schema.createStatements
s5.foreach(println)
s5.foreach(s => assertTrue("DDL (create) uses schema name", s contains """ "myschema"."mytable" """))
val s6 = ts.schema.dropStatements
s6.foreach(println)
s6.foreach(s => assertTrue("DDL (drop) uses schema name", s contains """ "myschema"."mytable" """))
}
}
| easel/slick | slick-testkit/src/test/scala/slick/test/lifted/SchemaSupportTest.scala | Scala | bsd-2-clause | 1,331 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples.ml
// $example on$
import org.apache.spark.ml.feature.{HashingTF, IDF, Tokenizer}
// $example off$
import org.apache.spark.sql.SparkSession
object TfIdfExample {
def main(args: Array[String]): Unit = {
val spark = SparkSession
.builder
.appName("TfIdfExample")
.getOrCreate()
// $example on$
val sentenceData = spark.createDataFrame(Seq(
(0.0, "Hi I heard about Spark"),
(0.0, "I wish Java could use case classes"),
(1.0, "Logistic regression models are neat")
)).toDF("label", "sentence")
val tokenizer = new Tokenizer().setInputCol("sentence").setOutputCol("words")
val wordsData = tokenizer.transform(sentenceData)
val hashingTF = new HashingTF()
.setInputCol("words").setOutputCol("rawFeatures").setNumFeatures(20)
val featurizedData = hashingTF.transform(wordsData)
// alternatively, CountVectorizer can also be used to get term frequency vectors
val idf = new IDF().setInputCol("rawFeatures").setOutputCol("features")
val idfModel = idf.fit(featurizedData)
val rescaledData = idfModel.transform(featurizedData)
rescaledData.select("label", "features").show()
// $example off$
spark.stop()
}
}
// scalastyle:on println
| lhfei/spark-in-action | spark-3.x/src/main/scala/org/apache/spark/examples/ml/TfIdfExample.scala | Scala | apache-2.0 | 2,102 |
package models.hkg
case class Topics(topicList: List[Topic] = List())
object Topics {
} | mingchuno/golden-cache | app/models/hkg/Topics.scala | Scala | gpl-3.0 | 90 |
/*
* Copyright (c) 2012-2014 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.enrich.common
package enrichments
// Java
import java.lang.{Integer => JInteger}
// Scalaz
import scalaz._
import Scalaz._
// UserAgentUtils
import eu.bitwalker.useragentutils._
/**
* Contains enrichments related to the
* client - where the client is the
* software which is using the SnowPlow
* tracker.
*
* Enrichments relate to the useragent,
* browser resolution, etc.
*/
object ClientEnrichments {
/**
* The Tracker Protocol's pattern
* for a screen resolution - for
* details see:
*
* https://github.com/snowplow/snowplow/wiki/snowplow-tracker-protocol#wiki-browserandos
*/
private val ResRegex = """(\\d+)x(\\d+)""".r
/**
* Case class to wrap everything we
* can extract from the useragent
* using UserAgentUtils.
*
* TODO: update this definition when
* we swap out UserAgentUtils for
* ua-parser
*/
case class ClientAttributes(
// Browser
browserName: String,
browserFamily: String,
browserVersion: Option[String],
browserType: String,
browserRenderEngine: String,
// OS the browser is running on
osName: String,
osFamily: String,
osManufacturer: String,
// Hardware the OS is running on
deviceType: String,
deviceIsMobile: Boolean)
/**
* Extracts view dimensions (e.g. screen resolution,
* browser/app viewport) stored as per the Tracker
* Protocol:
*
* https://github.com/snowplow/snowplow/wiki/snowplow-tracker-protocol#wiki-browserandos
*
* @param field The name of the field
* holding the screen dimensions
* @param res The packed string
* holding the screen dimensions
* @return the ResolutionTuple or an
* error message, boxed in a
* Scalaz Validation
*/
val extractViewDimensions: (String, String) => Validation[String, ViewDimensionsTuple] = (field, res) =>
res match {
case ResRegex(width, height) =>
try {
(width.toInt: JInteger, height.toInt: JInteger).success
} catch {
case _ => "Field [%s]: view dimensions [%s] exceed Integer's max range".format(field, res).fail
}
case _ => "Field [%s]: [%s] does not contain valid view dimensions".format(field, res).fail
}
/**
* Extracts the client attributes
* from a useragent string, using
* UserAgentUtils.
*
* TODO: rewrite this when we swap
* out UserAgentUtils for ua-parser
*
* @param useragent The useragent
* String to extract from.
* Should be encoded (i.e.
* not previously decoded).
* @return the ClientAttributes or
* the message of the
* exception, boxed in a
* Scalaz Validation
*/
def extractClientAttributes(useragent: String): Validation[String, ClientAttributes] =
try {
val ua = UserAgent.parseUserAgentString(useragent)
val b = ua.getBrowser
val v = Option(ua.getBrowserVersion)
val os = ua.getOperatingSystem
ClientAttributes(
browserName = b.getName,
browserFamily = b.getGroup.getName,
browserVersion = v map { _.getVersion },
browserType = b.getBrowserType.getName,
browserRenderEngine = b.getRenderingEngine.toString,
osName = os.getName,
osFamily = os.getGroup.getName,
osManufacturer = os.getManufacturer.getName,
deviceType = os.getDeviceType.getName,
deviceIsMobile = os.isMobileDevice).success
} catch {
case e => "Exception parsing useragent [%s]: [%s]".format(useragent, e.getMessage).fail
}
} | 1974kpkpkp/snowplow | 3-enrich/scala-common-enrich/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/ClientEnrichments.scala | Scala | apache-2.0 | 4,345 |
/*
* Copyright 2008-present MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mongodb.scala
import scala.collection.JavaConverters._
import org.bson.BsonDocument
import org.bson.codecs.BsonValueCodecProvider
import org.bson.codecs.configuration.CodecRegistries.fromProviders
import com.mongodb.async.client.{ListCollectionsIterable, MongoDatabase => JMongoDatabase}
import org.mongodb.scala.bson.conversions.Bson
import org.mongodb.scala.model._
import org.scalamock.scalatest.proxy.MockFactory
import org.scalatest.{FlatSpec, Matchers}
class MongoDatabaseSpec extends FlatSpec with Matchers with MockFactory {
val wrapped = mock[JMongoDatabase]
val clientSession = mock[ClientSession]
val mongoDatabase = MongoDatabase(wrapped)
val command = Document()
val readPreference = ReadPreference.secondary()
def observer[T] = new Observer[T]() {
override def onError(throwable: Throwable): Unit = {}
override def onSubscribe(subscription: Subscription): Unit = subscription.request(Long.MaxValue)
override def onComplete(): Unit = {}
override def onNext(doc: T): Unit = {}
}
"MongoDatabase" should "have the same methods as the wrapped MongoDatabase" in {
val wrapped = classOf[JMongoDatabase].getMethods.map(_.getName)
val local = classOf[MongoDatabase].getMethods.map(_.getName)
wrapped.foreach((name: String) => {
val cleanedName = name.stripPrefix("get")
assert(local.contains(name) | local.contains(cleanedName.head.toLower + cleanedName.tail))
})
}
it should "return the underlying getCollection[T]" in {
wrapped.expects(Symbol("getCollection"))("collectionName", classOf[Document]).once()
wrapped.expects(Symbol("getCollection"))("collectionName", classOf[BsonDocument]).once()
mongoDatabase.getCollection("collectionName")
mongoDatabase.getCollection[BsonDocument]("collectionName")
}
it should "return the underlying getName" in {
wrapped.expects(Symbol("getName"))().once()
mongoDatabase.name
}
it should "return the underlying getCodecRegistry" in {
wrapped.expects(Symbol("getCodecRegistry"))().once()
mongoDatabase.codecRegistry
}
it should "return the underlying getReadPreference" in {
wrapped.expects(Symbol("getReadPreference"))().once()
mongoDatabase.readPreference
}
it should "return the underlying getWriteConcern" in {
wrapped.expects(Symbol("getWriteConcern"))().once()
mongoDatabase.writeConcern
}
it should "return the underlying getReadConcern" in {
wrapped.expects(Symbol("getReadConcern"))().once()
mongoDatabase.readConcern
}
it should "return the underlying withCodecRegistry" in {
val codecRegistry = fromProviders(new BsonValueCodecProvider())
wrapped.expects(Symbol("withCodecRegistry"))(codecRegistry).once()
mongoDatabase.withCodecRegistry(codecRegistry)
}
it should "return the underlying withReadPreference" in {
wrapped.expects(Symbol("withReadPreference"))(readPreference).once()
mongoDatabase.withReadPreference(readPreference)
}
it should "return the underlying withWriteConcern" in {
val writeConcern = WriteConcern.MAJORITY
wrapped.expects(Symbol("withWriteConcern"))(writeConcern).once()
mongoDatabase.withWriteConcern(writeConcern)
}
it should "return the underlying withReadConcern" in {
val readConcern = ReadConcern.MAJORITY
wrapped.expects(Symbol("withReadConcern"))(readConcern).once()
mongoDatabase.withReadConcern(readConcern)
}
it should "call the underlying runCommand[T] when writing" in {
wrapped.expects(Symbol("runCommand"))(command, classOf[Document], *).once()
wrapped.expects(Symbol("runCommand"))(command, classOf[BsonDocument], *).once()
wrapped.expects(Symbol("runCommand"))(clientSession, command, classOf[Document], *).once()
wrapped.expects(Symbol("runCommand"))(clientSession, command, classOf[BsonDocument], *).once()
mongoDatabase.runCommand(command).subscribe(observer[Document])
mongoDatabase.runCommand[BsonDocument](command).subscribe(observer[BsonDocument])
mongoDatabase.runCommand(clientSession, command).subscribe(observer[Document])
mongoDatabase.runCommand[BsonDocument](clientSession, command).subscribe(observer[BsonDocument])
}
it should "call the underlying runCommand[T] when reading" in {
wrapped.expects(Symbol("runCommand"))(command, readPreference, classOf[Document], *).once()
wrapped.expects(Symbol("runCommand"))(command, readPreference, classOf[BsonDocument], *).once()
wrapped.expects(Symbol("runCommand"))(clientSession, command, readPreference, classOf[Document], *).once()
wrapped.expects(Symbol("runCommand"))(clientSession, command, readPreference, classOf[BsonDocument], *).once()
mongoDatabase.runCommand(command, readPreference).subscribe(observer[Document])
mongoDatabase.runCommand[BsonDocument](command, readPreference).subscribe(observer[BsonDocument])
mongoDatabase.runCommand(clientSession, command, readPreference).subscribe(observer[Document])
mongoDatabase.runCommand[BsonDocument](clientSession, command, readPreference).subscribe(observer[BsonDocument])
}
it should "call the underlying drop()" in {
wrapped.expects(Symbol("drop"))(*).once()
wrapped.expects(Symbol("drop"))(clientSession, *).once()
mongoDatabase.drop().subscribe(observer[Completed])
mongoDatabase.drop(clientSession).subscribe(observer[Completed])
}
it should "call the underlying listCollectionNames()" in {
wrapped.expects(Symbol("listCollectionNames"))().once()
wrapped.expects(Symbol("listCollectionNames"))(clientSession).once()
mongoDatabase.listCollectionNames()
mongoDatabase.listCollectionNames(clientSession)
}
it should "call the underlying listCollections()" in {
wrapped.expects(Symbol("listCollections"))(*).returns(stub[ListCollectionsIterable[Document]]).once()
wrapped.expects(Symbol("listCollections"))(classOf[BsonDocument]).returns(stub[ListCollectionsIterable[BsonDocument]]).once()
wrapped.expects(Symbol("listCollections"))(clientSession, *).returns(stub[ListCollectionsIterable[Document]]).once()
wrapped.expects(Symbol("listCollections"))(clientSession, classOf[BsonDocument]).returns(stub[ListCollectionsIterable[BsonDocument]]).once()
mongoDatabase.listCollections().subscribe(observer[Document])
mongoDatabase.listCollections[BsonDocument]().subscribe(observer[BsonDocument])
mongoDatabase.listCollections(clientSession).subscribe(observer[Document])
mongoDatabase.listCollections[BsonDocument](clientSession).subscribe(observer[BsonDocument])
}
it should "call the underlying createCollection()" in {
val options = CreateCollectionOptions().capped(true).validationOptions(
ValidationOptions().validator(Document("""{level: {$gte: 10}}"""))
.validationLevel(ValidationLevel.MODERATE)
.validationAction(ValidationAction.WARN)
).indexOptionDefaults(IndexOptionDefaults().storageEngine(Document("""{storageEngine: { mmapv1: {}}}""")))
.storageEngineOptions(Document("""{ wiredTiger: {}}"""))
wrapped.expects(Symbol("createCollection"))("collectionName", *).once()
wrapped.expects(Symbol("createCollection"))("collectionName", options, *).once()
wrapped.expects(Symbol("createCollection"))(clientSession, "collectionName", *).once()
wrapped.expects(Symbol("createCollection"))(clientSession, "collectionName", options, *).once()
mongoDatabase.createCollection("collectionName").subscribe(observer[Completed])
mongoDatabase.createCollection("collectionName", options).subscribe(observer[Completed])
mongoDatabase.createCollection(clientSession, "collectionName").subscribe(observer[Completed])
mongoDatabase.createCollection(clientSession, "collectionName", options).subscribe(observer[Completed])
}
it should "call the underlying createView()" in {
val options = CreateViewOptions().collation(Collation.builder().locale("en").build())
val pipeline = List.empty[Bson]
wrapped.expects(Symbol("createView"))("viewName", "collectionName", pipeline.asJava, *).once()
wrapped.expects(Symbol("createView"))("viewName", "collectionName", pipeline.asJava, options, *).once()
wrapped.expects(Symbol("createView"))(clientSession, "viewName", "collectionName", pipeline.asJava, *).once()
wrapped.expects(Symbol("createView"))(clientSession, "viewName", "collectionName", pipeline.asJava, options, *).once()
mongoDatabase.createView("viewName", "collectionName", pipeline).subscribe(observer[Completed])
mongoDatabase.createView("viewName", "collectionName", pipeline, options).subscribe(observer[Completed])
mongoDatabase.createView(clientSession, "viewName", "collectionName", pipeline).subscribe(observer[Completed])
mongoDatabase.createView(clientSession, "viewName", "collectionName", pipeline, options).subscribe(observer[Completed])
}
it should "call the underlying watch" in {
val pipeline = List(Document("$match" -> 1))
wrapped.expects(Symbol("watch"))(classOf[Document]).once()
wrapped.expects(Symbol("watch"))(pipeline.asJava, classOf[Document]).once()
wrapped.expects(Symbol("watch"))(pipeline.asJava, classOf[BsonDocument]).once()
wrapped.expects(Symbol("watch"))(clientSession, pipeline.asJava, classOf[Document]).once()
wrapped.expects(Symbol("watch"))(clientSession, pipeline.asJava, classOf[BsonDocument]).once()
mongoDatabase.watch() shouldBe a[ChangeStreamObservable[_]]
mongoDatabase.watch(pipeline) shouldBe a[ChangeStreamObservable[_]]
mongoDatabase.watch[BsonDocument](pipeline) shouldBe a[ChangeStreamObservable[_]]
mongoDatabase.watch(clientSession, pipeline) shouldBe a[ChangeStreamObservable[_]]
mongoDatabase.watch[BsonDocument](clientSession, pipeline) shouldBe a[ChangeStreamObservable[_]]
}
it should "call the underlying aggregate" in {
val pipeline = List(Document("$match" -> 1))
wrapped.expects(Symbol("aggregate"))(pipeline.asJava, classOf[Document]).once()
wrapped.expects(Symbol("aggregate"))(pipeline.asJava, classOf[BsonDocument]).once()
wrapped.expects(Symbol("aggregate"))(clientSession, pipeline.asJava, classOf[Document]).once()
wrapped.expects(Symbol("aggregate"))(clientSession, pipeline.asJava, classOf[BsonDocument]).once()
mongoDatabase.aggregate(pipeline) shouldBe a[AggregateObservable[_]]
mongoDatabase.aggregate[BsonDocument](pipeline) shouldBe a[AggregateObservable[_]]
mongoDatabase.aggregate(clientSession, pipeline) shouldBe a[AggregateObservable[_]]
mongoDatabase.aggregate[BsonDocument](clientSession, pipeline) shouldBe a[AggregateObservable[_]]
}
}
| rozza/mongo-scala-driver | driver/src/test/scala/org/mongodb/scala/MongoDatabaseSpec.scala | Scala | apache-2.0 | 11,239 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.optim
import org.apache.spark.ml.feature.Instance
import org.apache.spark.ml.linalg._
import org.apache.spark.ml.util.OptionalInstrumentation
import org.apache.spark.rdd.RDD
/**
* Model fitted by [[WeightedLeastSquares]].
*
* @param coefficients model coefficients
* @param intercept model intercept
* @param diagInvAtWA diagonal of matrix (A^T * W * A)^-1
* @param objectiveHistory objective function (scaled loss + regularization) at each iteration.
*/
private[ml] class WeightedLeastSquaresModel(
val coefficients: DenseVector,
val intercept: Double,
val diagInvAtWA: DenseVector,
val objectiveHistory: Array[Double]) extends Serializable {
def predict(features: Vector): Double = {
BLAS.dot(coefficients, features) + intercept
}
}
/**
* Weighted least squares solver via normal equation.
* Given weighted observations (w,,i,,, a,,i,,, b,,i,,), we use the following weighted least squares
* formulation:
*
* min,,x,z,, 1/2 sum,,i,, w,,i,, (a,,i,,^T^ x + z - b,,i,,)^2^ / sum,,i,, w,,i,,
* + lambda / delta (1/2 (1 - alpha) sum,,j,, (sigma,,j,, x,,j,,)^2^
* + alpha sum,,j,, abs(sigma,,j,, x,,j,,)),
*
* where lambda is the regularization parameter, alpha is the ElasticNet mixing parameter,
* and delta and sigma,,j,, are controlled by [[standardizeLabel]] and [[standardizeFeatures]],
* respectively.
*
* Set [[regParam]] to 0.0 and turn off both [[standardizeFeatures]] and [[standardizeLabel]] to
* match R's `lm`.
* Turn on [[standardizeLabel]] to match R's `glmnet`.
*
* @note The coefficients and intercept are always trained in the scaled space, but are returned
* on the original scale. [[standardizeFeatures]] and [[standardizeLabel]] can be used to
* control whether regularization is applied in the original space or the scaled space.
* @param fitIntercept whether to fit intercept. If false, z is 0.0.
* @param regParam Regularization parameter (lambda).
* @param elasticNetParam the ElasticNet mixing parameter (alpha).
* @param standardizeFeatures whether to standardize features. If true, sigma,,j,, is the
* population standard deviation of the j-th column of A. Otherwise,
* sigma,,j,, is 1.0.
* @param standardizeLabel whether to standardize label. If true, delta is the population standard
* deviation of the label column b. Otherwise, delta is 1.0.
* @param solverType the type of solver to use for optimization.
* @param maxIter maximum number of iterations. Only for QuasiNewton solverType.
* @param tol the convergence tolerance of the iterations. Only for QuasiNewton solverType.
*/
private[ml] class WeightedLeastSquares(
val fitIntercept: Boolean,
val regParam: Double,
val elasticNetParam: Double,
val standardizeFeatures: Boolean,
val standardizeLabel: Boolean,
val solverType: WeightedLeastSquares.Solver = WeightedLeastSquares.Auto,
val maxIter: Int = 100,
val tol: Double = 1e-6
) extends Serializable {
import WeightedLeastSquares._
require(regParam >= 0.0, s"regParam cannot be negative: $regParam")
require(elasticNetParam >= 0.0 && elasticNetParam <= 1.0,
s"elasticNetParam must be in [0, 1]: $elasticNetParam")
require(maxIter > 0, s"maxIter must be a positive integer: $maxIter")
require(tol >= 0.0, s"tol must be >= 0, but was set to $tol")
/**
* Creates a [[WeightedLeastSquaresModel]] from an RDD of [[Instance]]s.
*/
def fit(
instances: RDD[Instance],
instr: OptionalInstrumentation = OptionalInstrumentation.create(
classOf[WeightedLeastSquares]),
depth: Int = 2
): WeightedLeastSquaresModel = {
if (regParam == 0.0) {
instr.logWarning("regParam is zero, which might cause numerical instability and overfitting.")
}
val summary = instances.treeAggregate(new Aggregator)(_.add(_), _.merge(_), depth)
summary.validate()
instr.logInfo(s"Number of instances: ${summary.count}.")
val k = if (fitIntercept) summary.k + 1 else summary.k
val numFeatures = summary.k
val triK = summary.triK
val wSum = summary.wSum
val rawBStd = summary.bStd
val rawBBar = summary.bBar
// if b is constant (rawBStd is zero), then b cannot be scaled. In this case
// setting bStd=abs(rawBBar) ensures that b is not scaled anymore in l-bfgs algorithm.
val bStd = if (rawBStd == 0.0) math.abs(rawBBar) else rawBStd
if (rawBStd == 0) {
if (fitIntercept || rawBBar == 0.0) {
if (rawBBar == 0.0) {
instr.logWarning(s"Mean and standard deviation of the label are zero, so the " +
s"coefficients and the intercept will all be zero; as a result, training is not " +
s"needed.")
} else {
instr.logWarning(s"The standard deviation of the label is zero, so the coefficients " +
s"will be zeros and the intercept will be the mean of the label; as a result, " +
s"training is not needed.")
}
val coefficients = new DenseVector(Array.ofDim(numFeatures))
val intercept = rawBBar
val diagInvAtWA = new DenseVector(Array(0D))
return new WeightedLeastSquaresModel(coefficients, intercept, diagInvAtWA, Array(0D))
} else {
require(!(regParam > 0.0 && standardizeLabel), "The standard deviation of the label is " +
"zero. Model cannot be regularized when labels are standardized.")
instr.logWarning(s"The standard deviation of the label is zero. Consider setting " +
s"fitIntercept=true.")
}
}
val bBar = summary.bBar / bStd
val bbBar = summary.bbBar / (bStd * bStd)
val aStd = summary.aStd
val aStdValues = aStd.values
val aBar = {
val _aBar = summary.aBar
val _aBarValues = _aBar.values
var i = 0
// scale aBar to standardized space in-place
while (i < numFeatures) {
if (aStdValues(i) == 0.0) {
_aBarValues(i) = 0.0
} else {
_aBarValues(i) /= aStdValues(i)
}
i += 1
}
_aBar
}
val aBarValues = aBar.values
val abBar = {
val _abBar = summary.abBar
val _abBarValues = _abBar.values
var i = 0
// scale abBar to standardized space in-place
while (i < numFeatures) {
if (aStdValues(i) == 0.0) {
_abBarValues(i) = 0.0
} else {
_abBarValues(i) /= (aStdValues(i) * bStd)
}
i += 1
}
_abBar
}
val abBarValues = abBar.values
val aaBar = {
val _aaBar = summary.aaBar
val _aaBarValues = _aaBar.values
var j = 0
var p = 0
// scale aaBar to standardized space in-place
while (j < numFeatures) {
val aStdJ = aStdValues(j)
var i = 0
while (i <= j) {
val aStdI = aStdValues(i)
if (aStdJ == 0.0 || aStdI == 0.0) {
_aaBarValues(p) = 0.0
} else {
_aaBarValues(p) /= (aStdI * aStdJ)
}
p += 1
i += 1
}
j += 1
}
_aaBar
}
val aaBarValues = aaBar.values
val effectiveRegParam = regParam / bStd
val effectiveL1RegParam = elasticNetParam * effectiveRegParam
val effectiveL2RegParam = (1.0 - elasticNetParam) * effectiveRegParam
// add L2 regularization to diagonals
var i = 0
var j = 2
while (i < triK) {
var lambda = effectiveL2RegParam
if (!standardizeFeatures) {
val std = aStdValues(j - 2)
if (std != 0.0) {
lambda /= (std * std)
} else {
lambda = 0.0
}
}
if (!standardizeLabel) {
lambda *= bStd
}
aaBarValues(i) += lambda
i += j
j += 1
}
val aa = getAtA(aaBarValues, aBarValues)
val ab = getAtB(abBarValues, bBar)
val solver = if ((solverType == WeightedLeastSquares.Auto && elasticNetParam != 0.0 &&
regParam != 0.0) || (solverType == WeightedLeastSquares.QuasiNewton)) {
val effectiveL1RegFun: Option[(Int) => Double] = if (effectiveL1RegParam != 0.0) {
Some((index: Int) => {
if (fitIntercept && index == numFeatures) {
0.0
} else {
if (standardizeFeatures) {
effectiveL1RegParam
} else {
if (aStdValues(index) != 0.0) effectiveL1RegParam / aStdValues(index) else 0.0
}
}
})
} else {
None
}
new QuasiNewtonSolver(fitIntercept, maxIter, tol, effectiveL1RegFun)
} else {
new CholeskySolver
}
val solution = solver match {
case cholesky: CholeskySolver =>
try {
cholesky.solve(bBar, bbBar, ab, aa, aBar)
} catch {
// if Auto solver is used and Cholesky fails due to singular AtA, then fall back to
// Quasi-Newton solver.
case _: SingularMatrixException if solverType == WeightedLeastSquares.Auto =>
instr.logWarning("Cholesky solver failed due to singular covariance matrix. " +
"Retrying with Quasi-Newton solver.")
// ab and aa were modified in place, so reconstruct them
val _aa = getAtA(aaBarValues, aBarValues)
val _ab = getAtB(abBarValues, bBar)
val newSolver = new QuasiNewtonSolver(fitIntercept, maxIter, tol, None)
newSolver.solve(bBar, bbBar, _ab, _aa, aBar)
}
case qn: QuasiNewtonSolver =>
qn.solve(bBar, bbBar, ab, aa, aBar)
}
val (coefficientArray, intercept) = if (fitIntercept) {
(solution.coefficients.slice(0, solution.coefficients.length - 1),
solution.coefficients.last * bStd)
} else {
(solution.coefficients, 0.0)
}
// convert the coefficients from the scaled space to the original space
var q = 0
val len = coefficientArray.length
while (q < len) {
coefficientArray(q) *= { if (aStdValues(q) != 0.0) bStd / aStdValues(q) else 0.0 }
q += 1
}
// aaInv is a packed upper triangular matrix, here we get all elements on diagonal
val diagInvAtWA = solution.aaInv.map { inv =>
new DenseVector((1 to k).map { i =>
val multiplier = if (i == k && fitIntercept) {
1.0
} else {
aStdValues(i - 1) * aStdValues(i - 1)
}
inv(i + (i - 1) * i / 2 - 1) / (wSum * multiplier)
}.toArray)
}.getOrElse(new DenseVector(Array(0D)))
new WeightedLeastSquaresModel(new DenseVector(coefficientArray), intercept, diagInvAtWA,
solution.objectiveHistory.getOrElse(Array(0D)))
}
/** Construct A^T^ A (append bias if necessary). */
private def getAtA(aaBar: Array[Double], aBar: Array[Double]): DenseVector = {
if (fitIntercept) {
new DenseVector(Array.concat(aaBar, aBar, Array(1.0)))
} else {
new DenseVector(aaBar.clone())
}
}
/** Construct A^T^ b (append bias if necessary). */
private def getAtB(abBar: Array[Double], bBar: Double): DenseVector = {
if (fitIntercept) {
new DenseVector(Array.concat(abBar, Array(bBar)))
} else {
new DenseVector(abBar.clone())
}
}
}
private[ml] object WeightedLeastSquares {
/**
* In order to take the normal equation approach efficiently, [[WeightedLeastSquares]]
* only supports the number of features is no more than 4096.
*/
val MAX_NUM_FEATURES: Int = 4096
sealed trait Solver
case object Auto extends Solver
case object Cholesky extends Solver
case object QuasiNewton extends Solver
val supportedSolvers = Array(Auto, Cholesky, QuasiNewton)
/**
* Aggregator to provide necessary summary statistics for solving [[WeightedLeastSquares]].
*/
// TODO: consolidate aggregates for summary statistics
private class Aggregator extends Serializable {
var initialized: Boolean = false
var k: Int = _
var count: Long = _
var triK: Int = _
var wSum: Double = _
private var wwSum: Double = _
private var bSum: Double = _
private var bbSum: Double = _
private var aSum: DenseVector = _
private var abSum: DenseVector = _
private var aaSum: DenseVector = _
private def init(k: Int): Unit = {
require(k <= MAX_NUM_FEATURES, "In order to take the normal equation approach efficiently, " +
s"we set the max number of features to $MAX_NUM_FEATURES but got $k.")
this.k = k
triK = k * (k + 1) / 2
count = 0L
wSum = 0.0
wwSum = 0.0
bSum = 0.0
bbSum = 0.0
aSum = new DenseVector(Array.ofDim(k))
abSum = new DenseVector(Array.ofDim(k))
aaSum = new DenseVector(Array.ofDim(triK))
initialized = true
}
/**
* Adds an instance.
*/
def add(instance: Instance): this.type = {
val Instance(l, w, f) = instance
val ak = f.size
if (!initialized) {
init(ak)
}
assert(ak == k, s"Dimension mismatch. Expect vectors of size $k but got $ak.")
count += 1L
wSum += w
wwSum += w * w
bSum += w * l
bbSum += w * l * l
BLAS.axpy(w, f, aSum)
BLAS.axpy(w * l, f, abSum)
BLAS.spr(w, f, aaSum)
this
}
/**
* Merges another [[Aggregator]].
*/
def merge(other: Aggregator): this.type = {
if (!other.initialized) {
this
} else {
if (!initialized) {
init(other.k)
}
assert(k == other.k, s"dimension mismatch: this.k = $k but other.k = ${other.k}")
count += other.count
wSum += other.wSum
wwSum += other.wwSum
bSum += other.bSum
bbSum += other.bbSum
BLAS.axpy(1.0, other.aSum, aSum)
BLAS.axpy(1.0, other.abSum, abSum)
BLAS.axpy(1.0, other.aaSum, aaSum)
this
}
}
/**
* Validates that we have seen observations.
*/
def validate(): Unit = {
assert(initialized, "Training dataset is empty.")
assert(wSum > 0.0, "Sum of weights cannot be zero.")
}
/**
* Weighted mean of features.
*/
def aBar: DenseVector = {
val output = aSum.copy
BLAS.scal(1.0 / wSum, output)
output
}
/**
* Weighted mean of labels.
*/
def bBar: Double = bSum / wSum
/**
* Weighted mean of squared labels.
*/
def bbBar: Double = bbSum / wSum
/**
* Weighted population standard deviation of labels.
*/
def bStd: Double = {
// We prevent variance from negative value caused by numerical error.
val variance = math.max(bbSum / wSum - bBar * bBar, 0.0)
math.sqrt(variance)
}
/**
* Weighted mean of (label * features).
*/
def abBar: DenseVector = {
val output = abSum.copy
BLAS.scal(1.0 / wSum, output)
output
}
/**
* Weighted mean of (features * features^T^).
*/
def aaBar: DenseVector = {
val output = aaSum.copy
BLAS.scal(1.0 / wSum, output)
output
}
/**
* Weighted population standard deviation of features.
*/
def aStd: DenseVector = {
val std = Array.ofDim[Double](k)
var i = 0
var j = 2
val aaValues = aaSum.values
while (i < triK) {
val l = j - 2
val aw = aSum(l) / wSum
// We prevent variance from negative value caused by numerical error.
std(l) = math.sqrt(math.max(aaValues(i) / wSum - aw * aw, 0.0))
i += j
j += 1
}
new DenseVector(std)
}
/**
* Weighted population variance of features.
*/
def aVar: DenseVector = {
val variance = Array.ofDim[Double](k)
var i = 0
var j = 2
val aaValues = aaSum.values
while (i < triK) {
val l = j - 2
val aw = aSum(l) / wSum
// We prevent variance from negative value caused by numerical error.
variance(l) = math.max(aaValues(i) / wSum - aw * aw, 0.0)
i += j
j += 1
}
new DenseVector(variance)
}
}
}
| maropu/spark | mllib/src/main/scala/org/apache/spark/ml/optim/WeightedLeastSquares.scala | Scala | apache-2.0 | 16,868 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogx.compiler.codegenerator.opencl.fragments
/** OpenCL has support for various scalar and vector floating point types,
* enumerated here.
*
* For a given CLType, it's often useful in HyperKernel code generation to
* know a string representation of the type (i.e. 'name') and a string
* representation of the zero of the type (i.e. 'zero').
*
* CLPixel is used to represent the Cog-standard representation of 2D and 3D
* color images: RGBA with floats. Note that the 'zero' of CLPixel has
* the alpha channel set to 1.
*
* @author Greg Snider
*/
private[cogx]
abstract class CLType(val name: String, val zero: String) {
override def toString = name
}
private[cogx]
case object CLFloat extends CLType("float", "0.0f")
private[cogx]
case object CLFloat2 extends CLType("float2", "(float2) (0.0f, 0.0f)")
private[cogx]
case object CLFloat3 extends CLType("float3", "(float3) (0.0f, 0.0f, 0.0f)")
private[cogx]
case object CLFloat4 extends CLType("float4", "(float4) (0.0f, 0.0f, 0.0f, 0.0f)")
private[cogx]
case object CLFloat8 extends CLType("float8", "(float8) (0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f)")
private[cogx]
case object CLFloat16 extends CLType("float16", "(float16) (0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f)")
private[cogx]
case class CLFloatN(n: Int) extends CLType("float" + n, "(float" + n + ")" +
"(" + ("0.0f, " * n).dropRight(2) + ")")
// Old form of CLFloat2, CLFloat3 and CLFloat4 before CLFloatN(n:Int) existed
//object CLFloat2 extends CLType("float2", "(float2) (0.0f, 0.0f)")
//object CLFloat3 extends CLType("float3", "(float3) (0.0f, 0.0f, 0.0f)")
//object CLFloat4 extends CLType("float4", "(float4) (0.0f, 0.0f, 0.0f, 0.0f)")
private[cogx]
case object CLInt extends CLType("int", "0")
private[cogx]
case object CLInt2 extends CLType("int2", "(int2) (0,0)")
private[cogx]
case object CLInt3 extends CLType("int3", "(int3) (0,0,0)")
private[cogx]
case object CLInt4 extends CLType("int4", "(int4) (0,0,0,0)")
private[cogx]
case object CLPixel extends CLType("float4", "(float4) (0.0f, 0.0f, 0.0f, 1.0f)")
private[cogx]
case object CLComplex extends CLType("float2", "(float2) (0.0f, 0.0f)")
| hpe-cct/cct-core | src/main/scala/cogx/compiler/codegenerator/opencl/fragments/CLType.scala | Scala | apache-2.0 | 2,864 |
package com.cerner.beadledom.avro
import com.cerner.beadledom.metadata.BuildInfo
import com.fasterxml.jackson.databind.{Module, ObjectMapper}
import com.google.inject._
import com.google.inject.multibindings.ProvidesIntoSet
import java.util.Properties
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{FunSpec, Matchers}
import scala.collection.JavaConverters._
/**
* Spec tests for {@link AvroJacksonGuiceModule}.
*/
class AvroJacksonGuiceModuleSpec extends FunSpec with Matchers with MockitoSugar {
val injector = Guice.createInjector(new AbstractModule() {
override def configure(): Unit = {
val buildInfo = BuildInfo.builder()
.setVersion("1.0")
.setArtifactId("artifactId")
.setGroupId("groupId")
.setScmRevision("ScmRevision")
.setRawProperties(new Properties())
.build()
bind(classOf[BuildInfo]).toInstance(buildInfo)
install(new AvroJacksonGuiceModule())
}
@Provides
@Singleton
def provideObjectMapper(
jacksonModules: java.util.Set[Module]): ObjectMapper = {
val objectMapper: ObjectMapper = new ObjectMapper
objectMapper.registerModules(jacksonModules)
objectMapper
}
@ProvidesIntoSet
def provideAvroJacksonModule(): Module = {
return new TestModule
}
})
val mapper = injector.getInstance(classOf[ObjectMapper])
describe("AvroSerializationModule") {
describe("ObjectMapper") {
it("serializes and deserializes avro objects") {
val model = OuterTestModel.newBuilder
.setInnerModels(List(
InnerTestModel.newBuilder
.setSomeField("what up")
.build,
InnerTestModel.newBuilder
.setNullableWithDefault(null)
.setSomeField("howdy")
.build,
InnerTestModel.newBuilder
.setNullableWithDefault("hi there")
.setSomeField("aloha")
.build).asJava)
.setLongWithoutDefault(9)
.setStringWithoutDefault("yo")
.build
val str = mapper.writeValueAsString(model)
mapper.readValue(str, classOf[OuterTestModel]) should be(model)
}
it("applies default field values when parsing into avro objects") {
val input =
"""
|{
| "innerModels": [
| {
| "nullableWithDefault": "hi there",
| "someField": "aloha"
| },
| {
| "nullableWithDefault": null,
| "someField": "sayonara"
| },
| {
| "someField": "toodles"
| }
| ],
| "longWithoutDefault": 9,
| "stringWithoutDefault": "yo"
|}
""".stripMargin
val expected = OuterTestModel.newBuilder
.setInnerModels(List(
InnerTestModel.newBuilder
.setNullableWithDefault("hi there")
.setSomeField("aloha")
.build,
InnerTestModel.newBuilder
.setNullableWithDefault(null)
.setSomeField("sayonara")
.build,
InnerTestModel.newBuilder
.setNullableWithDefault(null)
.setSomeField("toodles")
.build).asJava)
.setLongWithoutDefault(9)
.setLongWithDefault(5)
.setStringWithoutDefault("yo")
.build
mapper.readValue(input, classOf[OuterTestModel]) should be(expected)
}
}
it("creates map of dependencies") {
val setType = new TypeLiteral[java.util.Set[Module]] {}
val dependencies = injector.getInstance(Key.get(setType))
dependencies should have size 2
}
}
}
| bbaugher/beadledom | avro/jackson/src/test/scala/com/cerner/beadledom/avro/AvroJacksonGuiceModuleSpec.scala | Scala | apache-2.0 | 3,812 |
/*
* Copyright (c) 2012-2014 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.enrich
package hadoop
package good
// Scala
import scala.collection.mutable.Buffer
// Specs2
import org.specs2.mutable.Specification
// Scalding
import com.twitter.scalding._
// Cascading
import cascading.tuple.TupleEntry
// This project
import JobSpecHelpers._
/**
* Holds the input and expected data
* for the test.
*/
object CljTomcatMailchimpEventSpec {
val lines = Lines(
"2014-10-09 16:28:31 - 13 255.255.255.255 POST 255.255.255.255 /com.mailchimp/v1 404 - - aid=email&cv=clj-0.6.0-tom-0.0.4&nuid=- - - - application%2Fx-www-form-urlencoded dHlwZT1zdWJzY3JpYmUmZmlyZWRfYXQ9MjAxNC0xMS0wNCswOSUzQTQyJTNBMzEmZGF0YSU1QmlkJTVEPWU3Yzc3ZDM4NTImZGF0YSU1QmVtYWlsJTVEPWFnZW50c21pdGglNDBzbm93cGxvd3Rlc3QuY29tJmRhdGElNUJlbWFpbF90eXBlJTVEPWh0bWwmZGF0YSU1QmlwX29wdCU1RD04Mi4yMjUuMTY5LjIyMCZkYXRhJTVCd2ViX2lkJTVEPTIxMDgzMzgyNSZkYXRhJTVCbWVyZ2VzJTVEJTVCRU1BSUwlNUQ9YWdlbnRzbWl0aCU0MHNub3dwbG93dGVzdC5jb20mZGF0YSU1Qm1lcmdlcyU1RCU1QkZOQU1FJTVEPUFnZW50JmRhdGElNUJtZXJnZXMlNUQlNUJMTkFNRSU1RD1TbWl0aCZkYXRhJTVCbGlzdF9pZCU1RD1mMTI0M2EzYjEy"
)
val expected = List(
"email",
"srv",
EtlTimestamp,
"2014-10-09 16:28:31.000",
null,
"unstruct",
null, // We can't predict the event_id
null,
null, // No tracker namespace
"com.mailchimp-v1",
"clj-0.6.0-tom-0.0.4",
EtlVersion,
null, // No user_id set
"255.255.x.x",
null,
null,
null,
"-", // TODO: fix this, https://github.com/snowplow/snowplow/issues/1133
null, // No geo-location for this IP address
null,
null,
null,
null,
null,
null,
null, // No additional MaxMind databases used
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null, // Marketing campaign fields empty
null, //
null, //
null, //
null, //
null, // No custom contexts
null, // Structured event fields empty
null, //
null, //
null, //
null, //
"""{"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.mailchimp/subscribe/jsonschema/1-0-0","data":{"data":{"ip_opt":"82.225.169.220","merges":{"LNAME":"Smith","FNAME":"Agent","EMAIL":"[email protected]"},"email":"[email protected]","list_id":"f1243a3b12","email_type":"html","id":"e7c77d3852","web_id":"210833825"},"fired_at":"2014-11-04T09:42:31.000Z","type":"subscribe"}}}""",
null, // Transaction fields empty
null, //
null, //
null, //
null, //
null, //
null, //
null, //
null, // Transaction item fields empty
null, //
null, //
null, //
null, //
null, //
null, // Page ping fields empty
null, //
null, //
null, //
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null
)
}
/**
* Integration test for the EtlJob:
*
* For details:
* https://forums.aws.amazon.com/thread.jspa?threadID=134017&tstart=0#
*/
class CljTomcatMailchimpEventSpec extends Specification {
"A job which processes a Clojure-Tomcat file containing a POST raw event representing 1 valid completed call" should {
EtlJobSpec("clj-tomcat", "2", true, List("geo")).
source(MultipleTextLineFiles("inputFolder"), CljTomcatMailchimpEventSpec.lines).
sink[TupleEntry](Tsv("outputFolder")){ buf : Buffer[TupleEntry] =>
"correctly output 1 completed call" in {
buf.size must_== 1
val actual = buf.head
for (idx <- CljTomcatMailchimpEventSpec.expected.indices) {
actual.getString(idx) must beFieldEqualTo(CljTomcatMailchimpEventSpec.expected(idx), withIndex = idx)
}
}
}.
sink[TupleEntry](Tsv("exceptionsFolder")){ trap =>
"not trap any exceptions" in {
trap must beEmpty
}
}.
sink[String](Tsv("badFolder")){ error =>
"not write any bad rows" in {
error must beEmpty
}
}.
run.
finish
}
} | mdavid/lessig-bigdata | lib/snowplow/3-enrich/scala-hadoop-enrich/src/test/scala/com.snowplowanalytics.snowplow.enrich.hadoop/good/CljTomcatMailchimpEventSpec.scala | Scala | mit | 5,094 |
package fpinscala.laziness
import Stream._
trait Stream[+A] {
def uncons: Option[(A, Stream[A])]
def isEmpty: Boolean = uncons.isEmpty
def foldRight[B](z: => B)(f: (A, => B) => B): B =
uncons match {
case Some((h, t)) => f(h, t.foldRight(z)(f))
case None => z
}
def exists(p: A => Boolean): Boolean =
foldRight(false)((a, b) => p(a) || b)
}
object Stream {
def empty[A]: Stream[A] =
new Stream[A] { def uncons = None }
def cons[A](hd: => A, tl: => Stream[A]): Stream[A] =
new Stream[A] {
lazy val uncons = Some((hd, tl))
}
def apply[A](as: A*): Stream[A] =
if (as.isEmpty) empty
else cons(as.head, apply(as.tail: _*))
val ones: Stream[Int] = cons(1, ones)
} | ShokuninSan/fpinscala | chaptercode/src/main/scala/fpinscala/laziness/Stream.scala | Scala | mit | 737 |
package net.pointsgame.engine
abstract sealed class MoveResult
case object Error extends MoveResult
case object Normal extends MoveResult
case class WithSurroundings(newSurroundings: Int*) extends MoveResult
| vn971/points-wip | modules/game-engine-experiments/src/main/scala/net/pointsgame/engine/AiEngineApi.scala | Scala | agpl-3.0 | 210 |
package spire.macros
import spire.algebra.{Field, Ring}
import spire.macros.compat.Context
import spire.math.{Rational, UByte, UShort, UInt, ULong}
object Macros {
case class LiteralUtil(c: Context) {
import c.universe._
def getString: String = {
val Apply(_, List(Apply(_, List(Literal(Constant(s: String)))))) = c.prefix.tree
s
}
}
def parseContext(c: Context, lower: BigInt, upper: BigInt): Either[String, BigInt] =
parseNumber(LiteralUtil(c).getString, lower, upper)
def parseNumber(s: String, lower: BigInt, upper: BigInt): Either[String, BigInt] =
try {
val n = BigInt(s)
if (n < lower || n > upper) Left("illegal constant: %s" format s) else Right(n)
} catch {
case _: Exception => Left("illegal constant: %s" format s)
}
def byte(c: Context)(): c.Expr[Byte] = {
import c.universe._
parseContext(c, BigInt(-128), BigInt(255)) match {
case Right(n) => c.Expr(q"${n.toByte}")
case Left(s) => throw new NumberFormatException(s)
}
}
def ubyte(c: Context)(): c.Expr[UByte] = {
import c.universe._
parseContext(c, BigInt(0), BigInt(255)) match {
case Right(n) => c.Expr(q"spire.math.UByte(${n.toByte})")
case Left(s) => throw new NumberFormatException(s)
}
}
def short(c: Context)(): c.Expr[Short] = {
import c.universe._
parseContext(c, BigInt(-32768), BigInt(65535)) match {
case Right(n) => c.Expr(q"${n.toShort}")
case Left(s) => throw new NumberFormatException(s)
}
}
def ushort(c: Context)(): c.Expr[UShort] = {
import c.universe._
parseContext(c, BigInt(0), BigInt(65535)) match {
case Right(n) => c.Expr(q"spire.math.UShort(${n.toShort})")
case Left(s) => throw new NumberFormatException(s)
}
}
def uint(c: Context)(): c.Expr[UInt] = {
import c.universe._
parseContext(c, BigInt(0), BigInt(4294967295L)) match {
case Right(n) => c.Expr(q"spire.math.UInt(${n.toInt})")
case Left(s) => throw new NumberFormatException(s)
}
}
def ulong(c: Context)(): c.Expr[ULong] = {
import c.universe._
parseContext(c, BigInt(0), BigInt("18446744073709551615")) match {
case Right(n) => c.Expr(q"spire.math.ULong(${n.toLong})")
case Left(s) => throw new NumberFormatException(s)
}
}
def rational(c: Context)(): c.Expr[Rational] = {
import c.universe._
val Apply(_, List(Apply(_, List(Literal(Constant(s:String)))))) = c.prefix.tree
val r = Rational(s)
val (n, d) = (r.numerator, r.denominator)
if (n.isValidLong && d.isValidLong)
c.Expr(q"spire.math.Rational(${n.toLong}, ${d.toLong})")
else
c.Expr(q"spire.math.Rational(BigInt(${n.toString}), BigInt(${d.toString}))")
}
def formatWhole(c: Context, sep: String): String = {
val regex = "0|-?[1-9][0-9]{0,2}(%s[0-9]{3})*" format sep
import c.universe._
val Apply(_, List(Apply(_, List(Literal(Constant(s:String)))))) = c.prefix.tree
if (!s.matches(regex)) c.error(c.enclosingPosition, "invalid whole number")
s.replace(sep, "")
}
def formatDecimal(c: Context, sep: String, dec: String): String = {
val regex = "0|-?[1-9][0-9]{0,2}(%s[0-9]{3})*(%s[0-9]+)?" format (sep, dec)
import c.universe._
val Apply(_, List(Apply(_, List(Literal(Constant(s:String)))))) = c.prefix.tree
if (!s.matches(regex)) c.error(c.enclosingPosition, "invalid decimal number")
s.replace(sep, "").replace(dec, ".")
}
def handleInt(c: Context, name: String, sep: String): c.Expr[Int] = {
import c.universe._
try {
c.Expr[Int](Literal(Constant(formatWhole(c, sep).toInt)))
} catch {
case e: Exception =>
throw new NumberFormatException("illegal %s Int constant" format name)
}
}
def handleLong(c: Context, name: String, sep: String): c.Expr[Long] = {
import c.universe._
try {
c.Expr[Long](Literal(Constant(formatWhole(c, sep).toLong)))
} catch {
case e: Exception =>
throw new NumberFormatException("illegal %s Long constant" format name)
}
}
def handleBigInt(c: Context, name: String, sep: String): c.Expr[BigInt] = {
import c.universe._
try {
val s = formatWhole(c, sep)
val b = BigInt(s) // make sure it's ok
c.Expr[BigInt](Apply(q"scala.math.BigInt.apply", List(Literal(Constant(s)))))
} catch {
case e: Exception =>
throw new NumberFormatException("illegal %s BigInt constant" format name)
}
}
def handleBigDecimal(c: Context, name: String, sep: String, dec: String): c.Expr[BigDecimal] = {
import c.universe._
try {
val s = formatDecimal(c, sep, dec)
val b = BigDecimal(s) // make sure it's ok
c.Expr[BigDecimal](Apply(q"scala.math.BigDecimal.apply", List(Literal(Constant(s)))))
} catch {
case e: Exception =>
throw new NumberFormatException("illegal %s BigInt constant" format name)
}
}
def siInt(c: Context)(): c.Expr[Int] = handleInt(c, "SI", " ")
def siLong(c: Context)(): c.Expr[Long] = handleLong(c, "SI", " ")
def siBigInt(c: Context)(): c.Expr[BigInt] = handleBigInt(c, "SI", " ")
def siBigDecimal(c: Context)(): c.Expr[BigDecimal] = handleBigDecimal(c, "SI", " ", ".")
def usInt(c: Context)(): c.Expr[Int] = handleInt(c, "US", ",")
def usLong(c: Context)(): c.Expr[Long] = handleLong(c, "US", ",")
def usBigInt(c: Context)(): c.Expr[BigInt] = handleBigInt(c, "US", ",")
def usBigDecimal(c: Context)(): c.Expr[BigDecimal] = handleBigDecimal(c, "US", ",", ".")
def euInt(c: Context)(): c.Expr[Int] = handleInt(c, "EU", ".")
def euLong(c: Context)(): c.Expr[Long] = handleLong(c, "EU", ".")
def euBigInt(c: Context)(): c.Expr[BigInt] = handleBigInt(c, "EU", ".")
def euBigDecimal(c: Context)(): c.Expr[BigDecimal] = handleBigDecimal(c, "EU", ".", ",")
def radix(c: Context)(): c.Expr[Int] = {
import c.universe._
val Apply(_, List(Apply(_, List(Literal(Constant(s:String)))))) = c.prefix.tree
val name = c.macroApplication.symbol.name.toString
val base = name.substring(1).toInt
if (base < 2 || 36 < base)
throw new NumberFormatException("invalid radix: %s" format base)
val n = java.lang.Integer.parseInt(s, base)
c.Expr[Int](Literal(Constant(n)))
}
def intAs[A : c.WeakTypeTag](c:Context)(ev : c.Expr[Ring[A]]):c.Expr[A] = {
import c.universe._
c.Expr[A](c.prefix.tree match {
case Apply((_, List(Literal(Constant(0))))) => q"$ev.zero"
case Apply((_, List(Literal(Constant(1))))) => q"$ev.one"
case Apply((_, List(n))) => q"$ev.fromInt($n)"
})
}
def dblAs[A : c.WeakTypeTag](c:Context)(ev : c.Expr[Field[A]]):c.Expr[A]= {
import c.universe._
c.Expr[A](c.prefix.tree match {
case Apply((_, List(Literal(Constant(0.0))))) => q"$ev.zero"
case Apply((_, List(Literal(Constant(1.0))))) => q"$ev.one"
case Apply((_, List(n))) => q"$ev.fromDouble($n)"
})
}
}
| guersam/spire | core/shared/src/main/scala/spire/macros/Macros.scala | Scala | mit | 6,947 |
package com.sample.app.model
class Post(val user: User, val header: String, val text: String) | nmilinkovic/Skadi | src/test/scala/com/sample/app/model/Post.scala | Scala | bsd-3-clause | 94 |
package org.orbeon.dom.io
import java.{lang ⇒ jl, util ⇒ ju}
import org.orbeon.dom._
import org.orbeon.dom.tree.{ConcreteElement, NamespaceStack}
import org.xml.sax._
import org.xml.sax.ext.LexicalHandler
import org.xml.sax.helpers.DefaultHandler
/**
* `SAXContentHandler` builds a tree via SAX events.
*/
class SAXContentHandler(
systemIdOpt : Option[String],
mergeAdjacentText : Boolean,
stripWhitespaceText : Boolean,
ignoreComments : Boolean
) extends DefaultHandler with LexicalHandler {
protected val elementStack = new ju.ArrayList[Element](50)
private val namespaceStack = new NamespaceStack
private lazy val document = createDocument
def getDocument = document
// State
private var locator: Locator = _
private var declaredNamespaceIndex = 0
private var currentElement: Element = _
private var textInTextBuffer = false
private var textBuffer: jl.StringBuilder = _
override def setDocumentLocator(documentLocator: Locator): Unit =
this.locator = documentLocator
override def processingInstruction(target: String, data: String): Unit = {
if (mergeAdjacentText && textInTextBuffer)
completeCurrentTextNode()
if (currentElement ne null)
currentElement.addProcessingInstruction(target, data)
else
getDocument.addProcessingInstruction(target, data)
}
override def startPrefixMapping(prefix: String, uri: String): Unit =
namespaceStack.push(prefix, uri)
override def endPrefixMapping(prefix: String): Unit = {
namespaceStack.pop(prefix)
declaredNamespaceIndex = namespaceStack.size
}
override def startDocument(): Unit = {
// document = null
currentElement = null
elementStack.clear()
namespaceStack.clear()
declaredNamespaceIndex = 0
if (mergeAdjacentText && (textBuffer eq null))
textBuffer = new jl.StringBuilder
textInTextBuffer = false
}
override def endDocument(): Unit = {
namespaceStack.clear()
elementStack.clear()
currentElement = null
textBuffer = null
}
override def startElement(
namespaceURI : String,
localName : String,
qualifiedName : String,
attributes : Attributes
): Unit = {
if (mergeAdjacentText && textInTextBuffer)
completeCurrentTextNode()
val qName = namespaceStack.getQName(namespaceURI, localName, qualifiedName)
val branch = Option(currentElement) getOrElse getDocument
val element = branch.addElement(qName)
addDeclaredNamespaces(element)
addAttributes(element, attributes)
elementStack.add(element)
currentElement = element
}
override def endElement(
namespaceURI : String,
localName : String,
qName : String
): Unit = {
if (mergeAdjacentText && textInTextBuffer)
completeCurrentTextNode()
elementStack.remove(elementStack.size - 1)
currentElement = if (elementStack.isEmpty) null else elementStack.get(elementStack.size - 1)
}
override def characters(ch: Array[Char], start: Int, end: Int): Unit = {
if (end == 0)
return
if (currentElement ne null) {
if (mergeAdjacentText) {
textBuffer.append(ch, start, end)
textInTextBuffer = true
} else {
currentElement.addText(new String(ch, start, end))
}
}
}
override def warning(exception: SAXParseException) : Unit = ()
override def error(exception: SAXParseException) : Unit = throw exception
override def fatalError(exception: SAXParseException): Unit = throw exception
def startDTD(name: String, publicId: String, systemId: String) = ()
def endDTD() = ()
def startEntity(name: String) = ()
def endEntity(name: String) = ()
def startCDATA() = ()
def endCDATA() = ()
def comment(ch: Array[Char], start: Int, end: Int): Unit = {
if (!ignoreComments) {
if (mergeAdjacentText && textInTextBuffer) {
completeCurrentTextNode()
}
val text = new String(ch, start, end)
if (text.length > 0) {
if (currentElement ne null) {
currentElement.addComment(text)
} else {
getDocument.addComment(text)
}
}
}
}
override def notationDecl(name: String, publicId: String, systemId: String) = ()
override def unparsedEntityDecl(name: String, publicId: String, systemId: String, notationName: String) = ()
/**
* If the current text buffer contains any text then create a new text node
* with it and add it to the current element.
*/
private def completeCurrentTextNode(): Unit = {
if (stripWhitespaceText) {
var whitespace = true
val breaks = new scala.util.control.Breaks
import breaks._
breakable {
for (i ← 0 until textBuffer.length if ! Character.isWhitespace(textBuffer.charAt(i))) {
whitespace = false
break()
}
}
if (!whitespace) {
currentElement.addText(textBuffer.toString)
}
} else {
currentElement.addText(textBuffer.toString)
}
textBuffer.setLength(0)
textInTextBuffer = false
}
private def createDocument: Document = {
val doc = DocumentFactory.createDocument
systemIdOpt foreach doc.setName
doc
}
/**
* Add all namespaces declared before the startElement() SAX event to the
* current element so that they are available to child elements and
* attributes.
*/
private def addDeclaredNamespaces(element: Element): Unit = {
val size = namespaceStack.size
while (declaredNamespaceIndex < size) {
element.add(namespaceStack.getNamespace(declaredNamespaceIndex))
declaredNamespaceIndex += 1
}
}
// TODO: Change once everything is a concrete `Element`.
private def addAttributes(element: Element, attributes: Attributes): Unit =
element match {
case elem: ConcreteElement ⇒ elem.setAttributes(attributes, namespaceStack, noNamespaceAttributes = false)
case _ ⇒ throw new IllegalStateException
}
}
| brunobuzzi/orbeon-forms | dom/src/main/scala/org/orbeon/dom/io/SAXContentHandler.scala | Scala | lgpl-2.1 | 6,235 |
package mypipe.snapshotter.splitter
/** Created by hisham on 6/25/16.
*/
case class InputSplit(lowClausePrefix: String, upperClausePrefix: String)
| mardambey/mypipe | mypipe-snapshotter/src/main/scala/mypipe/snapshotter/splitter/InputSplit.scala | Scala | apache-2.0 | 149 |
package avrohugger
package matchers
import avrohugger.matchers.custom.CustomDefaultParamMatcher
import avrohugger.stores.ClassStore
import avrohugger.types._
import org.apache.avro.Schema
import org.apache.avro.Schema.Type
import treehugger.forest._
import definitions._
import treehugger.forest
import treehuggerDSL._
object DefaultParamMatcher {
// for SpecificRecord
def asDefaultParam(
classStore: ClassStore,
avroSchema: Schema,
typeMatcher: TypeMatcher): Tree = {
avroSchema.getType match {
case Type.BOOLEAN => FALSE
case Type.INT =>
LogicalType.foldLogicalTypes[Tree](
schema = avroSchema,
default = LIT(0)) {
case Date =>
CustomDefaultParamMatcher.checkCustomDateType(
typeMatcher.avroScalaTypes.date)
}
case Type.LONG =>
LogicalType.foldLogicalTypes[Tree](
schema = avroSchema,
default = LIT(0L)) {
case TimestampMillis =>
CustomDefaultParamMatcher.checkCustomTimestampMillisType(
typeMatcher.avroScalaTypes.timestampMillis)
}
case Type.FLOAT => LIT(0F)
case Type.DOUBLE => LIT(0D)
case Type.STRING =>
LogicalType.foldLogicalTypes[Tree](
schema = avroSchema,
default = LIT("")) {
case UUID => REF("java.util.UUID.randomUUID")
}
case Type.NULL => NULL
case Type.FIXED =>
REF(classStore.generatedClasses(avroSchema)).APPLY(CustomDefaultParamMatcher.checkCustomDecimalType(
decimalType = typeMatcher.avroScalaTypes.decimal,
schema = avroSchema,
default = ArrayClass.APPLY()))
case Type.ENUM =>
CustomDefaultParamMatcher.checkCustomEnumType(typeMatcher.avroScalaTypes.`enum`)
case Type.BYTES =>
CustomDefaultParamMatcher.checkCustomDecimalType(
decimalType = typeMatcher.avroScalaTypes.decimal,
schema = avroSchema,
default = ArrayClass.APPLY())
case Type.RECORD => NEW(classStore.generatedClasses(avroSchema))
case Type.UNION => NONE
case Type.ARRAY =>
CustomDefaultParamMatcher.checkCustomArrayType(typeMatcher.avroScalaTypes.array) DOT "empty"
case Type.MAP =>
MAKE_MAP(LIT("") ANY_-> asDefaultParam(classStore, avroSchema.getValueType, typeMatcher))
}
}
}
| julianpeeters/avrohugger | avrohugger-core/src/main/scala/matchers/DefaultParamMatcher.scala | Scala | apache-2.0 | 2,412 |
package com.filez.astyanax.multientity.play2.impl
import scala.collection.mutable.ListMap
import scala.collection.mutable.SynchronizedMap
import com.filez.astyanax.multientity.MultiEntityManagerFactory
import com.filez.astyanax.multientity.MultiEntityManager
/**
cached instances of MultiEntityManager
*/
object MultiEntityManagerCache extends ListMap[String, MultiEntityManager]
with SynchronizedMap [String, MultiEntityManager] {
} | hsn10/multiplugin | src/main/scala/impl/MultiEntityManagerCache.scala | Scala | agpl-3.0 | 443 |
/*
Facsimile: A Discrete-Event Simulation Library
Copyright © 2004-2020, Michael J Allen.
This file is part of Facsimile.
Facsimile is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later
version.
Facsimile is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License along with Facsimile. If not, see
http://www.gnu.org/licenses/lgpl.
The developers welcome all comments, suggestions and offers of assistance. For further information, please visit the
project home page at:
http://facsim.org/
Thank you for your interest in the Facsimile project!
IMPORTANT NOTE: All patches (modifications to existing files and/or the addition of new files) submitted for inclusion
as part of the official Facsimile code base, must comply with the published Facsimile Coding Standards. If your code
fails to comply with the standard, then your patches will be rejected. For further information, please visit the coding
standards at:
http://facsim.org/Documentation/CodingStandards/
========================================================================================================================
Scala source file from the org.facsim.anim package.
*/
package org.facsim.anim
import org.facsim.requireFinite
import scalafx.scene.transform.Translate
/**
Defines a point in 3D animation space.
The defined animation point may utilize world coordinates, or may be relative
to a parent object (with local ''X''-, ''Y''- and ''Z''-axes).
@constructor Create new 3D animation point.
@param x ''X''-axis coordinate. This value must be finite.
@param y ''Y''-axis coordinate. This value must be finite.
@param z ''Z''-axis coordinate. This value must be finite.
@throws IllegalArgumentException if `x`, `y` or `z` has a non-finite
value.
*/
private[anim] final case class Point3D(x: Double, y: Double, z: Double) {
/*
Sanity checks.
*/
requireFinite(x)
requireFinite(y)
requireFinite(z)
/**
Convert this point into a List of coordinate values.
@return List of coordinate values, with ''x'' coordinate first, ''y''
coordinate second, and ''z'' coordinate third.
*/
private[anim] def toList = List(x, y, z)
/**
Convert this point into a List of single-precision coordinate values.
@return List of coordinate values as single-precision floating point values,
with ''x'' coordinate first, ''y'' coordinate second, and ''z'' coordinate
third.
*/
private[anim] def toFloatList = List(x.toFloat, y.toFloat, z.toFloat)
/**
Convert this point to a ''ScalaFX'' translation.
@return Translation along local axes by coordinate values.
*/
private[anim] def toTranslate = new Translate(x, y, z)
}
/**
Point3D companion object.
*/
private[anim] object Point3D {
/**
Origin.
*/
private[anim] val Origin = Point3D(0.0, 0.0, 0.0)
} | MichaelJAllen/facsimile | core/src/main/scala/org/facsim/anim/Point3D.scala | Scala | lgpl-3.0 | 3,154 |
package susuru.core
sealed class Result[R]
case class Void[R]() extends Result[R]
case class NotExists[R]() extends Result[R]
case class WaitNotify[R](r: R) extends Result[R]
case class Wait[R](until: Long) extends Result[R]
case class Lease[R](resource: Resource[R]) extends Result[R]
| tomingtoming/susuru | src/main/scala/susuru/core/Result.scala | Scala | mit | 292 |
package models.actor
import models.binding._
import org.specs2.mutable.Specification
import org.specs2.mock.Mockito
import play.api.test.Helpers._
import play.api.test._
import play.api.libs.concurrent._
import play.api.libs.json._
import akka.pattern.ask
import akka.testkit._
import scala.concurrent.duration._
import scala.concurrent.Await
import scala.xml.NodeSeq
import java.net.URI
import models.actor.ConfigService.{
GetConfig, GetDatabases,
GetRegistryDatabases,
GetDatabaseById,
GetDatabaseType
}
import models.actor.ConfigService.GetDatabaseType
// actor tests need empty onStart routine
object ConfigServiceSpecs extends Specification with Mockito {
"ConfigService" should {
"get config in XML" in {
val app = new FakeApplication
running(app) {
implicit val actorSystem = Akka.system(app)
val actorRef = TestActorRef(new ConfigService, name = "config")
val actor = actorSystem.actorSelection("user/config")
val future = actor ? GetConfig("xml")
val config = Await.result(future.mapTo[NodeSeq], DurationInt(10) second)
config must beAnInstanceOf[NodeSeq]
}
}
"get config in JSON" in {
val app = new FakeApplication
running(app) {
implicit val actorSystem = Akka.system(app)
val actorRef = TestActorRef(new ConfigService, name = "config")
val actor = actorSystem.actorSelection("user/config")
val future = actor ? GetConfig("json")
val config = Await.result(future.mapTo[JsValue], DurationInt(10) second)
config must beAnInstanceOf[JsValue]
}
}
"get database objects" in {
val app = new FakeApplication
running(app) {
implicit val actorSystem = Akka.system(app)
val actorRef = TestActorRef(new ConfigService, name = "config")
val actor = actorSystem.actorSelection("user/config")
val future = actor ? GetDatabases
val databases = Await.result(future.mapTo[Seq[Database]], DurationInt(10) second)
databases must beAnInstanceOf[Seq[Database]]
}
}
"get database objects for registry" in {
val app = new FakeApplication
running(app) {
implicit val actorSystem = Akka.system(app)
val actorRef = TestActorRef(new ConfigService, name = "config")
val actor = actorSystem.actorSelection("user/config")
val future = actor ? GetRegistryDatabases
val databases = Await.result(future.mapTo[Seq[Database]], DurationInt(10) second)
databases must beAnInstanceOf[Seq[Database]]
}
}
"get database objects by type" in {
val app = new FakeApplication
running(app) {
implicit val actorSystem = Akka.system(app)
val actorRef = TestActorRef(new ConfigService, name = "config")
val actor = actorSystem.actorSelection("user/config")
val future = actor ? GetDatabaseType(Simulation)
val databases = Await.result(future.mapTo[Seq[Database]], DurationInt(10) second)
databases.map(_.typeValue must beEqualTo(Simulation))
databases must beAnInstanceOf[Seq[Database]]
}
}
"get database object by id" in {
val app = new FakeApplication
running(app) {
implicit val actorSystem = Akka.system(app)
val actorRef = TestActorRef(new ConfigService, name = "config")
val actor = actorSystem.actorSelection("user/config")
val future = actor ? GetDatabaseById(new URI("spase://IMPEX/Repository/LATMOS"))
val database = Await.result(future.mapTo[Database], DurationInt(10) second)
database must beAnInstanceOf[Database]
}
}
}
} | FlorianTopf/impex-portal | test/models/actor/ConfigServiceSpecs.scala | Scala | gpl-2.0 | 4,288 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations.calculations
import uk.gov.hmrc.ct.box.CtTypeConverters
import uk.gov.hmrc.ct.computations._
trait SummaryCalculator extends CtTypeConverters{
def calculateTradingLossesBroughtForwardForSummary(cp238: CP238): CP257 = CP257(cp238.value)
def calculateNetTradingAndProfessionalProfits(cp256: CP256, cp257: CP257): CP258 = CP258(cp256 - cp257.value.getOrElse(0))
def calculateProfitsAndGainsFromNonTradingLoanRelationships(cp43: CP43): CP259 = CP259(cp43.value.getOrElse(0))
def calculateTradingLossesOfThisOrLaterAccountingPeriods(cp239: CP239): CP264 = CP264(cp239)
def calculateQualifyingCharitableDonations(cp301: CP301, cp302: CP302): CP305 = CP305(cp301 + cp302)
def calculateTradeNetAllowancesForSummary(cp186: CP186, cp668: CP668, cp674: CP674, cp91: CP91, cp670: CP670): CP99 = CP99((cp186 + cp668 + cp674 - cp91 - cp670).max(0))
}
| keithhall/ct-calculations | src/main/scala/uk/gov/hmrc/ct/computations/calculations/SummaryCalculator.scala | Scala | apache-2.0 | 1,499 |
package flaky
import java.io.File
import org.apache.commons.vfs2.FileObject
import scala.collection.immutable.{Iterable, Seq}
import scala.language.postfixOps
import scala.util.Try
import scala.xml.{Elem, NodeSeq, XML}
case class Test(clazz: String, test: String) {
def classNameOnly(): String = clazz.split('.').lastOption.getOrElse("<?>")
}
case class TestCase(runName: String,
test: Test,
time: Float = 0f,
failureDetails: Option[FailureDetails] = None
)
case class FailureDetails(message: String, ftype: String, stacktrace: String) {
def withoutStacktraceMessage(): FailureDetails = {
val newStacktraceWithoutMessage = stacktrace.substring(Math.max(stacktrace.indexOf("\\n"), 0))
copy(stacktrace = newStacktraceWithoutMessage)
}
def firstNonAssertStacktrace(): Option[String] = {
stacktrace
.lines
.filter(_.startsWith("\\tat"))
.filter(!_.startsWith("\\tat org.junit"))
.filter(!_.startsWith("\\tat org.testng"))
.filter(!_.startsWith("\\tat org.scalatest"))
.filter(!_.startsWith("\\tat java."))
.filter(!_.startsWith("\\tat scala."))
.filter(!_.startsWith("\\tat akka."))
.find(_ => true)
}
}
case class TestRun(
name: String,
testCases: List[TestCase]
)
case class FlakyTest(test: Test,
totalRun: Int,
failedRuns: List[TestCase]
) {
def failures(): Int = failedRuns.size
def failurePercent(): Float = (100f * failures()) / totalRun
def groupByStacktrace(): List[List[TestCase]] = {
failedRuns.map { tc =>
tc.copy(failureDetails = tc.failureDetails.map(_.withoutStacktraceMessage()))
}.groupBy(_.failureDetails.map(_.stacktrace))
.values.toList
}
}
case class TimeDetails(start: Long, end: Long) {
def duration(): Long = end - start
}
case class FlakyCase(test: Test, runNames: List[String], message: Option[String], stacktrace: String, allMessages: Set[String])
case class FlakyTestReport(projectName: String, timeDetails: TimeDetails, testRuns: List[TestRun], flakyTests: List[FlakyTest]) {
def groupFlakyCases(): Map[String, List[FlakyCase]] = {
flakyTests
.filter(_.failures > 0)
.groupBy(t => t.test.clazz)
.map { kv =>
val clazzTestName = kv._1
val list: Seq[FlakyTest] = kv._2
val text: Iterable[List[FlakyCase]] = list
.groupBy(_.test)
.flatMap {
case (test, listOfFlakyTests) =>
listOfFlakyTests.map {
_.groupByStacktrace()
.map { list =>
val runNames: List[String] = list.map(_.runName).sorted
val messages: Seq[String] = list.flatMap(_.failureDetails).map(_.message)
val msg: Option[String] = findCommonString(messages.toList)
val stacktrace = list.headOption.flatMap(_.failureDetails.flatMap(_.firstNonAssertStacktrace())).getOrElse("")
FlakyCase(test, runNames, msg, stacktrace, messages.toSet)
}.toList
}
}
(clazzTestName, text.flatten.toList)
}
}
def successProbabilityPercent(): Float = {
val totalRuns = testRuns.size
val successProbability = flakyTests
.filter(_.test.test != "(It is not a test)")
// Issue #38 take int account that some test are failing as test "(It is not a test)"
.map(t => (t.failedRuns.length + totalRuns - t.totalRun).toFloat / totalRuns)
.foldLeft(1f)((acc, x) => acc * (1 - x))
100 * successProbability
}
}
object Flaky {
def parseJunitXmlReport(runName: String, xml: Elem): List[TestCase] = {
val testCases = xml \\\\ "testcase"
testCases.map { testcase =>
val className = testcase \\ "@classname"
val name = testcase \\ "@name"
val time = testcase \\ "@time"
val fail: NodeSeq = testcase \\ "failure"
val error = testcase \\ "error"
val failureDetails: Option[FailureDetails] = fail.headOption
.orElse(error.headOption)
.map { head =>
FailureDetails(
head \\ "@message" text,
head \\ "@type" text,
head.text)
}
val test = Test(className.text, name.text)
TestCase(
runName,
test,
time.text.toFloat,
failureDetails
)
}.toList
}
def processFolder(dir: File): List[TestCase] = {
dir.listFiles.toList
.map(_.getAbsolutePath)
.filter(_.endsWith("xml"))
.map(x => Try {
XML.loadFile(x)
})
.filter(_.isSuccess)
.map(_.get)
.flatMap { xml => parseJunitXmlReport(dir.getName, xml) }
}
def processFolder(dir: FileObject): List[TestCase] = {
dir.getChildren
.filter(_.getName.getBaseName.endsWith(".xml"))
.map(x => Try {
XML.load(x.getContent.getInputStream)
})
.filter(_.isSuccess)
.map(_.get)
.flatMap { xml => parseJunitXmlReport(dir.getName.getBaseName, xml) }.toList
}
def findFlakyTests(list: List[TestRun]): List[FlakyTest] = {
val map = list.flatMap(tr => tr.testCases)
.groupBy(tc => tc.test)
map.keySet.map { key =>
val testCases: List[TestCase] = map(key)
val failures = testCases.filter(tc => tc.failureDetails.nonEmpty)
val t = testCases.head.test
FlakyTest(t, testCases.length, failures)
}.toList
}
def createReport(projectName: String,
timeDetails: TimeDetails,
iterationNames: List[String],
flakyDir: File = new File("target/flaky-report")): FlakyTestReport = {
val testRunDirs = flakyDir.listFiles
.filter(_.isDirectory)
.filter(f => iterationNames.contains(f.getName))
.toList
val testRuns = testRunDirs.map { dir =>
val testCases = processFolder(dir)
TestRun(s"${dir.getName}", testCases)
}
val flakyTests = findFlakyTests(testRuns)
FlakyTestReport(projectName, timeDetails, testRuns, flakyTests)
}
def createReportFromHistory(zippedFolder: FileObject): FlakyTestReport = {
val testRunDirs = zippedFolder
.getChildren
.filter(_.isFolder)
.toList
val testRuns = testRunDirs.map { dir =>
val testCases = processFolder(dir)
TestRun(s"${dir.getName}", testCases)
}
val flakyTests = findFlakyTests(testRuns)
FlakyTestReport("", TimeDetails(0, 0), testRuns, flakyTests)
}
def isFailed(dir: File): Boolean = {
if (dir.exists()) {
val testCases: List[TestCase] = processFolder(dir)
testCases.exists(tc => tc.failureDetails.nonEmpty)
} else {
false
}
}
}
| otrebski/sbt-flaky | src/main/scala/flaky/Flaky.scala | Scala | apache-2.0 | 6,779 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.commons.shared.unstable.model.stats.assertion
import io.gatling.commons.shared.unstable.model.stats
import io.gatling.commons.shared.unstable.model.stats._
import io.gatling.commons.stats._
import io.gatling.commons.stats.assertion._
object AssertionValidator {
type StatsByStatus = Option[Status] => GeneralStats
def validateAssertions(dataReader: GeneralStatsSource): List[AssertionResult] =
dataReader.assertions.flatMap(validateAssertion(_, dataReader))
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
private def validateAssertion(assertion: Assertion, source: GeneralStatsSource): List[AssertionResult] = {
val printablePath = assertion.path.printable
assertion.path match {
case Global =>
List(resolveTarget(assertion, status => source.requestGeneralStats(None, None, status), printablePath))
case ForAll =>
val detailedAssertions = source.statsPaths.collect { case RequestStatsPath(request, group) =>
assertion.copy(path = Details(group.map(_.hierarchy).getOrElse(Nil) ::: List(request)))
}
detailedAssertions.flatMap(validateAssertion(_, source))
case Details(parts) if parts.isEmpty =>
List(resolveTarget(assertion, status => source.requestGeneralStats(None, None, status), printablePath))
case Details(parts) =>
findPath(parts, source) match {
case Some(RequestStatsPath(request, group)) =>
List(resolveTarget(assertion, source.requestGeneralStats(Some(request), group, _), printablePath))
case Some(GroupStatsPath(group)) =>
List(resolveTarget(assertion, source.groupCumulatedResponseTimeGeneralStats(group, _), printablePath))
case _ =>
List(AssertionResult(assertion, result = false, s"Could not find stats matching assertion path $parts", None))
}
}
}
@SuppressWarnings(Array("org.wartremover.warts.ListAppend"))
private def findPath(parts: List[String], source: GeneralStatsSource): Option[StatsPath] =
source.statsPaths.find { statsPath =>
val path = statsPath match {
case RequestStatsPath(request, group) =>
group.map(_.hierarchy :+ request).getOrElse(List(request))
case GroupStatsPath(group) =>
group.hierarchy
}
path == parts
}
private def resolveTarget(assertion: Assertion, stats: StatsByStatus, path: String) = {
val printableTarget = assertion.target.printable
assertion.target match {
case MeanRequestsPerSecondTarget =>
val actualValue = stats(None).meanRequestsPerSec
resolveCondition(assertion, path, printableTarget, actualValue)
case target: CountTarget =>
val actualValue = resolveCountTargetActualValue(target, stats)
resolveCondition(assertion, path, printableTarget, actualValue.toDouble)
case target: PercentTarget =>
val actualValue = resolvePercentTargetActualValue(target, stats)
resolveCondition(assertion, path, printableTarget, actualValue)
case target: TimeTarget =>
val actualValue = resolveTimeTargetActualValue(target, stats)
resolveCondition(assertion, path, printableTarget, actualValue)
}
}
private def resolveCountTargetActualValue(target: CountTarget, stats: StatsByStatus): Long = {
val resolvedStats = target.metric match {
case AllRequests => stats(None)
case FailedRequests => stats(Some(KO))
case SuccessfulRequests => stats(Some(OK))
}
resolvedStats.count
}
private def resolvePercentTargetActualValue(target: PercentTarget, stats: StatsByStatus): Double = {
val allCount = stats(None).count
target.metric match {
case SuccessfulRequests =>
if (allCount == 0) {
0
} else {
stats(Some(OK)).count.toDouble / allCount * 100
}
case FailedRequests =>
if (allCount == 0) {
100
} else {
stats(Some(KO)).count.toDouble / allCount * 100
}
case _ => 100
}
}
private def resolveTimeTargetActualValue(target: TimeTarget, stats: StatsByStatus): Int = {
val resolvedStats = target.metric match {
case ResponseTime => stats(None)
}
target.selection match {
case Min => resolvedStats.min
case Max => resolvedStats.max
case Mean => resolvedStats.mean
case StandardDeviation => resolvedStats.stdDev
case Percentiles(value) => resolvedStats.percentile(value)
}
}
private def resolveCondition(assertion: Assertion, path: String, printableTarget: String, actualValue: Double): AssertionResult = {
val (result, expectedValueMessage) =
assertion.condition match {
case Lt(upper) => (actualValue < upper, upper.toString)
case Lte(upper) => (actualValue <= upper, upper.toString)
case Gt(lower) => (actualValue > lower, lower.toString)
case Gte(lower) => (actualValue >= lower, lower.toString)
case Is(exactValue) => (actualValue == exactValue, exactValue.toString)
case Between(lower, upper, true) => (actualValue >= lower && actualValue <= upper, s"$lower and $upper")
case Between(lower, upper, false) => (actualValue > lower && actualValue < upper, s"$lower and $upper")
case In(elements) => (elements.contains(actualValue), elements.toString)
}
stats.assertion.AssertionResult(assertion, result, s"$path: $printableTarget ${assertion.condition.printable} $expectedValueMessage", Some(actualValue))
}
}
| gatling/gatling | gatling-commons-shared-unstable/src/main/scala/io/gatling/commons/shared/unstable/model/stats/assertion/AssertionValidator.scala | Scala | apache-2.0 | 6,336 |
/*
* Shadowsocks - A shadowsocks client for Android
* Copyright (C) 2015 <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
* ___====-_ _-====___
* _--^^^#####// \\\\#####^^^--_
* _-^##########// ( ) \\\\##########^-_
* -############// |\\^^/| \\\\############-
* _/############// (@::@) \\\\############\\_
* /#############(( \\\\// ))#############\\
* -###############\\\\ (oo) //###############-
* -#################\\\\ / VV \\ //#################-
* -###################\\\\/ \\//###################-
* _#/|##########/\\######( /\\ )######/\\##########|\\#_
* |/ |#/\\#/\\#/\\/ \\#/\\##\\ | | /##/\\#/ \\/\\#/\\#/\\#| \\|
* ` |/ V V ` V \\#\\| | | |/#/ V ' V V \\| '
* ` ` ` ` / | | | | \\ ' ' ' '
* ( | | | | )
* __\\ | | | | /__
* (vvv(VVV)(VVV)vvv)
*
* HERE BE DRAGONS
*
*/
package com.github.shadowsocks.utils
import java.io.{File, IOException}
import java.nio.{ByteBuffer, ByteOrder}
import java.util.concurrent.Executors
import android.content.Context
import android.net.{LocalServerSocket, LocalSocket, LocalSocketAddress}
import android.util.Log
class TrafficMonitorThread(context: Context) extends Thread {
val TAG = "TrafficMonitorThread"
lazy val PATH = context.getApplicationInfo.dataDir + "/stat_path"
@volatile var serverSocket: LocalServerSocket = null
@volatile var isRunning: Boolean = true
def closeServerSocket() {
if (serverSocket != null) {
try {
serverSocket.close()
} catch {
case _: Exception => // ignore
}
serverSocket = null
}
}
def stopThread() {
isRunning = false
closeServerSocket()
}
override def run() {
try {
new File(PATH).delete()
} catch {
case _: Exception => // ignore
}
try {
val localSocket = new LocalSocket
localSocket.bind(new LocalSocketAddress(PATH, LocalSocketAddress.Namespace.FILESYSTEM))
serverSocket = new LocalServerSocket(localSocket.getFileDescriptor)
} catch {
case e: IOException =>
Log.e(TAG, "unable to bind", e)
return
}
val pool = Executors.newFixedThreadPool(1)
while (isRunning) {
try {
val socket = serverSocket.accept()
pool.execute(() => {
try {
val input = socket.getInputStream
val output = socket.getOutputStream
val buffer = new Array[Byte](16)
if (input.read(buffer) != 16) throw new IOException("Unexpected traffic stat length")
val stat = ByteBuffer.wrap(buffer).order(ByteOrder.LITTLE_ENDIAN)
TrafficMonitor.update(stat.getLong(0), stat.getLong(8))
output.write(0)
input.close()
output.close()
} catch {
case e: Exception =>
Log.e(TAG, "Error when recv traffic stat", e)
}
// close socket
try {
socket.close()
} catch {
case _: Exception => // ignore
}
})
} catch {
case e: IOException =>
Log.e(TAG, "Error when accept socket", e)
return
}
}
}
}
| otoil/shadowsocks-android | src/main/scala/com/github/shadowsocks/utils/TrafficMonitorThread.scala | Scala | gpl-3.0 | 4,104 |
/*
* Copyright 2018 CJWW Development
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cjwwdev.auth.backend
import com.cjwwdev.http.headers.HeaderPackage
import com.cjwwdev.implicits.ImplicitDataSecurity._
import com.cjwwdev.testing.unit.UnitTestSpec
import play.api.test.FakeRequest
class BaseAuthSpec extends UnitTestSpec {
class Setup extends BaseAuth {
override val appId: String = "testAppId"
}
"validateAppId" should {
"return NotAuthorised" when {
"No appId is found in the request headers" in new Setup {
val request = FakeRequest()
val result = validateAppId(request)
result mustBe NotAuthorised
}
"An unknown appId is found in the request header" in new Setup {
val request = FakeRequest()
.withHeaders("cjww-headers" -> HeaderPackage("unknown app id", Some("testCookieId")).encrypt)
val result = validateAppId(request)
result mustBe NotAuthorised
}
}
"return Authenticated" when {
"a valid appId is found in the headers" in new Setup {
val request = FakeRequest()
.withHeaders("cjww-headers" -> HeaderPackage("testSessionStoreId", Some("testCookieId")).encrypt)
val result = validateAppId(request)
result mustBe Authenticated
}
}
}
}
| cjww-development/authorisation | src/test/scala/com/cjwwdev/auth/backend/BaseAuthSpec.scala | Scala | apache-2.0 | 1,841 |
/*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.flaminem.flamy.commands
import com.flaminem.flamy.Launcher
import com.flaminem.flamy.exec.utils.ReturnSuccess
import org.scalatest.FreeSpec
import scala.language.implicitConversions
/**
* Created by fpin on 2/13/17.
*/
class ShowTest extends FreeSpec {
implicit def stringToSeq(s: String): Seq[String] = s.split("\\\\s+")
"show tables should work" in {
val exitCode = Launcher.launch("--conf flamy.model.dir.paths=src/test/resources/test show tables")
assert(exitCode === ReturnSuccess)
}
"show tables should work when there is a cycle in the graph" in {
val exitCode = Launcher.launch("--conf flamy.model.dir.paths=src/test/resources/Graph show tables")
assert(exitCode === ReturnSuccess)
}
}
| flaminem/flamy | src/test/scala/com/flaminem/flamy/commands/ShowTest.scala | Scala | apache-2.0 | 1,310 |
package org.mitre.mandolin.mx
import org.mitre.mandolin.config.ConfigGeneratedCommandOptions
import org.mitre.mandolin.mlp.MandolinMLPSettings
import com.typesafe.config.Config
import net.ceedubs.ficus.Ficus._
class MxModelSettings(_confOptions: Option[ConfigGeneratedCommandOptions], _conf: Option[Config])
extends MandolinMLPSettings(_confOptions, _conf) {
def this(s: String) = this(None,Some(com.typesafe.config.ConfigFactory.parseString(s)))
def this(args: Seq[String]) = this(Some(new ConfigGeneratedCommandOptions(args)),None)
def this() = this(Seq())
import scala.collection.JavaConversions._
override val trainFile = asStrOpt("mandolin.mx.train-file")
override val testFile = asStrOpt("mandolin.mx.test-file")
override val testFreq = asIntOpt("mandolin.mx.eval-freq").getOrElse(10)
override val modelFile = asStrOpt("mandolin.mx.model-file")
override val labelFile = asStrOpt("mandolin.mx.label-file")
override val denseVectorSize = asIntOpt("mandolin.mx.dense-vector-size").getOrElse(0)
override val scaleInputs = asBoolean("mandolin.mx.scale-inputs")
override val miniBatchSize = asIntOpt("mandolin.mx.mini-batch-size").getOrElse(32)
// input type: 1) mlp, 2) ndarray, 3) recordio ... others?
val inputType = asStrOpt("mandolin.mx.input-type").getOrElse("mlp")
val numberOfClasses = asIntOpt("mandolin.mx.num-classes").getOrElse(2)
override val numEpochs = asIntOpt("mandolin.mx.num-epochs").getOrElse(21)
val mxSpecification = try config.getAnyRefList("mandolin.mx.specification") catch {case _: Throwable => null}
val gpus = try config.getAnyRefList("mandolin.mx.gpus") catch {case _: Throwable => null}
val cpus = try config.getAnyRefList("mandolin.mx.cpus") catch {case _: Throwable => null}
def getGpus = try config.as[List[Int]]("mandolin.mx.gpus") catch {case _: Throwable => Nil}
def getCpus = try config.as[List[Int]]("mandolin.mx.cpus") catch {case _: Throwable => Nil}
val saveFreq = asIntOpt("mandolin.mx.save-freq").getOrElse(10)
// this should override mandolin global learning rate
val mxInitialLearnRate = asFloatOpt("mandolin.mx.train.initial-learning-rate").getOrElse(0.01f)
val mxRescaleGrad = asFloatOpt("mandolin.mx.train.rescale-gradient").getOrElse(0f)
val mxMomentum = asFloatOpt("mandolin.mx.train.momentum").getOrElse(0f)
val mxGradClip = asFloatOpt("mandolin.mx.train.gradient-clip").getOrElse(0f)
val mxRho = asFloatOpt("mandolin.mx.train.rho").getOrElse(0.01f)
val mxWd = asFloatOpt("mandolin.mx.train.wd").getOrElse(0.00001f) // L2 weight decay
val mxInitializer = asStrOpt("mandolin.mx.train.initializer").getOrElse("uniform")
val mxTrainLabels = asStrOpt("mandolin.mx.train-labels")
val mxTestLabels = asStrOpt("mandolin.mx.test-labels")
// values are: sgd, adam, rmsprop, adadelta, nag, adagrad, sgld
val mxOptimizer = asStrOpt("mandolin.mx.train.optimizer").getOrElse("sgd")
val channels = asIntOpt("mandolin.mx.img.channels").getOrElse(0)
val xdim = asIntOpt("mandolin.mx.img.xdim").getOrElse(0)
val ydim = asIntOpt("mandolin.mx.img.ydim").getOrElse(0)
val meanImgFile = asStrOpt("mandolin.mx.img.mean-image").getOrElse("mean-img")
val preProcThreads = asIntOpt("mandolin.mx.img.preprocess-threads").getOrElse(8)
val mxResizeShortest = asIntOpt("mandolin.mx.img.resize-shortest").getOrElse(0)
// this allows GPU hosts to be specified in the configuration
val gpuHosts = try config.as[List[String]]("mandolin.mx.gpu-hosts") catch {case _:Throwable => Nil}
// set this up to have a device mapping
// gpu-host1 => 0,1,2,3, gpu-host2 => 0,1, etc.
val gpuHostMapping = try config.getAnyRefList("mandolin.mx.gpu-host-map") catch {case _:Throwable => null}
override def withSets(avs: Seq[(String, Any)]) : MxModelSettings = {
val nc = avs.foldLeft(this.config){case (ac, (v1,v2)) =>
v2 match {
case v2: List[_] =>
if (v2 != null) ac.withValue(v1,com.typesafe.config.ConfigValueFactory.fromIterable(v2)) else ac
case v2: Any =>
ac.withValue(v1, com.typesafe.config.ConfigValueFactory.fromAnyRef(v2))}
}
new MxModelSettings(None,Some(nc))
}
} | project-mandolin/mandolin | mandolin-mx/src/main/scala/org/mitre/mandolin/mx/MxModelSettings.scala | Scala | apache-2.0 | 4,271 |
package core.services
import core.collection.OrganizationCollection
import core.{ OrganizationCollectionLookupService, CultureHubPlugin }
import models.OrganizationConfiguration
/**
*
* @author Manuel Bernhardt <[email protected]>
*/
class AggregatingOrganizationCollectionLookupService extends OrganizationCollectionLookupService {
def organizationCollectionLookups(implicit configuration: OrganizationConfiguration): Seq[OrganizationCollectionLookupService] = CultureHubPlugin.getServices(classOf[OrganizationCollectionLookupService])
def findAll(implicit configuration: OrganizationConfiguration): Seq[OrganizationCollection] = organizationCollectionLookups.flatMap(lookup => lookup.findAll)
def findBySpecAndOrgId(spec: String, orgId: String)(implicit configuration: OrganizationConfiguration): Option[OrganizationCollection] = organizationCollectionLookups.flatMap(lookup => lookup.findBySpecAndOrgId(spec, orgId)).headOption
} | delving/culture-hub | web-core/app/core/services/AggregatingOrganizationCollectionLookupService.scala | Scala | apache-2.0 | 957 |
import com.zaxxer.hikari.HikariDataSource
import gplume.scala.jdbc._
import gplume.scala.jdbc.SQLAux._
import gplume.scala.jdbc.SQLOperation._
import org.junit.{Test, Before}
import scala.Seq
import scala.util.Try
/**
* Created by Bowen Cai on 12/27/2014.
*/
class SQLInterpTest {
val ds = new HikariDataSource()
@Before
def t0: Unit ={
ds.setDriverClassName("org.h2.Driver")
ds.setJdbcUrl("jdbc:h2:mem:gscala_test")
val con = ds.getConnection()
con.createStatement().executeUpdate("""
CREATE TABLE data (
key VARCHAR(255) PRIMARY KEY,
value1 VARCHAR(1023), value2 VARCHAR(1023) )""")
con.close()
}
@Test
def t12: Unit = {
val db = new DB(ds)
db.newSession{implicit session =>
// sql"insert into DATA(key,VALUE1, VALUE2)VALUES ('k1','v11','v12'),('k1,'v21','v22')".execute(session)
sql"insert into DATA(key,VALUE1, VALUE2)VALUES (?,?,?)".batchInsert(
Seq(Seq("kk1", "111","222"),
Seq("kk2", "222","333"),
Seq("kk3", "333", "444")
))
val kk3 = "kk3"
println(sql"SELECT COUNT(1) FROM `DATA` WHERE key=$kk3 ".first(colInt))
println(sql"SELECT VALUE2 FROM `DATA` WHERE key=$kk3".first(colStr))
println(sql"SELECT * FROM `DATA` WHERE key=$kk3".autoMap())
val tp = (sql"SELECT * FROM `DATA` WHERE key=$kk3".product()).asInstanceOf[Tuple3[String, String, String]]
println(tp)
println(tp._3)
// println(tp.productElement(1))
// println(sql"SELECT COUNT(1) FROM `DATA` ".int(1))
}
}
@Test
def t1: Unit = {
val p1 = 5
val p2 = "data"
val p3 = List(1)
val so = sql"SELECT * FROM $p2 WHERE id = $p1 AND $p3"
// println(so.stmt)
}
@Test
def q: Unit ={
val k1 = "key 111"
val v1 = "value 111"
implicit val session = new DBSession(ds.getConnection, false)
val ins = sql"INSERT INTO `data` (key,value)VALUES($k1, $v1)".batchInsert(
Seq(Seq("111","222"),
Seq("222","333"),
Seq("333", "444")
)
)
val count = sql"SELECT COUNT (1) FROM `data`".first(colInt)
println(count)
val k1q = sql"SELECT value from data where key = $k1 OR key = '333'".first(colStr)
println(k1q)
val lsv = sql"SELECT value from data".array(colStr)
println(lsv)
}
@Test
def tnx: Unit = {
val db = new DB(ds)
val r = Try {
db.transactional {implicit session=>
val ins = sql"INSERT INTO `data` (key,value)VALUES(?, ?)".batchInsert(
Seq(Seq("111", "222"),
Seq("222", "333"),
Seq("333", "444")
)
)
db.transactional {implicit session =>
val ins2 = sql"INSERT INTO `data` (key,value)VALUES(?, ?)".batchInsert(
Seq(Seq("444", "555"),
Seq("555", "666")
)
)
}
throw new RuntimeException
}
}
db.newSession{ implicit session =>
val count = sql"SELECT COUNT (1) FROM `data`".first(colInt)
println(count)
}
db.execute("DELETE FROM `data`")
db.newSession{ implicit session =>
val count = sql"SELECT COUNT (1) FROM `data`".first(colInt)
println(count)
}
println(r)
}
}
| xkommando/Gplume-Scala | jdbc/src/test/scala/SQLInterpTest.scala | Scala | apache-2.0 | 3,195 |
class C {
def get(): Int = 0
}
def g = {
val s: String | Null = ???
val l = s.length // ok
val c: C | Null = ???
c.get()
}
| lampepfl/dotty | tests/pos/i11968.scala | Scala | apache-2.0 | 145 |
package org.apache.spark.ml.mleap.converter.runtime
import com.truecar.mleap.runtime.transformer
import org.apache.spark.ml.PipelineModel
import org.apache.spark.ml.classification.RandomForestClassificationModel
import org.apache.spark.ml.feature.{IndexToString, StandardScalerModel, StringIndexerModel, VectorAssembler}
import org.apache.spark.ml.mleap.classification.SVMModel
import org.apache.spark.ml.mleap.converter.runtime.classification.{RandomForestClassificationModelToMleap, SupportVectorMachineModelToMleap}
import org.apache.spark.ml.mleap.converter.runtime.feature.{IndexToStringToMleap, StandardScalerModelToMleap, StringIndexerModelToMleap, VectorAssemblerModelToMleap}
import org.apache.spark.ml.mleap.converter.runtime.regression.{LinearRegressionModelToMleap, RandomForestRegressionModelToMleap}
import org.apache.spark.ml.regression.{LinearRegressionModel, RandomForestRegressionModel}
/**
* Created by hollinwilkins on 4/17/16.
*/
trait BaseTransformerConverter extends SparkTransformerConverter {
// regression
implicit val mleapLinearRegressionModelToMleap: TransformerToMleap[LinearRegressionModel, transformer.LinearRegressionModel] =
addConverter(LinearRegressionModelToMleap)
implicit val mleapRandomForestRegressionModelToMleap: TransformerToMleap[RandomForestRegressionModel, transformer.RandomForestRegressionModel] =
addConverter(RandomForestRegressionModelToMleap)
// classification
implicit val mleapRandomForestClassificationModelToMleap: TransformerToMleap[RandomForestClassificationModel, transformer.RandomForestClassificationModel] =
addConverter(RandomForestClassificationModelToMleap)
implicit val mleapSupportVectorMachineModelToMleap: TransformerToMleap[SVMModel, transformer.SupportVectorMachineModel] =
addConverter(SupportVectorMachineModelToMleap)
//feature
implicit val mleapIndexToStringToMleap: TransformerToMleap[IndexToString, transformer.ReverseStringIndexerModel] =
addConverter(IndexToStringToMleap)
implicit val mleapStandardScalerModelToMleap: TransformerToMleap[StandardScalerModel, transformer.StandardScalerModel] =
addConverter(StandardScalerModelToMleap)
implicit val mleapStringIndexerModelToMleap: TransformerToMleap[StringIndexerModel, transformer.StringIndexerModel] =
addConverter(StringIndexerModelToMleap)
implicit val mleapVectorAssemblerToMleap: TransformerToMleap[VectorAssembler, transformer.VectorAssemblerModel] =
addConverter(VectorAssemblerModelToMleap)
// other
implicit val mleapPipelineModelToMleap: TransformerToMleap[PipelineModel, transformer.PipelineModel] =
addConverter(PipelineModelToMleap(this))
}
object BaseTransformerConverter extends BaseTransformerConverter
| TrueCar/mleap | mleap-spark/src/main/scala/org/apache/spark/ml/mleap/converter/runtime/BaseTransformerConverter.scala | Scala | apache-2.0 | 2,720 |
/*
* Copyright 2013 - 2020 Outworkers Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.outworkers.phantom.builder.serializers
import java.util.concurrent.TimeUnit
import com.outworkers.phantom.builder.primitives.Primitive
import com.outworkers.phantom.builder.query.QueryBuilderTest
import com.outworkers.phantom.builder.syntax.CQLSyntax
import com.outworkers.phantom.dsl._
import com.outworkers.phantom.tables.TestDatabase
import java.nio.ByteBuffer
import org.joda.time.Seconds
import scala.concurrent.duration._
class AlterQueryBuilderTest extends QueryBuilderTest {
private[this] val basicTable = TestDatabase.basicTable
private[this] val ssTableSize = 50
final val DefaultTtl = 500
final val OneDay = 86400
"The ALTER query builder" - {
"should serialise ALTER .. ADD queries" - {
"serialise an ADD query for a column without a STATIC modifier" in {
val qb = basicTable.alter.add("test_big_decimal", CQLSyntax.Types.Decimal).queryString
qb shouldEqual s"ALTER TABLE phantom.basicTable ADD test_big_decimal ${CQLSyntax.Types.Decimal};"
}
"serialise an ADD query for a column with a STATIC modifier" in {
val qb = basicTable.alter.add("test_big_decimal", CQLSyntax.Types.Decimal, static = true).queryString
qb shouldEqual s"ALTER TABLE phantom.basicTable ADD test_big_decimal ${CQLSyntax.Types.Decimal} STATIC;"
}
"serialise an ADD query for a column without a STATIC modifier from a CQLQuery" in {
val qb = basicTable.alter.add(basicTable.placeholder.qb).queryString
qb shouldEqual s"ALTER TABLE phantom.basicTable ADD placeholder ${CQLSyntax.Types.Text};"
}
"serialise an ADD query for a column with a STATIC modifier from a CQLQuery" in {
val qb = basicTable.alter.add(TestDatabase.staticTable.staticTest.qb).queryString
Console.println(qb)
qb shouldEqual s"ALTER TABLE phantom.basicTable ADD staticTest ${CQLSyntax.Types.Text} STATIC;"
}
}
"should serialise ALTER .. DROP queries" - {
"should serialise a DROP query based based on a column select" in {
val qb = basicTable.alter.drop(_.placeholder).queryString
qb shouldEqual "ALTER TABLE phantom.basicTable DROP placeholder;"
}
"should serialise a DROP query with no arguments to DROP a table" in {
val qb = basicTable.alter().drop().queryString
qb shouldEqual "DROP TABLE phantom.basicTable;"
}
"should serialise a DROP query based on string value" in {
val qb = basicTable.alter.drop("test").queryString
qb shouldEqual "ALTER TABLE phantom.basicTable DROP test;"
}
"should not compile DROP queries on INDEX fields" in {
"""
| basicTable.alter.drop(_.id).queryString
""" shouldNot compile
}
}
"should serialise ALTER .. WITH queries" - {
"serialise a simple create query with a SizeTieredCompactionStrategy and no compaction strategy options set" in {
val qb = basicTable.alter.option(compaction eqs SizeTieredCompactionStrategy).qb.queryString
qb shouldEqual "ALTER TABLE phantom.basicTable WITH compaction = {'class': 'SizeTieredCompactionStrategy'}"
}
"serialise a simple create query with a SizeTieredCompactionStrategy and 1 compaction strategy options set" in {
val qb = basicTable.alter.option(compaction eqs LeveledCompactionStrategy.sstable_size_in_mb(ssTableSize)).qb.queryString
qb shouldEqual s"ALTER TABLE phantom.basicTable WITH compaction = {'class': 'LeveledCompactionStrategy', 'sstable_size_in_mb': $ssTableSize}"
}
"serialise a simple create query with a SizeTieredCompactionStrategy and 1 compaction strategy options set and a compression strategy set" in {
val qb = basicTable.alter
.option(compaction eqs LeveledCompactionStrategy.sstable_size_in_mb(ssTableSize))
.and(compression eqs LZ4Compressor.crc_check_chance(0.5))
.qb.queryString
qb shouldEqual s"""ALTER TABLE phantom.basicTable WITH compaction = {'class': 'LeveledCompactionStrategy', 'sstable_size_in_mb': $ssTableSize} AND compression = {'sstable_compression': 'LZ4Compressor', 'crc_check_chance': 0.5}"""
}
"add a comment option to a create query" in {
val qb = basicTable.alter
.option(comment eqs "testing")
.qb.queryString
qb shouldEqual "ALTER TABLE phantom.basicTable WITH comment = 'testing'"
}
"allow specifying a read_repair_chance clause" in {
val qb = basicTable.alter.option(read_repair_chance eqs 5D).qb.queryString
qb shouldEqual "ALTER TABLE phantom.basicTable WITH read_repair_chance = 5.0"
}
"allow specifying a dclocal_read_repair_chance clause" in {
val qb = basicTable.alter.option(dclocal_read_repair_chance eqs 5D).qb.queryString
qb shouldEqual "ALTER TABLE phantom.basicTable WITH dclocal_read_repair_chance = 5.0"
}
"allow specifying a replicate_on_write clause" in {
val qb = basicTable.alter.option(replicate_on_write eqs true).qb.queryString
qb shouldEqual "ALTER TABLE phantom.basicTable WITH replicate_on_write = true"
}
"allow specifying a custom gc_grace_seconds clause" in {
val qb = basicTable.alter.option(gc_grace_seconds eqs 5.seconds).qb.queryString
qb shouldEqual "ALTER TABLE phantom.basicTable WITH gc_grace_seconds = 5"
}
"allow specifying larger custom units as gc_grace_seconds" in {
val qb = basicTable.alter.option(gc_grace_seconds eqs 1.day).qb.queryString
qb shouldEqual "ALTER TABLE phantom.basicTable WITH gc_grace_seconds = 86400"
}
"allow specifying custom gc_grade_seconds using the Joda Time ReadableInstant and Second API" in {
val qb = basicTable.alter.option(gc_grace_seconds eqs Seconds.seconds(OneDay)).qb.queryString
qb shouldEqual "ALTER TABLE phantom.basicTable WITH gc_grace_seconds = 86400"
}
"allow specifying a bloom_filter_fp_chance using a Double param value" in {
val qb = basicTable.alter.option(bloom_filter_fp_chance eqs 5D).qb.queryString
qb shouldEqual "ALTER TABLE phantom.basicTable WITH " +
"bloom_filter_fp_chance = 5.0"
}
}
"should allow specifying cache strategies " - {
"specify Cache.None as a cache strategy" in {
val qb = basicTable.alter.option(caching eqs Cache.None()).qb.queryString
if (session.v4orNewer) {
qb shouldEqual "ALTER TABLE phantom.basicTable WITH caching = {'keys': 'none', 'rows_per_partition': 'none'}"
} else {
qb shouldEqual "ALTER TABLE phantom.basicTable WITH caching = 'none'"
}
}
"specify Cache.KeysOnly as a caching strategy" in {
val qb = basicTable.alter.option(caching eqs Cache.KeysOnly()).qb.queryString
if (session.v4orNewer) {
qb shouldEqual "ALTER TABLE phantom.basicTable WITH caching = {'keys': 'all', 'rows_per_partition': 'none'}"
} else {
qb shouldEqual "ALTER TABLE phantom.basicTable WITH caching = 'keys_only'"
}
}
}
"should allow specifying a default_time_to_live" - {
"specify a default time to live using a Long value" in {
val qb = basicTable.alter.option(default_time_to_live eqs DefaultTtl).qb.queryString
qb shouldEqual "ALTER TABLE phantom.basicTable WITH default_time_to_live = 500"
}
"specify a default time to live using a Long value and a with clause" in {
val qb = basicTable.alter.`with`(default_time_to_live eqs DefaultTtl).qb.queryString
qb shouldEqual "ALTER TABLE phantom.basicTable WITH default_time_to_live = 500"
}
"specify a default time to live using a org.joda.time.Seconds value" in {
val qb = basicTable.alter.option(default_time_to_live eqs Seconds.seconds(DefaultTtl)).qb.queryString
qb shouldEqual "ALTER TABLE phantom.basicTable WITH default_time_to_live = 500"
}
"specify a default time to live using a org.joda.time.Seconds value and a `with` clause" in {
val qb = basicTable.alter.`with`(default_time_to_live eqs Seconds.seconds(DefaultTtl)).qb.queryString
qb shouldEqual "ALTER TABLE phantom.basicTable WITH default_time_to_live = 500"
}
"specify a default time to live using a scala.concurrent.duration.FiniteDuration value" in {
val qb = basicTable.alter.option(default_time_to_live eqs FiniteDuration(DefaultTtl, TimeUnit.SECONDS)).qb.queryString
qb shouldEqual "ALTER TABLE phantom.basicTable WITH default_time_to_live = 500"
}
}
"should allow altering the type of a column" - {
"alter column type from text to blob" in {
val qb = basicTable.alter(_.placeholder)(Primitive[ByteBuffer]).queryString
qb shouldEqual s"ALTER TABLE phantom.basicTable ALTER placeholder TYPE ${Primitive[ByteBuffer].dataType};"
}
"alter a column type from placedholder to test" in {
val qb = basicTable.alter.rename(_.placeholder, "test").queryString
qb shouldEqual "ALTER TABLE phantom.basicTable RENAME placeholder TO test;"
}
}
}
}
| outworkers/phantom | phantom-dsl/src/test/scala/com/outworkers/phantom/builder/serializers/AlterQueryBuilderTest.scala | Scala | apache-2.0 | 9,795 |
package com.eevolution.context.dictionary.infrastructure.repository
import com.eevolution.context.dictionary.domain.model.WorkflowNodeNext
import com.eevolution.context.dictionary.infrastructure.db.DbContext._
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: [email protected], http://www.e-evolution.com , http://github.com/EmerisScala
* Created by [email protected] , www.e-evolution.com on 31/10/17.
*/
/**
* Workflow Node Next Mapping
*/
trait WorkflowNodeNextMapping {
val queryWorkflowNodeNext = quote {
querySchema[WorkflowNodeNext]("AD_WF_NodeNext",
_.workflowNodeNextId-> "AD_WF_NodeNext_ID",
_.tenantId-> "AD_Client_ID",
_.organizationId -> "AD_Org_ID" ,
_.isActive-> "IsActive",
_.created-> "Created",
_.createdBy-> "CreatedBy",
_.updated-> "Updated",
_.updatedBy-> "UpdatedBy",
_.workflowNodeId-> "AD_WF_Node_ID",
_.workflowNextId-> "AD_WF_Next_ID",
_.description-> "Description",
_.seqNo-> "SeqNo",
_.entityType-> "EntityType",
_.transitionCode-> "TransitionCode",
_.isStdUserWorkflow-> "IsStdUserWorkflow",
_.uuid-> "UUID")
}
}
| adempiere/ADReactiveSystem | dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/repository/WorkflowNodeNextMapping.scala | Scala | gpl-3.0 | 1,891 |
package ru.wordmetrix.nlp
/**
* Text tokenizer
* (acquired from an old HMM NLP project of my own)
*/
object NLP {
type Phrase = String
type Word = String
implicit def string2NLP(s: String) = new NLP(s)
implicit def list2NLP(l: List[String]) = new NLP(l)
implicit def list2String(l: List[String]) = l.rectify()
implicit def nlp2String(nlp: NLP) = nlp.phrase.rectify()
val rPhrase = """((?<=\.\s{0,4})|^)\p{Lu}+[\p{L}\s,.’'\-]+?\p{Ll}{2}[?!.]($|(?=(\s*\p{Lu})))""".r
}
class NLP(val phrase: List[NLP.Phrase]) {
def this(phrase: String) = this(phrase.split("\\b").map(
x => x.replaceAll("\\s+", "")).filter(_ != "").map(
x => "^\\p{Punct}+$".r.findFirstMatchIn(x) match {
case None => List(x);
case _ => x.split("").toList
}).flatten.filter(_ != "").toList)
import NLP._
def phrases() = rPhrase.findAllIn("\\s+".r.replaceAllIn(phrase, " "))
def tokenize(): List[NLP.Word] = phrase
def tokenizeGap(): List[NLP.Word] = List("", "") ++ phrase
def hidewords(ws: List[NLP.Word]): Phrase = hidewords_(ws)
def hidewords_(ws: List[NLP.Word]): List[Word] = {
def filter(ws1: List[Word], ws2: List[Word],
wout: List[Word] = List()): Option[List[Word]] =
(ws1, ws2) match {
case (w1 :: ws1, w2 :: ws2) if (w1.equalsIgnoreCase(w2)) =>
filter(ws1, ws2, "*" * w1.length :: wout)
case (ws1, List()) => Some(wout.reverse ++ ws1)
case (w1 :: ws1, ws2) => filter(ws1, ws2, w1 :: wout)
case (List(), ws2) => None
}
filter(phrase.tokenize(), ws) match {
case Some(ws) => ws
case None => phrase
}
}
def hidephrases(ps: List[NLP.Phrase]): Phrase = ps.
map(_.tokenize).
foldLeft(phrase)({
case (p, ws) => p.hidewords_(ws.filter(_ != "*"))
})
def rectify(): String = {
"""\s+(\p{Punct})""".r.replaceAllIn(
phrase.mkString(" "),
m => scala.util.matching.Regex.quoteReplacement(m.group(1) match {
case "*" => " *";
case x => x
})
).replace("' ", "'")
}
}
| electricmind/enwiz | src/main/scala/ru/wordmetrix/nlp/NLP.scala | Scala | apache-2.0 | 2,304 |
/*
* scala-swing (https://www.scala-lang.org)
*
* Copyright EPFL, Lightbend, Inc., contributors
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.swing
import java.awt
import java.awt.event.{ActionEvent, ActionListener}
import javax.swing.border.{BevelBorder, Border, CompoundBorder, MatteBorder, TitledBorder}
import javax.swing.event.{ChangeEvent, ChangeListener}
import javax.swing.{BorderFactory, Icon, ImageIcon, JComponent, SwingUtilities}
import scala.language.implicitConversions
/**
* Helpers for this package.
*/
object Swing {
protected[swing] trait PeerContainer { def peer: awt.Container }
protected[swing] def toNoIcon (i: Icon): Icon = if (i == null) EmptyIcon else i
protected[swing] def toNullIcon (i: Icon): Icon = if (i == EmptyIcon) null else i
protected[swing] def nullPeer(c: PeerContainer): awt.Container = if (c != null) c.peer else null
implicit def pair2Dimension(p: (Int, Int)) : Dimension = new Dimension (p._1, p._2)
implicit def pair2Point (p: (Int, Int)) : Point = new Point (p._1, p._2)
implicit def pair2Point (p: (Int, Int, Int, Int)): Rectangle = new Rectangle (p._1, p._2, p._3, p._4)
@inline final def Runnable(@inline block: => Unit): Runnable = new Runnable {
def run(): Unit = block
}
final def ChangeListener(f: ChangeEvent => Unit): ChangeListener = new ChangeListener {
def stateChanged(e: ChangeEvent): Unit = f(e)
}
final def ActionListener(f: ActionEvent => Unit): ActionListener = new ActionListener {
def actionPerformed(e: ActionEvent): Unit = f(e)
}
def Box(min: Dimension, pref: Dimension, max: Dimension): Component = new Component {
override lazy val peer: JComponent = new javax.swing.Box.Filler(min, pref, max)
}
def HGlue: Component = new Component {
override lazy val peer: JComponent = javax.swing.Box.createHorizontalGlue.asInstanceOf[JComponent]
}
def VGlue: Component = new Component {
override lazy val peer: JComponent = javax.swing.Box.createVerticalGlue.asInstanceOf[JComponent]
}
def Glue: Component = new Component {
override lazy val peer: JComponent = javax.swing.Box.createGlue.asInstanceOf[JComponent]
}
def RigidBox(dim: Dimension): Component = new Component {
override lazy val peer: JComponent = javax.swing.Box.createRigidArea(dim).asInstanceOf[JComponent]
}
def HStrut(width: Int): Component = new Component {
override lazy val peer: JComponent = javax.swing.Box.createHorizontalStrut(width).asInstanceOf[JComponent]
}
def VStrut(height: Int): Component = new Component {
override lazy val peer: JComponent = javax.swing.Box.createVerticalStrut(height).asInstanceOf[JComponent]
}
def Icon(image: java.awt.Image) : ImageIcon = new ImageIcon(image)
def Icon(filename: String) : ImageIcon = new ImageIcon(filename)
def Icon(url: java.net.URL) : ImageIcon = new ImageIcon(url)
/**
* The empty icon. Use this icon instead of <code>null</code> to indicate
* that you don't want an icon.
*/
case object EmptyIcon extends Icon {
def getIconHeight : Int = 0
def getIconWidth : Int = 0
def paintIcon(c: java.awt.Component, g: java.awt.Graphics, x: Int, y: Int): Unit = ()
}
def unwrapIcon(icon: Icon): Icon = if (icon == null) EmptyIcon else icon
def wrapIcon (icon: Icon): Icon = if (icon == EmptyIcon) null else icon
def EmptyBorder: Border = BorderFactory.createEmptyBorder()
def EmptyBorder(weight: Int): Border =
BorderFactory.createEmptyBorder(weight, weight, weight, weight)
def EmptyBorder(top: Int, left: Int, bottom: Int, right: Int): Border =
BorderFactory.createEmptyBorder(top, left, bottom, right)
def LineBorder(c: Color): Border = BorderFactory.createLineBorder(c)
def LineBorder(c: Color, weight: Int): Border = BorderFactory.createLineBorder(c, weight)
def BeveledBorder(kind: Embossing): Border = BorderFactory.createBevelBorder(kind.bevelPeer)
def BeveledBorder(kind: Embossing, highlight: Color, shadow: Color): Border =
BorderFactory.createBevelBorder(kind.bevelPeer, highlight, shadow)
def BeveledBorder(kind: Embossing,
highlightOuter: Color, highlightInner: Color,
shadowOuter: Color, shadowInner: Color): Border =
BorderFactory.createBevelBorder(kind.bevelPeer,
highlightOuter, highlightInner,
shadowOuter, shadowInner)
sealed abstract class Embossing {
def bevelPeer : Int
def etchPeer : Int
}
case object Lowered extends Embossing {
def bevelPeer : Int = BevelBorder.LOWERED
def etchPeer : Int = javax.swing.border.EtchedBorder.LOWERED
}
case object Raised extends Embossing {
def bevelPeer : Int = BevelBorder.RAISED
def etchPeer : Int = javax.swing.border.EtchedBorder.RAISED
}
def EtchedBorder: Border = BorderFactory.createEtchedBorder()
def EtchedBorder(kind: Embossing): Border =
BorderFactory.createEtchedBorder(kind.etchPeer)
def EtchedBorder(kind: Embossing, highlight: Color, shadow: Color): Border =
BorderFactory.createEtchedBorder(kind.etchPeer, highlight, shadow)
def MatteBorder(top: Int, left: Int, bottom: Int, right: Int, color: Color): MatteBorder =
BorderFactory.createMatteBorder(top, left, bottom, right, color)
def MatteBorder(top: Int, left: Int, bottom: Int, right: Int, icon: Icon): MatteBorder =
BorderFactory.createMatteBorder(top, left, bottom, right, icon)
def CompoundBorder(outside: Border, inside: Border): CompoundBorder =
BorderFactory.createCompoundBorder(outside, inside)
def TitledBorder(border: Border, title: String): TitledBorder =
BorderFactory.createTitledBorder(border, title)
/**
* Schedule the given code to be executed on the Swing event dispatching
* thread (EDT). Returns immediately.
*/
@inline final def onEDT(op: => Unit): Unit = SwingUtilities invokeLater Runnable(op)
/**
* Schedule the given code to be executed on the Swing event dispatching
* thread (EDT). Blocks until after the code has been run.
*/
@inline final def onEDTWait(op: => Unit): Unit = SwingUtilities invokeAndWait Runnable(op)
}
| scala/scala-swing | src/main/scala/scala/swing/Swing.scala | Scala | apache-2.0 | 6,311 |
package scala.generator
import java.util.UUID
import org.joda.time.format.ISODateTimeFormat.dateTimeParser
import lib.Datatype
import lib.generator.GeneratorUtil
import lib.Text.initLowerCase
import play.api.libs.json._
import play.api.Logger
sealed trait ScalaDatatype {
def asString(originalVarName: String): String = {
throw new UnsupportedOperationException(s"unsupported conversion of type ${name} for var $originalVarName")
}
def name: String
def definition(
originalVarName: String,
default: Option[String]
): String = {
val varName = ScalaUtil.quoteNameIfKeyword(originalVarName)
default.fold(s"$varName: $name") { default =>
s"$varName: $name = $default"
}
}
def default(value: String): String = default(Json.parse(value))
protected def default(json: JsValue): String = {
throw new UnsupportedOperationException(s"default for type ${name}")
}
def toVariableName: String
}
sealed trait ScalaPrimitive extends ScalaDatatype {
def apidocType: String
def shortName: String
override def asString(originalVarName: String): String
def namespace: Option[String] = None
def fullName: String = namespace match {
case None => shortName
case Some(ns) => s"$ns.$shortName"
}
def name = fullName
override def toVariableName = "value"
}
object ScalaPrimitive {
case object Boolean extends ScalaPrimitive {
def apidocType = "boolean"
def shortName = "Boolean"
override def asString(originalVarName: String): String = {
val varName = ScalaUtil.quoteNameIfKeyword(originalVarName)
s"$varName.toString"
}
override protected def default(json: JsValue) = json.as[scala.Boolean].toString
}
case object Double extends ScalaPrimitive {
def apidocType = "double"
def shortName = "Double"
override def asString(originalVarName: String): String = {
val varName = ScalaUtil.quoteNameIfKeyword(originalVarName)
s"$varName.toString"
}
override protected def default(json: JsValue) = json.as[scala.Double].toString
}
case object Integer extends ScalaPrimitive {
def apidocType = "integer"
def shortName = "Int"
override def asString(originalVarName: String): String = {
val varName = ScalaUtil.quoteNameIfKeyword(originalVarName)
s"$varName.toString"
}
override protected def default(json: JsValue) = json.as[scala.Int].toString
}
case object Long extends ScalaPrimitive {
def apidocType = "long"
def shortName = "Long"
override def asString(originalVarName: String): String = {
val varName = ScalaUtil.quoteNameIfKeyword(originalVarName)
s"$varName.toString"
}
override protected def default(json: JsValue) = json.as[scala.Long].toString
}
case object DateIso8601 extends ScalaPrimitive {
override def namespace = Some("_root_.org.joda.time")
def apidocType = "date-iso8601"
def shortName = "LocalDate"
override def asString(originalVarName: String): String = {
val varName = ScalaUtil.quoteNameIfKeyword(originalVarName)
s"$varName.toString"
}
override def default(value: String) = default(JsString(value))
override protected def default(json: JsValue) = {
val dt = dateTimeParser.parseLocalDate(json.as[String])
s"new ${fullName}(${dt.getYear}, ${dt.getMonthOfYear}, ${dt.getDayOfMonth})"
}
}
case object DateTimeIso8601 extends ScalaPrimitive {
override def namespace = Some("_root_.org.joda.time")
def apidocType = "date-time-iso8601"
def shortName = "DateTime"
override def asString(originalVarName: String): String = {
val varName = ScalaUtil.quoteNameIfKeyword(originalVarName)
s"_root_.org.joda.time.format.ISODateTimeFormat.dateTime.print($varName)"
}
override def default(value: String) = {
"_root_.org.joda.time.format.ISODateTimeFormat.dateTimeParser.parseDateTime(" + ScalaUtil.wrapInQuotes(value) + ")"
}
override protected def default(json: JsValue) = {
// TODO would like to use the constructor for DateTime, since that would
// be faster code, but things get quite tricky because of time zones :(
s"""_root_.org.joda.time.format.ISODateTimeFormat.dateTimeParser.parseDateTime(${json})"""
}
}
case object Decimal extends ScalaPrimitive {
def apidocType = "decimal"
def shortName = "BigDecimal"
override def asString(originalVarName: String): String = {
val varName = ScalaUtil.quoteNameIfKeyword(originalVarName)
s"$varName.toString"
}
override protected def default(json: JsValue) = json.as[scala.BigDecimal].toString
}
case object Object extends ScalaPrimitive {
override def namespace = Some("_root_.play.api.libs.json")
def apidocType = "object"
def shortName = "JsObject"
override def asString(originalVarName: String): String = {
val varName = ScalaUtil.quoteNameIfKeyword(originalVarName)
s"$varName.toString"
}
}
case object String extends ScalaPrimitive {
def apidocType = "string"
def shortName = "String"
override def asString(originalVarName: String): String = {
val varName = ScalaUtil.quoteNameIfKeyword(originalVarName)
s"$varName"
}
override def default(value: String) = ScalaUtil.wrapInQuotes(value)
override protected def default(json: JsValue) = default(json.as[String])
}
case object Unit extends ScalaPrimitive {
def apidocType = "unit"
def shortName = "Unit"
override def asString(originalVarName: String): String = {
throw new UnsupportedOperationException(s"unsupported conversion of type object for $originalVarName")
}
}
case object Uuid extends ScalaPrimitive {
override def namespace = Some("_root_.java.util")
def apidocType = "uuid"
def shortName = "UUID"
override def asString(originalVarName: String): String = {
val varName = ScalaUtil.quoteNameIfKeyword(originalVarName)
s"$varName.toString"
}
override def default(value: String) = "_root_.java.util.UUID.fromString(" + ScalaUtil.wrapInQuotes(value) + ")"
override protected def default(json: JsValue) = default(json.as[UUID].toString)
}
case class Model(namespaces: Namespaces, shortName: String) extends ScalaPrimitive {
override def namespace = Some(namespaces.models)
def apidocType = shortName
override def asString(originalVarName: String): String = {
val varName = ScalaUtil.quoteNameIfKeyword(originalVarName)
s"$varName.toString"
}
override def toVariableName = initLowerCase(shortName)
}
case class Enum(namespaces: Namespaces, shortName: String) extends ScalaPrimitive {
override def namespace = Some(namespaces.enums)
def apidocType = shortName
override def asString(originalVarName: String): String = {
val varName = ScalaUtil.quoteNameIfKeyword(originalVarName)
s"$varName.toString"
}
override def default(value: String): String = default(JsString(value))
override protected def default(json: JsValue) = s"${fullName}(${json})"
override def toVariableName = initLowerCase(shortName)
}
case class Union(namespaces: Namespaces, shortName: String) extends ScalaPrimitive {
override def namespace = Some(namespaces.unions)
def apidocType = shortName
override def asString(originalVarName: String): String = {
val varName = ScalaUtil.quoteNameIfKeyword(originalVarName)
s"$varName.toString"
}
override def toVariableName = initLowerCase(shortName)
}
}
object ScalaDatatype {
sealed abstract class Container(inner: ScalaDatatype) extends ScalaDatatype {
override def toVariableName = inner match {
case _: Container => inner.toVariableName
case _ => lib.Text.pluralize(inner.toVariableName)
}
}
case class List(inner: ScalaDatatype) extends Container(inner) {
override def name = s"Seq[${inner.name}]"
override protected def default(json: JsValue) = {
val arr = json.as[JsArray]
val seq = arr.value.map { value =>
inner.default(value)
}
if (seq.isEmpty) "Nil" else seq.mkString(s"Seq(", ",", ")")
}
}
case class Map(inner: ScalaDatatype) extends Container(inner) {
override def name = s"Map[String, ${inner.name}]"
override protected def default(json: JsValue) = {
val map = json.as[scala.collection.immutable.Map[String, JsValue]].map {
case (key, value) => s""""${key}" -> ${inner.default(value)}"""
}
if (map.isEmpty) "Map.empty" else map.mkString(s"Map(", ",", ")")
}
}
case class Option(datatype: ScalaDatatype) extends Container(datatype) {
override def name = s"_root_.scala.Option[${datatype.name}]"
override def definition(
originalVarName: String,
default: scala.Option[String]
): String = {
require(default.isEmpty, s"no defaults allowed on options: ${default}")
val varName = ScalaUtil.quoteNameIfKeyword(originalVarName)
s"$varName: $name = None"
}
// override, since options contain at most one element
override def toVariableName = datatype.toVariableName
}
}
object ScalaTypeResolver {
/**
* If name is a qualified name (as identified by having a dot),
* parses the name and returns a tuple of (namespace,
* name). Otherwise, returns (Namespaces object, class name)
*/
private def parseQualifiedName(defaultNamespaces: Namespaces, name: String): (Namespaces, String) = {
name.split("\\.").toList match {
case n :: Nil => (defaultNamespaces, ScalaUtil.toClassName(name))
case multiple =>
val n = multiple.last
val objectType = GeneratorUtil.ObjectType.fromString(multiple.reverse.drop(1).reverse.last).getOrElse {
Logger.warn(s"Could not resolve object type[${multiple.reverse.drop(1).reverse.last}]. Defaults to models")
GeneratorUtil.ObjectType.Model
}
val baseNamespace = multiple.reverse.drop(2).reverse.mkString(".")
(Namespaces(baseNamespace), ScalaUtil.toClassName(n))
}
}
}
case class ScalaTypeResolver(
namespaces: Namespaces
) {
def scalaDatatype(datatype: Datatype): ScalaDatatype = {
datatype match {
case Datatype.Primitive.Boolean => ScalaPrimitive.Boolean
case Datatype.Primitive.Decimal => ScalaPrimitive.Decimal
case Datatype.Primitive.Integer => ScalaPrimitive.Integer
case Datatype.Primitive.Double => ScalaPrimitive.Double
case Datatype.Primitive.Long => ScalaPrimitive.Long
case Datatype.Primitive.Object => ScalaPrimitive.Object
case Datatype.Primitive.String => ScalaPrimitive.String
case Datatype.Primitive.DateIso8601 => ScalaPrimitive.DateIso8601
case Datatype.Primitive.DateTimeIso8601 => ScalaPrimitive.DateTimeIso8601
case Datatype.Primitive.Uuid => ScalaPrimitive.Uuid
case Datatype.Primitive.Unit => ScalaPrimitive.Unit
case Datatype.Container.List(t) => ScalaDatatype.List(scalaDatatype(t))
case Datatype.Container.Map(t) => ScalaDatatype.Map(scalaDatatype(t))
case Datatype.Container.Option(inner) => ScalaDatatype.Option(scalaDatatype(inner))
case Datatype.UserDefined.Model(name) => {
name.split("\\.").toList match {
case n :: Nil => ScalaPrimitive.Model(namespaces, ScalaUtil.toClassName(n))
case multiple => {
val (ns, n) = ScalaTypeResolver.parseQualifiedName(namespaces, name)
ScalaPrimitive.Model(ns, n)
}
}
}
case Datatype.UserDefined.Enum(name) => {
val (ns, n) = ScalaTypeResolver.parseQualifiedName(namespaces, name)
ScalaPrimitive.Enum(ns, n)
}
case Datatype.UserDefined.Union(name) => {
val (ns, n) = ScalaTypeResolver.parseQualifiedName(namespaces, name)
ScalaPrimitive.Union(ns, n)
}
}
}
}
| movio/movio-apidoc-generator | scala-generator/src/main/scala/models/generator/ScalaDatatype.scala | Scala | mit | 11,866 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Iat Chong Chan
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package th4j
import java.nio.ByteOrder
import java.nio.file.{Paths, Path, Files}
import com.naef.jnlua.LuaState
import com.sun.jna._
import th4j.Storage._
import th4j.Tensor._
import th4j.func._
import th4j.util._
import scala.sys.SystemProperties
import scala.util.{Random, Try}
object Main extends App {
val prob = new SystemProperties()
prob("jna.library.path") = "./nativeLib"
prob("java.library.path") = "./nativeLib:" + prob("java.library.path")
val fieldSysPath = classOf[ClassLoader].getDeclaredField( "sys_paths" )
fieldSysPath.setAccessible( true )
fieldSysPath.set( null, null )
Native.loadLibrary("libjnlua5.1.so",
classOf[Library])
//
// L.openLibs()
//// println(L.getTop)
// L.call(0,0)
// L.getGlobal("readFromTensor")
// val t = new DoubleTensor(4, 5).fill(1.0)
// L.pushIn
// L.pushNumber(t.getPeerPtr())
// L.call(1,0)
// L.getGlobal("newTensor")
// L.call(0, 1)
// val ptr = new Pointer(L.toNumber(1).toLong)
// val t = new DoubleTensor(ptr)
// print(t)
val L = new LuaState()
L.openLibs()
L.load("""require 'th4j' """, "=wrapper")
L.call(0, 0)
L.load(Files.newInputStream(Paths.get("lua/test.lua"))
, "=hello")
L.call(0, 0)
//
// L.getGlobal("test")
// L.pushInteger(1)
// L.call(1, 2)
// println(L.toInteger(-1), L.toInteger(-2))
val test = LuaFunction.create[(DoubleTensor, DoubleTensor, String)=>(LongStorage, String)]("test", L)
println(test()(new DoubleTensor(3, 4), new DoubleTensor(), "Hello From Java"))
// println(Pointer.nativeValue(a.ptr))
// println(a)
}
| ET-Chan/th4j | src/main/scala/th4j/Main.scala | Scala | mit | 2,708 |
package info.longshore.site
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.server.Directives._
import akka.stream.ActorMaterializer
import info.longshore.site.libs.scalatagsMarshaller
import java.io.File
import scala.util.Try
object Main {
def main(args: Array[String]): Unit = {
implicit val actorSystem = ActorSystem()
implicit val actorMaterializer = ActorMaterializer()
implicit val ec = actorSystem.dispatcher
implicit val marshaller = scalatagsMarshaller()
val port = Try(args(0).toInt).getOrElse(8080)
val resume = templates.resume()
val home = System.getProperty("user.home")
assert(home != null && home != "", "user.home must be defined")
val files = new File(
home + File.separator + ".info" + File.separator + "longshore" + "site" + File.separator + "files"
)
val route =
pathEndOrSingleSlash {
encodeResponse(
complete(resume)
)
} ~
pathPrefix("files") {
getFromBrowseableDirectory(files.getAbsolutePath)
}
println("Starting server on port: " + port)
println("Serving files from: " + files.getAbsolutePath)
val future = Http().bindAndHandle(route, "0.0.0.0", port)
Runtime.getRuntime.addShutdownHook(
new Thread() {
override def run(): Unit = {
future
.flatMap(_.unbind())
.onComplete(_ => actorSystem.terminate())
}
}
)
}
}
| longshorej/longshore-info-site | src/main/scala/info/longshore/site/Main.scala | Scala | gpl-3.0 | 1,481 |
import _root_.io.gatling.core.scenario.Simulation
import ch.qos.logback.classic.{Level, LoggerContext}
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
/**
* Performance test for the Appareil entity.
*/
class AppareilGatlingTest extends Simulation {
val context: LoggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
// Log all HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("TRACE"))
// Log failed HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("DEBUG"))
val baseURL = Option(System.getProperty("baseURL")) getOrElse """http://127.0.0.1:8080"""
val httpConf = http
.baseURL(baseURL)
.inferHtmlResources()
.acceptHeader("*/*")
.acceptEncodingHeader("gzip, deflate")
.acceptLanguageHeader("fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3")
.connection("keep-alive")
.userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:33.0) Gecko/20100101 Firefox/33.0")
val headers_http = Map(
"Accept" -> """application/json"""
)
val headers_http_authenticated = Map(
"Accept" -> """application/json""",
"X-CSRF-TOKEN" -> "${csrf_token}"
)
val scn = scenario("Test the Appareil entity")
.exec(http("First unauthenticated request")
.get("/api/account")
.headers(headers_http)
.check(status.is(401))
.check(headerRegex("Set-Cookie", "CSRF-TOKEN=(.*); [P,p]ath=/").saveAs("csrf_token")))
.pause(10)
.exec(http("Authentication")
.post("/api/authentication")
.headers(headers_http_authenticated)
.formParam("j_username", "admin")
.formParam("j_password", "admin")
.formParam("_spring_security_remember_me", "true")
.formParam("submit", "Login"))
.pause(1)
.exec(http("Authenticated request")
.get("/api/account")
.headers(headers_http_authenticated)
.check(status.is(200))
.check(headerRegex("Set-Cookie", "CSRF-TOKEN=(.*); [P,p]ath=/").saveAs("csrf_token")))
.pause(10)
.repeat(2) {
exec(http("Get all appareils")
.get("/api/appareils")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10 seconds, 20 seconds)
.exec(http("Create new appareil")
.put("/api/appareils")
.headers(headers_http_authenticated)
.body(StringBody("""{"id":null, "nomapp":"SAMPLE_TEXT", "conso":"SAMPLE_TEXT"}""")).asJSON
.check(status.is(201))
.check(headerRegex("Location", "(.*)").saveAs("new_appareil_url")))
.pause(10)
.repeat(5) {
exec(http("Get created appareil")
.get("${new_appareil_url}")
.headers(headers_http_authenticated))
.pause(10)
}
.exec(http("Delete created appareil")
.delete("${new_appareil_url}")
.headers(headers_http_authenticated))
.pause(10)
}
val users = scenario("Users").exec(scn)
setUp(
users.inject(rampUsers(100) over (1 minutes))
).protocols(httpConf)
}
| CherifAbdoul/TpSirM1MIAGE_Ierlomenko-Kinfack-Haidara | tpJpa/src/test/gatling/simulations/AppareilGatlingTest.scala | Scala | gpl-2.0 | 3,339 |
import sbt._
import sbt.Keys._
import com.typesafe.sbt.SbtScalariform
import com.typesafe.sbt.SbtScalariform.ScalariformKeys
/**
* Please use plain text editor to edit this file instead of NetBeans (To be supported)
*/
object Build extends sbt.Build {
lazy val root = Project("SampleProject", file("."))
.settings(basicSettings: _*)
.settings(libraryDependencies ++= Dependencies.basic)
lazy val basicSettings = Seq(
organization := "your.organization",
version := "0.1.0",
scalaVersion := "2.11.7",
scalacOptions ++= Seq("-unchecked", "-deprecation"),
javacOptions ++= Seq("-source", "1.8", "-target", "1.8"),
resolvers ++= Seq(
"Sonatype OSS Releases" at "https://oss.sonatype.org/content/repositories/releases",
"Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots",
"Typesafe Releases" at "http://repo.typesafe.com/typesafe/releases/"))
// scalariform code format settings
SbtScalariform.scalariformSettings // enable scalariformSettings
import scalariform.formatter.preferences._
ScalariformKeys.preferences := ScalariformKeys.preferences.value
.setPreference(RewriteArrowSymbols, false)
.setPreference(AlignParameters, true)
.setPreference(AlignSingleLineCaseStatements, true)
.setPreference(DoubleIndentClassDeclaration, true)
.setPreference(IndentSpaces, 2)
}
object Dependencies {
// ---- define dependencies libs
var basic: Seq[ModuleID] = Seq(
"io.spray" %% "spray-can" % "1.3.3" withSources() withJavadoc(),
"io.spray" %% "spray-routing" % "1.3.3" withSources() withJavadoc(),
"io.spray" %% "spray-json" % "1.3.2",
"io.spray" %% "spray-client" % "1.3.3",
"io.spray" %% "spray-testkit" % "1.3.3" % "test",
"com.typesafe.akka" %% "akka-actor" % "2.3.11",
"com.typesafe.akka" %% "akka-remote" % "2.3.11",
"com.typesafe.akka" %% "akka-contrib" % "2.3.11",
"com.typesafe.akka" %% "akka-testkit" % "2.3.11",
"org.scalatest" %% "scalatest" % "2.2.4" % "test",
"commons-io" % "commons-io" % "2.4" % "test",
"org.specs2" %% "specs2-core" % "2.3.11" % "test",
"org.scalaz" %% "scalaz-core" % "7.1.0"
)
} | BabakAp/ProjectFB | project/Build.scala | Scala | apache-2.0 | 2,208 |
package core
import akka.actor.{Actor, ActorRef}
import akka.io.IO
import domain.{Place, Tweet, User}
import spray.can.Http
import spray.client.pipelining._
import spray.http.{HttpRequest, _}
import spray.httpx.unmarshalling.{Deserialized, MalformedContent, Unmarshaller}
import spray.json._
import scala.util.Try
trait TweetMarshaller {
implicit object TweetUnmarshaller extends Unmarshaller[Tweet] {
def mkUser(user: JsObject): Deserialized[User] = {
(user.fields("id_str"), user.fields("lang"), user.fields("followers_count")) match {
case (JsString(id), JsString(lang), JsNumber(followers)) => Right(User(id, lang, followers.toInt))
case (JsString(id), _, _) => Right(User(id, "", 0))
case _ => Left(MalformedContent("bad user"))
}
}
def mkPlace(place: JsValue): Deserialized[Option[Place]] = place match {
case JsObject(fields) =>
(fields.get("country"), fields.get("name")) match {
case (Some(JsString(country)), Some(JsString(name))) => Right(Some(Place(country, name)))
case _ => Left(MalformedContent("bad place"))
}
case JsNull => Right(None)
case _ => Left(MalformedContent("bad tweet"))
}
override def apply(entity: HttpEntity): Deserialized[Tweet] = {
Try {
val json = JsonParser(entity.asString).asJsObject
(json.fields.get("id_str"), json.fields.get("text"), json.fields.get("place"), json.fields.get("user")) match {
case (Some(JsString(id)), Some(JsString(text)), Some(place), Some(user: JsObject)) =>
val x = mkUser(user).fold(x => Left(x), { user =>
mkPlace(place).fold(x => Left(x), { place =>
Right(Tweet(id, user, text, place))
})
})
x
case _ => Left(MalformedContent("bad tweet"))
}
}
}.getOrElse(Left(MalformedContent("bad json")))
}
}
object TweetStreamerActor {
val twitterUri = Uri("https://stream.twitter.com/1.1/statuses/filter.json")
case class AddProcessor(actor: ActorRef)
}
class TweetStreamerActor(uri: Uri, var processors: Set[ActorRef] = Set.empty) extends Actor with TweetMarshaller {
this: TwitterAuthorization =>
import TweetStreamerActor._
val io = IO(Http)(context.system)
override def receive: Receive = {
case query: String =>
val body = HttpEntity(ContentType(MediaTypes.`application/x-www-form-urlencoded`), s"track=$query")
val rq = HttpRequest(HttpMethods.POST, uri = uri, entity = body) ~> authorize
println(s"Sent: $rq")
sendTo(io).withResponsesReceivedBy(self)(rq)
case ChunkedResponseStart(_) =>
case MessageChunk(entity, _) => TweetUnmarshaller(entity).right.foreach(tweet => processors.foreach(_ ! tweet))
case AddProcessor(processor) => processors += processor
case _ =>
}
}
| jeffmay/twitter-voice | src/main/scala/core/tweetstream.scala | Scala | apache-2.0 | 2,981 |
package lila.i18n
import play.api.i18n.Lang
object LangList {
val all = Map(
Lang("en", "GB") -> "English",
Lang("af", "ZA") -> "Afrikaans",
Lang("an", "ES") -> "aragonés",
Lang("ar", "SA") -> "العربية",
Lang("as", "IN") -> "অসমীয়া",
Lang("az", "AZ") -> "Azərbaycanca",
Lang("be", "BY") -> "Беларуская",
Lang("bg", "BG") -> "български език",
Lang("bn", "BD") -> "বাংলা",
Lang("br", "FR") -> "brezhoneg",
Lang("bs", "BA") -> "bosanski",
Lang("ca", "ES") -> "Català, valencià",
Lang("cs", "CZ") -> "čeština",
Lang("cv", "CU") -> "чӑваш чӗлхи",
Lang("cy", "GB") -> "Cymraeg",
Lang("da", "DK") -> "Dansk",
Lang("de", "CH") -> "Schwiizerdüütsch",
Lang("de", "DE") -> "Deutsch",
Lang("el", "GR") -> "Ελληνικά",
Lang("en", "US") -> "English (US)",
Lang("eo", "UY") -> "Esperanto",
Lang("es", "ES") -> "español",
Lang("et", "EE") -> "eesti keel",
Lang("eu", "ES") -> "Euskara",
Lang("fa", "IR") -> "فارسی",
Lang("fi", "FI") -> "suomen kieli",
Lang("fo", "FO") -> "føroyskt",
Lang("fr", "FR") -> "français",
Lang("frp", "IT") -> "arpitan",
Lang("fy", "NL") -> "Frysk",
Lang("ga", "IE") -> "Gaeilge",
Lang("gd", "GB") -> "Gàidhlig",
Lang("gl", "ES") -> "Galego",
Lang("gu", "IN") -> "ગુજરાતી",
Lang("he", "IL") -> "עִבְרִית",
Lang("hi", "IN") -> "हिन्दी, हिंदी",
Lang("hr", "HR") -> "hrvatski",
Lang("hu", "HU") -> "Magyar",
Lang("hy", "AM") -> "Հայերեն",
Lang("ia", "IA") -> "Interlingua",
Lang("id", "ID") -> "Bahasa Indonesia",
Lang("io", "EN") -> "Ido",
Lang("is", "IS") -> "Íslenska",
Lang("it", "IT") -> "Italiano",
Lang("ja", "JP") -> "日本語",
Lang("jbo", "EN") -> "lojban",
Lang("jv", "ID") -> "basa Jawa",
Lang("ka", "GE") -> "ქართული",
Lang("kab", "DZ") -> "Taqvaylit",
Lang("kk", "KZ") -> "қазақша",
Lang("kmr", "TR") -> "Kurdî (Kurmancî)",
Lang("kn", "IN") -> "ಕನ್ನಡ",
Lang("ko", "KR") -> "한국어",
Lang("ky", "KG") -> "кыргызча",
Lang("la", "LA") -> "lingua Latina",
Lang("lb", "LU") -> "Lëtzebuergesch",
Lang("lt", "LT") -> "lietuvių kalba",
Lang("lv", "LV") -> "latviešu valoda",
Lang("mg", "MG") -> "fiteny malagasy",
Lang("mk", "MK") -> "македонски јази",
Lang("ml", "IN") -> "മലയാളം",
Lang("mn", "MN") -> "монгол",
Lang("mr", "IN") -> "मराठी",
Lang("nb", "NO") -> "Norsk bokmål",
Lang("ne", "NP") -> "नेपाली",
Lang("nl", "NL") -> "Nederlands",
Lang("nn", "NO") -> "Norsk nynorsk",
Lang("pi", "IN") -> "पालि",
Lang("pl", "PL") -> "polski",
Lang("ps", "AF") -> "پښتو",
Lang("pt", "PT") -> "Português",
Lang("pt", "BR") -> "Português (BR)",
Lang("ro", "RO") -> "Română",
Lang("ru", "RU") -> "русский язык",
Lang("sa", "IN") -> "संस्कृत",
Lang("sk", "SK") -> "slovenčina",
Lang("sl", "SI") -> "slovenščina",
Lang("sq", "AL") -> "Shqip",
Lang("sr", "SP") -> "Српски језик",
Lang("sv", "SE") -> "svenska",
Lang("sw", "KE") -> "Kiswahili",
Lang("ta", "IN") -> "தமிழ்",
Lang("tg", "TJ") -> "тоҷикӣ",
Lang("th", "TH") -> "ไทย",
Lang("tk", "TM") -> "Türkmençe",
Lang("tl", "PH") -> "Tagalog",
Lang("tp", "TP") -> "toki pona",
Lang("tr", "TR") -> "Türkçe",
Lang("uk", "UA") -> "українська",
Lang("ur", "PK") -> "اُردُو",
Lang("uz", "UZ") -> "oʻzbekcha",
Lang("vi", "VN") -> "Tiếng Việt",
Lang("yo", "NG") -> "Yorùbá",
Lang("zh", "CN") -> "中文",
Lang("zh", "TW") -> "繁體中文",
Lang("zu", "ZA") -> "isiZulu"
)
val defaultRegions = Map[String, Lang](
"de" -> Lang("de", "DE"),
"en" -> Lang("en", "US"),
"pt" -> Lang("pt", "PT"),
"zh" -> Lang("zh", "CN")
)
def removeRegion(lang: Lang): Lang =
defaultRegions.get(lang.language) | lang
private lazy val popular: List[Lang] = {
// 26/04/2020 based on db.user4.aggregate({$sortByCount:'$lang'}).toArray()
val langs =
"en-US en-GB ru-RU es-ES tr-TR fr-FR de-DE pt-BR it-IT pl-PL ar-SA fa-IR nl-NL id-ID nb-NO el-GR sv-SE uk-UA cs-CZ vi-VN sr-SP hr-HR hu-HU pt-PT he-IL fi-FI ca-ES da-DK ro-RO zh-CN bg-BG sk-SK ko-KR az-AZ ja-JP sl-SI lt-LT ka-GE mn-MN bs-BA hy-AM zh-TW lv-LV et-EE th-TH gl-ES sq-AL eu-ES hi-IN mk-MK uz-UZ be-BY ms-MY bn-BD is-IS af-ZA nn-NO ta-IN as-IN la-LA kk-KZ tl-PH mr-IN eo-UY gu-IN ky-KG kn-IN ml-IN cy-GB no-NO fo-FO zu-ZA jv-ID ga-IE ur-PK ur-IN te-IN sw-KE am-ET ia-IA sa-IN si-LK ps-AF mg-MG kmr-TR ne-NP tk-TM fy-NL pa-PK br-FR tt-RU cv-CU tg-TJ tp-TP yo-NG frp-IT pi-IN my-MM pa-IN kab-DZ io-EN gd-GB jbo-EN io-IO ckb-IR ceb-PH an-ES"
.split(' ')
.flatMap(Lang.get)
.zipWithIndex
.toMap
all.keys.toList.sortBy(l => langs.getOrElse(l, Int.MaxValue))
}
lazy val popularNoRegion: List[Lang] = popular.collect {
case l if defaultRegions.get(l.language).fold(true)(_ == l) => l
}
// lazy val popularNoRegionByLanguage: Map[String, Lang] =
// popularNoRegion.view.map { l =>
// l.language -> l
// }.toMap
def name(lang: Lang): String = all.getOrElse(lang, lang.code)
def name(code: String): String = Lang.get(code).fold(code)(name)
def nameByStr(str: String): String = I18nLangPicker.byStr(str).fold(str)(name)
lazy val allChoices: List[(String, String)] = all.view
.map { case (l, name) =>
l.code -> name
}
.toList
.sortBy(_._1)
}
| luanlv/lila | modules/i18n/src/main/LangList.scala | Scala | mit | 5,885 |
/*
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.runtime.directio
import com.asakusafw.runtime.directio.{ DataDefinition, DataFilter, DataFormat }
case class BasicDataDefinition[T] private (
dataFormat: DataFormat[T],
dataFilter: Option[DataFilter[_ >: T]])
extends DataDefinition[T] {
override def getDataClass(): Class[_ <: T] = dataFormat.getSupportedType
override def getDataFormat(): DataFormat[T] = dataFormat
override def getDataFilter: DataFilter[_ >: T] = dataFilter.orNull
}
object BasicDataDefinition {
def apply[T](dataFormat: DataFormat[T]): DataDefinition[T] = {
BasicDataDefinition(dataFormat, None)
}
def apply[T](dataFormat: DataFormat[T], dataFilter: DataFilter[_]): DataDefinition[T] = {
BasicDataDefinition[T](dataFormat, Some(dataFilter.asInstanceOf[DataFilter[_ >: T]]))
}
def apply[T](
factory: ObjectFactory,
dataFormatClass: Class[_ <: DataFormat[T]]): DataDefinition[T] = {
BasicDataDefinition[T](factory.newInstance(dataFormatClass), None)
}
def apply[T](
factory: ObjectFactory,
dataFormatClass: Class[_ <: DataFormat[T]],
dataFilterClass: Option[Class[_ <: DataFilter[_]]]): DataDefinition[T] = {
BasicDataDefinition[T](
factory.newInstance(dataFormatClass),
dataFilterClass.map(factory.newInstance(_).asInstanceOf[DataFilter[_ >: T]]))
}
}
| asakusafw/asakusafw-spark | runtime/src/main/scala/com/asakusafw/spark/runtime/directio/BasicDataDefinition.scala | Scala | apache-2.0 | 1,947 |
package akka.persistence.pg.journal
import akka.NotUsed
import akka.persistence.PersistentRepr
import akka.persistence.pg.{EventTag, PgConfig}
import akka.stream.scaladsl.Source
import slick.jdbc.{ResultSetConcurrency, ResultSetType}
trait ReadJournalStore extends JournalStore { self: PgConfig =>
import driver.api._
def currentPersistenceIds(): Source[String, NotUsed] = {
val query = journals
.map(_.persistenceId)
.distinct
.result
.withStatementParameters(
rsType = ResultSetType.ForwardOnly,
rsConcurrency = ResultSetConcurrency.ReadOnly,
fetchSize = 1000
)
.transactionally
val publisher = database.stream(query)
Source.fromPublisher(publisher)
}
def currentEvents(
persistenceId: String,
fromSequenceNr: Long,
toSequenceNr: Long
): Source[PersistentRepr, NotUsed] = {
val query = journals
.filter(_.persistenceId === persistenceId)
.filter(_.sequenceNr >= fromSequenceNr)
.filter(_.sequenceNr <= toSequenceNr)
.sortBy(_.sequenceNr)
.result
.withStatementParameters(
rsType = ResultSetType.ForwardOnly,
rsConcurrency = ResultSetConcurrency.ReadOnly,
fetchSize = 1000
)
.transactionally
val publisher = database.stream(query)
Source.fromPublisher(publisher).map(toPersistentRepr)
}
def currentEvents(
fromSequenceNr: Long,
toSequenceNr: Long,
maybeTags: Option[Set[EventTag]]
): Source[PersistentRepr, NotUsed] = {
val tagFilter = maybeTags match {
case Some(tags) => tagsFilter(tags)
case None =>
(_: JournalTable) => true: Rep[Boolean]
}
val query = journals
.filter(_.idForQuery >= fromSequenceNr)
.filter(_.idForQuery <= toSequenceNr)
.filter(tagFilter)
.sortBy(_.idForQuery)
.result
.withStatementParameters(
rsType = ResultSetType.ForwardOnly,
rsConcurrency = ResultSetConcurrency.ReadOnly,
fetchSize = 1000
)
.transactionally
val publisher = database.stream(query)
Source.fromPublisher(publisher).map(toPersistentRepr)
}
}
| WegenenVerkeer/akka-persistence-postgresql | modules/akka-persistence-pg/src/main/scala/akka/persistence/pg/journal/ReadJournalStore.scala | Scala | mit | 2,169 |
package com.electronwill.macros
import scala.annotation.{StaticAnnotation, compileTimeOnly}
import scala.language.experimental.macros
import scala.reflect.macros.whitebox
/**
* Ensures that the method is executed in the right ExecutionGroup. This is useful for
* non-thread-safe Entity methods (among others) since it will ensure that the method is always
* executed from the same thread as the Entity's updates.
* <p>
* <p1>How it works</p1>
* <ol>
* <li>If not already present, an implicit parameter `callerGroup` of type `ExecutionGroup` is
* added to the declaration of the method.</li>
* <li>The content of the method is modified such that:
* <ul>
* <li>If `(callerGroup eq execGroup)` then the method is executed normally.</li>
* <li>Otherwise the method is submitted to `execGroup` and will be executed as soon as possible.</li>
* </ul>
* </li>
* </ol>
* <p>
* <p1>Requirements to make it work</p1>
* <ul>
* <li>The method must explicitely return Unit.</li>
* <li>A variable or parameterless method `execGroup` of type `ExecutionGroup` must be in scope.
* This is always the case if the method belongs to an instance of Updatable.
* </li>
* </ul>
*
* @author TheElectronWill
*/
@compileTimeOnly("This macro annotation must be expanded by the paradise compiler plugin")
class EnsureInGroup extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro EnsureInGroup.impl
}
object EnsureInGroup {
def impl(c: whitebox.Context)(annottees: c.Expr[Any]*): c.Expr[Any] = {
import c.universe._
def wrapMethodContent(method: DefDef) = {
val q"$mods def $name[..$t](...$paramss)(implicit ..$iParams): $result = $expr" =
method
val implicitParams = iParams.asInstanceOf[List[ValDef]]
val normalParams = paramss.asInstanceOf[List[List[ValDef]]]
val safeResult = ensureSafeResult(result.asInstanceOf[Tree])
val (newImplicitParams, iParam) = addImplicitParam(implicitParams)
q"""$mods def $name[..$t](...$paramss)(implicit ..$newImplicitParams): $safeResult = {
if($iParam eq execGroup) {
$expr
} else {
execGroup.execute(() => $name(...${normalParams.map(_.map(_.name))})(..${newImplicitParams
.map(
v =>
if (iParam.decodedName.toString == v.name.decodedName.toString) iParam
else v.name) // Pass execGroup as the caller group
}))
}
}
"""
}
def addImplicitParam(implicitParams: Seq[ValDef]): (Seq[ValDef], TermName) = {
val execGroupType = TypeName("org.mcphoton.server.ExecutionGroup")
val intParam =
implicitParams.find(_.tpt.asInstanceOf[Ident].name == execGroupType)
intParam match {
case Some(valDef) => (implicitParams, valDef.name)
case None =>
val newParams = implicitParams :+ q"implicit val callerGroup: $execGroupType"
.asInstanceOf[ValDef]
(newParams, TermName("callerGroup"))
}
}
def ensureSafeResult(result: Tree): Ident = {
val isUnit = result match {
case (id: Ident) => id.name == TypeName("Unit")
case _ => false
}
if (!isUnit) {
c.abort(result.pos, "Methods annotated with @EnsureInGroup must return Unit")
}
Ident(TypeName("Unit"))
}
annottees.map(_.tree) match {
case (m: DefDef) :: Nil => c.Expr[Any](wrapMethodContent(m))
case _ =>
c.abort(c.enclosingPosition, "@EnsureInGroup can only be used with methods")
}
}
}
| mcphoton/Photon-Server | metaprog/src/main/scala/com/electronwill/macros/EnsureInGroup.scala | Scala | lgpl-3.0 | 3,506 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.cache.redis
import org.beangle.cache.{AbstractCacheManager, Cache}
import org.beangle.commons.io.BinarySerializer
import redis.clients.jedis.JedisPool
/**
* @author chaostone
*/
class RedisCacheManager(pool: JedisPool, serializer: BinarySerializer, autoCreate: Boolean = true)
extends AbstractCacheManager(autoCreate) {
var ttl: Int = -1
protected override def newCache[K, V](name: String, keyType: Class[K], valueType: Class[V]): Cache[K, V] = {
registerClass(keyType, valueType)
new RedisCache(name, pool, serializer, keyType, valueType, ttl)
}
protected override def findCache[K, V](name: String, keyType: Class[K], valueType: Class[V]): Cache[K, V] = {
registerClass(keyType, valueType)
new RedisCache(name, pool, serializer, keyType, valueType, ttl)
}
override def destroy(): Unit = {
pool.destroy()
}
private def registerClass(keyType: Class[_], valueType: Class[_]): Unit = {
serializer.registerClass(keyType)
serializer.registerClass(valueType)
}
}
| beangle/cache | redis/src/main/scala/org/beangle/cache/redis/RedisCacheManager.scala | Scala | lgpl-3.0 | 1,757 |
package microtools.metrics
import com.codahale.metrics.{MetricRegistry, Timer}
import microtools.decorators.{FutureDecorator, TryDecorator}
import microtools.logging.{ContextAwareLogger, LoggingContext}
import microtools.{BusinessFailure, BusinessSuccess, BusinessTry}
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success}
trait TimedCalls {
def log: ContextAwareLogger
def metricRegistry: MetricRegistry
def timeFuture[T](
callId: String
)(implicit ec: ExecutionContext, ctx: LoggingContext): FutureDecorator[T] = {
val timer = metricRegistry.timer(s"${log.name}.$callId")
timeFuture(callId, timer)
}
def timeFuture[T](
callId: String,
timer: Timer
)(implicit ec: ExecutionContext, ctx: LoggingContext): FutureDecorator[T] =
new FutureDecorator[T] {
override def apply(block: => Future[T]): Future[T] = {
val timeCtx = timer.time()
val result = block
result.onComplete {
case Success(_) =>
val nanos = timeCtx.stop()
log.info(s"$callId: Success", "millis" -> (nanos / 1000000.0).toString)
case Failure(e) =>
val nanos = timeCtx.stop()
log.error(s"$callId: Internal error", e, "millis" -> (nanos / 1000000.0).toString)
}
result
}
}
def timeTry[T](
callId: String
)(implicit ec: ExecutionContext, ctx: LoggingContext): TryDecorator[T] = {
val timer = metricRegistry.timer(s"${log.name}.$callId")
timeTry(callId, timer)
}
def timeTry[T](
callId: String,
timer: Timer
)(implicit ec: ExecutionContext, ctx: LoggingContext): TryDecorator[T] =
new TryDecorator[T] {
override def apply(block: => BusinessTry[T]): BusinessTry[T] = {
val timeCtx = timer.time()
val result = block
result.onComplete {
case Success(BusinessSuccess(_)) =>
val nanos = timeCtx.stop()
log.info(s"$callId: Success", "millis" -> (nanos / 1000000.0).toString)
case Success(BusinessFailure(problem)) =>
val nanos = timeCtx.stop()
log.info(s"$callId: Problem: $problem", "millis" -> (nanos / 1000000.0).toString)
case Failure(e) =>
val nanos = timeCtx.stop()
log.error(s"$callId: Internal error", e, "millis" -> (nanos / 1000000.0).toString)
}
}
}
}
| 21re/play-error-handling | src/main/scala/microtools/metrics/TimedCalls.scala | Scala | mit | 2,421 |
package com.themillhousegroup.gatsby.test
import com.typesafe.scalalogging.slf4j.{ StrictLogging, Logging, Logger }
trait MockedLogging extends Logging {
override protected lazy val logger = Logger(org.mockito.Mockito.mock(classOf[org.slf4j.Logger]))
}
trait MockedStrictLogging extends StrictLogging {
override protected val logger = Logger(org.mockito.Mockito.mock(classOf[org.slf4j.Logger]))
}
| themillhousegroup/gatsby | src/test/scala/com/themillhousegroup/gatsby/test/MockedLogging.scala | Scala | mit | 404 |
/*
* Copyright 2009-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package json
import org.specs2.mutable.Specification
object XmlExamples extends Specification {
"XML Examples".title
import JsonDSL._
import Xml._
import scala.xml.{Group, Text}
"Basic conversion example" in {
val json = toJson(users1)
compactRender(json) mustEqual """{"users":{"count":"2","user":[{"disabled":"true","id":"1","name":"Harry"},{"id":"2","name":"David","nickname":"Dave"}]}}"""
}
"Conversion transformation example 1" in {
val json = toJson(users1).transformField {
case JField("id", JString(s)) => JField("id", JInt(s.toInt))
}
compactRender(json) mustEqual """{"users":{"count":"2","user":[{"disabled":"true","id":1,"name":"Harry"},{"id":2,"name":"David","nickname":"Dave"}]}}"""
}
"Conversion transformation example 2" in {
val json = toJson(users2).transformField {
case JField("id", JString(s)) => JField("id", JInt(s.toInt))
case JField("user", x: JObject) => JField("user", JArray(x :: Nil))
}
compactRender(json) mustEqual """{"users":{"user":[{"id":1,"name":"Harry"}]}}"""
}
"Primitive array example" in {
val xml = <chars><char>a</char><char>b</char><char>c</char></chars>
compactRender(toJson(xml)) mustEqual """{"chars":{"char":["a","b","c"]}}"""
}
"Lotto example which flattens number arrays into encoded string arrays" in {
def flattenArray(nums: List[JValue]) = JString(nums.map(_.values).mkString(","))
val printer = new scala.xml.PrettyPrinter(100,2)
val lotto: JObject = LottoExample.json
val xml = toXml(lotto.transformField {
case JField("winning-numbers", JArray(nums)) => JField("winning-numbers", flattenArray(nums))
case JField("numbers", JArray(nums)) => JField("numbers", flattenArray(nums))
})
printer.format(xml(0)) mustEqual printer.format(
<lotto>
<id>5</id>
<winning-numbers>2,45,34,23,7,5,3</winning-numbers>
<winners>
<winner-id>23</winner-id>
<numbers>2,45,34,23,3,5</numbers>
</winners>
<winners>
<winner-id>54</winner-id>
<numbers>52,3,12,11,18,22</numbers>
</winners>
</lotto>)
}
"Band example with namespaces" in {
val json = toJson(band)
json mustEqual parse("""{
"b:band":{
"name":"The Fall",
"genre":"rock",
"influence":"",
"playlists":{
"playlist":[{
"name":"hits",
"song":["Hit the north","Victoria"]
},{
"name":"mid 80s",
"song":["Eat your self fitter","My new house"]
}]
}
}
}""")
}
val band =
<b:band>
<name>The Fall</name>
<genre>rock</genre>
<influence/>
<playlists>
<playlist name="hits">
<song>Hit the north</song>
<song>Victoria</song>
</playlist>
<playlist name="mid 80s">
<song>Eat your self fitter</song>
<song>My new house</song>
</playlist>
</playlists>
</b:band>
"Grouped text example" in {
val json = toJson(groupedText)
compactRender(json) mustEqual """{"g":{"group":"foobar","url":"http://example.com/test"}}"""
}
val users1 =
<users count="2">
<user disabled="true">
<id>1</id>
<name>Harry</name>
</user>
<user>
<id>2</id>
<name nickname="Dave">David</name>
</user>
</users>
val users2 =
<users>
<user>
<id>1</id>
<name>Harry</name>
</user>
</users>
val url = "test"
val groupedText =
<g>
<group>{ Group(List(Text("foo"), Text("bar"))) }</group>
<url>http://example.com/{ url }</url>
</g>
// Examples by Jonathan Ferguson. See http://groups.google.com/group/liftweb/browse_thread/thread/f3bdfcaf1c21c615/c311a91e44f9c178?show_docid=c311a91e44f9c178
// This example shows how to use a transformation function to correct JSON generated by
// default conversion rules. The transformation function 'attrToObject' makes following conversion:
// { ..., "fieldName": "", "attrName":"someValue", ...} ->
// { ..., "fieldName": { "attrName": f("someValue") }, ... }
def attrToObject(fieldName: String, attrName: String, f: JString => JValue)(json: JValue) = json.transformField {
case JField(n, v: JString) if n == attrName => JField(fieldName, JObject(JField(n, f(v)) :: Nil))
case JField(n, JString("")) if n == fieldName => JField(n, JNothing)
} transformField {
case JField(n, x: JObject) if n == attrName => JField(fieldName, x)
}
"Example with multiple attributes, multiple nested elements " in {
val a1 = attrToObject("stats", "count", s => JInt(s.s.toInt)) _
val a2 = attrToObject("messages", "href", identity) _
val json = a1(a2(toJson(messageXml1)))
(json diff parse(expected1)) mustEqual Diff(JNothing, JNothing, JNothing)
}
"Example with one attribute, one nested element " in {
val a = attrToObject("stats", "count", s => JInt(s.s.toInt)) _
compactRender(a(toJson(messageXml2))) mustEqual expected2
compactRender(a(toJson(messageXml3))) mustEqual expected2
}
val messageXml1 =
<message expiry_date="20091126" text="text" word="ant" self="me">
<stats count="0"></stats>
<messages href="https://domain.com/message/ant"></messages>
</message>
val expected1 = """{"message":{"expiry_date":"20091126","word":"ant","text":"text","self":"me","stats":{"count":0},"messages":{"href":"https://domain.com/message/ant"}}}"""
val messageXml2 =
<message expiry_date="20091126">
<stats count="0"></stats>
</message>
val messageXml3 = <message expiry_date="20091126"><stats count="0"></stats></message>
val expected2 = """{"message":{"expiry_date":"20091126","stats":{"count":0}}}"""
}
| lift/framework | core/json/src/test/scala/net/liftweb/json/XmlExamples.scala | Scala | apache-2.0 | 6,394 |
import sbt._
object Dependencies {
lazy val scalaTest = "org.scalatest" %% "scalatest" % "3.0.1"
lazy val specs2 = "org.specs2" %% "specs2-core" % "3.8.9"
lazy val scalaParserCombinators = "org.scala-lang.modules" %% "scala-parser-combinators" % "1.0.5"
lazy val scalaz = "org.scalaz" %% "scalaz-core" % "7.2.10"
}
| MelvinWM/Various | Calculator/project/Dependencies.scala | Scala | mit | 324 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.checkpoint.kafka
import java.util.Collections
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicReference
import com.google.common.annotations.VisibleForTesting
import com.google.common.base.Preconditions
import org.apache.samza.checkpoint.{Checkpoint, CheckpointManager}
import org.apache.samza.config.{Config, JobConfig}
import org.apache.samza.container.TaskName
import org.apache.samza.serializers.Serde
import org.apache.samza.metrics.MetricsRegistry
import org.apache.samza.serializers.CheckpointSerde
import org.apache.samza.system._
import org.apache.samza.system.kafka.KafkaStreamSpec
import org.apache.samza.util.{ExponentialSleepStrategy, Logging}
import org.apache.samza.{Partition, SamzaException}
import scala.collection.mutable
/**
* A [[CheckpointManager]] that uses a compacted Kafka topic-partition to store the [[Checkpoint]] corresponding to
* a task.
*
* <p> The Kafka partition provides an abstraction of a log to which all [[Checkpoint]]s are appended to. The
* checkpoints written to the log are keyed by their corresponding taskName.
*
* <p> This class is thread safe for writing but not for reading checkpoints. This is currently OK since checkpoints
* are only read on the main thread.
*/
class KafkaCheckpointManager(checkpointSpec: KafkaStreamSpec,
systemFactory: SystemFactory,
validateCheckpoint: Boolean,
config: Config,
metricsRegistry: MetricsRegistry,
checkpointMsgSerde: Serde[Checkpoint] = new CheckpointSerde,
checkpointKeySerde: Serde[KafkaCheckpointLogKey] = new KafkaCheckpointLogKeySerde) extends CheckpointManager with Logging {
var MaxRetryDurationInMillis: Long = TimeUnit.MINUTES.toMillis(15)
info(s"Creating KafkaCheckpointManager for checkpointTopic:$checkpointTopic, systemName:$checkpointSystem " +
s"validateCheckpoints:$validateCheckpoint")
val checkpointSystem: String = checkpointSpec.getSystemName
val checkpointTopic: String = checkpointSpec.getPhysicalName
val checkpointSsp: SystemStreamPartition = new SystemStreamPartition(checkpointSystem, checkpointTopic, new Partition(0))
val expectedGrouperFactory: String = new JobConfig(config).getSystemStreamPartitionGrouperFactory
val systemConsumer = systemFactory.getConsumer(checkpointSystem, config, metricsRegistry)
val systemAdmin = systemFactory.getAdmin(checkpointSystem, config)
var taskNames: Set[TaskName] = Set[TaskName]()
var taskNamesToCheckpoints: Map[TaskName, Checkpoint] = _
val producerRef: AtomicReference[SystemProducer] = new AtomicReference[SystemProducer](getSystemProducer())
val producerCreationLock: Object = new Object
/**
* Create checkpoint stream prior to start.
*/
override def createResources(): Unit = {
Preconditions.checkNotNull(systemAdmin)
systemAdmin.start()
info(s"Creating checkpoint stream: ${checkpointSpec.getPhysicalName} with " +
s"partition count: ${checkpointSpec.getPartitionCount}")
systemAdmin.createStream(checkpointSpec)
if (validateCheckpoint) {
info(s"Validating checkpoint stream")
systemAdmin.validateStream(checkpointSpec)
}
}
/**
* @inheritdoc
*/
override def start(): Unit = {
// register and start a producer for the checkpoint topic
info("Starting the checkpoint SystemProducer")
producerRef.get().start()
// register and start a consumer for the checkpoint topic
val oldestOffset = getOldestOffset(checkpointSsp)
info(s"Starting the checkpoint SystemConsumer from oldest offset $oldestOffset")
systemConsumer.register(checkpointSsp, oldestOffset)
systemConsumer.start()
// the consumer will be closed after first time reading the checkpoint
}
/**
* @inheritdoc
*/
override def register(taskName: TaskName) {
debug(s"Registering taskName: $taskName")
producerRef.get().register(taskName.getTaskName)
taskNames += taskName
}
/**
* @inheritdoc
*/
override def readLastCheckpoint(taskName: TaskName): Checkpoint = {
if (!taskNames.contains(taskName)) {
throw new SamzaException(s"Task: $taskName is not registered with this CheckpointManager")
}
info(s"Reading checkpoint for taskName $taskName")
if (taskNamesToCheckpoints == null) {
info("Reading checkpoints for the first time")
taskNamesToCheckpoints = readCheckpoints()
// Stop the system consumer since we only need to read checkpoints once
info("Stopping system consumer.")
systemConsumer.stop()
}
val checkpoint: Checkpoint = taskNamesToCheckpoints.getOrElse(taskName, null)
info(s"Got checkpoint state for taskName - $taskName: $checkpoint")
checkpoint
}
/**
* @inheritdoc
*/
override def writeCheckpoint(taskName: TaskName, checkpoint: Checkpoint) {
val key = new KafkaCheckpointLogKey(KafkaCheckpointLogKey.CHECKPOINT_KEY_TYPE, taskName, expectedGrouperFactory)
val keyBytes = try {
checkpointKeySerde.toBytes(key)
} catch {
case e: Exception => throw new SamzaException(s"Exception when writing checkpoint-key for $taskName: $checkpoint", e)
}
val msgBytes = try {
checkpointMsgSerde.toBytes(checkpoint)
} catch {
case e: Exception => throw new SamzaException(s"Exception when writing checkpoint for $taskName: $checkpoint", e)
}
val envelope = new OutgoingMessageEnvelope(checkpointSsp, keyBytes, msgBytes)
// Used for exponential backoff retries on failure in sending messages through producer.
val startTimeInMillis: Long = System.currentTimeMillis()
var sleepTimeInMillis: Long = 1000
val maxSleepTimeInMillis: Long = 10000
var producerException: Exception = null
while ((System.currentTimeMillis() - startTimeInMillis) <= MaxRetryDurationInMillis) {
val currentProducer = producerRef.get()
try {
currentProducer.send(taskName.getTaskName, envelope)
currentProducer.flush(taskName.getTaskName) // make sure it is written
debug(s"Wrote checkpoint: $checkpoint for task: $taskName")
return
} catch {
case exception: Exception => {
producerException = exception
warn(s"Retrying failed checkpoint write to key: $key, checkpoint: $checkpoint for task: $taskName", exception)
// TODO: Remove this producer recreation logic after SAMZA-1393.
val newProducer: SystemProducer = getSystemProducer()
producerCreationLock.synchronized {
if (producerRef.compareAndSet(currentProducer, newProducer)) {
info(s"Stopping the checkpoint SystemProducer")
currentProducer.stop()
info(s"Recreating the checkpoint SystemProducer")
// SystemProducer contract is that clients call register(taskName) followed by start
// before invoking writeCheckpoint, readCheckpoint API. Hence list of taskName are not
// expected to change during the producer recreation.
for (taskName <- taskNames) {
debug(s"Registering the taskName: $taskName with SystemProducer")
newProducer.register(taskName.getTaskName)
}
newProducer.start()
} else {
info("Producer instance was recreated by other thread. Retrying with it.")
newProducer.stop()
}
}
}
}
sleepTimeInMillis = Math.min(sleepTimeInMillis * 2, maxSleepTimeInMillis)
Thread.sleep(sleepTimeInMillis)
}
throw new SamzaException(s"Exception when writing checkpoint: $checkpoint for task: $taskName.", producerException)
}
/**
* @inheritdoc
*/
override def clearCheckpoints(): Unit = {
info("Clear checkpoint stream %s in system %s" format(checkpointTopic, checkpointSystem))
systemAdmin.clearStream(checkpointSpec)
}
override def stop(): Unit = {
info ("Stopping system admin.")
systemAdmin.stop()
info ("Stopping system producer.")
producerRef.get().stop()
info("CheckpointManager stopped.")
}
@VisibleForTesting
def getSystemProducer(): SystemProducer = {
systemFactory.getProducer(checkpointSystem, config, metricsRegistry)
}
/**
* Returns the checkpoints from the log.
*
* <p> The underlying [[SystemConsumer]] is stateful and tracks its offsets. Hence, each invocation of this method
* will read the log from where it left off previously. This allows for multiple efficient calls to [[readLastCheckpoint()]]
*/
private def readCheckpoints(): Map[TaskName, Checkpoint] = {
val checkpoints = mutable.Map[TaskName, Checkpoint]()
val iterator = new SystemStreamPartitionIterator(systemConsumer, checkpointSsp)
var numMessagesRead = 0
while (iterator.hasNext) {
val checkpointEnvelope: IncomingMessageEnvelope = iterator.next
val offset = checkpointEnvelope.getOffset
numMessagesRead += 1
if (numMessagesRead % 1000 == 0) {
info(s"Read $numMessagesRead from topic: $checkpointTopic. Current offset: $offset")
}
val keyBytes = checkpointEnvelope.getKey.asInstanceOf[Array[Byte]]
if (keyBytes == null) {
throw new SamzaException("Encountered a checkpoint message with null key. Topic:$checkpointTopic " +
s"Offset:$offset")
}
val checkpointKey = try {
checkpointKeySerde.fromBytes(keyBytes)
} catch {
case e: Exception => if (validateCheckpoint) {
throw new SamzaException(s"Exception while serializing checkpoint-key. " +
s"Topic: $checkpointTopic Offset: $offset", e)
} else {
warn(s"Ignoring exception while serializing checkpoint-key. Topic: $checkpointTopic Offset: $offset", e)
null
}
}
if (checkpointKey != null) {
// If the grouper in the key is not equal to the configured grouper, error out.
val actualGrouperFactory = checkpointKey.getGrouperFactoryClassName
if (!expectedGrouperFactory.equals(actualGrouperFactory)) {
warn(s"Grouper mismatch. Configured: $expectedGrouperFactory Actual: $actualGrouperFactory ")
if (validateCheckpoint) {
throw new SamzaException("SSPGrouperFactory in the checkpoint topic does not match the configured value" +
s"Configured value: $expectedGrouperFactory; Actual value: $actualGrouperFactory Offset: $offset")
}
}
// If the type of the key is not KafkaCheckpointLogKey.CHECKPOINT_KEY_TYPE, it can safely be ignored.
if (KafkaCheckpointLogKey.CHECKPOINT_KEY_TYPE.equals(checkpointKey.getType)) {
val checkpointBytes = checkpointEnvelope.getMessage.asInstanceOf[Array[Byte]]
val checkpoint = try {
checkpointMsgSerde.fromBytes(checkpointBytes)
} catch {
case e: Exception => throw new SamzaException(s"Exception while serializing checkpoint-message. " +
s"Topic: $checkpointTopic Offset: $offset", e)
}
checkpoints.put(checkpointKey.getTaskName, checkpoint)
}
}
}
info(s"Read $numMessagesRead messages from system:$checkpointSystem topic:$checkpointTopic")
checkpoints.toMap
}
/**
* Returns the oldest available offset for the provided [[SystemStreamPartition]].
*/
private def getOldestOffset(ssp: SystemStreamPartition): String = {
val topic = ssp.getSystemStream.getStream
val partition = ssp.getPartition
val metaDataMap = systemAdmin.getSystemStreamMetadata(Collections.singleton(topic))
val checkpointMetadata: SystemStreamMetadata = metaDataMap.get(topic)
if (checkpointMetadata == null) {
throw new SamzaException(s"Got null metadata for system:$checkpointSystem, topic:$topic")
}
val partitionMetaData = checkpointMetadata.getSystemStreamPartitionMetadata.get(partition)
if (partitionMetaData == null) {
throw new SamzaException(s"Got a null partition metadata for system:$checkpointSystem, topic:$topic")
}
partitionMetaData.getOldestOffset
}
}
| Swrrt/Samza | samza-kafka/src/main/scala/org/apache/samza/checkpoint/kafka/KafkaCheckpointManager.scala | Scala | apache-2.0 | 13,097 |
package tests.rescala
class HigherOrderTestSuite extends RETests {
allEngines("basic Higher Order Signal can Be Accessed"){ engine => import engine._
val v = Var(42)
val s1: Signal[Int] = v.map(identity)
val s2: Signal[Signal[Int]] = dynamic() { t => s1 }
assert(s2.now.now == 42)
v.set(0)
assert(s2.now.now == 0)
}
allEngines("basic Higher Order Signal can Be Defereferenced"){ engine => import engine._
val v = Var(42)
val s1: Signal[Int] = v.map(identity)
val s2: Signal[Signal[Int]] = dynamic() { t => s1 }
val sDeref = s2.flatten
assert(sDeref.now == 42)
v.set(0)
assert(sDeref.now == 0)
}
allEngines("basic Higher Order Signal deref Fires Change"){ engine => import engine._
val v = Var(42)
val sValue: Signal[Int] = v.map(identity)
val sHigher: Signal[Signal[Int]] = dynamic() { t => sValue }
val sDeref = sHigher.flatten
var sDerefChanged = false
var sHigherChanged = false
sDeref.change += { _ => sDerefChanged = true }
sHigher.change += { _ => sHigherChanged = true }
assert(!sHigherChanged && !sDerefChanged)
v.set(0) // update
assert(!sHigherChanged) // higher does not change
assert(sDerefChanged) // deref DOES change
}
allEngines("basic Higher Order Signal higher Order Fires Change"){ engine => import engine._
val v1 = Var(42)
val v2 = Var(123)
val s1: Signal[Int] = v1.map(identity)
val s2: Signal[Int] = v2.map(identity)
val selector: Var[Signal[Int]] = Var(s1)
val sHigher = selector.map(identity)
val sDeref = sHigher.flatten
var sDerefChanged = false
var sHigherChanged = false
sDeref.change += { _ => sDerefChanged = true }
sHigher.change += { _ => sHigherChanged = true }
// 1. Unrelated value changes, no updates
v2.set(1234)
assert(!sDerefChanged)
assert(!sHigherChanged)
// 2. Related value changes, only the deref signal changes
v1.set(321)
assert(sDerefChanged)
assert(!sHigherChanged)
sDerefChanged = false
// 3. Selector changes, both signals fire changes
selector() = s2
assert(sDerefChanged)
assert(sHigherChanged)
assert(sDeref.now == 1234)
}
allEngines("order3 Signal"){ engine => import engine._
val v = Var(42)
val s0: Signal[Int] = v.map(identity)
val s1: Signal[Signal[Int]] = Signals.static() { t => s0 }
val s2: Signal[Signal[Signal[Int]]] = Signals.static() { t => s1 }
val sDeref1 = s1.flatten
val sDeref2 = s2.flatten.flatten
val sDeref2_a = s2.flatten
val sDeref2_b = sDeref2_a.flatten
var sDeref1Changed = false
var sDeref2Changed = false
var sDeref2_aChanged = false
var sDeref2_bChanged = false
sDeref1.change += { _ => sDeref1Changed = true }
sDeref2.change += { _ => sDeref2Changed = true }
sDeref2_a.change += { _ => sDeref2_aChanged = true }
sDeref2_b.change += { _ => sDeref2_bChanged = true }
v.set(0)
assert(sDeref1Changed)
assert(sDeref2Changed)
assert(!sDeref2_aChanged) // 2_a is not completely dereferenced, and thus did not change
assert(sDeref2_bChanged)
assert(s2.now.now.now == 0)
}
allEngines("list Of Signals Section"){ engine => import engine._
val tick = Evt[Unit]
val count = tick.iterate(0)(_ + 1)
val doubled = count.map(_ * 2)
val mod2 = count.map(_ % 2)
val listOfSignals: Signal[List[Signal[Int]]] = Signals.static() { t => List(doubled, count) }
val selected: Signal[Signal[Int]] = Signal { listOfSignals()(mod2()) }
val dereferenced = selected.flatten
var dereferencedChanged = false
dereferenced.changed += { _ => dereferencedChanged = true }
tick(())
assert(count.now == 1)
assert(doubled.now ==2)
assert(mod2.now == 1)
assert(selected.now == count)
assert(dereferencedChanged)
dereferencedChanged = false
assert(dereferenced.now == 1)
tick(())
assert(count.now == 2)
assert(doubled.now ==4)
assert(mod2.now == 0)
assert(selected.now == doubled)
assert(dereferencedChanged)
dereferencedChanged = false
assert(dereferenced.now == 4)
}
allEngines("unwrap Event"){ engine => import engine._
val e1 = Evt[Int]
val e2 = Evt[Int]
val eventSelector = Var(e1)
val selected = eventSelector.map(identity)
val unwrapped = selected.flatten
var lastEvent = -1
unwrapped += { lastEvent = _ }
e1(1)
assert(lastEvent == 1)
e2(2)
assert(lastEvent == 1)
eventSelector() = e2 //select new event source
e2(3)
assert(lastEvent == 3)
e1(4)
assert(lastEvent == 3)
e2(5)
assert(lastEvent == 5)
}
allEngines("dynamic Level"){ engine => import engine._
val v1 = Var(1)
val derived = v1.map(identity)
val level1 = v1.map(_ + 1)
val level2 = level1.map(_ + 1)
val level3 = level2.map(_ + 1)
val combined = Signal { if (v1() == 10) level3() else derived() }
var log = List[Int]()
combined.changed += (log ::= _)
v1.set(10)
assert(log == List(13))
v1.set(1)
assert(log == List(1, 13))
val higherOrder = Signal { if (v1() == 10) level3 else derived }
val flattened = higherOrder.flatten
var higherOrderLog = List[Int]()
flattened.changed += (higherOrderLog ::= _)
v1.set(10)
assert(higherOrderLog == List(13))
v1.set(1)
assert(higherOrderLog == List(1, 13))
assert(log == List(1, 13, 1, 13))
}
allEngines("wrapped Event"){ engine => import engine._
val e1 = Evt[Int]
val condition = e1.latest(-1)
val level1Event = e1.map(_ => "level 1")
val level2Event = level1Event.map(_ => "level 2")
val dynamicSignal = Signal { if (condition() == 1) level1Event else level2Event }
val unwrapped = dynamicSignal.flatten
var log = List[String]()
unwrapped += (log ::= _)
e1.apply(0)
assert(log == List("level 2"))
e1.apply(1)
assert(log == List("level 1", "level 2"))
}
allEngines("wrapped Event Same Level"){ engine => import engine._
val e1 = Evt[Int]
val level2Condition = e1.latest(-1).map(identity)
val level1EventA = e1.map(_ => "A")
val level1EventB = e1.map(_ => "B")
val dynamicSignal = Signal { if (level2Condition() == 1) level1EventA else level1EventB }
val unwrapped = dynamicSignal.flatten
var log = List[String]()
unwrapped += (log ::= _)
e1.apply(0)
assert(log == List("B"))
e1.apply(1)
assert(log == List("A", "B"))
}
// allEngines("flatten Events"){ engine => import engine._
// val e1 = Evt[Event[Int]]
// val f1 = e1.flatten
// val res = f1.list()
// val e2 = Evt[Int]
// val e3 = Evt[Int]
// e2(10)
// e3(10)
//
// assert(res.now === Nil)
// e1(e2)
// assert(res.now === Nil)
// e3(10)
// assert(res.now === Nil)
// e2(10)
// assert(res.now === List(10))
// e1(e3)
// assert(res.now === List(10))
// e2(20)
// assert(res.now === List(10))
// e3(30)
// assert(res.now === List(30, 10))
//
// }
allEngines("flatten Signal Seq"){ engine => import engine._
val v = Var.empty[Seq[Signal[Int]]]
var count = 0
val v1, v2, v3 = {count += 1 ; Var(count) }
v.set(List(v1, v2, v3))
val flat = v.flatten
assert(flat.now === Seq(1,2,3), "flatten fails")
v2.set(100)
assert(flat.now === Seq(1,100,3), "flatten fails 2")
v.set(List(v3, v2))
assert(flat.now === Seq(3,100), "flatten fails 3")
}
allEngines("flatten Signal Set"){ engine => import engine._
val v = Var.empty[Set[Var[Int]]]
var count = 0
val v1, v2, v3 = {count += 1 ; Var(count) }
v.set(Set(v1, v2, v3))
val flat = v.flatten
assert(flat.now === Set(1,2,3), "flatten fails")
v2.set(100)
assert(flat.now === Set(1,100,3), "flatten fails 2")
v.set(Set(v3, v2))
assert(flat.now === Set(3,100), "flatten fails 3")
}
allEngines("flatten Signal Array"){ engine => import engine._
val v = Var.empty[Array[Var[Int]]]
var count = 0
val v1, v2, v3 = {count += 1 ; Var(count) }
v.set(Array(v1, v2, v3))
val flat = v.flatten
assert(flat.now === Array(1,2,3), "flatten fails")
v2.set(100)
assert(flat.now === Array(1,100,3), "flatten fails 2")
v.set(Array(v3, v2))
assert(flat.now === Array(3,100), "flatten fails 3")
}
allEngines("flatten Signal Option"){ engine => import engine._
val v = Var(Option.empty[Var[Int]])
val w = Var(1)
val flat: Signal[Option[Int]] = v.flatten
assert(flat.now === None, "flatten fails")
v.set(Some(w))
assert(flat.now === Some(1), "flatten fails 2")
w.set(100)
assert(flat.now === Some(100), "flatten fails 3")
}
allEngines("create changes during reevaluation"){ engine => import engine._
val v = Var(1)
val mapped = v.map(_ + 0)
val sm = Signal { mapped.change.apply() }
val sd = dynamic() {t => t.depend(mapped.change(t)) }
//intercept[NoSuchElementException](sm.now)
assert(sm.now.isEmpty)
assert(sd.now.isEmpty)
v.set(2)
assert(sm.now.get.pair == 1 -> 2)
assert(sd.now.get.pair == 1 -> 2)
v.set(3)
assert(sm.now.get.pair == 2 -> 3)
assert(sd.now.get.pair == 2 -> 3)
}
allEngines("create folds during tx"){ engine => import engine._
val e = Evt[String]
val listed = transaction(e) { implicit t =>
e.admit("hello")
e.list()
}
assert(listed.now == List("hello"))
}
}
| volkc/REScala | Tests/shared/src/test/scala/tests/rescala/HigherOrderTestSuite.scala | Scala | apache-2.0 | 9,507 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.filter
import org.geotools.filter.visitor.DefaultFilterVisitor
import org.opengis.filter.{And, Filter}
import scala.collection.JavaConversions._
// This class helps us split a Filter into pieces if there are ANDs at the top.
class AndSplittingFilter extends DefaultFilterVisitor {
// This function really returns a Seq[Filter].
override def visit(filter: And, data: scala.Any): AnyRef = {
filter.getChildren.flatMap { subfilter =>
this.visit(subfilter, data)
}
}
def visit(filter: Filter, data: scala.Any): Seq[Filter] = {
filter match {
case a: And => visit(a, data).asInstanceOf[Seq[Filter]]
case _ => Seq(filter)
}
}
}
| locationtech/geomesa | geomesa-filter/src/main/scala/org/locationtech/geomesa/filter/AndSplittingFilter.scala | Scala | apache-2.0 | 1,173 |
// https://leetcode.com/problems/powx-n
object Solution {
def myPow(x: Double, n: Int): Double =
if (n < 0) 1 / h(x, -n) else h(x, n)
def h(x: Double, n: Int, z: Double = 1): Double =
if (n == 0) z else h(x * x, n / 2, if (n % 2 == 0) z else z * x)
}
| airtial/Codegames | leetcode/050-powx-n.scala | Scala | gpl-2.0 | 267 |
package heartbeat_multijvm_tests
/**
* Copyright (c) Nikita Kovaliov, maizy.ru, 2015
* See LICENSE.txt for details.
*/
import akka.actor.ActorRef
import scala.concurrent.duration._
import org.scalatest.FlatSpecLike
import com.typesafe.config.ConfigFactory
import akka.remote.testkit.MultiNodeConfig
import ru.maizy.dev.heartbeat.actor.{ Statistics, GetStatistics, StatActors, GetStatActors }
import ru.maizy.dev.heartbeat.role.Stat
object StatBeatsSpecMultiNodeConfig extends BaseConfig {
val statNode0 = role("stat0")
val statNode1 = role("stat1")
val allNodes = Seq(statNode0, statNode1)
val seedNode = statNode0
nodeConfig(allNodes: _*)(ConfigFactory.parseString(
"""
akka.cluster.roles = ["stat"]
"""
))
}
// name convention {Spec}MultiJvm{NodeName}
class StatActorManagedBySupervisorSpecMultiJvmStat0 extends StatActorManagedBySupervisorSpec
class StatActorManagedBySupervisorSpecMultiJvmStat1 extends StatActorManagedBySupervisorSpec
//TODO: descrese beats delay for speedup tests
abstract class StatActorManagedBySupervisorSpec(config: MultiNodeConfig)
extends MultiNodeBaseSpec(config)
with FlatSpecLike
with SupervisorTestUtils
{
import StatBeatsSpecMultiNodeConfig._
def this() = {
this(StatBeatsSpecMultiNodeConfig)
}
implicit val defaultTimeout = 5.seconds
implicit val dispatcher = system.dispatcher
val expectedBeatDelay = 2.seconds // TODO: set for tested actors, when supported
override def initialParticipants: Int = roles.size
muteDeadLetters(classOf[Any])(system)
var maybeFirstStatActor: Option[ActorRef] = None
"StatActor managed by supervisor" should "launch on supervisor startup" in within(defaultTimeout) {
runOn(statNode0) {
new Stat(1) startUp(system, cluster)
}
joinToCluster(Seq(statNode0), seedNode)
awaitClusterReady(Seq("stat"))
runOnJoinedNodes {
val supervisor = getSupervisorActorForNode(statNode0).await
supervisor ! GetStatActors
val statActors = expectMsgType[StatActors]
statActors.me shouldBe supervisor
statActors.statActorsRefs should have size 1
maybeFirstStatActor = Some(statActors.statActorsRefs.head)
}
}
it should "do nothing if there are no siblings" in within(defaultTimeout) {
runOnJoinedNodes {
val statActor = maybeFirstStatActor.get
statActor ! GetStatistics
expectMsg(Statistics(totalBeatsReceived = 0))
}
}
it should "recive beats from siblings of other nodes" in within(defaultTimeout + expectedBeatDelay * 3) {
runOn(statNode1) {
new Stat(1) startUp(system, cluster)
}
joinToCluster(Seq(statNode1), seedNode)
awaitClusterReady(Seq("stat", "stat"))
runOnJoinedNodes {
val secondSupervisor = getSupervisorActorForNode(statNode1).await
secondSupervisor ! GetStatActors
val statActors = expectMsgType[StatActors]
statActors.me shouldBe secondSupervisor
statActors.statActorsRefs should have size 1
val secondStatActor = statActors.statActorsRefs.head
awaitAssert(
{
secondStatActor ! GetStatistics
val statistics = expectMsgType[Statistics]
statistics.totalBeatsReceived should be > BigInt(2)
},
expectedBeatDelay * 3
)
}
}
}
| maizy/akka-cluster-heartbeat | src/multi-jvm/scala/heartbeat_multijvm_tests/StatActorManagedBySupervisorSpec.scala | Scala | mit | 3,317 |
package spark.streaming.examples
import spark.RDD
import scala.collection.mutable.ArrayBuffer
/**
* Created with IntelliJ IDEA.
* User: peter
* Date: 10/16/13
* Time: 9:15 PM
* To change this template use File | Settings | File Templates.
*/
class OperatorGraph(_parentCtx : SqlSparkStreamingContext) {
val parentCtx = _parentCtx
val outputOperators = scala.collection.mutable.ArrayBuffer[OutputOperator]()
val whereOperators = scala.collection.mutable.ArrayBuffer[WhereOperator]()
val allOperators = scala.collection.mutable.ArrayBuffer[Operator]()
val whereOperatorSets = ArrayBuffer[WhereOperatorSet]()
val innerJoinOperators = ArrayBuffer[InnerJoinOperator]()
val innerJoinOperatorSets = ArrayBuffer[InnerJoinOperatorSet]()
def addOperator(operator : Operator){
allOperators += operator
if(operator.isInstanceOf[OutputOperator])
outputOperators += operator.asInstanceOf[OutputOperator]
if(operator.isInstanceOf[WhereOperator])
whereOperators += operator.asInstanceOf[WhereOperator]
if(operator.isInstanceOf[InnerJoinOperator])
innerJoinOperators += operator.asInstanceOf[InnerJoinOperator]
}
def groupPredicate(){
val visited = scala.collection.mutable.HashSet[Operator]()
def visit(op : Operator, set : Option[WhereOperatorSet]){
println("calling visit" + op)
if(visited(op))
return
visited += op
if(op.isInstanceOf[WhereOperator] && op.childOperators.size < 2) {
if(set.isEmpty){
val newSet = new WhereOperatorSet(parentCtx)
newSet.addWhereOperator(op.asInstanceOf[WhereOperator])
whereOperatorSets += newSet
op.childOperators.foreach(c => c.replaceParent(op,newSet))
op.parentOperators.foreach(p => visit(p,Some(newSet)))
}else{
set.get.addWhereOperator(op.asInstanceOf[WhereOperator])
op.parentOperators.foreach(p => visit(p,set))
}
}else{
if(!set.isEmpty){
set.get.setParent(op)
}
op.parentOperators.foreach(p => visit(p, None))
}
}
outputOperators.foreach(op => visit(op, None))
println("finish")
}
def groupInnerJoin(){
val visited = scala.collection.mutable.HashSet[Operator]()
def visit(op : Operator, set : Option[InnerJoinOperatorSet]){
if(visited(op))
return
visited += op
if(op.isInstanceOf[InnerJoinOperator] && op.childOperators.size < 2){
if(set.isEmpty){
val newSet = new InnerJoinOperatorSet(parentCtx)
newSet.addInnerJoinOperator(op.asInstanceOf[InnerJoinOperator])
innerJoinOperatorSets += newSet
op.childOperators.foreach(c => c.replaceParent(op, newSet))
op.parentOperators.foreach(p => visit(p, Some(newSet)))
}else{
set.get.addInnerJoinOperator(op.asInstanceOf[InnerJoinOperator])
op.parentOperators.foreach(p => visit(p,set))
}
}else{
if(!set.isEmpty){
set.get.addParent(op)
}
op.parentOperators.foreach(p => visit(p, None))
}
}
outputOperators.foreach(op => visit(op, None))
}
def pushAllPredicates = whereOperators.foreach(w => pushPredicate(w))
def pushPredicate(operator : WhereOperator){
var parentToReplaceWhenBinary : Operator = null
def findPushTo(pushTo : Operator) : Operator = {
val p =
if(pushTo.isInstanceOf[UnaryOperator])
pushTo.parentOperators.head
else if(pushTo.isInstanceOf[BinaryOperator])
if(pushTo.isInstanceOf[InnerJoinOperator]){
parentToReplaceWhenBinary =
if(operator.getWhereColumnId.subsetOf(pushTo.asInstanceOf[InnerJoinOperator].parentOperators(0).outputSchema.getLocalIdFromGlobalId.keySet))
pushTo.parentOperators(0)
else if(operator.getWhereColumnId.subsetOf(pushTo.asInstanceOf[InnerJoinOperator].parentOperators(1).outputSchema.getLocalIdFromGlobalId.keySet))
pushTo.parentOperators(1)
else
throw new Exception()
parentToReplaceWhenBinary
}
if(p.asInstanceOf[Operator].getChildOperators.size > 1)
return pushTo
p match{
case p:WhereOperator => findPushTo(p)
case p:SelectOperator => findPushTo(p)
case p:GroupByOperator => {
val gb = p.asInstanceOf[GroupByOperator]
if(operator.getWhereColumnId.subsetOf(gb.getKeyColumnsArr.toSet)){
findPushTo(p)
}else{
return pushTo
}
}
case p:ParseOperator => return pushTo
case p:InnerJoinOperator => {
if(operator.getWhereColumnId.subsetOf(p.asInstanceOf[InnerJoinOperator].parentOperators(0).outputSchema.getLocalIdFromGlobalId.keySet) ||
operator.getWhereColumnId.subsetOf(p.asInstanceOf[InnerJoinOperator].parentOperators(1).outputSchema.getLocalIdFromGlobalId.keySet) ){
findPushTo(p)
}else{
return pushTo
}
}
}
}
var pushTo : Operator = findPushTo(operator)
if(pushTo == operator)
return
operator.childOperators.foreach(op => op.replaceParent(operator, operator.parentOperators.head))
val oldParent = pushTo match{
case pushTo : UnaryOperator => pushTo.parentOperators.head
case pushTo : BinaryOperator => parentToReplaceWhenBinary
}
pushTo.replaceParent(oldParent,operator)
operator.setParent(oldParent)
}
override def toString() : String = {
val buffer = new StringBuilder
outputOperators.foreach(op => buffer.append(op.getFamilyTree(0)))
buffer.toString()
}
def execute(func : RDD[IndexedSeq[Any]] => Unit, exec : Execution){
outputOperators.foreach(op => func(op.execute(exec).head))
}
}
| pxgao/spark-0.7.3 | examples/src/main/scala/spark/streaming/examples/OperatorGraph.scala | Scala | bsd-3-clause | 5,836 |
package com.asto.dmp.ycd.service.impl
import com.asto.dmp.ycd.base.Constants
import com.asto.dmp.ycd.dao.impl.BizDao
import com.asto.dmp.ycd.mq.{MQAgent, MsgWrapper, Msg}
import com.asto.dmp.ycd.service.Service
import com.asto.dmp.ycd.util._
/**
* 授信规则
* 授信金额=min(30万,近12月月均提货额*评分对应系数)
* 评分对应系数
* (600 650] 1.5
* (550 600] 1.2
* (500 550] 1
*/
object CreditService extends org.apache.spark.Logging {
//授信金额上限
private val maxAmountOfCredit = 300000
/** 计算评分对应系数 **/
def getScoreCoefficient(score: Int) = {
if (score <= 550) 1.0
else if (score > 550 && score <= 600) 1.2
else 1.5
}
/**
* 授信额度结果
* 店铺id ,近12月月均提货额 ,评分,评分对应系数,授信额度
*/
def getAmountOfCredit = {
BizDao.moneyAmountAnnAvg
.leftOuterJoin(ScoreService.getAllScore.map(t => (t._1, t._7)))
.map(t => (t._1, t._2._1, t._2._2.get, getScoreCoefficient(t._2._2.get.toString.toInt), Math.min(maxAmountOfCredit, Utils.retainDecimal(getScoreCoefficient(t._2._2.get) * t._2._1, 0).toLong))).cache()
}
/**
* 将计算结果通过MQ发送出去
*/
def sendCreditAmount() {
val strMsgsOfAllStores = new StringBuffer()
getAmountOfCredit.collect().foreach {
eachStore =>
val msgs = List(
Msg("M_PROP_CREDIT_SCORE", eachStore._3, "1"),
Msg("M_PROP_CREDIT_LIMIT_AMOUNT", eachStore._5, "1")
)
MQAgent.send(MsgWrapper.getJson("总得分和授信额度", msgs, eachStore._1))
strMsgsOfAllStores.append(Msg.strMsgsOfAStore("总得分和授信额度", eachStore._1, msgs))
}
FileUtils.saveAsTextFile(strMsgsOfAllStores.toString, Constants.OutputPath.CREDIT_AMOUNT_PATH)
}
}
class CreditService extends Service {
override protected def runServices(): Unit = {
CreditService.sendCreditAmount()
}
}
| zj-lingxin/Dmp_ycd | src/main/scala/com/asto/dmp/ycd/service/impl/CreditService.scala | Scala | mit | 1,950 |
/**
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: [email protected]
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package io.swagger.client.model
import play.api.libs.json._
case class GithubScmlinks (
self: Option[Link],
`class`: Option[String]
)
object GithubScmlinks {
implicit val format: Format[GithubScmlinks] = Json.format
}
| cliffano/swaggy-jenkins | clients/scala-lagom-server/generated/src/main/scala/io/swagger/client/model/GithubScmlinks.scala | Scala | mit | 605 |
package org.bitcoins.node
import akka.actor.Cancellable
import org.bitcoins.core.currency._
import org.bitcoins.core.protocol.BitcoinAddress
import org.bitcoins.core.protocol.blockchain.MerkleBlock
import org.bitcoins.core.protocol.transaction.Transaction
import org.bitcoins.core.wallet.fee.SatoshisPerByte
import org.bitcoins.node.networking.peer.DataMessageHandler
import org.bitcoins.server.BitcoinSAppConfig
import org.bitcoins.testkit.BitcoinSTestAppConfig
import org.bitcoins.testkit.node.NodeUnitTest.SpvNodeFundedWalletBitcoind
import org.bitcoins.testkit.node.{NodeTestUtil, NodeUnitTest}
import org.scalatest.exceptions.TestFailedException
import org.scalatest.{BeforeAndAfter, FutureOutcome}
import scala.concurrent._
import scala.concurrent.duration._
class UpdateBloomFilterTest extends NodeUnitTest with BeforeAndAfter {
/** Wallet config with data directory set to user temp directory */
implicit override protected def config: BitcoinSAppConfig =
BitcoinSTestAppConfig.getSpvTestConfig()
override type FixtureParam = SpvNodeFundedWalletBitcoind
def withFixture(test: OneArgAsyncTest): FutureOutcome = {
withSpvNodeFundedWalletBitcoind(test, callbacks)
}
val testTimeout = 30.seconds
private var assertionP: Promise[Boolean] = Promise()
after {
//reset assertion after a test runs, because we
//are doing mutation to work around our callback
//limitations, we can't currently modify callbacks
//after a SpvNode is constructed :-(
assertionP = Promise()
}
/** The address we expect to receive funds at */
private val addressFromWalletP: Promise[BitcoinAddress] = Promise()
// the TX we sent from our wallet to bitcoind,
// we expect to get notified once this is
// confirmed
private val txFromWalletP: Promise[Transaction] = Promise()
def addressCallback: DataMessageHandler.OnTxReceived = { tx: Transaction =>
// we check if any of the addresses in the TX
// pays to our wallet address
val _ = for {
addressFromWallet <- addressFromWalletP.future
result = tx.outputs.exists(
_.scriptPubKey == addressFromWallet.scriptPubKey)
} yield {
if (result) {
assertionP.success(true)
}
}
}
def txCallback: DataMessageHandler.OnMerkleBlockReceived = {
(_: MerkleBlock, txs: Vector[Transaction]) =>
{
txFromWalletP.future
.map { tx =>
if (txs.contains(tx)) {
assertionP.success(true)
}
}
}
}
def callbacks: SpvNodeCallbacks = {
SpvNodeCallbacks(onTxReceived = Vector(addressCallback),
onMerkleBlockReceived = Vector(txCallback))
}
it must "update the bloom filter with an address" in { param =>
val SpvNodeFundedWalletBitcoind(spv, wallet, rpc) = param
// we want to schedule a runnable that aborts
// the test after a timeout, but then
// we need to cancel that runnable once
// we get a result
var cancelable: Option[Cancellable] = None
for {
firstBloom <- wallet.getBloomFilter()
// this has to be generated after our bloom filter
// is calculated
addressFromWallet <- wallet.getNewAddress()
_ = addressFromWalletP.success(addressFromWallet)
_ <- spv.updateBloomFilter(addressFromWallet)
_ <- spv.sync()
_ <- rpc.sendToAddress(addressFromWallet, 1.bitcoin)
_ <- NodeTestUtil.awaitSync(spv, rpc)
_ = {
cancelable = Some {
system.scheduler.scheduleOnce(
testTimeout,
new Runnable {
override def run: Unit = {
if (!assertionP.isCompleted)
assertionP.failure(new TestFailedException(
s"Did not receive a merkle block message after $testTimeout!",
failedCodeStackDepth = 0))
}
}
)
}
}
result <- assertionP.future
} yield assert(result)
}
it must "update the bloom filter with a TX" in { param =>
val SpvNodeFundedWalletBitcoind(spv, wallet, rpc) = param
// we want to schedule a runnable that aborts
// the test after a timeout, but then
// we need to cancel that runnable once
// we get a result
var cancelable: Option[Cancellable] = None
for {
firstBloom <- wallet.getBloomFilter()
addressFromBitcoind <- rpc.getNewAddress
tx <- wallet
.sendToAddress(addressFromBitcoind,
5.bitcoin,
SatoshisPerByte(100.sats))
_ = txFromWalletP.success(tx)
updatedBloom <- spv.updateBloomFilter(tx).map(_.bloomFilter)
_ = spv.broadcastTransaction(tx)
_ <- spv.sync()
_ <- NodeTestUtil.awaitSync(spv, rpc)
_ = assert(updatedBloom.contains(tx.txId))
_ = {
cancelable = Some {
system.scheduler.scheduleOnce(
testTimeout,
new Runnable {
override def run: Unit = {
if (!assertionP.isCompleted)
assertionP.failure(new TestFailedException(
s"Did not receive a merkle block message after $testTimeout!",
failedCodeStackDepth = 0))
}
}
)
}
}
// this should confirm our TX
// since we updated the bloom filter
// we should get notified about the block
_ <- rpc.getNewAddress.flatMap(rpc.generateToAddress(1, _))
result <- assertionP.future
} yield assert(result)
}
}
| bitcoin-s/bitcoin-s-core | node-test/src/test/scala/org/bitcoins/node/UpdateBloomFilterTest.scala | Scala | mit | 5,546 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.util
import java.util.{Collections, Properties}
import java.util.concurrent.locks.ReentrantReadWriteLock
import kafka.cluster.EndPoint
import kafka.log.{LogCleaner, LogConfig, LogManager}
import kafka.network.SocketServer
import kafka.server.DynamicBrokerConfig._
import kafka.utils.{CoreUtils, Logging, PasswordEncoder}
import kafka.utils.Implicits._
import kafka.zk.{AdminZkClient, KafkaZkClient}
import org.apache.kafka.common.Reconfigurable
import org.apache.kafka.common.config.{AbstractConfig, ConfigDef, ConfigException, SslConfigs}
import org.apache.kafka.common.metrics.MetricsReporter
import org.apache.kafka.common.config.types.Password
import org.apache.kafka.common.network.{ListenerName, ListenerReconfigurable}
import org.apache.kafka.common.security.authenticator.LoginManager
import org.apache.kafka.common.utils.Utils
import scala.collection._
import scala.jdk.CollectionConverters._
/**
* Dynamic broker configurations are stored in ZooKeeper and may be defined at two levels:
* <ul>
* <li>Per-broker configs persisted at <tt>/configs/brokers/{brokerId}</tt>: These can be described/altered
* using AdminClient using the resource name brokerId.</li>
* <li>Cluster-wide defaults persisted at <tt>/configs/brokers/<default></tt>: These can be described/altered
* using AdminClient using an empty resource name.</li>
* </ul>
* The order of precedence for broker configs is:
* <ol>
* <li>DYNAMIC_BROKER_CONFIG: stored in ZK at /configs/brokers/{brokerId}</li>
* <li>DYNAMIC_DEFAULT_BROKER_CONFIG: stored in ZK at /configs/brokers/<default></li>
* <li>STATIC_BROKER_CONFIG: properties that broker is started up with, typically from server.properties file</li>
* <li>DEFAULT_CONFIG: Default configs defined in KafkaConfig</li>
* </ol>
* Log configs use topic config overrides if defined and fallback to broker defaults using the order of precedence above.
* Topic config overrides may use a different config name from the default broker config.
* See [[kafka.log.LogConfig#TopicConfigSynonyms]] for the mapping.
* <p>
* AdminClient returns all config synonyms in the order of precedence when configs are described with
* <code>includeSynonyms</code>. In addition to configs that may be defined with the same name at different levels,
* some configs have additional synonyms.
* </p>
* <ul>
* <li>Listener configs may be defined using the prefix <tt>listener.name.{listenerName}.{configName}</tt>. These may be
* configured as dynamic or static broker configs. Listener configs have higher precedence than the base configs
* that don't specify the listener name. Listeners without a listener config use the base config. Base configs
* may be defined only as STATIC_BROKER_CONFIG or DEFAULT_CONFIG and cannot be updated dynamically.<li>
* <li>Some configs may be defined using multiple properties. For example, <tt>log.roll.ms</tt> and
* <tt>log.roll.hours</tt> refer to the same config that may be defined in milliseconds or hours. The order of
* precedence of these synonyms is described in the docs of these configs in [[kafka.server.KafkaConfig]].</li>
* </ul>
*
*/
object DynamicBrokerConfig {
private[server] val DynamicSecurityConfigs = SslConfigs.RECONFIGURABLE_CONFIGS.asScala
val AllDynamicConfigs = DynamicSecurityConfigs ++
LogCleaner.ReconfigurableConfigs ++
DynamicLogConfig.ReconfigurableConfigs ++
DynamicThreadPool.ReconfigurableConfigs ++
Set(KafkaConfig.MetricReporterClassesProp) ++
DynamicListenerConfig.ReconfigurableConfigs ++
SocketServer.ReconfigurableConfigs
private val ClusterLevelListenerConfigs = Set(KafkaConfig.MaxConnectionsProp, KafkaConfig.MaxConnectionCreationRateProp)
private val PerBrokerConfigs = (DynamicSecurityConfigs ++ DynamicListenerConfig.ReconfigurableConfigs).diff(
ClusterLevelListenerConfigs)
private val ListenerMechanismConfigs = Set(KafkaConfig.SaslJaasConfigProp,
KafkaConfig.SaslLoginCallbackHandlerClassProp,
KafkaConfig.SaslLoginClassProp,
KafkaConfig.SaslServerCallbackHandlerClassProp,
KafkaConfig.ConnectionsMaxReauthMsProp)
private val ReloadableFileConfigs = Set(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG)
val ListenerConfigRegex = """listener\\.name\\.[^.]*\\.(.*)""".r
private val DynamicPasswordConfigs = {
val passwordConfigs = KafkaConfig.configKeys.filter(_._2.`type` == ConfigDef.Type.PASSWORD).keySet
AllDynamicConfigs.intersect(passwordConfigs)
}
def isPasswordConfig(name: String): Boolean = DynamicBrokerConfig.DynamicPasswordConfigs.exists(name.endsWith)
def brokerConfigSynonyms(name: String, matchListenerOverride: Boolean): List[String] = {
name match {
case KafkaConfig.LogRollTimeMillisProp | KafkaConfig.LogRollTimeHoursProp =>
List(KafkaConfig.LogRollTimeMillisProp, KafkaConfig.LogRollTimeHoursProp)
case KafkaConfig.LogRollTimeJitterMillisProp | KafkaConfig.LogRollTimeJitterHoursProp =>
List(KafkaConfig.LogRollTimeJitterMillisProp, KafkaConfig.LogRollTimeJitterHoursProp)
case KafkaConfig.LogFlushIntervalMsProp => // LogFlushSchedulerIntervalMsProp is used as default
List(KafkaConfig.LogFlushIntervalMsProp, KafkaConfig.LogFlushSchedulerIntervalMsProp)
case KafkaConfig.LogRetentionTimeMillisProp | KafkaConfig.LogRetentionTimeMinutesProp | KafkaConfig.LogRetentionTimeHoursProp =>
List(KafkaConfig.LogRetentionTimeMillisProp, KafkaConfig.LogRetentionTimeMinutesProp, KafkaConfig.LogRetentionTimeHoursProp)
case ListenerConfigRegex(baseName) if matchListenerOverride =>
// `ListenerMechanismConfigs` are specified as listenerPrefix.mechanism.<configName>
// and other listener configs are specified as listenerPrefix.<configName>
// Add <configName> as a synonym in both cases.
val mechanismConfig = ListenerMechanismConfigs.find(baseName.endsWith)
List(name, mechanismConfig.getOrElse(baseName))
case _ => List(name)
}
}
def validateConfigs(props: Properties, perBrokerConfig: Boolean): Unit = {
def checkInvalidProps(invalidPropNames: Set[String], errorMessage: String): Unit = {
if (invalidPropNames.nonEmpty)
throw new ConfigException(s"$errorMessage: $invalidPropNames")
}
checkInvalidProps(nonDynamicConfigs(props), "Cannot update these configs dynamically")
checkInvalidProps(securityConfigsWithoutListenerPrefix(props),
"These security configs can be dynamically updated only per-listener using the listener prefix")
validateConfigTypes(props)
if (!perBrokerConfig) {
checkInvalidProps(perBrokerConfigs(props),
"Cannot update these configs at default cluster level, broker id must be specified")
}
}
private def perBrokerConfigs(props: Properties): Set[String] = {
val configNames = props.asScala.keySet
def perBrokerListenerConfig(name: String): Boolean = {
name match {
case ListenerConfigRegex(baseName) => !ClusterLevelListenerConfigs.contains(baseName)
case _ => false
}
}
configNames.intersect(PerBrokerConfigs) ++ configNames.filter(perBrokerListenerConfig)
}
private def nonDynamicConfigs(props: Properties): Set[String] = {
props.asScala.keySet.intersect(DynamicConfig.Broker.nonDynamicProps)
}
private def securityConfigsWithoutListenerPrefix(props: Properties): Set[String] = {
DynamicSecurityConfigs.filter(props.containsKey)
}
private def validateConfigTypes(props: Properties): Unit = {
val baseProps = new Properties
props.asScala.foreach {
case (ListenerConfigRegex(baseName), v) => baseProps.put(baseName, v)
case (k, v) => baseProps.put(k, v)
}
DynamicConfig.Broker.validate(baseProps)
}
private[server] def addDynamicConfigs(configDef: ConfigDef): Unit = {
KafkaConfig.configKeys.forKeyValue { (configName, config) =>
if (AllDynamicConfigs.contains(configName)) {
configDef.define(config.name, config.`type`, config.defaultValue, config.validator,
config.importance, config.documentation, config.group, config.orderInGroup, config.width,
config.displayName, config.dependents, config.recommender)
}
}
}
private[server] def dynamicConfigUpdateModes: util.Map[String, String] = {
AllDynamicConfigs.map { name =>
val mode = if (PerBrokerConfigs.contains(name)) "per-broker" else "cluster-wide"
(name -> mode)
}.toMap.asJava
}
private[server] def resolveVariableConfigs(propsOriginal: Properties): Properties = {
val props = new Properties
val config = new AbstractConfig(new ConfigDef(), propsOriginal, false)
config.originals.asScala.filter(!_._1.startsWith(AbstractConfig.CONFIG_PROVIDERS_CONFIG)).foreach {case (key: String, value: Object) => {
props.put(key, value)
}}
props
}
}
class DynamicBrokerConfig(private val kafkaConfig: KafkaConfig) extends Logging {
private[server] val staticBrokerConfigs = ConfigDef.convertToStringMapWithPasswordValues(kafkaConfig.originalsFromThisConfig).asScala
private[server] val staticDefaultConfigs = ConfigDef.convertToStringMapWithPasswordValues(KafkaConfig.defaultValues.asJava).asScala
private val dynamicBrokerConfigs = mutable.Map[String, String]()
private val dynamicDefaultConfigs = mutable.Map[String, String]()
private val reconfigurables = mutable.Buffer[Reconfigurable]()
private val brokerReconfigurables = mutable.Buffer[BrokerReconfigurable]()
private val lock = new ReentrantReadWriteLock
private var currentConfig = kafkaConfig
private val dynamicConfigPasswordEncoder = maybeCreatePasswordEncoder(kafkaConfig.passwordEncoderSecret)
private[server] def initialize(zkClient: KafkaZkClient): Unit = {
currentConfig = new KafkaConfig(kafkaConfig.props, false, None)
val adminZkClient = new AdminZkClient(zkClient)
updateDefaultConfig(adminZkClient.fetchEntityConfig(ConfigType.Broker, ConfigEntityName.Default))
val props = adminZkClient.fetchEntityConfig(ConfigType.Broker, kafkaConfig.brokerId.toString)
val brokerConfig = maybeReEncodePasswords(props, adminZkClient)
updateBrokerConfig(kafkaConfig.brokerId, brokerConfig)
}
/**
* Clear all cached values. This is used to clear state on broker shutdown to avoid
* exceptions in tests when broker is restarted. These fields are re-initialized when
* broker starts up.
*/
private[server] def clear(): Unit = {
dynamicBrokerConfigs.clear()
dynamicDefaultConfigs.clear()
reconfigurables.clear()
brokerReconfigurables.clear()
}
/**
* Add reconfigurables to be notified when a dynamic broker config is updated.
*
* `Reconfigurable` is the public API used by configurable plugins like metrics reporter
* and quota callbacks. These are reconfigured before `KafkaConfig` is updated so that
* the update can be aborted if `reconfigure()` fails with an exception.
*
* `BrokerReconfigurable` is used for internal reconfigurable classes. These are
* reconfigured after `KafkaConfig` is updated so that they can access `KafkaConfig`
* directly. They are provided both old and new configs.
*/
def addReconfigurables(kafkaServer: KafkaBroker): Unit = {
kafkaServer.authorizer match {
case Some(authz: Reconfigurable) => addReconfigurable(authz)
case _ =>
}
addReconfigurable(kafkaServer.kafkaYammerMetrics)
addReconfigurable(new DynamicMetricsReporters(kafkaConfig.brokerId, kafkaServer))
addReconfigurable(new DynamicClientQuotaCallback(kafkaConfig.brokerId, kafkaServer))
addBrokerReconfigurable(new DynamicThreadPool(kafkaServer))
if (kafkaServer.logManager.cleaner != null)
addBrokerReconfigurable(kafkaServer.logManager.cleaner)
addBrokerReconfigurable(new DynamicLogConfig(kafkaServer.logManager, kafkaServer))
addBrokerReconfigurable(new DynamicListenerConfig(kafkaServer))
addBrokerReconfigurable(kafkaServer.socketServer)
}
def addReconfigurable(reconfigurable: Reconfigurable): Unit = CoreUtils.inWriteLock(lock) {
verifyReconfigurableConfigs(reconfigurable.reconfigurableConfigs.asScala)
reconfigurables += reconfigurable
}
def addBrokerReconfigurable(reconfigurable: BrokerReconfigurable): Unit = CoreUtils.inWriteLock(lock) {
verifyReconfigurableConfigs(reconfigurable.reconfigurableConfigs)
brokerReconfigurables += reconfigurable
}
def removeReconfigurable(reconfigurable: Reconfigurable): Unit = CoreUtils.inWriteLock(lock) {
reconfigurables -= reconfigurable
}
private def verifyReconfigurableConfigs(configNames: Set[String]): Unit = CoreUtils.inWriteLock(lock) {
val nonDynamic = configNames.filter(DynamicConfig.Broker.nonDynamicProps.contains)
require(nonDynamic.isEmpty, s"Reconfigurable contains non-dynamic configs $nonDynamic")
}
// Visibility for testing
private[server] def currentKafkaConfig: KafkaConfig = CoreUtils.inReadLock(lock) {
currentConfig
}
private[server] def currentDynamicBrokerConfigs: Map[String, String] = CoreUtils.inReadLock(lock) {
dynamicBrokerConfigs.clone()
}
private[server] def currentDynamicDefaultConfigs: Map[String, String] = CoreUtils.inReadLock(lock) {
dynamicDefaultConfigs.clone()
}
private[server] def updateBrokerConfig(brokerId: Int, persistentProps: Properties): Unit = CoreUtils.inWriteLock(lock) {
try {
val props = fromPersistentProps(persistentProps, perBrokerConfig = true)
dynamicBrokerConfigs.clear()
dynamicBrokerConfigs ++= props.asScala
updateCurrentConfig()
} catch {
case e: Exception => error(s"Per-broker configs of $brokerId could not be applied: $persistentProps", e)
}
}
private[server] def updateDefaultConfig(persistentProps: Properties): Unit = CoreUtils.inWriteLock(lock) {
try {
val props = fromPersistentProps(persistentProps, perBrokerConfig = false)
dynamicDefaultConfigs.clear()
dynamicDefaultConfigs ++= props.asScala
updateCurrentConfig()
} catch {
case e: Exception => error(s"Cluster default configs could not be applied: $persistentProps", e)
}
}
/**
* All config updates through ZooKeeper are triggered through actual changes in values stored in ZooKeeper.
* For some configs like SSL keystores and truststores, we also want to reload the store if it was modified
* in-place, even though the actual value of the file path and password haven't changed. This scenario alone
* is handled here when a config update request using admin client is processed by ZkAdminManager. If any of
* the SSL configs have changed, then the update will not be done here, but will be handled later when ZK
* changes are processed. At the moment, only listener configs are considered for reloading.
*/
private[server] def reloadUpdatedFilesWithoutConfigChange(newProps: Properties): Unit = CoreUtils.inWriteLock(lock) {
reconfigurables
.filter(reconfigurable => ReloadableFileConfigs.exists(reconfigurable.reconfigurableConfigs.contains))
.foreach {
case reconfigurable: ListenerReconfigurable =>
val kafkaProps = validatedKafkaProps(newProps, perBrokerConfig = true)
val newConfig = new KafkaConfig(kafkaProps.asJava, false, None)
processListenerReconfigurable(reconfigurable, newConfig, Collections.emptyMap(), validateOnly = false, reloadOnly = true)
case reconfigurable =>
trace(s"Files will not be reloaded without config change for $reconfigurable")
}
}
private def maybeCreatePasswordEncoder(secret: Option[Password]): Option[PasswordEncoder] = {
secret.map { secret =>
new PasswordEncoder(secret,
kafkaConfig.passwordEncoderKeyFactoryAlgorithm,
kafkaConfig.passwordEncoderCipherAlgorithm,
kafkaConfig.passwordEncoderKeyLength,
kafkaConfig.passwordEncoderIterations)
}
}
private def passwordEncoder: PasswordEncoder = {
dynamicConfigPasswordEncoder.getOrElse(throw new ConfigException("Password encoder secret not configured"))
}
private[server] def toPersistentProps(configProps: Properties, perBrokerConfig: Boolean): Properties = {
val props = configProps.clone().asInstanceOf[Properties]
def encodePassword(configName: String, value: String): Unit = {
if (value != null) {
if (!perBrokerConfig)
throw new ConfigException("Password config can be defined only at broker level")
props.setProperty(configName, passwordEncoder.encode(new Password(value)))
}
}
configProps.asScala.forKeyValue { (name, value) =>
if (isPasswordConfig(name))
encodePassword(name, value)
}
props
}
private[server] def fromPersistentProps(persistentProps: Properties,
perBrokerConfig: Boolean): Properties = {
val props = persistentProps.clone().asInstanceOf[Properties]
// Remove all invalid configs from `props`
removeInvalidConfigs(props, perBrokerConfig)
def removeInvalidProps(invalidPropNames: Set[String], errorMessage: String): Unit = {
if (invalidPropNames.nonEmpty) {
invalidPropNames.foreach(props.remove)
error(s"$errorMessage: $invalidPropNames")
}
}
removeInvalidProps(nonDynamicConfigs(props), "Non-dynamic configs configured in ZooKeeper will be ignored")
removeInvalidProps(securityConfigsWithoutListenerPrefix(props),
"Security configs can be dynamically updated only using listener prefix, base configs will be ignored")
if (!perBrokerConfig)
removeInvalidProps(perBrokerConfigs(props), "Per-broker configs defined at default cluster level will be ignored")
def decodePassword(configName: String, value: String): Unit = {
if (value != null) {
try {
props.setProperty(configName, passwordEncoder.decode(value).value)
} catch {
case e: Exception =>
error(s"Dynamic password config $configName could not be decoded, ignoring.", e)
props.remove(configName)
}
}
}
props.asScala.forKeyValue { (name, value) =>
if (isPasswordConfig(name))
decodePassword(name, value)
}
props
}
// If the secret has changed, password.encoder.old.secret contains the old secret that was used
// to encode the configs in ZK. Decode passwords using the old secret and update ZK with values
// encoded using the current secret. Ignore any errors during decoding since old secret may not
// have been removed during broker restart.
private def maybeReEncodePasswords(persistentProps: Properties, adminZkClient: AdminZkClient): Properties = {
val props = persistentProps.clone().asInstanceOf[Properties]
if (props.asScala.keySet.exists(isPasswordConfig)) {
maybeCreatePasswordEncoder(kafkaConfig.passwordEncoderOldSecret).foreach { passwordDecoder =>
persistentProps.asScala.forKeyValue { (configName, value) =>
if (isPasswordConfig(configName) && value != null) {
val decoded = try {
Some(passwordDecoder.decode(value).value)
} catch {
case _: Exception =>
debug(s"Dynamic password config $configName could not be decoded using old secret, new secret will be used.")
None
}
decoded.foreach { value => props.put(configName, passwordEncoder.encode(new Password(value))) }
}
}
adminZkClient.changeBrokerConfig(Some(kafkaConfig.brokerId), props)
}
}
props
}
/**
* Validate the provided configs `propsOverride` and return the full Kafka configs with
* the configured defaults and these overrides.
*
* Note: The caller must acquire the read or write lock before invoking this method.
*/
private def validatedKafkaProps(propsOverride: Properties, perBrokerConfig: Boolean): Map[String, String] = {
val propsResolved = DynamicBrokerConfig.resolveVariableConfigs(propsOverride)
validateConfigs(propsResolved, perBrokerConfig)
val newProps = mutable.Map[String, String]()
newProps ++= staticBrokerConfigs
if (perBrokerConfig) {
overrideProps(newProps, dynamicDefaultConfigs)
overrideProps(newProps, propsResolved.asScala)
} else {
overrideProps(newProps, propsResolved.asScala)
overrideProps(newProps, dynamicBrokerConfigs)
}
newProps
}
private[server] def validate(props: Properties, perBrokerConfig: Boolean): Unit = CoreUtils.inReadLock(lock) {
val newProps = validatedKafkaProps(props, perBrokerConfig)
processReconfiguration(newProps, validateOnly = true)
}
private def removeInvalidConfigs(props: Properties, perBrokerConfig: Boolean): Unit = {
try {
validateConfigTypes(props)
props.asScala
} catch {
case e: Exception =>
val invalidProps = props.asScala.filter { case (k, v) =>
val props1 = new Properties
props1.put(k, v)
try {
validateConfigTypes(props1)
false
} catch {
case _: Exception => true
}
}
invalidProps.keys.foreach(props.remove)
val configSource = if (perBrokerConfig) "broker" else "default cluster"
error(s"Dynamic $configSource config contains invalid values: $invalidProps, these configs will be ignored", e)
}
}
private[server] def maybeReconfigure(reconfigurable: Reconfigurable, oldConfig: KafkaConfig, newConfig: util.Map[String, _]): Unit = {
if (reconfigurable.reconfigurableConfigs.asScala.exists(key => oldConfig.originals.get(key) != newConfig.get(key)))
reconfigurable.reconfigure(newConfig)
}
/**
* Returns the change in configurations between the new props and current props by returning a
* map of the changed configs, as well as the set of deleted keys
*/
private def updatedConfigs(newProps: java.util.Map[String, _], currentProps: java.util.Map[String, _]):
(mutable.Map[String, _], Set[String]) = {
val changeMap = newProps.asScala.filter {
case (k, v) => v != currentProps.get(k)
}
val deletedKeySet = currentProps.asScala.filter {
case (k, _) => !newProps.containsKey(k)
}.keySet
(changeMap, deletedKeySet)
}
/**
* Updates values in `props` with the new values from `propsOverride`. Synonyms of updated configs
* are removed from `props` to ensure that the config with the higher precedence is applied. For example,
* if `log.roll.ms` was defined in server.properties and `log.roll.hours` is configured dynamically,
* `log.roll.hours` from the dynamic configuration will be used and `log.roll.ms` will be removed from
* `props` (even though `log.roll.hours` is secondary to `log.roll.ms`).
*/
private def overrideProps(props: mutable.Map[String, String], propsOverride: mutable.Map[String, String]): Unit = {
propsOverride.forKeyValue { (k, v) =>
// Remove synonyms of `k` to ensure the right precedence is applied. But disable `matchListenerOverride`
// so that base configs corresponding to listener configs are not removed. Base configs should not be removed
// since they may be used by other listeners. It is ok to retain them in `props` since base configs cannot be
// dynamically updated and listener-specific configs have the higher precedence.
brokerConfigSynonyms(k, matchListenerOverride = false).foreach(props.remove)
props.put(k, v)
}
}
private def updateCurrentConfig(): Unit = {
val newProps = mutable.Map[String, String]()
newProps ++= staticBrokerConfigs
overrideProps(newProps, dynamicDefaultConfigs)
overrideProps(newProps, dynamicBrokerConfigs)
val oldConfig = currentConfig
val (newConfig, brokerReconfigurablesToUpdate) = processReconfiguration(newProps, validateOnly = false)
if (newConfig ne currentConfig) {
currentConfig = newConfig
kafkaConfig.updateCurrentConfig(newConfig)
// Process BrokerReconfigurable updates after current config is updated
brokerReconfigurablesToUpdate.foreach(_.reconfigure(oldConfig, newConfig))
}
}
private def processReconfiguration(newProps: Map[String, String], validateOnly: Boolean): (KafkaConfig, List[BrokerReconfigurable]) = {
val newConfig = new KafkaConfig(newProps.asJava, !validateOnly, None)
val (changeMap, deletedKeySet) = updatedConfigs(newConfig.originalsFromThisConfig, currentConfig.originals)
if (changeMap.nonEmpty || deletedKeySet.nonEmpty) {
try {
val customConfigs = new util.HashMap[String, Object](newConfig.originalsFromThisConfig) // non-Kafka configs
newConfig.valuesFromThisConfig.keySet.forEach(customConfigs.remove(_))
reconfigurables.foreach {
case listenerReconfigurable: ListenerReconfigurable =>
processListenerReconfigurable(listenerReconfigurable, newConfig, customConfigs, validateOnly, reloadOnly = false)
case reconfigurable =>
if (needsReconfiguration(reconfigurable.reconfigurableConfigs, changeMap.keySet, deletedKeySet))
processReconfigurable(reconfigurable, changeMap.keySet, newConfig.valuesFromThisConfig, customConfigs, validateOnly)
}
// BrokerReconfigurable updates are processed after config is updated. Only do the validation here.
val brokerReconfigurablesToUpdate = mutable.Buffer[BrokerReconfigurable]()
brokerReconfigurables.foreach { reconfigurable =>
if (needsReconfiguration(reconfigurable.reconfigurableConfigs.asJava, changeMap.keySet, deletedKeySet)) {
reconfigurable.validateReconfiguration(newConfig)
if (!validateOnly)
brokerReconfigurablesToUpdate += reconfigurable
}
}
(newConfig, brokerReconfigurablesToUpdate.toList)
} catch {
case e: Exception =>
if (!validateOnly)
error(s"Failed to update broker configuration with configs : ${newConfig.originalsFromThisConfig}", e)
throw new ConfigException("Invalid dynamic configuration", e)
}
}
else
(currentConfig, List.empty)
}
private def needsReconfiguration(reconfigurableConfigs: util.Set[String], updatedKeys: Set[String], deletedKeys: Set[String]): Boolean = {
reconfigurableConfigs.asScala.intersect(updatedKeys).nonEmpty ||
reconfigurableConfigs.asScala.intersect(deletedKeys).nonEmpty
}
private def processListenerReconfigurable(listenerReconfigurable: ListenerReconfigurable,
newConfig: KafkaConfig,
customConfigs: util.Map[String, Object],
validateOnly: Boolean,
reloadOnly: Boolean): Unit = {
val listenerName = listenerReconfigurable.listenerName
val oldValues = currentConfig.valuesWithPrefixOverride(listenerName.configPrefix)
val newValues = newConfig.valuesFromThisConfigWithPrefixOverride(listenerName.configPrefix)
val (changeMap, deletedKeys) = updatedConfigs(newValues, oldValues)
val updatedKeys = changeMap.keySet
val configsChanged = needsReconfiguration(listenerReconfigurable.reconfigurableConfigs, updatedKeys, deletedKeys)
// if `reloadOnly`, reconfigure if configs haven't changed. Otherwise reconfigure if configs have changed
if (reloadOnly != configsChanged)
processReconfigurable(listenerReconfigurable, updatedKeys, newValues, customConfigs, validateOnly)
}
private def processReconfigurable(reconfigurable: Reconfigurable,
updatedConfigNames: Set[String],
allNewConfigs: util.Map[String, _],
newCustomConfigs: util.Map[String, Object],
validateOnly: Boolean): Unit = {
val newConfigs = new util.HashMap[String, Object]
allNewConfigs.forEach { (k, v) => newConfigs.put(k, v.asInstanceOf[AnyRef]) }
newConfigs.putAll(newCustomConfigs)
try {
reconfigurable.validateReconfiguration(newConfigs)
} catch {
case e: ConfigException => throw e
case _: Exception =>
throw new ConfigException(s"Validation of dynamic config update of $updatedConfigNames failed with class ${reconfigurable.getClass}")
}
if (!validateOnly) {
info(s"Reconfiguring $reconfigurable, updated configs: $updatedConfigNames custom configs: $newCustomConfigs")
reconfigurable.reconfigure(newConfigs)
}
}
}
trait BrokerReconfigurable {
def reconfigurableConfigs: Set[String]
def validateReconfiguration(newConfig: KafkaConfig): Unit
def reconfigure(oldConfig: KafkaConfig, newConfig: KafkaConfig): Unit
}
object DynamicLogConfig {
// Exclude message.format.version for now since we need to check that the version
// is supported on all brokers in the cluster.
val ExcludedConfigs = Set(KafkaConfig.LogMessageFormatVersionProp)
val ReconfigurableConfigs = LogConfig.TopicConfigSynonyms.values.toSet -- ExcludedConfigs
val KafkaConfigToLogConfigName = LogConfig.TopicConfigSynonyms.map { case (k, v) => (v, k) }
}
class DynamicLogConfig(logManager: LogManager, server: KafkaBroker) extends BrokerReconfigurable with Logging {
override def reconfigurableConfigs: Set[String] = {
DynamicLogConfig.ReconfigurableConfigs
}
override def validateReconfiguration(newConfig: KafkaConfig): Unit = {
// For update of topic config overrides, only config names and types are validated
// Names and types have already been validated. For consistency with topic config
// validation, no additional validation is performed.
}
private def updateLogsConfig(newBrokerDefaults: Map[String, Object]): Unit = {
logManager.brokerConfigUpdated()
logManager.allLogs.foreach { log =>
val props = mutable.Map.empty[Any, Any]
props ++= newBrokerDefaults
props ++= log.config.originals.asScala.filter { case (k, _) => log.config.overriddenConfigs.contains(k) }
val logConfig = LogConfig(props.asJava, log.config.overriddenConfigs)
log.updateConfig(logConfig)
}
}
override def reconfigure(oldConfig: KafkaConfig, newConfig: KafkaConfig): Unit = {
val currentLogConfig = logManager.currentDefaultConfig
val origUncleanLeaderElectionEnable = logManager.currentDefaultConfig.uncleanLeaderElectionEnable
val newBrokerDefaults = new util.HashMap[String, Object](currentLogConfig.originals)
newConfig.valuesFromThisConfig.forEach { (k, v) =>
if (DynamicLogConfig.ReconfigurableConfigs.contains(k)) {
DynamicLogConfig.KafkaConfigToLogConfigName.get(k).foreach { configName =>
if (v == null)
newBrokerDefaults.remove(configName)
else
newBrokerDefaults.put(configName, v.asInstanceOf[AnyRef])
}
}
}
logManager.reconfigureDefaultLogConfig(LogConfig(newBrokerDefaults))
updateLogsConfig(newBrokerDefaults.asScala)
if (logManager.currentDefaultConfig.uncleanLeaderElectionEnable && !origUncleanLeaderElectionEnable) {
server match {
case kafkaServer: KafkaServer => kafkaServer.kafkaController.enableDefaultUncleanLeaderElection()
case _ =>
}
}
}
}
object DynamicThreadPool {
val ReconfigurableConfigs = Set(
KafkaConfig.NumIoThreadsProp,
KafkaConfig.NumNetworkThreadsProp,
KafkaConfig.NumReplicaFetchersProp,
KafkaConfig.NumRecoveryThreadsPerDataDirProp,
KafkaConfig.BackgroundThreadsProp)
}
class DynamicThreadPool(server: KafkaBroker) extends BrokerReconfigurable {
override def reconfigurableConfigs: Set[String] = {
DynamicThreadPool.ReconfigurableConfigs
}
override def validateReconfiguration(newConfig: KafkaConfig): Unit = {
newConfig.values.forEach { (k, v) =>
if (DynamicThreadPool.ReconfigurableConfigs.contains(k)) {
val newValue = v.asInstanceOf[Int]
val oldValue = currentValue(k)
if (newValue != oldValue) {
val errorMsg = s"Dynamic thread count update validation failed for $k=$v"
if (newValue <= 0)
throw new ConfigException(s"$errorMsg, value should be at least 1")
if (newValue < oldValue / 2)
throw new ConfigException(s"$errorMsg, value should be at least half the current value $oldValue")
if (newValue > oldValue * 2)
throw new ConfigException(s"$errorMsg, value should not be greater than double the current value $oldValue")
}
}
}
}
override def reconfigure(oldConfig: KafkaConfig, newConfig: KafkaConfig): Unit = {
if (newConfig.numIoThreads != oldConfig.numIoThreads)
server.dataPlaneRequestHandlerPool.resizeThreadPool(newConfig.numIoThreads)
if (newConfig.numNetworkThreads != oldConfig.numNetworkThreads)
server.socketServer.resizeThreadPool(oldConfig.numNetworkThreads, newConfig.numNetworkThreads)
if (newConfig.numReplicaFetchers != oldConfig.numReplicaFetchers)
server.replicaManager.replicaFetcherManager.resizeThreadPool(newConfig.numReplicaFetchers)
if (newConfig.numRecoveryThreadsPerDataDir != oldConfig.numRecoveryThreadsPerDataDir)
server.logManager.resizeRecoveryThreadPool(newConfig.numRecoveryThreadsPerDataDir)
if (newConfig.backgroundThreads != oldConfig.backgroundThreads)
server.kafkaScheduler.resizeThreadPool(newConfig.backgroundThreads)
}
private def currentValue(name: String): Int = {
name match {
case KafkaConfig.NumIoThreadsProp => server.config.numIoThreads
case KafkaConfig.NumNetworkThreadsProp => server.config.numNetworkThreads
case KafkaConfig.NumReplicaFetchersProp => server.config.numReplicaFetchers
case KafkaConfig.NumRecoveryThreadsPerDataDirProp => server.config.numRecoveryThreadsPerDataDir
case KafkaConfig.BackgroundThreadsProp => server.config.backgroundThreads
case n => throw new IllegalStateException(s"Unexpected config $n")
}
}
}
class DynamicMetricsReporters(brokerId: Int, server: KafkaBroker) extends Reconfigurable {
private val dynamicConfig = server.config.dynamicConfig
private val metrics = server.metrics
private val propsOverride = Map[String, AnyRef](KafkaConfig.BrokerIdProp -> brokerId.toString)
private val currentReporters = mutable.Map[String, MetricsReporter]()
createReporters(dynamicConfig.currentKafkaConfig.getList(KafkaConfig.MetricReporterClassesProp),
Collections.emptyMap[String, Object])
private[server] def currentMetricsReporters: List[MetricsReporter] = currentReporters.values.toList
override def configure(configs: util.Map[String, _]): Unit = {}
override def reconfigurableConfigs(): util.Set[String] = {
val configs = new util.HashSet[String]()
configs.add(KafkaConfig.MetricReporterClassesProp)
currentReporters.values.foreach {
case reporter: Reconfigurable => configs.addAll(reporter.reconfigurableConfigs)
case _ =>
}
configs
}
override def validateReconfiguration(configs: util.Map[String, _]): Unit = {
val updatedMetricsReporters = metricsReporterClasses(configs)
// Ensure all the reporter classes can be loaded and have a default constructor
updatedMetricsReporters.foreach { className =>
val clazz = Utils.loadClass(className, classOf[MetricsReporter])
clazz.getConstructor()
}
// Validate the new configuration using every reconfigurable reporter instance that is not being deleted
currentReporters.values.foreach {
case reporter: Reconfigurable =>
if (updatedMetricsReporters.contains(reporter.getClass.getName))
reporter.validateReconfiguration(configs)
case _ =>
}
}
override def reconfigure(configs: util.Map[String, _]): Unit = {
val updatedMetricsReporters = metricsReporterClasses(configs)
val deleted = currentReporters.keySet.toSet -- updatedMetricsReporters
deleted.foreach(removeReporter)
currentReporters.values.foreach {
case reporter: Reconfigurable => dynamicConfig.maybeReconfigure(reporter, dynamicConfig.currentKafkaConfig, configs)
case _ =>
}
val added = updatedMetricsReporters.filterNot(currentReporters.keySet)
createReporters(added.asJava, configs)
}
private def createReporters(reporterClasses: util.List[String],
updatedConfigs: util.Map[String, _]): Unit = {
val props = new util.HashMap[String, AnyRef]
updatedConfigs.forEach { (k, v) => props.put(k, v.asInstanceOf[AnyRef]) }
propsOverride.forKeyValue { (k, v) => props.put(k, v) }
val reporters = dynamicConfig.currentKafkaConfig.getConfiguredInstances(reporterClasses, classOf[MetricsReporter], props)
reporters.forEach { reporter =>
metrics.addReporter(reporter)
currentReporters += reporter.getClass.getName -> reporter
}
KafkaBroker.notifyClusterListeners(server.clusterId, reporters.asScala)
KafkaBroker.notifyMetricsReporters(server.clusterId, server.config, reporters.asScala)
}
private def removeReporter(className: String): Unit = {
currentReporters.remove(className).foreach(metrics.removeReporter)
}
private def metricsReporterClasses(configs: util.Map[String, _]): mutable.Buffer[String] = {
configs.get(KafkaConfig.MetricReporterClassesProp).asInstanceOf[util.List[String]].asScala
}
}
object DynamicListenerConfig {
val ReconfigurableConfigs = Set(
// Listener configs
KafkaConfig.AdvertisedListenersProp,
KafkaConfig.ListenersProp,
KafkaConfig.ListenerSecurityProtocolMapProp,
// SSL configs
KafkaConfig.PrincipalBuilderClassProp,
KafkaConfig.SslProtocolProp,
KafkaConfig.SslProviderProp,
KafkaConfig.SslCipherSuitesProp,
KafkaConfig.SslEnabledProtocolsProp,
KafkaConfig.SslKeystoreTypeProp,
KafkaConfig.SslKeystoreLocationProp,
KafkaConfig.SslKeystorePasswordProp,
KafkaConfig.SslKeyPasswordProp,
KafkaConfig.SslTruststoreTypeProp,
KafkaConfig.SslTruststoreLocationProp,
KafkaConfig.SslTruststorePasswordProp,
KafkaConfig.SslKeyManagerAlgorithmProp,
KafkaConfig.SslTrustManagerAlgorithmProp,
KafkaConfig.SslEndpointIdentificationAlgorithmProp,
KafkaConfig.SslSecureRandomImplementationProp,
KafkaConfig.SslClientAuthProp,
KafkaConfig.SslEngineFactoryClassProp,
// SASL configs
KafkaConfig.SaslMechanismInterBrokerProtocolProp,
KafkaConfig.SaslJaasConfigProp,
KafkaConfig.SaslEnabledMechanismsProp,
KafkaConfig.SaslKerberosServiceNameProp,
KafkaConfig.SaslKerberosKinitCmdProp,
KafkaConfig.SaslKerberosTicketRenewWindowFactorProp,
KafkaConfig.SaslKerberosTicketRenewJitterProp,
KafkaConfig.SaslKerberosMinTimeBeforeReloginProp,
KafkaConfig.SaslKerberosPrincipalToLocalRulesProp,
KafkaConfig.SaslLoginRefreshWindowFactorProp,
KafkaConfig.SaslLoginRefreshWindowJitterProp,
KafkaConfig.SaslLoginRefreshMinPeriodSecondsProp,
KafkaConfig.SaslLoginRefreshBufferSecondsProp,
// Connection limit configs
KafkaConfig.MaxConnectionsProp,
KafkaConfig.MaxConnectionCreationRateProp
)
}
class DynamicClientQuotaCallback(brokerId: Int, server: KafkaBroker) extends Reconfigurable {
override def configure(configs: util.Map[String, _]): Unit = {}
override def reconfigurableConfigs(): util.Set[String] = {
val configs = new util.HashSet[String]()
server.quotaManagers.clientQuotaCallback.foreach {
case callback: Reconfigurable => configs.addAll(callback.reconfigurableConfigs)
case _ =>
}
configs
}
override def validateReconfiguration(configs: util.Map[String, _]): Unit = {
server.quotaManagers.clientQuotaCallback.foreach {
case callback: Reconfigurable => callback.validateReconfiguration(configs)
case _ =>
}
}
override def reconfigure(configs: util.Map[String, _]): Unit = {
val config = server.config
server.quotaManagers.clientQuotaCallback.foreach {
case callback: Reconfigurable =>
config.dynamicConfig.maybeReconfigure(callback, config.dynamicConfig.currentKafkaConfig, configs)
true
case _ => false
}
}
}
class DynamicListenerConfig(server: KafkaBroker) extends BrokerReconfigurable with Logging {
override def reconfigurableConfigs: Set[String] = {
DynamicListenerConfig.ReconfigurableConfigs
}
def validateReconfiguration(newConfig: KafkaConfig): Unit = {
val oldConfig = server.config
if (!oldConfig.requiresZookeeper) {
throw new ConfigException("Dynamic reconfiguration of listeners is not yet supported when using a Raft-based metadata quorum")
}
val newListeners = listenersToMap(newConfig.listeners)
val newAdvertisedListeners = listenersToMap(newConfig.advertisedListeners)
val oldListeners = listenersToMap(oldConfig.listeners)
if (!newAdvertisedListeners.keySet.subsetOf(newListeners.keySet))
throw new ConfigException(s"Advertised listeners '$newAdvertisedListeners' must be a subset of listeners '$newListeners'")
if (!newListeners.keySet.subsetOf(newConfig.listenerSecurityProtocolMap.keySet))
throw new ConfigException(s"Listeners '$newListeners' must be subset of listener map '${newConfig.listenerSecurityProtocolMap}'")
newListeners.keySet.intersect(oldListeners.keySet).foreach { listenerName =>
def immutableListenerConfigs(kafkaConfig: KafkaConfig, prefix: String): Map[String, AnyRef] = {
kafkaConfig.originalsWithPrefix(prefix, true).asScala.filter { case (key, _) =>
// skip the reconfigurable configs
!DynamicSecurityConfigs.contains(key) && !SocketServer.ListenerReconfigurableConfigs.contains(key)
}
}
if (immutableListenerConfigs(newConfig, listenerName.configPrefix) != immutableListenerConfigs(oldConfig, listenerName.configPrefix))
throw new ConfigException(s"Configs cannot be updated dynamically for existing listener $listenerName, " +
"restart broker or create a new listener for update")
if (oldConfig.listenerSecurityProtocolMap(listenerName) != newConfig.listenerSecurityProtocolMap(listenerName))
throw new ConfigException(s"Security protocol cannot be updated for existing listener $listenerName")
}
if (!newAdvertisedListeners.contains(newConfig.interBrokerListenerName))
throw new ConfigException(s"Advertised listener must be specified for inter-broker listener ${newConfig.interBrokerListenerName}")
}
def reconfigure(oldConfig: KafkaConfig, newConfig: KafkaConfig): Unit = {
val newListeners = newConfig.listeners
val newListenerMap = listenersToMap(newListeners)
val oldListeners = oldConfig.listeners
val oldListenerMap = listenersToMap(oldListeners)
val listenersRemoved = oldListeners.filterNot(e => newListenerMap.contains(e.listenerName))
val listenersAdded = newListeners.filterNot(e => oldListenerMap.contains(e.listenerName))
// Clear SASL login cache to force re-login
if (listenersAdded.nonEmpty || listenersRemoved.nonEmpty)
LoginManager.closeAll()
server.socketServer.removeListeners(listenersRemoved)
if (listenersAdded.nonEmpty)
server.socketServer.addListeners(listenersAdded)
server match {
case kafkaServer: KafkaServer => kafkaServer.kafkaController.updateBrokerInfo(kafkaServer.createBrokerInfo)
case _ =>
}
}
private def listenersToMap(listeners: Seq[EndPoint]): Map[ListenerName, EndPoint] =
listeners.map(e => (e.listenerName, e)).toMap
}
| Chasego/kafka | core/src/main/scala/kafka/server/DynamicBrokerConfig.scala | Scala | apache-2.0 | 44,582 |
Extracting, transforming and selecting features - spark.ml
This section covers algorithms for working with features, roughly divided into these groups:
Extraction: Extracting features from “raw” data
Transformation: Scaling, converting, or modifying features
Selection: Selecting a subset from a larger set of features
Table of Contents
Feature Extractors
TF-IDF (HashingTF and IDF)
Word2Vec
CountVectorizer
Feature Transformers
Tokenizer
StopWordsRemover
nn-gram
Binarizer
PCA
PolynomialExpansion
Discrete Cosine Transform (DCT)
StringIndexer
IndexToString
OneHotEncoder
VectorIndexer
Normalizer
StandardScaler
MinMaxScaler
Bucketizer
ElementwiseProduct
SQLTransformer
VectorAssembler
QuantileDiscretizer
Feature Selectors
VectorSlicer
RFormula
ChiSqSelector
Feature Extractors
TF-IDF (HashingTF and IDF)
Term Frequency-Inverse Document Frequency (TF-IDF) is a common text pre-processing step. In Spark ML, TF-IDF is separate into two parts: TF (+hashing) and IDF.
TF: HashingTF is a Transformer which takes sets of terms and converts those sets into fixed-length feature vectors. In text processing, a “set of terms” might be a bag of words. The algorithm combines Term Frequency (TF) counts with the hashing trick for dimensionality reduction.
IDF: IDF is an Estimator which fits on a dataset and produces an IDFModel. The IDFModel takes feature vectors (generally created from HashingTF) and scales each column. Intuitively, it down-weights columns which appear frequently in a corpus.
Please refer to the MLlib user guide on TF-IDF for more details on Term Frequency and Inverse Document Frequency.
In the following code segment, we start with a set of sentences. We split each sentence into words using Tokenizer. For each sentence (bag of words), we use HashingTF to hash the sentence into a feature vector. We use IDF to rescale the feature vectors; this generally improves performance when using text as features. Our feature vectors could then be passed to a learning algorithm.
Scala
Java
Python
Refer to the HashingTF Scala docs and the IDF Scala docs for more details on the API.
import org.apache.spark.ml.feature.{HashingTF, IDF, Tokenizer}
val sentenceData = sqlContext.createDataFrame(Seq(
(0, "Hi I heard about Spark"),
(0, "I wish Java could use case classes"),
(1, "Logistic regression models are neat")
)).toDF("label", "sentence")
val tokenizer = new Tokenizer().setInputCol("sentence").setOutputCol("words")
val wordsData = tokenizer.transform(sentenceData)
val hashingTF = new HashingTF()
.setInputCol("words").setOutputCol("rawFeatures").setNumFeatures(20)
val featurizedData = hashingTF.transform(wordsData)
val idf = new IDF().setInputCol("rawFeatures").setOutputCol("features")
val idfModel = idf.fit(featurizedData)
val rescaledData = idfModel.transform(featurizedData)
rescaledData.select("features", "label").take(3).foreach(println)
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/TfIdfExample.scala" in the Spark repo.
Word2Vec
Word2Vec is an Estimator which takes sequences of words representing documents and trains a Word2VecModel. The model maps each word to a unique fixed-size vector. The Word2VecModel transforms each document into a vector using the average of all words in the document; this vector can then be used for as features for prediction, document similarity calculations, etc. Please refer to the MLlib user guide on Word2Vec for more details.
In the following code segment, we start with a set of documents, each of which is represented as a sequence of words. For each document, we transform it into a feature vector. This feature vector could then be passed to a learning algorithm.
Scala
Java
Python
Refer to the Word2Vec Scala docs for more details on the API.
import org.apache.spark.ml.feature.Word2Vec
// Input data: Each row is a bag of words from a sentence or document.
val documentDF = sqlContext.createDataFrame(Seq(
"Hi I heard about Spark".split(" "),
"I wish Java could use case classes".split(" "),
"Logistic regression models are neat".split(" ")
).map(Tuple1.apply)).toDF("text")
// Learn a mapping from words to Vectors.
val word2Vec = new Word2Vec()
.setInputCol("text")
.setOutputCol("result")
.setVectorSize(3)
.setMinCount(0)
val model = word2Vec.fit(documentDF)
val result = model.transform(documentDF)
result.select("result").take(3).foreach(println)
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/Word2VecExample.scala" in the Spark repo.
CountVectorizer
CountVectorizer and CountVectorizerModel aim to help convert a collection of text documents to vectors of token counts. When an a-priori dictionary is not available, CountVectorizer can be used as an Estimator to extract the vocabulary and generates a CountVectorizerModel. The model produces sparse representations for the documents over the vocabulary, which can then be passed to other algorithms like LDA.
During the fitting process, CountVectorizer will select the top vocabSize words ordered by term frequency across the corpus. An optional parameter “minDF” also affect the fitting process by specifying the minimum number (or fraction if < 1.0) of documents a term must appear in to be included in the vocabulary.
Examples
Assume that we have the following DataFrame with columns id and texts:
id | texts
----|----------
0 | Array("a", "b", "c")
1 | Array("a", "b", "b", "c", "a")
each row intexts is a document of type Array[String]. Invoking fit of CountVectorizer produces a CountVectorizerModel with vocabulary (a, b, c), then the output column “vector” after transformation contains:
id | texts | vector
----|---------------------------------|---------------
0 | Array("a", "b", "c") | (3,[0,1,2],[1.0,1.0,1.0])
1 | Array("a", "b", "b", "c", "a") | (3,[0,1,2],[2.0,2.0,1.0])
each vector represents the token counts of the document over the vocabulary.
Scala
Java
Refer to the CountVectorizer Scala docs and the CountVectorizerModel Scala docs for more details on the API.
import org.apache.spark.ml.feature.{CountVectorizer, CountVectorizerModel}
val df = sqlContext.createDataFrame(Seq(
(0, Array("a", "b", "c")),
(1, Array("a", "b", "b", "c", "a"))
)).toDF("id", "words")
// fit a CountVectorizerModel from the corpus
val cvModel: CountVectorizerModel = new CountVectorizer()
.setInputCol("words")
.setOutputCol("features")
.setVocabSize(3)
.setMinDF(2)
.fit(df)
// alternatively, define CountVectorizerModel with a-priori vocabulary
val cvm = new CountVectorizerModel(Array("a", "b", "c"))
.setInputCol("words")
.setOutputCol("features")
cvModel.transform(df).select("features").show()
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/CountVectorizerExample.scala" in the Spark repo.
Feature Transformers
Tokenizer
Tokenization is the process of taking text (such as a sentence) and breaking it into individual terms (usually words). A simple Tokenizer class provides this functionality. The example below shows how to split sentences into sequences of words.
RegexTokenizer allows more advanced tokenization based on regular expression (regex) matching. By default, the parameter “pattern” (regex, default: \\s+) is used as delimiters to split the input text. Alternatively, users can set parameter “gaps” to false indicating the regex “pattern” denotes “tokens” rather than splitting gaps, and find all matching occurrences as the tokenization result.
Scala
Java
Python
Refer to the Tokenizer Scala docs and the RegexTokenizer Scala docs for more details on the API.
import org.apache.spark.ml.feature.{RegexTokenizer, Tokenizer}
val sentenceDataFrame = sqlContext.createDataFrame(Seq(
(0, "Hi I heard about Spark"),
(1, "I wish Java could use case classes"),
(2, "Logistic,regression,models,are,neat")
)).toDF("label", "sentence")
val tokenizer = new Tokenizer().setInputCol("sentence").setOutputCol("words")
val regexTokenizer = new RegexTokenizer()
.setInputCol("sentence")
.setOutputCol("words")
.setPattern("\\\\W") // alternatively .setPattern("\\\\w+").setGaps(false)
val tokenized = tokenizer.transform(sentenceDataFrame)
tokenized.select("words", "label").take(3).foreach(println)
val regexTokenized = regexTokenizer.transform(sentenceDataFrame)
regexTokenized.select("words", "label").take(3).foreach(println)
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/TokenizerExample.scala" in the Spark repo.
StopWordsRemover
Stop words are words which should be excluded from the input, typically because the words appear frequently and don’t carry as much meaning.
StopWordsRemover takes as input a sequence of strings (e.g. the output of a Tokenizer) and drops all the stop words from the input sequences. The list of stopwords is specified by the stopWords parameter. We provide a list of stop words by default, accessible by calling getStopWords on a newly instantiated StopWordsRemover instance. A boolean parameter caseSensitive indicates if the matches should be case sensitive (false by default).
Examples
Assume that we have the following DataFrame with columns id and raw:
id | raw
----|----------
0 | [I, saw, the, red, baloon]
1 | [Mary, had, a, little, lamb]
Applying StopWordsRemover with raw as the input column and filtered as the output column, we should get the following:
id | raw | filtered
----|-----------------------------|--------------------
0 | [I, saw, the, red, baloon] | [saw, red, baloon]
1 | [Mary, had, a, little, lamb]|[Mary, little, lamb]
In filtered, the stop words “I”, “the”, “had”, and “a” have been filtered out.
Scala
Java
Python
Refer to the StopWordsRemover Scala docs for more details on the API.
import org.apache.spark.ml.feature.StopWordsRemover
val remover = new StopWordsRemover()
.setInputCol("raw")
.setOutputCol("filtered")
val dataSet = sqlContext.createDataFrame(Seq(
(0, Seq("I", "saw", "the", "red", "baloon")),
(1, Seq("Mary", "had", "a", "little", "lamb"))
)).toDF("id", "raw")
remover.transform(dataSet).show()
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/StopWordsRemoverExample.scala" in the Spark repo.
nn-gram
An n-gram is a sequence of nn tokens (typically words) for some integer nn. The NGram class can be used to transform input features into nn-grams.
NGram takes as input a sequence of strings (e.g. the output of a Tokenizer). The parameter n is used to determine the number of terms in each nn-gram. The output will consist of a sequence of nn-grams where each nn-gram is represented by a space-delimited string of nn consecutive words. If the input sequence contains fewer than n strings, no output is produced.
Scala
Java
Python
Refer to the NGram Scala docs for more details on the API.
import org.apache.spark.ml.feature.NGram
val wordDataFrame = sqlContext.createDataFrame(Seq(
(0, Array("Hi", "I", "heard", "about", "Spark")),
(1, Array("I", "wish", "Java", "could", "use", "case", "classes")),
(2, Array("Logistic", "regression", "models", "are", "neat"))
)).toDF("label", "words")
val ngram = new NGram().setInputCol("words").setOutputCol("ngrams")
val ngramDataFrame = ngram.transform(wordDataFrame)
ngramDataFrame.take(3).map(_.getAs[Stream[String]]("ngrams").toList).foreach(println)
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/NGramExample.scala" in the Spark repo.
Binarizer
Binarization is the process of thresholding numerical features to binary (0/1) features.
Binarizer takes the common parameters inputCol and outputCol, as well as the threshold for binarization. Feature values greater than the threshold are binarized to 1.0; values equal to or less than the threshold are binarized to 0.0.
Scala
Java
Python
Refer to the Binarizer Scala docs for more details on the API.
import org.apache.spark.ml.feature.Binarizer
val data = Array((0, 0.1), (1, 0.8), (2, 0.2))
val dataFrame: DataFrame = sqlContext.createDataFrame(data).toDF("label", "feature")
val binarizer: Binarizer = new Binarizer()
.setInputCol("feature")
.setOutputCol("binarized_feature")
.setThreshold(0.5)
val binarizedDataFrame = binarizer.transform(dataFrame)
val binarizedFeatures = binarizedDataFrame.select("binarized_feature")
binarizedFeatures.collect().foreach(println)
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/BinarizerExample.scala" in the Spark repo.
PCA
PCA is a statistical procedure that uses an orthogonal transformation to convert a set of observations of possibly correlated variables into a set of values of linearly uncorrelated variables called principal components. A PCA class trains a model to project vectors to a low-dimensional space using PCA. The example below shows how to project 5-dimensional feature vectors into 3-dimensional principal components.
Scala
Java
Python
Refer to the PCA Scala docs for more details on the API.
import org.apache.spark.ml.feature.PCA
import org.apache.spark.mllib.linalg.Vectors
val data = Array(
Vectors.sparse(5, Seq((1, 1.0), (3, 7.0))),
Vectors.dense(2.0, 0.0, 3.0, 4.0, 5.0),
Vectors.dense(4.0, 0.0, 0.0, 6.0, 7.0)
)
val df = sqlContext.createDataFrame(data.map(Tuple1.apply)).toDF("features")
val pca = new PCA()
.setInputCol("features")
.setOutputCol("pcaFeatures")
.setK(3)
.fit(df)
val pcaDF = pca.transform(df)
val result = pcaDF.select("pcaFeatures")
result.show()
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/PCAExample.scala" in the Spark repo.
PolynomialExpansion
Polynomial expansion is the process of expanding your features into a polynomial space, which is formulated by an n-degree combination of original dimensions. A PolynomialExpansion class provides this functionality. The example below shows how to expand your features into a 3-degree polynomial space.
Scala
Java
Python
Refer to the PolynomialExpansion Scala docs for more details on the API.
import org.apache.spark.ml.feature.PolynomialExpansion
import org.apache.spark.mllib.linalg.Vectors
val data = Array(
Vectors.dense(-2.0, 2.3),
Vectors.dense(0.0, 0.0),
Vectors.dense(0.6, -1.1)
)
val df = sqlContext.createDataFrame(data.map(Tuple1.apply)).toDF("features")
val polynomialExpansion = new PolynomialExpansion()
.setInputCol("features")
.setOutputCol("polyFeatures")
.setDegree(3)
val polyDF = polynomialExpansion.transform(df)
polyDF.select("polyFeatures").take(3).foreach(println)
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/PolynomialExpansionExample.scala" in the Spark repo.
Discrete Cosine Transform (DCT)
The Discrete Cosine Transform transforms a length NN real-valued sequence in the time domain into another length NN real-valued sequence in the frequency domain. A DCT class provides this functionality, implementing the DCT-II and scaling the result by 1/2√1/2 such that the representing matrix for the transform is unitary. No shift is applied to the transformed sequence (e.g. the 00th element of the transformed sequence is the 00th DCT coefficient and not the N/2N/2th).
Scala
Java
Refer to the DCT Scala docs for more details on the API.
import org.apache.spark.ml.feature.DCT
import org.apache.spark.mllib.linalg.Vectors
val data = Seq(
Vectors.dense(0.0, 1.0, -2.0, 3.0),
Vectors.dense(-1.0, 2.0, 4.0, -7.0),
Vectors.dense(14.0, -2.0, -5.0, 1.0))
val df = sqlContext.createDataFrame(data.map(Tuple1.apply)).toDF("features")
val dct = new DCT()
.setInputCol("features")
.setOutputCol("featuresDCT")
.setInverse(false)
val dctDf = dct.transform(df)
dctDf.select("featuresDCT").show(3)
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/DCTExample.scala" in the Spark repo.
StringIndexer
StringIndexer encodes a string column of labels to a column of label indices. The indices are in [0, numLabels), ordered by label frequencies. So the most frequent label gets index 0. If the input column is numeric, we cast it to string and index the string values. When downstream pipeline components such as Estimator or Transformer make use of this string-indexed label, you must set the input column of the component to this string-indexed column name. In many cases, you can set the input column with setInputCol.
Examples
Assume that we have the following DataFrame with columns id and category:
id | category
----|----------
0 | a
1 | b
2 | c
3 | a
4 | a
5 | c
category is a string column with three labels: “a”, “b”, and “c”. Applying StringIndexer with category as the input column and categoryIndex as the output column, we should get the following:
id | category | categoryIndex
----|----------|---------------
0 | a | 0.0
1 | b | 2.0
2 | c | 1.0
3 | a | 0.0
4 | a | 0.0
5 | c | 1.0
“a” gets index 0 because it is the most frequent, followed by “c” with index 1 and “b” with index 2.
Additionaly, there are two strategies regarding how StringIndexer will handle unseen labels when you have fit a StringIndexer on one dataset and then use it to transform another:
throw an exception (which is the default)
skip the row containing the unseen label entirely
Examples
Let’s go back to our previous example but this time reuse our previously defined StringIndexer on the following dataset:
id | category
----|----------
0 | a
1 | b
2 | c
3 | d
If you’ve not set how StringIndexer handles unseen labels or set it to “error”, an exception will be thrown. However, if you had called setHandleInvalid("skip"), the following dataset will be generated:
id | category | categoryIndex
----|----------|---------------
0 | a | 0.0
1 | b | 2.0
2 | c | 1.0
Notice that the row containing “d” does not appear.
Scala
Java
Python
Refer to the StringIndexer Scala docs for more details on the API.
import org.apache.spark.ml.feature.StringIndexer
val df = sqlContext.createDataFrame(
Seq((0, "a"), (1, "b"), (2, "c"), (3, "a"), (4, "a"), (5, "c"))
).toDF("id", "category")
val indexer = new StringIndexer()
.setInputCol("category")
.setOutputCol("categoryIndex")
val indexed = indexer.fit(df).transform(df)
indexed.show()
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/StringIndexerExample.scala" in the Spark repo.
IndexToString
Symmetrically to StringIndexer, IndexToString maps a column of label indices back to a column containing the original labels as strings. The common use case is to produce indices from labels with StringIndexer, train a model with those indices and retrieve the original labels from the column of predicted indices with IndexToString. However, you are free to supply your own labels.
Examples
Building on the StringIndexer example, let’s assume we have the following DataFrame with columns id and categoryIndex:
id | categoryIndex
----|---------------
0 | 0.0
1 | 2.0
2 | 1.0
3 | 0.0
4 | 0.0
5 | 1.0
Applying IndexToString with categoryIndex as the input column, originalCategory as the output column, we are able to retrieve our original labels (they will be inferred from the columns’ metadata):
id | categoryIndex | originalCategory
----|---------------|-----------------
0 | 0.0 | a
1 | 2.0 | b
2 | 1.0 | c
3 | 0.0 | a
4 | 0.0 | a
5 | 1.0 | c
Scala
Java
Python
Refer to the IndexToString Scala docs for more details on the API.
import org.apache.spark.ml.feature.{StringIndexer, IndexToString}
val df = sqlContext.createDataFrame(Seq(
(0, "a"),
(1, "b"),
(2, "c"),
(3, "a"),
(4, "a"),
(5, "c")
)).toDF("id", "category")
val indexer = new StringIndexer()
.setInputCol("category")
.setOutputCol("categoryIndex")
.fit(df)
val indexed = indexer.transform(df)
val converter = new IndexToString()
.setInputCol("categoryIndex")
.setOutputCol("originalCategory")
val converted = converter.transform(indexed)
converted.select("id", "originalCategory").show()
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/IndexToStringExample.scala" in the Spark repo.
OneHotEncoder
One-hot encoding maps a column of label indices to a column of binary vectors, with at most a single one-value. This encoding allows algorithms which expect continuous features, such as Logistic Regression, to use categorical features
Scala
Java
Python
Refer to the OneHotEncoder Scala docs for more details on the API.
import org.apache.spark.ml.feature.{OneHotEncoder, StringIndexer}
val df = sqlContext.createDataFrame(Seq(
(0, "a"),
(1, "b"),
(2, "c"),
(3, "a"),
(4, "a"),
(5, "c")
)).toDF("id", "category")
val indexer = new StringIndexer()
.setInputCol("category")
.setOutputCol("categoryIndex")
.fit(df)
val indexed = indexer.transform(df)
val encoder = new OneHotEncoder()
.setInputCol("categoryIndex")
.setOutputCol("categoryVec")
val encoded = encoder.transform(indexed)
encoded.select("id", "categoryVec").show()
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/OneHotEncoderExample.scala" in the Spark repo.
VectorIndexer
VectorIndexer helps index categorical features in datasets of Vectors. It can both automatically decide which features are categorical and convert original values to category indices. Specifically, it does the following:
Take an input column of type Vector and a parameter maxCategories.
Decide which features should be categorical based on the number of distinct values, where features with at most maxCategories are declared categorical.
Compute 0-based category indices for each categorical feature.
Index categorical features and transform original feature values to indices.
Indexing categorical features allows algorithms such as Decision Trees and Tree Ensembles to treat categorical features appropriately, improving performance.
In the example below, we read in a dataset of labeled points and then use VectorIndexer to decide which features should be treated as categorical. We transform the categorical feature values to their indices. This transformed data could then be passed to algorithms such as DecisionTreeRegressor that handle categorical features.
Scala
Java
Python
Refer to the VectorIndexer Scala docs for more details on the API.
import org.apache.spark.ml.feature.VectorIndexer
val data = sqlContext.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt")
val indexer = new VectorIndexer()
.setInputCol("features")
.setOutputCol("indexed")
.setMaxCategories(10)
val indexerModel = indexer.fit(data)
val categoricalFeatures: Set[Int] = indexerModel.categoryMaps.keys.toSet
println(s"Chose ${categoricalFeatures.size} categorical features: " +
categoricalFeatures.mkString(", "))
// Create new column "indexed" with categorical values transformed to indices
val indexedData = indexerModel.transform(data)
indexedData.show()
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/VectorIndexerExample.scala" in the Spark repo.
Normalizer
Normalizer is a Transformer which transforms a dataset of Vector rows, normalizing each Vector to have unit norm. It takes parameter p, which specifies the p-norm used for normalization. (p=2p=2 by default.) This normalization can help standardize your input data and improve the behavior of learning algorithms.
The following example demonstrates how to load a dataset in libsvm format and then normalize each row to have unit L2L2 norm and unit L∞L∞ norm.
Scala
Java
Python
Refer to the Normalizer Scala docs for more details on the API.
import org.apache.spark.ml.feature.Normalizer
val dataFrame = sqlContext.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt")
// Normalize each Vector using $L^1$ norm.
val normalizer = new Normalizer()
.setInputCol("features")
.setOutputCol("normFeatures")
.setP(1.0)
val l1NormData = normalizer.transform(dataFrame)
l1NormData.show()
// Normalize each Vector using $L^\\infty$ norm.
val lInfNormData = normalizer.transform(dataFrame, normalizer.p -> Double.PositiveInfinity)
lInfNormData.show()
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/NormalizerExample.scala" in the Spark repo.
StandardScaler
StandardScaler transforms a dataset of Vector rows, normalizing each feature to have unit standard deviation and/or zero mean. It takes parameters:
withStd: True by default. Scales the data to unit standard deviation.
withMean: False by default. Centers the data with mean before scaling. It will build a dense output, so this does not work on sparse input and will raise an exception.
StandardScaler is an Estimator which can be fit on a dataset to produce a StandardScalerModel; this amounts to computing summary statistics. The model can then transform a Vector column in a dataset to have unit standard deviation and/or zero mean features.
Note that if the standard deviation of a feature is zero, it will return default 0.0 value in the Vector for that feature.
The following example demonstrates how to load a dataset in libsvm format and then normalize each feature to have unit standard deviation.
Scala
Java
Python
Refer to the StandardScaler Scala docs for more details on the API.
import org.apache.spark.ml.feature.StandardScaler
val dataFrame = sqlContext.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt")
val scaler = new StandardScaler()
.setInputCol("features")
.setOutputCol("scaledFeatures")
.setWithStd(true)
.setWithMean(false)
// Compute summary statistics by fitting the StandardScaler.
val scalerModel = scaler.fit(dataFrame)
// Normalize each feature to have unit standard deviation.
val scaledData = scalerModel.transform(dataFrame)
scaledData.show()
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/StandardScalerExample.scala" in the Spark repo.
MinMaxScaler
MinMaxScaler transforms a dataset of Vector rows, rescaling each feature to a specific range (often [0, 1]). It takes parameters:
min: 0.0 by default. Lower bound after transformation, shared by all features.
max: 1.0 by default. Upper bound after transformation, shared by all features.
MinMaxScaler computes summary statistics on a data set and produces a MinMaxScalerModel. The model can then transform each feature individually such that it is in the given range.
The rescaled value for a feature E is calculated as,
Rescaled(ei)=ei−EminEmax−Emin∗(max−min)+min(1)
(1)Rescaled(ei)=ei−EminEmax−Emin∗(max−min)+min
For the case E_{max} == E_{min}, Rescaled(e_i) = 0.5 * (max + min)
Note that since zero values will probably be transformed to non-zero values, output of the transformer will be DenseVector even for sparse input.
The following example demonstrates how to load a dataset in libsvm format and then rescale each feature to [0, 1].
Scala
Java
Refer to the MinMaxScaler Scala docs and the MinMaxScalerModel Scala docs for more details on the API.
import org.apache.spark.ml.feature.MinMaxScaler
val dataFrame = sqlContext.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt")
val scaler = new MinMaxScaler()
.setInputCol("features")
.setOutputCol("scaledFeatures")
// Compute summary statistics and generate MinMaxScalerModel
val scalerModel = scaler.fit(dataFrame)
// rescale each feature to range [min, max].
val scaledData = scalerModel.transform(dataFrame)
scaledData.show()
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/MinMaxScalerExample.scala" in the Spark repo.
Bucketizer
Bucketizer transforms a column of continuous features to a column of feature buckets, where the buckets are specified by users. It takes a parameter:
splits: Parameter for mapping continuous features into buckets. With n+1 splits, there are n buckets. A bucket defined by splits x,y holds values in the range [x,y) except the last bucket, which also includes y. Splits should be strictly increasing. Values at -inf, inf must be explicitly provided to cover all Double values; Otherwise, values outside the splits specified will be treated as errors. Two examples of splits are Array(Double.NegativeInfinity, 0.0, 1.0, Double.PositiveInfinity) and Array(0.0, 1.0, 2.0).
Note that if you have no idea of the upper bound and lower bound of the targeted column, you would better add the Double.NegativeInfinity and Double.PositiveInfinity as the bounds of your splits to prevent a potenial out of Bucketizer bounds exception.
Note also that the splits that you provided have to be in strictly increasing order, i.e. s0 < s1 < s2 < ... < sn.
More details can be found in the API docs for Bucketizer.
The following example demonstrates how to bucketize a column of Doubles into another index-wised column.
Scala
Java
Python
Refer to the Bucketizer Scala docs for more details on the API.
import org.apache.spark.ml.feature.Bucketizer
val splits = Array(Double.NegativeInfinity, -0.5, 0.0, 0.5, Double.PositiveInfinity)
val data = Array(-0.5, -0.3, 0.0, 0.2)
val dataFrame = sqlContext.createDataFrame(data.map(Tuple1.apply)).toDF("features")
val bucketizer = new Bucketizer()
.setInputCol("features")
.setOutputCol("bucketedFeatures")
.setSplits(splits)
// Transform original data into its bucket index.
val bucketedData = bucketizer.transform(dataFrame)
bucketedData.show()
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/BucketizerExample.scala" in the Spark repo.
ElementwiseProduct
ElementwiseProduct multiplies each input vector by a provided “weight” vector, using element-wise multiplication. In other words, it scales each column of the dataset by a scalar multiplier. This represents the Hadamard product between the input vector, v and transforming vector, w, to yield a result vector.
⎛⎝⎜⎜v1⋮vN⎞⎠⎟⎟∘⎛⎝⎜⎜w1⋮wN⎞⎠⎟⎟=⎛⎝⎜⎜v1w1⋮vNwN⎞⎠⎟⎟
(v1⋮vN)∘(w1⋮wN)=(v1w1⋮vNwN)
This example below demonstrates how to transform vectors using a transforming vector value.
Scala
Java
Python
Refer to the ElementwiseProduct Scala docs for more details on the API.
import org.apache.spark.ml.feature.ElementwiseProduct
import org.apache.spark.mllib.linalg.Vectors
// Create some vector data; also works for sparse vectors
val dataFrame = sqlContext.createDataFrame(Seq(
("a", Vectors.dense(1.0, 2.0, 3.0)),
("b", Vectors.dense(4.0, 5.0, 6.0)))).toDF("id", "vector")
val transformingVector = Vectors.dense(0.0, 1.0, 2.0)
val transformer = new ElementwiseProduct()
.setScalingVec(transformingVector)
.setInputCol("vector")
.setOutputCol("transformedVector")
// Batch transform the vectors to create new column:
transformer.transform(dataFrame).show()
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/ElementwiseProductExample.scala" in the Spark repo.
SQLTransformer
SQLTransformer implements the transformations which are defined by SQL statement. Currently we only support SQL syntax like "SELECT ... FROM __THIS__ ..." where "__THIS__" represents the underlying table of the input dataset. The select clause specifies the fields, constants, and expressions to display in the output, it can be any select clause that Spark SQL supports. Users can also use Spark SQL built-in function and UDFs to operate on these selected columns. For example, SQLTransformer supports statements like:
SELECT a, a + b AS a_b FROM __THIS__
SELECT a, SQRT(b) AS b_sqrt FROM __THIS__ where a > 5
SELECT a, b, SUM(c) AS c_sum FROM __THIS__ GROUP BY a, b
Examples
Assume that we have the following DataFrame with columns id, v1 and v2:
id | v1 | v2
----|-----|-----
0 | 1.0 | 3.0
2 | 2.0 | 5.0
This is the output of the SQLTransformer with statement "SELECT *, (v1 + v2) AS v3, (v1 * v2) AS v4 FROM __THIS__":
id | v1 | v2 | v3 | v4
----|-----|-----|-----|-----
0 | 1.0 | 3.0 | 4.0 | 3.0
2 | 2.0 | 5.0 | 7.0 |10.0
Scala
Java
Python
Refer to the SQLTransformer Scala docs for more details on the API.
import org.apache.spark.ml.feature.SQLTransformer
val df = sqlContext.createDataFrame(
Seq((0, 1.0, 3.0), (2, 2.0, 5.0))).toDF("id", "v1", "v2")
val sqlTrans = new SQLTransformer().setStatement(
"SELECT *, (v1 + v2) AS v3, (v1 * v2) AS v4 FROM __THIS__")
sqlTrans.transform(df).show()
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/SQLTransformerExample.scala" in the Spark repo.
VectorAssembler
VectorAssembler is a transformer that combines a given list of columns into a single vector column. It is useful for combining raw features and features generated by different feature transformers into a single feature vector, in order to train ML models like logistic regression and decision trees. VectorAssembler accepts the following input column types: all numeric types, boolean type, and vector type. In each row, the values of the input columns will be concatenated into a vector in the specified order.
Examples
Assume that we have a DataFrame with the columns id, hour, mobile, userFeatures, and clicked:
id | hour | mobile | userFeatures | clicked
----|------|--------|------------------|---------
0 | 18 | 1.0 | [0.0, 10.0, 0.5] | 1.0
userFeatures is a vector column that contains three user features. We want to combine hour, mobile, and userFeatures into a single feature vector called features and use it to predict clicked or not. If we set VectorAssembler’s input columns to hour, mobile, and userFeatures and output column to features, after transformation we should get the following DataFrame:
id | hour | mobile | userFeatures | clicked | features
----|------|--------|------------------|---------|-----------------------------
0 | 18 | 1.0 | [0.0, 10.0, 0.5] | 1.0 | [18.0, 1.0, 0.0, 10.0, 0.5]
Scala
Java
Python
Refer to the VectorAssembler Scala docs for more details on the API.
import org.apache.spark.ml.feature.VectorAssembler
import org.apache.spark.mllib.linalg.Vectors
val dataset = sqlContext.createDataFrame(
Seq((0, 18, 1.0, Vectors.dense(0.0, 10.0, 0.5), 1.0))
).toDF("id", "hour", "mobile", "userFeatures", "clicked")
val assembler = new VectorAssembler()
.setInputCols(Array("hour", "mobile", "userFeatures"))
.setOutputCol("features")
val output = assembler.transform(dataset)
println(output.select("features", "clicked").first())
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/VectorAssemblerExample.scala" in the Spark repo.
QuantileDiscretizer
QuantileDiscretizer takes a column with continuous features and outputs a column with binned categorical features. The bin ranges are chosen by taking a sample of the data and dividing it into roughly equal parts. The lower and upper bin bounds will be -Infinity and +Infinity, covering all real values. This attempts to find numBuckets partitions based on a sample of the given input data, but it may find fewer depending on the data sample values.
Note that the result may be different every time you run it, since the sample strategy behind it is non-deterministic.
Examples
Assume that we have a DataFrame with the columns id, hour:
id | hour
----|------
0 | 18.0
----|------
1 | 19.0
----|------
2 | 8.0
----|------
3 | 5.0
----|------
4 | 2.2
hour is a continuous feature with Double type. We want to turn the continuous feature into categorical one. Given numBuckets = 3, we should get the following DataFrame:
id | hour | result
----|------|------
0 | 18.0 | 2.0
----|------|------
1 | 19.0 | 2.0
----|------|------
2 | 8.0 | 1.0
----|------|------
3 | 5.0 | 1.0
----|------|------
4 | 2.2 | 0.0
Scala
Java
Refer to the QuantileDiscretizer Scala docs for more details on the API.
import org.apache.spark.ml.feature.QuantileDiscretizer
val data = Array((0, 18.0), (1, 19.0), (2, 8.0), (3, 5.0), (4, 2.2))
val df = sc.parallelize(data).toDF("id", "hour")
val discretizer = new QuantileDiscretizer()
.setInputCol("hour")
.setOutputCol("result")
.setNumBuckets(3)
val result = discretizer.fit(df).transform(df)
result.show()
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/QuantileDiscretizerExample.scala" in the Spark repo.
Feature Selectors
VectorSlicer
VectorSlicer is a transformer that takes a feature vector and outputs a new feature vector with a sub-array of the original features. It is useful for extracting features from a vector column.
VectorSlicer accepts a vector column with a specified indices, then outputs a new vector column whose values are selected via those indices. There are two types of indices,
Integer indices that represents the indices into the vector, setIndices();
String indices that represents the names of features into the vector, setNames(). This requires the vector column to have an AttributeGroup since the implementation matches on the name field of an Attribute.
Specification by integer and string are both acceptable. Moreover, you can use integer index and string name simultaneously. At least one feature must be selected. Duplicate features are not allowed, so there can be no overlap between selected indices and names. Note that if names of features are selected, an exception will be threw out when encountering with empty input attributes.
The output vector will order features with the selected indices first (in the order given), followed by the selected names (in the order given).
Examples
Suppose that we have a DataFrame with the column userFeatures:
userFeatures
------------------
[0.0, 10.0, 0.5]
userFeatures is a vector column that contains three user features. Assuming that the first column of userFeatures are all zeros, so we want to remove it and only the last two columns are selected. The VectorSlicer selects the last two elements with setIndices(1, 2) then produces a new vector column named features:
userFeatures | features
------------------|-----------------------------
[0.0, 10.0, 0.5] | [10.0, 0.5]
Suppose also that we have a potential input attributes for the userFeatures, i.e. ["f1", "f2", "f3"], then we can use setNames("f2", "f3") to select them.
userFeatures | features
------------------|-----------------------------
[0.0, 10.0, 0.5] | [10.0, 0.5]
["f1", "f2", "f3"] | ["f2", "f3"]
Scala
Java
Refer to the VectorSlicer Scala docs for more details on the API.
import org.apache.spark.ml.attribute.{Attribute, AttributeGroup, NumericAttribute}
import org.apache.spark.ml.feature.VectorSlicer
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.StructType
val data = Array(Row(Vectors.dense(-2.0, 2.3, 0.0)))
val defaultAttr = NumericAttribute.defaultAttr
val attrs = Array("f1", "f2", "f3").map(defaultAttr.withName)
val attrGroup = new AttributeGroup("userFeatures", attrs.asInstanceOf[Array[Attribute]])
val dataRDD = sc.parallelize(data)
val dataset = sqlContext.createDataFrame(dataRDD, StructType(Array(attrGroup.toStructField())))
val slicer = new VectorSlicer().setInputCol("userFeatures").setOutputCol("features")
slicer.setIndices(Array(1)).setNames(Array("f3"))
// or slicer.setIndices(Array(1, 2)), or slicer.setNames(Array("f2", "f3"))
val output = slicer.transform(dataset)
println(output.select("userFeatures", "features").first())
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/VectorSlicerExample.scala" in the Spark repo.
RFormula
RFormula selects columns specified by an R model formula. It produces a vector column of features and a double column of labels. Like when formulas are used in R for linear regression, string input columns will be one-hot encoded, and numeric columns will be cast to doubles. If not already present in the DataFrame, the output label column will be created from the specified response variable in the formula.
Examples
Assume that we have a DataFrame with the columns id, country, hour, and clicked:
id | country | hour | clicked
---|---------|------|---------
7 | "US" | 18 | 1.0
8 | "CA" | 12 | 0.0
9 | "NZ" | 15 | 0.0
If we use RFormula with a formula string of clicked ~ country + hour, which indicates that we want to predict clicked based on country and hour, after transformation we should get the following DataFrame:
id | country | hour | clicked | features | label
---|---------|------|---------|------------------|-------
7 | "US" | 18 | 1.0 | [0.0, 0.0, 18.0] | 1.0
8 | "CA" | 12 | 0.0 | [0.0, 1.0, 12.0] | 0.0
9 | "NZ" | 15 | 0.0 | [1.0, 0.0, 15.0] | 0.0
Scala
Java
Python
Refer to the RFormula Scala docs for more details on the API.
import org.apache.spark.ml.feature.RFormula
val dataset = sqlContext.createDataFrame(Seq(
(7, "US", 18, 1.0),
(8, "CA", 12, 0.0),
(9, "NZ", 15, 0.0)
)).toDF("id", "country", "hour", "clicked")
val formula = new RFormula()
.setFormula("clicked ~ country + hour")
.setFeaturesCol("features")
.setLabelCol("label")
val output = formula.fit(dataset).transform(dataset)
output.select("features", "label").show()
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/RFormulaExample.scala" in the Spark repo.
ChiSqSelector
ChiSqSelector stands for Chi-Squared feature selection. It operates on labeled data with categorical features. ChiSqSelector orders features based on a Chi-Squared test of independence from the class, and then filters (selects) the top features which the class label depends on the most. This is akin to yielding the features with the most predictive power.
Examples
Assume that we have a DataFrame with the columns id, features, and clicked, which is used as our target to be predicted:
id | features | clicked
---|-----------------------|---------
7 | [0.0, 0.0, 18.0, 1.0] | 1.0
8 | [0.0, 1.0, 12.0, 0.0] | 0.0
9 | [1.0, 0.0, 15.0, 0.1] | 0.0
If we use ChiSqSelector with a numTopFeatures = 1, then according to our label clicked the last column in our features chosen as the most useful feature:
id | features | clicked | selectedFeatures
---|-----------------------|---------|------------------
7 | [0.0, 0.0, 18.0, 1.0] | 1.0 | [1.0]
8 | [0.0, 1.0, 12.0, 0.0] | 0.0 | [0.0]
9 | [1.0, 0.0, 15.0, 0.1] | 0.0 | [0.1]
Scala
Java
Refer to the ChiSqSelector Scala docs for more details on the API.
import org.apache.spark.ml.feature.ChiSqSelector
import org.apache.spark.mllib.linalg.Vectors
val data = Seq(
(7, Vectors.dense(0.0, 0.0, 18.0, 1.0), 1.0),
(8, Vectors.dense(0.0, 1.0, 12.0, 0.0), 0.0),
(9, Vectors.dense(1.0, 0.0, 15.0, 0.1), 0.0)
)
val df = sc.parallelize(data).toDF("id", "features", "clicked")
val selector = new ChiSqSelector()
.setNumTopFeatures(1)
.setFeaturesCol("features")
.setLabelCol("clicked")
.setOutputCol("selectedFeatures")
val result = selector.fit(df).transform(df)
result.show()
Find full example code at "examples/src/main/scala/org/apache/spark/examples/ml/ChiSqSelectorExample.scala" in the Spark repo.
| xieguobin/Spark_2.0.0_cn1 | ml_analysis/ETS.scala | Scala | apache-2.0 | 43,199 |
// Copyright (C) 2014 Fehmi Can Saglam (@fehmicans) and contributors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package reactivemongo.extensions.json.dao
import scala.util.Random
import scala.concurrent.{ Future, Await, ExecutionContext }
import scala.concurrent.duration._
import play.api.libs.json.{ Json, JsObject, Format, Writes }
import reactivemongo.bson.BSONDocument
import reactivemongo.api.{ bulk, DB, QueryOpts }
import reactivemongo.api.indexes.Index
import reactivemongo.core.commands.{ LastError, GetLastError, Count, FindAndModify, Update, Remove }
import play.modules.reactivemongo.json.collection.JSONCollection
import play.modules.reactivemongo.json.ImplicitBSONHandlers.JsObjectWriter
import reactivemongo.extensions.BSONFormats
import reactivemongo.extensions.dao.{ Dao, LifeCycle, ReflexiveLifeCycle }
import reactivemongo.extensions.json.dsl.JsonDsl._
import play.api.libs.iteratee.{ Iteratee, Enumerator }
/**
* A DAO implementation that operates on JSONCollection using JsObject.
*
* To create a DAO for a concrete model extend this class.
*
* Below is a sample model.
* {{{
* import reactivemongo.bson.BSONObjectID
* import play.api.libs.json.Json
* import play.modules.reactivemongo.json.BSONFormats._
*
* case class Person(
* _id: BSONObjectID = BSONObjectID.generate,
* name: String,
* surname: String,
* age: Int)
*
* object Person {
* implicit val personFormat = Json.format[Person]
* }
*
* }}}
*
* To define a JsonDao for the Person model you just need to extend JsonDao.
*
* {{{
* import reactivemongo.api.{ MongoDriver, DB }
* import reactivemongo.bson.BSONObjectID
* import play.modules.reactivemongo.json.BSONFormats._
* import reactivemongo.extensions.json.dao.JsonDao
* import scala.concurrent.ExecutionContext.Implicits.global
*
*
* object MongoContext {
* val driver = new MongoDriver
* val connection = driver.connection(List("localhost"))
* def db(): DB = connection("reactivemongo-extensions")
* }
*
* object PersonDao extends JsonDao[Person, BSONObjectID](MongoContext.db, "persons")
* }}}
*
* @param db A parameterless function returning a [[reactivemongo.api.DB]] instance.
* @param collectionName Name of the collection this DAO is going to operate on.
* @param lifeCycle [[reactivemongo.extensions.dao.LifeCycle]] for the Model type.
* @tparam Model Type of the model that this DAO uses.
* @tparam ID Type of the ID field of the model.
*/
abstract class JsonDao[Model: Format, ID: Writes](db: () => DB, collectionName: String)(implicit lifeCycle: LifeCycle[Model, ID] = new ReflexiveLifeCycle[Model, ID], ec: ExecutionContext)
extends Dao[JSONCollection, JsObject, Model, ID, Writes](db, collectionName) {
private def toBSONDocument(document: JsObject): BSONDocument = {
BSONFormats.BSONDocumentFormat.reads(document).get
}
private def toJsObject(document: BSONDocument): JsObject = {
BSONFormats.BSONDocumentFormat.writes(document).as[JsObject]
}
def ensureIndexes()(implicit ec: ExecutionContext): Future[Traversable[Boolean]] = Future sequence {
autoIndexes map { index =>
collection.indexesManager.ensure(index)
}
}.map { results =>
lifeCycle.ensuredIndexes()
results
}
def listIndexes()(implicit ec: ExecutionContext): Future[List[Index]] = {
collection.indexesManager.list()
}
def findOne(selector: JsObject = Json.obj())(implicit ec: ExecutionContext): Future[Option[Model]] = {
collection.find(selector).one[Model]
}
def findById(id: ID)(implicit ec: ExecutionContext): Future[Option[Model]] = {
findOne($id(id))
}
def findByIds(ids: ID*)(implicit ec: ExecutionContext): Future[List[Model]] = {
findAll("_id" $in (ids: _*))
}
def find(
selector: JsObject = Json.obj(),
sort: JsObject = Json.obj("_id" -> 1),
page: Int,
pageSize: Int)(implicit ec: ExecutionContext): Future[List[Model]] = {
val from = (page - 1) * pageSize
collection
.find(selector)
.sort(sort)
.options(QueryOpts(skipN = from, batchSizeN = pageSize))
.cursor[Model]
.collect[List](pageSize)
}
def findAll(
selector: JsObject = Json.obj(),
sort: JsObject = Json.obj("_id" -> 1))(implicit ec: ExecutionContext): Future[List[Model]] = {
collection.find(selector).sort(sort).cursor[Model].collect[List]()
}
def findAndUpdate(
query: JsObject,
update: JsObject,
sort: JsObject = Json.obj(),
fetchNewObject: Boolean = false,
upsert: Boolean = false)(implicit ec: ExecutionContext): Future[Option[Model]] = {
val command = FindAndModify(
collection = collectionName,
query = toBSONDocument(query),
modify = Update(toBSONDocument(update), fetchNewObject),
upsert = upsert,
sort = if (sort == Json.obj()) None else Some(toBSONDocument(sort)))
collection.db.command(command).map(_.map(bson => implicitly[Format[Model]].reads(toJsObject(bson)).get))
}
def findAndRemove(query: JsObject, sort: JsObject = Json.obj())(implicit ec: ExecutionContext): Future[Option[Model]] = {
val command = FindAndModify(
collection = collectionName,
query = toBSONDocument(query),
modify = Remove,
sort = if (sort == Json.obj()) None else Some(toBSONDocument(sort)))
collection.db.command(command).map(_.map(bson => implicitly[Format[Model]].reads(toJsObject(bson)).get))
}
def findRandom(selector: JsObject = Json.obj())(implicit ec: ExecutionContext): Future[Option[Model]] = {
for {
count <- count(selector)
index = Random.nextInt(count)
random <- collection.find(selector).options(QueryOpts(skipN = index, batchSizeN = 1)).one[Model]
} yield random
}
def insert(model: Model, writeConcern: GetLastError = defaultWriteConcern)(implicit ec: ExecutionContext): Future[LastError] = {
val mappedModel = lifeCycle.prePersist(model)
collection.insert(mappedModel, writeConcern) map { lastError =>
lifeCycle.postPersist(mappedModel)
lastError
}
}
def bulkInsert(
documents: TraversableOnce[Model],
bulkSize: Int = bulk.MaxDocs,
bulkByteSize: Int = bulk.MaxBulkSize)(implicit ec: ExecutionContext): Future[Int] = {
val mappedDocuments = documents.map(lifeCycle.prePersist)
val enumerator = Enumerator.enumerate(mappedDocuments)
collection.bulkInsert(enumerator, bulkSize, bulkByteSize) map { result =>
mappedDocuments.map(lifeCycle.postPersist)
result
}
}
def update[U: Writes](
selector: JsObject,
update: U,
writeConcern: GetLastError = defaultWriteConcern,
upsert: Boolean = false,
multi: Boolean = false)(implicit ec: ExecutionContext): Future[LastError] = {
collection.update(selector, update, writeConcern, upsert, multi)
}
def updateById[U: Writes](
id: ID,
update: U,
writeConcern: GetLastError = defaultWriteConcern)(implicit ec: ExecutionContext): Future[LastError] = {
collection.update($id(id), update, writeConcern)
}
def save(model: Model, writeConcern: GetLastError = GetLastError())(implicit ec: ExecutionContext): Future[LastError] = {
val mappedModel = lifeCycle.prePersist(model)
collection.save(mappedModel, writeConcern) map { lastError =>
lifeCycle.postPersist(mappedModel)
lastError
}
}
def count(selector: JsObject = Json.obj())(implicit ec: ExecutionContext): Future[Int] = {
collection.db.command(Count(collectionName, Some(JsObjectWriter.write(selector))))
}
def drop()(implicit ec: ExecutionContext): Future[Boolean] = {
collection.drop()
}
def dropSync(timeout: Duration = 10 seconds)(implicit ec: ExecutionContext): Boolean = {
Await.result(drop(), timeout)
}
def removeById(id: ID, writeConcern: GetLastError = defaultWriteConcern)(implicit ec: ExecutionContext): Future[LastError] = {
lifeCycle.preRemove(id)
collection.remove($id(id), writeConcern = defaultWriteConcern) map { lastError =>
lifeCycle.postRemove(id)
lastError
}
}
def remove(
query: JsObject,
writeConcern: GetLastError = defaultWriteConcern,
firstMatchOnly: Boolean = false)(implicit ec: ExecutionContext): Future[LastError] = {
collection.remove(query, writeConcern, firstMatchOnly)
}
def removeAll(writeConcern: GetLastError = defaultWriteConcern)(implicit ec: ExecutionContext): Future[LastError] = {
collection.remove(query = Json.obj(), writeConcern = writeConcern, firstMatchOnly = false)
}
def foreach(
selector: JsObject = Json.obj(),
sort: JsObject = Json.obj("_id" -> 1))(f: (Model) => Unit)(implicit ec: ExecutionContext): Future[Unit] = {
collection.find(selector).sort(sort).cursor[Model]
.enumerate()
.apply(Iteratee.foreach(f))
.flatMap(i => i.run)
}
def fold[A](
selector: JsObject = Json.obj(),
sort: JsObject = Json.obj("_id" -> 1),
state: A)(f: (A, Model) => A)(implicit ec: ExecutionContext): Future[A] = {
collection.find(selector).sort(sort).cursor[Model]
.enumerate()
.apply(Iteratee.fold(state)(f))
.flatMap(i => i.run)
}
ensureIndexes()
}
object JsonDao {
def apply[Model: Format, ID: Writes](db: () => DB, collectionName: String)(
implicit lifeCycle: LifeCycle[Model, ID] = new ReflexiveLifeCycle[Model, ID],
ec: ExecutionContext): JsonDao[Model, ID] = {
new JsonDao[Model, ID](db, collectionName) {}
}
}
class JsonDaoBuilder[Model: Format, ID: Writes](db: () => DB) {
def apply(collectionName: String)(
implicit lifeCycle: LifeCycle[Model, ID] = new ReflexiveLifeCycle[Model, ID],
ec: ExecutionContext): JsonDao[Model, ID] = {
JsonDao(db, collectionName)
}
}
object JsonDaoBuilder {
def apply[Model: Format, ID: Writes](db: () => DB): JsonDaoBuilder[Model, ID] = {
new JsonDaoBuilder[Model, ID](db)
}
}
| fehmicansaglam/reactivemongo-extensions | json/src/main/scala/dao/JsonDao.scala | Scala | apache-2.0 | 10,464 |
package me.iamzsx.scala.svm
import scala.math._
import org.scalatest._
import org.scalatest.matchers._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.junit.Assert._
import scala.io.Source
import java.io.IOException
import AssertUtil._
@RunWith(classOf[JUnitRunner])
class SVMProblemSuite extends FunSuite with BeforeAndAfter {
val DELTA = 10E-6
var param: SVMParameter = null
before {
param = new SVMParameter(LinearKernel, 0, 0)
}
test("get") {
val source = Source.fromString("-1\\t1:1.000000\\t2:22.080000\\t3:11.460000")
val x = Array(List(new SVMNode(1, 1.0), new SVMNode(2, 22.08), new SVMNode(3, 11.46)))
val y = Array(-1.0)
val problem = SVMProblem.get(param, source)
assertEquals(1, problem.size)
svmAssertEquals(y, problem.ys)
svmAssertEquals(x, problem.xs)
assertEquals(param.gamma, 1.0 / 3, DELTA)
}
test("get with multiple lines") {
val source = Source.fromString("-1\\t1:1.000000\\t2:22.080000\\t3:11.460000\\n+1\\t1:19")
val x = Array(
List(new SVMNode(1, 1.0), new SVMNode(2, 22.08), new SVMNode(3, 11.46)),
List(new SVMNode(1, 19.0)))
val y = Array(-1.0, 1.0)
val problem = SVMProblem.get(param, source)
assertEquals(2, problem.size)
svmAssertEquals(y, problem.ys)
svmAssertEquals(x, problem.xs)
assertEquals(param.gamma, 1.0 / 3, DELTA)
}
test("get with param.gamma 0.1") {
param.gamma = 0.1
val source = Source.fromString("-1\\t1:1.000000\\t2:22.080000\\t3:11.460000")
val x = Array(List(new SVMNode(1, 1.0), new SVMNode(2, 22.08), new SVMNode(3, 11.46)))
val y = Array(-1.0)
val problem = SVMProblem.get(param, source)
assertEquals(param.gamma, 0.1, DELTA)
}
intercept[IOException] {
val source = Source.fromString("-1\\t1:1.000000\\t2:22.080000\\t3:11.460000\\n+1")
SVMProblem.get(param, source)
}
intercept[IOException] {
val source = Source.fromString("-1\\t11.000000\\t2:22.080000\\t3:11.460000")
SVMProblem.get(param, source)
}
intercept[IOException] {
val source = Source.fromString("-1\\t4:1.000000\\t2:22.080000\\t3:11.460000")
SVMProblem.get(param, source)
}
intercept[IOException] {
val source = Source.fromString("-1\\t2:1.000000\\t2:22.080000\\t3:11.460000")
SVMProblem.get(param, source)
}
} | zsxwing/scala-svm | src/test/scala/me/iamzsx/scala/svm/SVMProblemSuite.scala | Scala | mit | 2,319 |
package hackerRankFunctionalProgramming.arrayOfNElements
object ArrayOfNElements {
def f(num:Int) : List[Int] = {
val list = List.range(0,num);
//print(list + "\\n")
return list
}
}
| sagasu/scalaPlayground | playground/src/main/scala/hackerRankFunctionalProgramming/arrayOfNElements/ArrayOfNElements.scala | Scala | apache-2.0 | 199 |
package org.http4s
package server
package middleware
import cats.data.{Kleisli, OptionT}
import cats.effect.Sync
import org.http4s.headers._
package object authentication {
def challenged[F[_], A](
challenge: Kleisli[F, Request[F], Either[Challenge, AuthedRequest[F, A]]])(
routes: AuthedRoutes[A, F])(implicit F: Sync[F]): HttpRoutes[F] =
Kleisli { req =>
OptionT[F, Response[F]] {
F.flatMap(challenge(req)) {
case Left(challenge) => F.pure(Some(unauthorized(challenge)))
case Right(authedRequest) => routes(authedRequest).value
}
}
}
private[this] def unauthorized[F[_]](challenge: Challenge): Response[F] =
Response(Status.Unauthorized).putHeaders(`WWW-Authenticate`(challenge))
}
| ChristopherDavenport/http4s | server/src/main/scala/org/http4s/server/middleware/authentication/package.scala | Scala | apache-2.0 | 763 |
package com.github.agourlay.cornichon
import cats.data.StateT
import cats.effect.IO
package object core {
type StepState = StateT[IO, RunState, FailedStep Either Done]
type StepResult = IO[(RunState, FailedStep Either Done)]
}
| agourlay/cornichon | cornichon-core/src/main/scala/com/github/agourlay/cornichon/core/package.scala | Scala | apache-2.0 | 233 |
package org.kleemann.autoscore
import org.scaloid.common._
import android.graphics.{Bitmap, BitmapFactory, Canvas, Color, Matrix, Paint, PointF}
object Transform {
implicit val tag = LoggerTag("MyAppTag")
val DivideThreshold = 0.1f
def all(src: Bitmap, a: PointF, b: PointF, c: PointF): Bitmap = {
// make a bitmap with some padding and draw the source bimap in it
val dx = 500
val dy = 500
// reuse paint object
val p: Paint = new Paint()
p.setStrokeWidth(5.0f)
p.setStyle(Paint.Style.FILL_AND_STROKE)
// draw green selections directly on image
val green = newBitmap(src.getWidth, src.getHeight)
val cg = new Canvas(green)
cg.drawBitmap(src, 0f, 0f, p)
p.setColor(Color.GREEN)
var last: PointF = null
for (pt <- List(a, b, c)) {
cg.drawCircle(pt.x, pt.y, 20f, p)
if (last != null) cg.drawLine(last.x, last.y, pt.x, pt.y, p)
last = pt
}
// start matrix for transformation
val m = new Matrix()
m.postTranslate(-b.x, -b.y)
// rotate A so that it is aligned with the y axis
var ta = tp(a, m)
if (ta.y > DivideThreshold)
m.postRotate(math.toDegrees(math.tan(ta.x / ta.y)).toFloat)
// skew C to X axis
// meaning of skew arguments found by trial and error
var tc = tp(c, m)
if (tc.x > DivideThreshold)
m.postSkew(0f, math.tan(- tc.y / tc.x).toFloat)
// TODO: no scaling
m.postTranslate(dx, dy)
ta = tp(a, m)
val tb = tp(b, m)
tc = tp(c, m)
// draw transformed bitmap onto large working bitmap
val working = newBitmap(green.getWidth + dx*2, green.getHeight + dy*2)
val cw = new Canvas(working)
// red edges for now
// should cache this paint object
p.setColor(Color.RED)
cw.drawRect(0f, 0f, working.getWidth.toFloat, working.getHeight.toFloat, p)
cw.drawBitmap(green, m, p)
// TODO: may want to create a new bitmap subset so the larger one can be GCed
val cropped = Bitmap.createBitmap(
working,
tb.x.toInt, tb.y.toInt,
(tc.x-tb.x).toInt, (ta.y-tb.y).toInt)
// apparently ImageView cannot display a texture larger than 4096x4096
if (cropped.getWidth > 4096 || cropped.getHeight > 4096) {
val resize = newBitmap(math.min(cropped.getWidth, 4096), math.min(cropped.getHeight, 4096))
val cr = new Canvas(resize)
cr.drawBitmap(cropped, 0f, 0f, p)
resize
} else {
cropped
}
}
def newBitmap(w: Int, h: Int): Bitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888)
def tp(src: PointF, m: Matrix): PointF = {
val a = Array(src.x, src.y)
m.mapPoints(a)
new PointF(a(0), a(1))
}
}
| sizezero/auto-score | src/main/scala/org/kleemann/autoscore/Transform.scala | Scala | mit | 2,667 |
package x7c1.linen.modern.init.settings.preset
import android.app.Activity
import android.support.v4.app.{Fragment, FragmentActivity, FragmentManager, FragmentPagerAdapter}
import x7c1.linen.glue.activity.ActivityControl
import x7c1.linen.glue.activity.ActivityLabel.SettingPresetChannelSources
import x7c1.linen.glue.res.layout.{SettingPresetChannelRow, SettingPresetChannelsLayout, SettingPresetTabAll, SettingPresetTabSelected}
import x7c1.linen.glue.service.ServiceControl
import x7c1.linen.modern.display.settings.ChannelSourcesSelected
import x7c1.wheat.ancient.resource.ViewHolderProviderFactory
import x7c1.wheat.macros.fragment.FragmentFactory.create
import x7c1.wheat.macros.intent.{IntentExpander, IntentFactory, LocalBroadcastListener}
import x7c1.wheat.macros.logger.Log
import x7c1.wheat.modern.decorator.Imports._
class PresetChannelsDelegatee(
activity: FragmentActivity with ActivityControl with ServiceControl,
layout: SettingPresetChannelsLayout,
factories: ProviderFactories ){
lazy val onSubscribe = LocalBroadcastListener[PresetChannelSubscriptionChanged]{ event =>
val reloadable: PartialFunction[Fragment, PresetFragment] = event.from match {
case PresetTabSelected => { case f: PresetsAllFragment => f }
case PresetTabAll => { case f: PresetsSelectedFragment => f }
}
allFragments collect reloadable foreach (_ reload event.channelId)
}
def allFragments = {
(0 until layout.pager.getAdapter.getCount).view map { n =>
layout.pager.getAdapter.instantiateItem(layout.pager, n).asInstanceOf[Fragment]
}
}
def onCreate(): Unit = {
Log info s"[start]"
onSubscribe registerTo activity
layout.toolbar onClickNavigation { _ =>
activity.finish()
}
IntentExpander executeBy activity.getIntent
}
def showPresetChannels(accountId: Long) = {
layout.pager setAdapter new PresetPagerAdapter(
accountId = accountId,
manager = activity.getSupportFragmentManager,
factories = factories
)
layout.tabs.setupWithViewPager(layout.pager)
}
def onDestroy(): Unit = {
Log info s"[start]"
onSubscribe unregisterFrom activity
}
}
class ProviderFactories(
val forSelected: ViewHolderProviderFactory[SettingPresetTabSelected],
val forAll: ViewHolderProviderFactory[SettingPresetTabAll],
val forRow: ViewHolderProviderFactory[SettingPresetChannelRow]
)
class PresetPagerAdapter(
accountId: Long,
manager: FragmentManager,
factories: ProviderFactories) extends FragmentPagerAdapter(manager) {
lazy val fragments = Seq(
"SELECTED" -> {
create[PresetsSelectedFragment] by new ArgumentsForSelected(
accountId,
factories.forSelected,
factories.forRow )
},
"ALL" -> {
create[PresetsAllFragment] by new ArgumentsForAll(
accountId,
factories.forAll,
factories.forRow )
}
)
override def getItem(position: Int): Fragment = {
fragments(position)._2
}
override def getPageTitle(position: Int): CharSequence = {
fragments(position)._1
}
override def getCount: Int = fragments.length
}
class OnSourcesSelected(activity: Activity with ActivityControl){
def transitToSources(event: ChannelSourcesSelected): Unit = {
Log info s"[init] $event"
val intent = IntentFactory.using[PresetChannelSourcesDelegatee].
create(activity, activity getClassOf SettingPresetChannelSources){
_.showSources(event)
}
activity startActivity intent
}
}
| x7c1/Linen | linen-modern/src/main/scala/x7c1/linen/modern/init/settings/preset/PresetChannelsDelegatee.scala | Scala | mit | 3,494 |
package com.example.http4s.blaze.demo.server
package object endpoints {
val ApiVersion = "v1"
}
| aeons/http4s | examples/blaze/src/main/scala/com/example/http4s/blaze/demo/server/endpoints/package.scala | Scala | apache-2.0 | 99 |
package scala_school
/**
* Created by tilmannbruckhaus on 12/26/14.
*
*/
class Calculator(my_brand: String, my_model: String) {
/**
* A constructor.
*/
val brand = my_brand
val model = my_model
val color: String = if (brand == "TI") {
"blue"
} else if (brand == "HP") {
"black"
} else {
"white"
}
// An instance method.
def add(m: Int, n: Int): Int = m + n
}
class ScientificCalculator(brand: String, model: String) extends Calculator(brand, model) {
def log(m: Double, base: Double) = math.log(m) / math.log(base)
}
| bruckhaus/challenges | scala_challenges/src/main/scala/scala_school/Calculator.scala | Scala | mit | 562 |
package one.lockstep.util.crypto
import java.security.Security
import javax.crypto.spec.SecretKeySpec
import one.lockstep.util._
/**
* =Implementation Note [nw]=
*
* HMAC is considered secure in single-user setting if `keylenth = maclength/2`.
* In multi-user setting, bitstrength is `keylength - log`,,2,,`(users)` for some attacks.
* See [[https://eprint.iacr.org/2012/074.pdf Another Look at HMAC (Koblitz, Menenzes)]].
*
* My understanding is that without oracle access to the key the attacker gains no advantage.
* For now we stick with `keylength = machlength/2`.
*
*/
class Hmac private (serviceName: String) extends Mac with Cryptosystem {
private lazy val provider = {
val preferredProviders = Seq("GmsCore_OpenSSL", "AndroidOpenSSL", "SunJCE")
val providers = Security.getProviders(s"Mac.$serviceName")
val filteredProviders = providers.filter(p => preferredProviders.contains(p.getName)).distinct
filteredProviders.head
}
private def newInstance() = javax.crypto.Mac.getInstance(serviceName, provider)
override lazy val blockLength: Int = newInstance().getMacLength*8
override lazy val bitStrength: Int = blockLength/2
override lazy val keyLength: Int = blockLength/2
override def apply(secretKey: SecretKey): Bytes ⇒ MacTag = {
bytes ⇒
val hmac = newInstance()
val key = new SecretKeySpec(secretKey.raw, serviceName)
hmac.init(key)
MacTag(hmac.doFinal(bytes))
}
override def keygen(): SecretKey =
SecretKey(random.bits(keyLength).bytes)
override def validate(secretKey: SecretKey): Boolean =
secretKey.bitLength == keyLength
}
object Hmac {
val md5 = new Hmac("HmacMD5") { override lazy val bitStrength = 64 }
val sha1 = new Hmac("HmacSHA1") { override lazy val bitStrength = 80 }
val sha256 = new Hmac("HmacSHA256") { override lazy val bitStrength = 128 }
val sha386 = new Hmac("HmacSHA386") { override lazy val bitStrength = 192 }
val sha512 = new Hmac("HmacSHA512") { override lazy val bitStrength = 256 }
private val members: Seq[Hmac] = Seq(md5, sha1, sha256, sha386, sha512)
def apply(l: Int): Hmac =
members.filter(_.bitStrength >= l).minBy(_.bitStrength)
}
| lockstep-one/vault | vault-common/src/main/scala/one/lockstep/util/crypto/Hmac.scala | Scala | agpl-3.0 | 2,200 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.bg.test
import java.util.Properties
import akka.actor.{ActorRef, ActorSystem}
import cmwell.bg.{CMWellBGActor, ShutDown}
import cmwell.common.{CommandSerializer, OffsetsService, WriteCommand, ZStoreOffsetsService}
import cmwell.domain.{FieldValue, ObjectInfoton}
import cmwell.driver.Dao
import cmwell.fts._
import cmwell.irw.IRWService
import cmwell.zstore.ZStore
import com.typesafe.config.{Config, ConfigFactory, ConfigValueFactory}
import com.typesafe.scalalogging.LazyLogging
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest
import org.elasticsearch.common.unit.TimeValue
import org.scalatest.{BeforeAndAfterAll, DoNotDiscover, FlatSpec, Matchers}
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.io.Source
/**
* Created by israel on 13/09/2016.
*/
@DoNotDiscover
class BGResilienceSpec extends FlatSpec with BeforeAndAfterAll with Matchers with LazyLogging {
var kafkaProducer:KafkaProducer[Array[Byte], Array[Byte]] = _
var cmwellBGActor:ActorRef = _
var dao:Dao = _
var testIRWMockupService:IRWService = _
var irwService:IRWService = _
var zStore:ZStore = _
var offsetsService:OffsetsService = _
var ftsServiceES:FTSServiceNew = _
var bgConfig:Config = _
var actorSystem:ActorSystem = _
import concurrent.ExecutionContext.Implicits.global
override def beforeAll = {
val producerProperties = new Properties
producerProperties.put("bootstrap.servers", "localhost:9092")
producerProperties.put("key.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer")
producerProperties.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer")
kafkaProducer = new KafkaProducer[Array[Byte], Array[Byte]](producerProperties)
dao = Dao("Test","data2")
testIRWMockupService = FailingIRWServiceMockup(dao, 13)
zStore = ZStore(dao)
irwService = IRWService.newIRW(dao, 25 , true, 0.seconds)
offsetsService = new ZStoreOffsetsService(zStore)
ftsServiceES = FailingFTSServiceMockup("es.test.yml", 5)
// wait for green status
ftsServiceES.client.admin().cluster()
.prepareHealth()
.setWaitForGreenStatus()
.setTimeout(TimeValue.timeValueMinutes(5))
.execute()
.actionGet()
// delete all existing indices
ftsServiceES.client.admin().indices().delete(new DeleteIndexRequest("_all"))
// load indices template
val indicesTemplate = Source.fromURL(this.getClass.getResource("/indices_template_new.json")).getLines.reduceLeft(_ + _)
ftsServiceES.client.admin().indices().preparePutTemplate("indices_template").setSource(indicesTemplate).execute().actionGet()
// create current index
ftsServiceES.client.admin().indices().prepareCreate("cm_well_0").execute().actionGet()
ftsServiceES.client.admin().indices().prepareAliases()
.addAlias("cm_well_0", "cm_well_all")
.addAlias("cm_well_0", "cm_well_latest")
.execute().actionGet()
bgConfig = ConfigFactory.load
bgConfig.withValue("cmwell.bg.esActionsBulkSize", ConfigValueFactory.fromAnyRef(100))
actorSystem = ActorSystem("cmwell-bg-test-system")
cmwellBGActor = actorSystem.actorOf(CMWellBGActor.props(0, bgConfig, testIRWMockupService, ftsServiceES, zStore, offsetsService))
println("waiting 10 seconds for all components to load")
Thread.sleep(10000)
}
"Resilient BG" should "process commands as usual on circumvented BGActor (periodically failing IRWService) after suspending and resuming" in {
logger info "waiting 10 seconds for circumvented BGActor to start"
Thread.sleep(10000)
val numOfCommands = 1500
// prepare sequence of writeCommands
val writeCommands = Seq.tabulate(numOfCommands){ n =>
val infoton = ObjectInfoton(
path = s"/cmt/cm/bg-test/circumvented_bg/info$n",
dc = "dc",
indexTime = None,
fields = Some(Map("games" -> Set(FieldValue("Taki"), FieldValue("Race")))))
WriteCommand(infoton)
}
// make kafka records out of the commands
val pRecords = writeCommands.map{ writeCommand =>
val commandBytes = CommandSerializer.encode(writeCommand)
new ProducerRecord[Array[Byte], Array[Byte]]("persist_topic", commandBytes)
}
// send them all
pRecords.foreach { kafkaProducer.send(_)}
println("waiting for 10 seconds")
Thread.sleep(10000)
for( i <- 0 to numOfCommands-1) {
val nextResult = Await.result(irwService.readPathAsync(s"/cmt/cm/bg-test/circumvented_bg/info$i"), 5.seconds)
withClue(nextResult, s"/cmt/cm/bg-test/circumvented_bg/info$i"){
nextResult should not be empty
}
}
for( i <- 0 to numOfCommands-1) {
val searchResponse = Await.result(
ftsServiceES.search(
pathFilter = None,
fieldsFilter = Some(SingleFieldFilter(Must, Equals, "system.path", Some(s"/cmt/cm/bg-test/circumvented_bg/info$i"))),
datesFilter = None,
paginationParams = PaginationParams(0, 200)
),
10.seconds
)
withClue(s"/cmt/cm/bg-test/circumvented_bg/info$i"){
searchResponse.infotons.size should equal(1)
}
}
}
override def afterAll() = {
logger debug "afterAll: sending Shutdown"
cmwellBGActor ! ShutDown
Thread.sleep(10000)
ftsServiceES.shutdown()
testIRWMockupService = null
irwService = null
}
}
| nruppin/CM-Well | server/cmwell-bg/src/test/scala/cmwell/bg/test/BGResilienceSpec.scala | Scala | apache-2.0 | 6,120 |
package net.zzorn.utils
import com.jme3.math.Vector3f
/**
* Calculates normal vectors for a shape.
*
* Borrowed from my open source project skycastle:
* https://github.com/zzorn/skycastle/blob/master/src/main/scala/org/skycastle/util/mesh/NormalCalculator.scala
*/
object NormalCalculator {
def calculateNormals(vertexes: Array[Vector3f], triangleIndexes: Array[Int] ): Array[Vector3f] = {
val normals = new Array[Vector3f](vertexes.size)
// Initialize normals
var ni = 0
while (ni < normals.size) {
normals(ni) = new Vector3f()
ni += 1
}
def setNormal(vertexIndex: Int, normal: Vector3f) {
normals(vertexIndex).addLocal(normal)
}
// Calculate normal for each triangle
var i = 0
val sideAB = new Vector3f()
val sideAC = new Vector3f()
while (i < triangleIndexes.size) {
val ai = triangleIndexes(i)
val bi = triangleIndexes(i + 1)
val ci = triangleIndexes(i + 2)
val a = vertexes(ai)
val b = vertexes(bi)
val c = vertexes(ci)
if (a != null && b != null && c != null) {
b.subtract(a, sideAB)
c.subtract(a, sideAC)
val normal = sideAB.cross(sideAC)
if (normal.lengthSquared > 0) {
setNormal(ai, normal)
setNormal(bi, normal)
setNormal(ci, normal)
}
}
else {
println("WARNING: Missing vertex in mesh for index "+ ai +", " + bi + ", or " + ci)
}
i += 3
}
// Normalize
var normalIndex = 0
while (i < normals.size) {
val normal = normals(normalIndex)
normal.normalizeLocal
normalIndex += 1
}
normals
}
} | zzorn/ludumdare20 | src/main/scala/net/zzorn/utils/NormalCalculator.scala | Scala | gpl-2.0 | 1,670 |
package musicsvc.json
import play.api.libs.json.{Reads, JsPath, Writes}
import play.api.libs.functional.syntax._
import musicsvc.models._
object Implicits {
// ===== Json reads/Writes for Performer ========================================
implicit val performerWrites: Writes[Performer] = (
(JsPath \\ "id").write[Option[Long]] and
(JsPath \\ "name").write[String] and
(JsPath \\ "performerType").write[String] and
(JsPath \\ "recordings").lazyWrite[Seq[Recording]](Writes.seq[Recording](recordingWrites))
)(unlift(fromPerformer))
def fromPerformer: (Performer) => Option[(Option[Long], String, String, Seq[Recording])] = {
// Performer.unapply
(p: Performer) => Option(p.id, p.name, p.performerType.toString, p.recordings)
}
implicit val performerReads: Reads[Performer] = (
(JsPath \\ "id").readNullable[Long] and
(JsPath \\ "name").read[String] and
(JsPath \\ "performerType").read[String] and
(JsPath \\ "recordings").lazyRead[Seq[Recording]](Reads.seq[Recording](recordingReads))
)(toPerformer)
def toPerformer: (Option[Long], String, String, Seq[Recording]) => Performer = {
// Performer.apply _
(id, name, performerType, recordings) => new Performer(id, name, performerType, recordings)
}
// ===== Json reads/Writes for PerformerWithRecordingIds ========================================
implicit val performerWithRecordingIdsWrites: Writes[PerformerWithRecordingIds] = (
(JsPath \\ "performer").write[Performer] and
(JsPath \\ "recordingIds").write[Seq[Long]]
)(unlift(PerformerWithRecordingIds.unapply))
implicit val performerWithRecordingIdsReads: Reads[PerformerWithRecordingIds] = (
(JsPath \\ "performer").read[Performer] and
(JsPath \\ "recordingIds").read[Seq[Long]]
)(PerformerWithRecordingIds.apply _)
// ===== Json reads/Writes for Recording ========================================
implicit val recordingWrites: Writes[Recording] = (
(JsPath \\ "id").write[Option[Long]] and
(JsPath \\ "title").write[String] and
(JsPath \\ "composer").write[String] and
(JsPath \\ "year").write[Int] and
(JsPath \\ "performers").lazyWrite[Seq[Performer]](Writes.seq[Performer](performerWrites))
)(unlift(fromRecording))
def fromRecording: (Recording) => Option[(Option[Long], String, String, Int, Seq[Performer])] = {
Recording.unapply
}
implicit val recordingReads: Reads[Recording] = (
(JsPath \\ "id").readNullable[Long] and
(JsPath \\ "title").read[String] and
(JsPath \\ "composer").read[String] and
((JsPath \\ "year").read[Int] orElse Reads.pure(1900)) and
(JsPath \\ "performers").lazyRead[Seq[Performer]](Reads.seq[Performer](performerReads))
)(toRecording)
def toRecording: (Option[Long], String, String, Int, Seq[Performer]) => Recording = {
Recording.apply _
}
// ===== Json reads/Writes for PerformerWithRecordingIds ========================================
implicit val recordingWithPerformerIdsWrites: Writes[RecordingWithPerformerIds] = (
(JsPath \\ "recording").write[Recording] and
(JsPath \\ "performerIds").write[Seq[Long]]
)(unlift(RecordingWithPerformerIds.unapply))
implicit val recordingWithPerformerIdsReads: Reads[RecordingWithPerformerIds] = (
(JsPath \\ "recording").read[Recording] and
(JsPath \\ "performerIds").read[Seq[Long]]
)(RecordingWithPerformerIds.apply _)
}
| hermannhueck/MusicService | Services/MusicService-Play-Scala-Slick-NoAuth/app/musicsvc/json/Implicits.scala | Scala | apache-2.0 | 3,448 |
import java.io.File
import java.util.concurrent.atomic.AtomicInteger
import org.junit.Test
import scala.annotation.tailrec
object ParallelTest {
val nbConcurrentTests = new AtomicInteger(0)
val maxConcurrentTests = new AtomicInteger(0)
private def updateMaxConcurrentTests(currentMax: Int, newMax: Int) : Boolean = {
if( maxConcurrentTests.compareAndSet(currentMax, newMax) ) {
val f = new File("max-concurrent-tests_" + newMax)
f.createNewFile
true
} else {
false
}
}
@tailrec
def execute(f : => Unit): Unit = {
val nb = nbConcurrentTests.incrementAndGet()
val max = maxConcurrentTests.get()
if( nb <= max || updateMaxConcurrentTests(max, nb)) {
f
nbConcurrentTests.getAndDecrement
} else {
nbConcurrentTests.getAndDecrement
execute(f)
}
}
}
class Test1 {
@Test
def slow(): Unit = ParallelTest.execute { Thread.sleep(1000) }
}
class Test2 {
@Test
def slow(): Unit = ParallelTest.execute { Thread.sleep(1000) }
}
class Test3 {
@Test
def slow(): Unit = ParallelTest.execute { Thread.sleep(1000) }
}
class Test4 {
@Test
def slow(): Unit = ParallelTest.execute { Thread.sleep(1000) }
}
| mdedetrich/sbt | sbt/src/sbt-test/tests/fork-parallel/src/test/scala/tests.scala | Scala | bsd-3-clause | 1,146 |
package io.vamp.model
import java.time.format.DateTimeFormatter._
import java.time.{ ZoneOffset, ZonedDateTime }
import java.util.UUID
import scala.sys.process._
import scala.util.Try
object Model {
val uuid = UUID.randomUUID.toString
val version: String = Option(getClass.getPackage.getImplementationVersion).orElse {
Try(Option("git describe --tags".!!.stripLineEnd)).getOrElse(None)
} getOrElse ""
val runningSince = ZonedDateTime.now(ZoneOffset.UTC).format(ISO_OFFSET_DATE_TIME)
} | magneticio/vamp | model/src/main/scala/io/vamp/model/Model.scala | Scala | apache-2.0 | 502 |
// Databricks notebook source
// MAGIC %md
// MAGIC ScaDaMaLe Course [site](https://lamastex.github.io/scalable-data-science/sds/3/x/) and [book](https://lamastex.github.io/ScaDaMaLe/index.html)
// COMMAND ----------
// MAGIC %md
// MAGIC # Graph Spectral Analysis
// MAGIC ##### Project by Ciwan Ceylan and Hanna Hultin
// MAGIC Link to project video: "https://drive.google.com/file/d/1ctILEsMskFgpsVnu-6ucCMZqM1TLXfEB/view?usp=sharing"
// COMMAND ----------
// MAGIC %md
// MAGIC
// MAGIC ## Background on graphs
// MAGIC
// MAGIC A graph can be represented by its incidence matrix **B_0**. Each row of **B_0** corresponds to an edge in the graph and each column to a node. Say that row *k* corresponds to edge *i* -> *j*. Then element *i* of row *k* is *-1* and element *j* is *1*. All other elements are zero. See the figure below for an example of the indicence matrix with the corresponding graph.
// MAGIC
// MAGIC <img src ='/files/shared_uploads/[email protected]/incidence_matrix.png'>
// MAGIC <img src ='/files/shared_uploads/[email protected]/simple_graph.png' width="350px" height="350px">
// COMMAND ----------
// MAGIC %md
// MAGIC ### Graph Laplacian
// MAGIC The Laplacian lies at the center of *specral graph theory*. Its spectrum (its eigenvalues) encodes the geometry of the graph and can be used in various applications ranging from computer graphics to machine learning.
// MAGIC Therefore, one approximative approach for comparing graphs (a problem which is NP-hard) is to compare their spectra.
// MAGIC Graphs with similar geometry are expected to have similar spectrum and vice-versa.
// MAGIC Below is an example of the Laplacian for the graph seen in the cell above. The diagonal elements contain the degree of the corresponding node, while all other elements at index (*i*,*j*) are -1 if there is an edge between the nodes *i* and *j* and zero otherwise.
// MAGIC
// MAGIC <img src ='/files/shared_uploads/[email protected]/simple_laplacian.png'>
// MAGIC
// MAGIC The Laplacian can be constructed from the indicence matrix as
// MAGIC $$ \\mathbf{L} = \\mathbf{B}_0^T \\mathbf{B}_0 $$
// MAGIC Thus, we can compute the top eigenvalues of **L** by instead computing the top singular values of **B_0**. This follows from the following:
// MAGIC $$ \\mathbf{B}_0 = \\mathbf{U} \\mathbf{D}^{1/2} \\mathbf{V}^T $$
// MAGIC $$ \\mathbf{L}= \\mathbf{V} \\mathbf{D}^{1/2} \\mathbf{U}^T \\mathbf{U} \\mathbf{D}^{1/2} \\mathbf{V}^T = \\mathbf{V} \\mathbf{D} \\mathbf{V}^T $$
// MAGIC
// MAGIC #### Scaling to large graphs using randomized SVD
// MAGIC In the new age of big data, it is often interesting to analyze very large graphs of for example financial transactions. Doing the spectral graph analysis for these large graphs is challenging, since the full singular value decomposition of an *m x n* matrix scales as *O(m n min(m,n))*. To handle this, we turn to low rank approximations and specifically we use Randomized SVD.
// MAGIC
// MAGIC Randomized SVD was introduced in 2011 in the article "Finding structure with randomness: Probabilistic algorithms for constructing approximate matrix decompositions" (https://arxiv.org/abs/0909.4061), and is a smart way of finding a low-rank approximation for the singular value decomposition using Gaussian vectors.
// MAGIC
// MAGIC The basic idea is that given the *m x n* matrix *A*, we can create a sampling matrix *Y = AG* where *G* is a *n x k* Gaussian random matrix and it turns out that *Y* is then a quite good approximate basis for the column space of A.
// MAGIC
// MAGIC A nice summary of the methods and some variations written by one of the authors of the original article can be found in the following link: https://sinews.siam.org/Details-Page/randomized-projection-methods-in-linear-algebra-and-data-analysis
// COMMAND ----------
// MAGIC %md
// MAGIC ### Methods for generating random graphs
// MAGIC
// MAGIC #### Erdős–Rényi model
// MAGIC In "On the Evoluation of Random Graphs" (https://users.renyi.hu/~p_erdos/1960-10.pdf), Erdős and Rényi describes the random graph with *n* vertices and *N* edges where the *N* edges are chosen at random among all the undirected possible edges.
// MAGIC
// MAGIC #### R-MAT model
// MAGIC The Recursive Matrix (R-MAT) model introduced in the article "R-MAT: A Recursive Model for Graph Mining" (https://kilthub.cmu.edu/articles/R-MAT_A_Recursive_Model_for_Graph_Mining/6609113/files/12101195.pdf) is described as follows by the authors:
// MAGIC > "The basic idea behind R-MAT is to recursively subdivide the adjacency matrix into four equal-sized partitions, and distribute edges with in these partitions with unequal probabilities: starting off with an empty adjacency matrix, we "drop" edges into the matrix one at a time. Each edge chooses one of the four partitions with probabilities a; b; c; d respectively (see Figure1). Of course, a+b+c+d=1. The chosen partition is again subdivided into four smaller partitions, and the procedure is repeated until we reach a simplecell (=1 x 1 partition). This is the cell of the adjacency matrix occupied by the edge."
// MAGIC
// MAGIC This is visualized in the following image.
// MAGIC
// MAGIC <img src ='/files/shared_uploads/[email protected]/rmat_picture.png'>
// COMMAND ----------
// MAGIC %md
// MAGIC ## Project specifications
// MAGIC
// MAGIC The goal of the project is to compare spectra of the Laplacian for different graphs.
// MAGIC
// MAGIC ### Data
// MAGIC - Ethereum transactions:
// MAGIC - Original data from google cloud (https://cloud.google.com/blog/products/data-analytics/ethereum-bigquery-public-dataset-smart-contract-analytics)
// MAGIC - The dataset contains transactions from March 2018 to March 2020, aggregating per edge (same sender and receiver) and only keeping edges with at least 10 transactions with positive value
// MAGIC - Randomly generated graphs using the two different methods explained above
// MAGIC
// MAGIC ### Notebooks
// MAGIC - **01_preprocess_data**: preprocesses the Ethereum data using Python and PySpark and saves the graph information as parquet file
// MAGIC - **02_generate_graphs**: generates random graphs in Scala using Spark (SQL and GraphX) and saves the graph information as parquet files
// MAGIC - **03_compute_rsvd**: computes RSVD for the different graphs in Scala using Spark and the library Spark-RSVD and saves the singular values as parquet files
// MAGIC - **04_analyse_eigenvalues**: computes the eigenvalues from the singular values and plots these for different graphs
// COMMAND ----------
| lamastex/scalable-data-science | dbcArchives/2021/000_0-sds-3-x-projects/student-project-21_group-GraphSpectralAnalysis/00_introduction.scala | Scala | unlicense | 6,566 |
/*
* Copyright 2015 TouchType Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.databricks.spark.redshift
import java.sql.Timestamp
import org.scalatest.FunSuite
import org.apache.spark.sql.Row
/**
* Unit test for data type conversions
*/
class ConversionsSuite extends FunSuite {
val convertRow = Conversions.rowConverter(TestUtils.testSchema)
test("Data should be correctly converted") {
val doubleMin = Double.MinValue.toString
val longMax = Long.MaxValue.toString
val unicodeString = "Unicode是樂趣"
val timestampWithMillis = "2014-03-01 00:00:01.123"
val expectedDateMillis = TestUtils.toMillis(2015, 6, 1, 0, 0, 0)
val expectedTimestampMillis = TestUtils.toMillis(2014, 2, 1, 0, 0, 1, 123)
val convertedRow = convertRow(
Array("1", "t", "2015-07-01", doubleMin, "1.0", "42",
longMax, "23", unicodeString, timestampWithMillis))
val expectedRow = Row(1.asInstanceOf[Byte], true, new Timestamp(expectedDateMillis),
Double.MinValue, 1.0f, 42, Long.MaxValue, 23.toShort, unicodeString,
new Timestamp(expectedTimestampMillis))
assert(convertedRow == expectedRow)
}
test("Row conversion handles null values") {
val emptyRow = List.fill(TestUtils.testSchema.length)(null).toArray[String]
assert(convertRow(emptyRow) == Row(emptyRow: _*))
}
}
| methodmill/spark-redshift | src/test/scala/com/databricks/spark/redshift/ConversionsSuite.scala | Scala | apache-2.0 | 1,867 |
package scala.slick.jdbc.meta
import scala.slick.session.PositionedResult
/**
* A qualified name with an optional catalog and schema.
*/
case class MQName(catalog: Option[String], schema: Option[String], name: String) {
override def toString = "MQName(" + catalog.map(_ + ".").getOrElse("") + schema.map(_ + ".").getOrElse("") + name + ")"
def catalog_? = catalog.orNull
def schema_? = schema.orNull
}
object MQName {
private[meta] def from(r: PositionedResult) = MQName(r<<, r<<, r<<)
private[meta] def optionalFrom(r: PositionedResult) = {
val cat = r.nextStringOption
val schema = r.nextStringOption
r.nextStringOption map (MQName(cat, schema, _))
}
def local(name: String) = MQName(Some(""), Some(""), name)
}
| szeiger/scala-query | src/main/scala/scala/slick/jdbc/meta/MQName.scala | Scala | bsd-2-clause | 748 |
package com.github.ldaniels528.trifecta.sjs.controllers
import io.scalajs.npm.angularjs.Scope
import io.scalajs.util.ScalaJsHelper._
import scala.scalajs.js
/**
* Created by ldaniels on 12/14/15.
*/
@js.native
trait GlobalErrorHandling extends js.Object {
self: Scope =>
// properties
var globalMessages: js.Array[GlobalMessage] = js.native
// functions
var addErrorMessage: js.Function1[js.UndefOr[String], Unit] = js.native
var addInfoMessage: js.Function1[js.UndefOr[String], Unit] = js.native
var addWarningMessage: js.Function1[js.UndefOr[String], Unit] = js.native
var removeAllMessages: js.Function0[Unit] = js.native
var removeMessage: js.Function1[js.UndefOr[Int], Unit] = js.native
}
@js.native
trait GlobalMessage extends js.Object {
var text: String = js.native
var `type`: String = js.native
}
object GlobalMessage {
def apply(text: String, `type`: String) = {
val message = New[GlobalMessage]
message.text = text
message.`type` = `type`
message
}
} | ldaniels528/trifecta | app-js/src/main/scala/com/github/ldaniels528/trifecta/sjs/controllers/GlobalErrorHandling.scala | Scala | apache-2.0 | 1,016 |
package im.actor.server.push
import java.nio.ByteBuffer
import java.util.concurrent.TimeUnit
import scala.annotation.meta.field
import scala.annotation.tailrec
import scala.concurrent._
import scala.concurrent.duration._
import scala.util.{ Failure, Success }
import akka.actor._
import akka.contrib.pattern.{ ClusterSharding, ShardRegion }
import akka.pattern.{ ask, pipe }
import akka.persistence._
import akka.util.Timeout
import com.esotericsoftware.kryo.serializers.TaggedFieldSerializer.{ Tag ⇒ KryoTag }
import com.google.android.gcm.server.{ Sender ⇒ GCMSender }
import slick.dbio.DBIO
import slick.driver.PostgresDriver.api._
import im.actor.api.rpc.UpdateBox
import im.actor.api.rpc.messaging.{ UpdateMessage, UpdateMessageSent }
import im.actor.api.rpc.peers.Peer
import im.actor.api.rpc.sequence.{ DifferenceUpdate, FatSeqUpdate, SeqUpdate }
import im.actor.api.{ rpc ⇒ api }
import im.actor.server.commons.serialization.KryoSerializable
import im.actor.server.models.sequence
import im.actor.server.util.{ GroupUtils, UserUtils }
import im.actor.server.{ models, persist ⇒ p }
case class SeqUpdatesManagerRegion(ref: ActorRef)
object SeqUpdatesManager {
@SerialVersionUID(1L)
private[push] case class Envelope(authId: Long, payload: Message)
private[push] sealed trait Message
@SerialVersionUID(1L)
private[push] case object GetSequenceState extends Message
@SerialVersionUID(1L)
private[push] case class PushUpdate(
header: Int,
serializedData: Array[Byte],
userIds: Set[Int],
groupIds: Set[Int],
pushText: Option[String],
originPeer: Option[Peer],
isFat: Boolean
) extends Message
@SerialVersionUID(1L)
private[push] case class PushUpdateGetSequenceState(
header: Int,
serializedData: Array[Byte],
userIds: Set[Int],
groupIds: Set[Int],
pushText: Option[String],
originPeer: Option[Peer],
isFat: Boolean
) extends Message
@SerialVersionUID(1L)
private[push] case class Subscribe(consumer: ActorRef) extends Message
@SerialVersionUID(1L)
private[push] case class SubscribeAck(consumer: ActorRef) extends Message
@SerialVersionUID(1L)
private[push] case class PushCredentialsUpdated(credsOpt: Option[models.push.PushCredentials]) extends Message
@SerialVersionUID(1L)
private case class Initialized(
timestamp: Long,
googleCredsOpt: Option[models.push.GooglePushCredentials],
appleCredsOpt: Option[models.push.ApplePushCredentials]
)
@SerialVersionUID(1L)
case class UpdateReceived(update: UpdateBox)
type Sequence = Int
type SequenceState = (Int, Array[Byte])
type SequenceStateDate = (SequenceState, Long)
sealed trait PersistentEvent
@SerialVersionUID(1L)
final case class SeqChanged(@(KryoTag @field)(0) sequence:Int) extends PersistentEvent
final case class SeqChangedKryo(
@(KryoTag @field)(0) sequence:Int
) extends PersistentEvent with KryoSerializable
private val noop1: Any ⇒ Unit = _ ⇒ ()
private val idExtractor: ShardRegion.IdExtractor = {
case env @ Envelope(authId, payload) ⇒ (authId.toString, payload)
}
private val shardResolver: ShardRegion.ShardResolver = msg ⇒ msg match {
case Envelope(authId, _) ⇒ (authId % 32).toString // TODO: configurable
}
// TODO: configurable
private val OperationTimeout = Timeout(5.seconds)
private val MaxDifferenceUpdates = 100
private def startRegion(props: Option[Props])(implicit system: ActorSystem): SeqUpdatesManagerRegion =
SeqUpdatesManagerRegion(ClusterSharding(system).start(
typeName = "SeqUpdatesManager",
entryProps = props,
idExtractor = idExtractor,
shardResolver = shardResolver
))
def startRegion()(
implicit
system: ActorSystem,
googlePushManager: GooglePushManager,
applePushManager: ApplePushManager,
db: Database
): SeqUpdatesManagerRegion =
startRegion(Some(Props(classOf[SeqUpdatesManager], googlePushManager, applePushManager, db)))
def startRegionProxy()(implicit system: ActorSystem): SeqUpdatesManagerRegion = startRegion(None)
def getSeqState(authId: Long)(implicit region: SeqUpdatesManagerRegion, ec: ExecutionContext): DBIO[(Sequence, Array[Byte])] = {
for {
seqstate ← DBIO.from(region.ref.ask(Envelope(authId, GetSequenceState))(OperationTimeout).mapTo[SequenceState])
} yield seqstate
}
def persistAndPushUpdate(
authId: Long,
header: Int,
serializedData: Array[Byte],
userIds: Set[Int],
groupIds: Set[Int],
pushText: Option[String],
originPeer: Option[Peer],
isFat: Boolean
)(implicit region: SeqUpdatesManagerRegion, ec: ExecutionContext): DBIO[SequenceState] = {
DBIO.from(pushUpdateGetSeqState(authId, header, serializedData, userIds, groupIds, pushText, originPeer, isFat))
}
def persistAndPushUpdate(authId: Long, update: api.Update, pushText: Option[String], isFat: Boolean = false)(implicit region: SeqUpdatesManagerRegion, ec: ExecutionContext): DBIO[SequenceState] = {
val header = update.header
val serializedData = update.toByteArray
val (userIds, groupIds) = updateRefs(update)
persistAndPushUpdate(authId, header, serializedData, userIds, groupIds, pushText, getOriginPeer(update), isFat)
}
def persistAndPushUpdates(authIds: Set[Long], update: api.Update, pushText: Option[String], isFat: Boolean = false)(implicit region: SeqUpdatesManagerRegion, ec: ExecutionContext): DBIO[Seq[SequenceState]] = {
val header = update.header
val serializedData = update.toByteArray
val (userIds, groupIds) = updateRefs(update)
DBIO.sequence(authIds.toSeq map (persistAndPushUpdate(_, header, serializedData, userIds, groupIds, pushText, getOriginPeer(update), isFat)))
}
def broadcastClientAndUsersUpdate(
userIds: Set[Int],
update: api.Update,
pushText: Option[String],
isFat: Boolean = false
)(implicit
region: SeqUpdatesManagerRegion,
ec: ExecutionContext,
client: api.AuthorizedClientData): DBIO[(SequenceState, Seq[SequenceState])] = {
val header = update.header
val serializedData = update.toByteArray
val (refUserIds, refGroupIds) = updateRefs(update)
val originPeer = getOriginPeer(update)
for {
authIds ← p.AuthId.findIdByUserIds(userIds + client.userId)
seqstates ← DBIO.sequence(
authIds.view
.filterNot(_ == client.authId)
.map(persistAndPushUpdate(_, header, serializedData, refUserIds, refGroupIds, pushText, originPeer, isFat))
)
seqstate ← persistAndPushUpdate(client.authId, header, serializedData, refUserIds, refGroupIds, pushText, originPeer, isFat)
} yield (seqstate, seqstates)
}
def broadcastUsersUpdate(
userIds: Set[Int],
update: api.Update,
pushText: Option[String],
isFat: Boolean = false
)(implicit
region: SeqUpdatesManagerRegion,
ec: ExecutionContext): DBIO[Seq[SequenceState]] = {
val header = update.header
val serializedData = update.toByteArray
val (refUserIds, refGroupIds) = updateRefs(update)
val originPeer = getOriginPeer(update)
for {
authIds ← p.AuthId.findIdByUserIds(userIds)
seqstates ← DBIO.sequence(
authIds.map(persistAndPushUpdate(_, header, serializedData, refUserIds, refGroupIds, pushText, originPeer, isFat))
)
} yield seqstates
}
def broadcastUserUpdate(
userId: Int,
update: api.Update,
pushText: Option[String],
isFat: Boolean = false
)(implicit
region: SeqUpdatesManagerRegion,
ec: ExecutionContext): DBIO[Seq[SequenceState]] = {
val header = update.header
val serializedData = update.toByteArray
val (userIds, groupIds) = updateRefs(update)
broadcastUserUpdate(userId, header, serializedData, userIds, groupIds, pushText, getOriginPeer(update), isFat)
}
def broadcastUserUpdate(
userId: Int,
header: Int,
serializedData: Array[Byte],
userIds: Set[Int],
groupIds: Set[Int],
pushText: Option[String],
originPeer: Option[Peer],
isFat: Boolean
)(implicit
region: SeqUpdatesManagerRegion,
ec: ExecutionContext): DBIO[Seq[SequenceState]] = {
for {
authIds ← p.AuthId.findIdByUserId(userId)
seqstates ← DBIO.sequence(authIds map (persistAndPushUpdate(_, header, serializedData, userIds, groupIds, pushText, originPeer, isFat)))
} yield seqstates
}
def broadcastClientUpdate(update: api.Update, pushText: Option[String], isFat: Boolean = false)(
implicit
region: SeqUpdatesManagerRegion,
client: api.AuthorizedClientData,
ec: ExecutionContext
): DBIO[SequenceState] = {
val header = update.header
val serializedData = update.toByteArray
val (userIds, groupIds) = updateRefs(update)
val originPeer = getOriginPeer(update)
for {
otherAuthIds ← p.AuthId.findIdByUserId(client.userId).map(_.view.filter(_ != client.authId))
_ ← DBIO.sequence(otherAuthIds map (authId ⇒ persistAndPushUpdate(authId, header, serializedData, userIds, groupIds, pushText, originPeer, isFat)))
ownseqstate ← persistAndPushUpdate(client.authId, header, serializedData, userIds, groupIds, pushText, originPeer, isFat)
} yield ownseqstate
}
def notifyUserUpdate(userId: Int, exceptAuthId: Long, update: api.Update, pushText: Option[String], isFat: Boolean = false)(
implicit
region: SeqUpdatesManagerRegion,
ec: ExecutionContext
): DBIO[Seq[SequenceState]] = {
val header = update.header
val serializedData = update.toByteArray
val (userIds, groupIds) = updateRefs(update)
val originPeer = getOriginPeer(update)
notifyUserUpdate(userId, exceptAuthId, header, serializedData, userIds, groupIds, pushText, originPeer, isFat)
}
def notifyUserUpdate(
userId: Int,
exceptAuthId: Long,
header: Int,
serializedData: Array[Byte],
userIds: Set[Int],
groupIds: Set[Int],
pushText: Option[String],
originPeer: Option[Peer],
isFat: Boolean
)(implicit
region: SeqUpdatesManagerRegion,
ec: ExecutionContext) = {
for {
otherAuthIds ← p.AuthId.findIdByUserId(userId).map(_.view.filter(_ != exceptAuthId))
seqstates ← DBIO.sequence(otherAuthIds map (authId ⇒ persistAndPushUpdate(authId, header, serializedData, userIds, groupIds, pushText, originPeer, isFat)))
} yield seqstates
}
def notifyClientUpdate(update: api.Update, pushText: Option[String], isFat: Boolean = false)(
implicit
region: SeqUpdatesManagerRegion,
client: api.AuthorizedClientData,
ec: ExecutionContext
): DBIO[Seq[SequenceState]] = {
val header = update.header
val serializedData = update.toByteArray
val (userIds, groupIds) = updateRefs(update)
val originPeer = getOriginPeer(update)
notifyClientUpdate(header, serializedData, userIds, groupIds, pushText, originPeer, isFat)
}
def notifyClientUpdate(
header: Int,
serializedData: Array[Byte],
userIds: Set[Int],
groupIds: Set[Int],
pushText: Option[String],
originPeer: Option[Peer],
isFat: Boolean
)(implicit
region: SeqUpdatesManagerRegion,
client: api.AuthorizedClientData,
ec: ExecutionContext) = {
notifyUserUpdate(client.userId, client.authId, header, serializedData, userIds, groupIds, pushText, originPeer, isFat)
}
def setPushCredentials(authId: Long, creds: models.push.PushCredentials)(implicit region: SeqUpdatesManagerRegion): Unit = {
region.ref ! Envelope(authId, PushCredentialsUpdated(Some(creds)))
}
def deletePushCredentials(authId: Long)(implicit region: SeqUpdatesManagerRegion): Unit = {
region.ref ! Envelope(authId, PushCredentialsUpdated(None))
}
def getDifference(authId: Long, timestamp: Long, maxSizeInBytes: Long)(implicit ec: ExecutionContext): DBIO[(Vector[models.sequence.SeqUpdate], Boolean)] = {
def run(state: Long, acc: Vector[models.sequence.SeqUpdate], currentSize: Long): DBIO[(Vector[models.sequence.SeqUpdate], Boolean)] = {
p.sequence.SeqUpdate.findAfter(authId, state).flatMap { updates ⇒
if (updates.isEmpty) {
DBIO.successful(acc → false)
} else {
val (newAcc, newSize, allFit) = append(updates.toVector, currentSize, maxSizeInBytes, acc)
if (allFit) {
newAcc.lastOption match {
case Some(u) ⇒ run(u.timestamp, newAcc, newSize)
case None ⇒ DBIO.successful(acc → false)
}
} else {
DBIO.successful(newAcc → true)
}
}
}
}
run(timestamp, Vector.empty[sequence.SeqUpdate], 0L)
}
private def append(updates: Vector[sequence.SeqUpdate], currentSize: Long, maxSizeInBytes: Long, updateAcc: Vector[sequence.SeqUpdate]): (Vector[sequence.SeqUpdate], Long, Boolean) = {
@tailrec
def run(updLeft: Vector[sequence.SeqUpdate], acc: Vector[sequence.SeqUpdate], currSize: Long): (Vector[sequence.SeqUpdate], Long, Boolean) = {
updLeft match {
case h +: t ⇒
val newSize = currSize + h.serializedData.length
if (newSize > maxSizeInBytes) {
(acc, currSize, false)
} else {
run(t, acc :+ h, newSize)
}
case Vector() ⇒ (acc, currSize, true)
}
}
run(updates, updateAcc, currentSize)
}
def updateRefs(update: api.Update): (Set[Int], Set[Int]) = {
def peerRefs(peer: api.peers.Peer): (Set[Int], Set[Int]) = {
if (peer.`type` == api.peers.PeerType.Private) {
(Set(peer.id), Set.empty)
} else {
(Set.empty, Set(peer.id))
}
}
val empty = (Set.empty[Int], Set.empty[Int])
def singleUser(userId: Int): (Set[Int], Set[Int]) = (Set(userId), Set.empty)
def singleGroup(groupId: Int): (Set[Int], Set[Int]) = (Set.empty, Set(groupId))
def users(userIds: Seq[Int]): (Set[Int], Set[Int]) = (userIds.toSet, Set.empty)
update match {
case _: api.misc.UpdateConfig ⇒ empty
case _: api.configs.UpdateParameterChanged ⇒ empty
case api.messaging.UpdateChatClear(peer) ⇒ (Set.empty, Set(peer.id))
case api.messaging.UpdateChatDelete(peer) ⇒ (Set.empty, Set(peer.id))
case api.messaging.UpdateMessage(peer, senderUserId, _, _, _) ⇒
val refs = peerRefs(peer)
refs.copy(_1 = refs._1 + senderUserId)
case api.messaging.UpdateMessageDelete(peer, _) ⇒ peerRefs(peer)
case api.messaging.UpdateMessageRead(peer, _, _) ⇒ peerRefs(peer)
case api.messaging.UpdateMessageReadByMe(peer, _) ⇒ peerRefs(peer)
case api.messaging.UpdateMessageReceived(peer, _, _) ⇒ peerRefs(peer)
case api.messaging.UpdateMessageSent(peer, _, _) ⇒ peerRefs(peer)
case api.messaging.UpdateMessageContentChanged(peer, _, _) ⇒ peerRefs(peer)
case api.messaging.UpdateMessageDateChanged(peer, _, _) ⇒ peerRefs(peer)
case api.groups.UpdateGroupAvatarChanged(groupId, userId, _, _, _) ⇒ (Set(userId), Set(groupId))
case api.groups.UpdateGroupInvite(groupId, inviteUserId, _, _) ⇒ (Set(inviteUserId), Set(groupId))
case api.groups.UpdateGroupMembersUpdate(groupId, members) ⇒ (members.map(_.userId).toSet ++ members.map(_.inviterUserId).toSet, Set(groupId)) // TODO: #perf use foldLeft
case api.groups.UpdateGroupTitleChanged(groupId, userId, _, _, _) ⇒ (Set(userId), Set(groupId))
case api.groups.UpdateGroupUserInvited(groupId, userId, inviterUserId, _, _) ⇒ (Set(userId, inviterUserId), Set(groupId))
case api.groups.UpdateGroupUserKick(groupId, userId, kickerUserId, _, _) ⇒ (Set(userId, kickerUserId), Set(groupId))
case api.groups.UpdateGroupUserLeave(groupId, userId, _, _) ⇒ (Set(userId), Set(groupId))
case api.contacts.UpdateContactRegistered(userId, _, _, _) ⇒ singleUser(userId)
case api.contacts.UpdateContactsAdded(userIds) ⇒ users(userIds)
case api.contacts.UpdateContactsRemoved(userIds) ⇒ users(userIds)
case api.users.UpdateUserAvatarChanged(userId, _) ⇒ singleUser(userId)
case api.users.UpdateUserContactsChanged(userId, _) ⇒ singleUser(userId)
case api.users.UpdateUserLocalNameChanged(userId, _) ⇒ singleUser(userId)
case api.users.UpdateUserNameChanged(userId, _) ⇒ singleUser(userId)
case api.weak.UpdateGroupOnline(groupId, _) ⇒ singleGroup(groupId)
case api.weak.UpdateTyping(peer, userId, _) ⇒
val refs = peerRefs(peer)
refs.copy(_1 = refs._1 + userId)
case api.weak.UpdateUserLastSeen(userId, _) ⇒ singleUser(userId)
case api.weak.UpdateUserOffline(userId) ⇒ singleUser(userId)
case api.weak.UpdateUserOnline(userId) ⇒ singleUser(userId)
case api.calls.UpdateCallRing(user, _) ⇒ singleUser(user.id)
case api.calls.UpdateCallEnd(_) ⇒ empty
}
}
def bytesToTimestamp(bytes: Array[Byte]): Long = {
if (bytes.isEmpty) {
0L
} else {
ByteBuffer.wrap(bytes).getLong
}
}
def timestampToBytes(timestamp: Long): Array[Byte] = {
ByteBuffer.allocate(java.lang.Long.BYTES).putLong(timestamp).array()
}
private[push] def subscribe(authId: Long, consumer: ActorRef)(implicit region: SeqUpdatesManagerRegion, ec: ExecutionContext, timeout: Timeout): Future[Unit] = {
region.ref.ask(Envelope(authId, Subscribe(consumer))).mapTo[SubscribeAck].map(_ ⇒ ())
}
private def pushUpdateGetSeqState(
authId: Long,
header: Int,
serializedData: Array[Byte],
userIds: Set[Int],
groupIds: Set[Int],
pushText: Option[String],
originPeer: Option[Peer],
isFat: Boolean
)(implicit region: SeqUpdatesManagerRegion): Future[SequenceState] = {
region.ref.ask(Envelope(authId, PushUpdateGetSequenceState(header, serializedData, userIds, groupIds, pushText, originPeer, isFat)))(OperationTimeout).mapTo[SequenceState]
}
private def pushUpdate(
authId: Long,
header: Int,
serializedData: Array[Byte],
userIds: Set[Int],
groupIds: Set[Int],
pushText: Option[String],
originPeer: Option[Peer],
isFat: Boolean
)(implicit region: SeqUpdatesManagerRegion): Unit = {
region.ref ! Envelope(authId, PushUpdate(header, serializedData, userIds, groupIds, pushText, originPeer, isFat))
}
private def getOriginPeer(update: api.Update): Option[Peer] = {
update match {
case u: UpdateMessage ⇒ Some(u.peer)
case _ ⇒ None
}
}
}
class SeqUpdatesManager(
googlePushManager: GooglePushManager,
applePushManager: ApplePushManager,
db: Database
) extends PersistentActor with Stash with ActorLogging with VendorPush {
import ShardRegion.Passivate
import SeqUpdatesManager._
override def persistenceId: String = self.path.parent.name + "-" + self.path.name
implicit private val system: ActorSystem = context.system
implicit private val ec: ExecutionContext = context.dispatcher
private val authId: Long = self.path.name.toLong
// FIXME: move to props
private val receiveTimeout = context.system.settings.config.getDuration("push.seq-updates-manager.receive-timeout", TimeUnit.SECONDS).seconds
context.setReceiveTimeout(receiveTimeout)
private[this] val IncrementOnStart: Int = 1000
require(IncrementOnStart > 1)
// it is needed to prevent divizion by zero in pushUpdate
private[this] var seq: Int = 0
private[this] var lastTimestamp: Long = 0
// TODO: feed this value from db on actor startup
private[this] var consumers: Set[ActorRef] = Set.empty
private[this] var googleCredsOpt: Option[models.push.GooglePushCredentials] = None
private[this] var appleCredsOpt: Option[models.push.ApplePushCredentials] = None
private[this] val applePusher = new ApplePusher(applePushManager, db)
private[this] val googlePusher = new GooglePusher(googlePushManager, db)
initialize()
def receiveInitialized: Receive = {
case GetSequenceState ⇒
sender() ! sequenceState(seq, timestampToBytes(lastTimestamp))
case PushUpdate(header, updBytes, userIds, groupIds, pushText, originPeer, isFat) ⇒
pushUpdate(authId, header, updBytes, userIds, groupIds, pushText, originPeer, isFat)
case PushUpdateGetSequenceState(header, serializedData, userIds, groupIds, pushText, originPeer, isFat) ⇒
val replyTo = sender()
pushUpdate(authId, header, serializedData, userIds, groupIds, pushText, originPeer, isFat, { seqstate: SequenceState ⇒
replyTo ! seqstate
})
case Subscribe(consumer: ActorRef) ⇒
if (!consumers.contains(consumer)) {
context.watch(consumer)
}
consumers += consumer
log.debug("Consumer subscribed {}", consumer)
sender() ! SubscribeAck(consumer)
case PushCredentialsUpdated(credsOpt) ⇒
credsOpt match {
case Some(c: models.push.GooglePushCredentials) ⇒
googleCredsOpt = Some(c)
db.run(setPushCredentials(c))
case Some(c: models.push.ApplePushCredentials) ⇒
appleCredsOpt = Some(c)
db.run(setPushCredentials(c))
case None ⇒
googleCredsOpt = None
appleCredsOpt = None
db.run(deletePushCredentials(authId))
}
case ReceiveTimeout ⇒
if (consumers.isEmpty) {
context.parent ! Passivate(stopMessage = PoisonPill)
}
case Terminated(consumer) ⇒
log.debug("Consumer unsubscribed {}", consumer)
consumers -= consumer
}
def stashing: Receive = {
case Initialized(timestamp, googleCredsOpt, appleCredsOpt) ⇒
this.lastTimestamp = timestamp
this.googleCredsOpt = googleCredsOpt
this.appleCredsOpt = appleCredsOpt
unstashAll()
context.become(receiveInitialized)
case msg ⇒ stash()
}
override def receiveCommand: Receive = stashing
override def receiveRecover: Receive = {
case SeqChangedKryo(value) ⇒
log.debug("Recovery: SeqChangedKryo {}", value)
seq = value
case SnapshotOffer(_, SeqChangedKryo(value)) ⇒
log.debug("Recovery(snapshot): SeqChangedKryo {}", value)
seq = value
case SeqChanged(value) ⇒
log.debug("Recovery: SeqChanged {}", value)
seq = value
case SnapshotOffer(_, SeqChanged(value)) ⇒
log.debug("Recovery(snapshot): SeqChanged {}", value)
seq = value
case RecoveryFailure(cause) ⇒
log.error(cause, "Failed to recover")
context.stop(self)
case RecoveryCompleted ⇒
log.debug("Recovery: Completed, seq: {}", seq)
seq += IncrementOnStart - 1
}
override def preRestart(reason: Throwable, message: Option[Any]): Unit = {
super.preRestart(reason, message)
log.error(reason, "SeqUpdatesManager exception, message option: {}", message)
}
private def initialize(): Unit = {
val initiatedFuture: Future[Initialized] = for {
seqUpdOpt ← db.run(p.sequence.SeqUpdate.findLast(authId))
googleCredsOpt ← db.run(p.push.GooglePushCredentials.find(authId))
appleCredsOpt ← db.run(p.push.ApplePushCredentials.find(authId))
} yield Initialized(
seqUpdOpt.map(_.timestamp).getOrElse(0),
googleCredsOpt,
appleCredsOpt
)
initiatedFuture.onFailure {
case e ⇒
log.error(e, "Failed initiating SeqUpdatesManager")
context.parent ! Passivate(stopMessage = PoisonPill)
}
initiatedFuture pipeTo self
}
private def pushUpdate(
authId: Long,
header: Int,
serializedData: Array[Byte],
userIds: Set[Int],
groupIds: Set[Int],
pushText: Option[String],
originPeer: Option[Peer],
isFat: Boolean
): Unit = {
pushUpdate(authId, header, serializedData, userIds, groupIds, pushText, originPeer, isFat, noop1)
}
private def pushUpdate(
authId: Long,
header: Int,
serializedData: Array[Byte],
userIds: Set[Int],
groupIds: Set[Int],
pushText: Option[String],
originPeer: Option[Peer],
isFat: Boolean,
cb: SequenceState ⇒ Unit
): Unit = {
// TODO: #perf pinned dispatcher?
implicit val ec = context.dispatcher
def push(seq: Int, timestamp: Long): Future[Int] = {
val seqUpdate = models.sequence.SeqUpdate(authId, timestamp, seq, header, serializedData, userIds, groupIds)
db.run(p.sequence.SeqUpdate.create(seqUpdate))
.map(_ ⇒ seq)
.andThen {
case Success(_) ⇒
if (header != UpdateMessageSent.header) {
consumers foreach { consumer ⇒
val updateStructFuture = if (isFat) {
db.run(
p.AuthId.findUserId(authId) flatMap {
case Some(userId) ⇒
for {
users ← UserUtils.getUserStructs(userIds, userId, authId)
groups ← GroupUtils.getGroupStructs(groupIds, userId)
} yield {
FatSeqUpdate(
seqUpdate.seq,
timestampToBytes(seqUpdate.timestamp),
seqUpdate.header,
seqUpdate.serializedData,
users.toVector,
groups.toVector
)
}
case None ⇒
throw new Exception(s"Failed to get userId from authId ${authId}")
}
)
} else {
Future.successful(SeqUpdate(
seqUpdate.seq,
timestampToBytes(seqUpdate.timestamp),
seqUpdate.header,
seqUpdate.serializedData
))
}
updateStructFuture foreach (s ⇒ consumer ! UpdateReceived(s))
}
googleCredsOpt foreach { creds ⇒
if (header == UpdateMessage.header) {
googlePusher.deliverGooglePush(creds, authId, seqUpdate.seq, pushText, originPeer)
}
}
appleCredsOpt foreach { creds ⇒
applePusher.deliverApplePush(creds, authId, seqUpdate.seq, pushText, originPeer)
}
}
log.debug("Pushed update seq: {}", seq)
case Failure(err) ⇒
log.error(err, "Failed to push update") // TODO: throw exception?
}
}
seq += 1
val timestamp = newTimestamp()
log.debug("new timestamp {}", timestamp)
// TODO: DRY this
if (seq % (IncrementOnStart / 2) == 0) {
persist(SeqChangedKryo(seq)) { s ⇒
push(s.sequence, timestamp) foreach (_ ⇒ cb(sequenceState(s.sequence, timestampToBytes(timestamp))))
saveSnapshot(SeqChangedKryo(s.sequence))
}
} else {
push(seq, timestamp) foreach (updSeq ⇒ cb(sequenceState(updSeq, timestampToBytes(timestamp))))
}
}
private def newTimestamp(): Long = {
val timestamp = System.currentTimeMillis()
if (timestamp > lastTimestamp) {
lastTimestamp = timestamp
lastTimestamp
} else {
lastTimestamp = lastTimestamp + 1
lastTimestamp
}
}
private def sequenceState(sequence: Int, timestamp: Long): SequenceState =
sequenceState(sequence, timestampToBytes(timestamp))
private def sequenceState(sequence: Int, state: Array[Byte]): SequenceState =
(sequence, state)
}
| alessandrostone/actor-platform | actor-server/actor-push/src/main/scala/im/actor/server/push/SeqUpdatesManager.scala | Scala | mit | 28,497 |
package com.twitter.finagle.stats
import java.util
import java.util.Collections
import scala.collection.JavaConverters._
import org.scalatest.funsuite.AnyFunSuite
class MetricsViewTest extends AnyFunSuite {
private val EmptySnapshot: Snapshot = new Snapshot {
def count: Long = 0L
def sum: Long = 0L
def max: Long = 0L
def min: Long = 0L
def average: Double = 0L
def percentiles: IndexedSeq[Snapshot.Percentile] = IndexedSeq.empty
}
private class Impl(
val gauges: util.Map[String, Number] = Collections.emptyMap[String, Number],
val counters: util.Map[String, Number] = Collections.emptyMap[String, Number],
val histograms: util.Map[String, Snapshot] = Collections.emptyMap[String, Snapshot],
val verbosity: util.Map[String, Verbosity] = Collections.emptyMap[String, Verbosity])
extends MetricsView
private val a1 = new Impl(
gauges = Collections.singletonMap("a", 1),
counters = Collections.singletonMap("a", 1),
histograms = Collections.singletonMap("a", EmptySnapshot),
verbosity = Collections.singletonMap("a", Verbosity.Debug)
)
private val b2 = new Impl(
gauges = Collections.singletonMap("b", 2),
counters = Collections.singletonMap("b", 2),
histograms = Collections.singletonMap("b", EmptySnapshot),
verbosity = Collections.singletonMap("b", Verbosity.Debug)
)
test("of") {
val aAndB = MetricsView.of(a1, b2)
assert(Map("a" -> 1, "b" -> 2) == aAndB.gauges.asScala)
assert(Map("a" -> 1, "b" -> 2) == aAndB.counters.asScala)
assert(Map("a" -> EmptySnapshot, "b" -> EmptySnapshot) == aAndB.histograms.asScala)
assert(Map("a" -> Verbosity.Debug, "b" -> Verbosity.Debug) == aAndB.verbosity.asScala)
}
test("of handles duplicates") {
val c = new Impl(
gauges = Collections.singletonMap("a", 2),
counters = Collections.singletonMap("a", 2),
histograms = Collections.singletonMap("a", EmptySnapshot),
verbosity = Collections.singletonMap("a", Verbosity.Default)
)
val aAndC = MetricsView.of(a1, c)
assert(Map("a" -> 1) == aAndC.gauges.asScala)
assert(Map("a" -> 1) == aAndC.counters.asScala)
assert(Map("a" -> EmptySnapshot) == aAndC.histograms.asScala)
assert(Map("a" -> Verbosity.Debug) == aAndC.verbosity.asScala)
val cAndA = MetricsView.of(c, a1)
assert(Map("a" -> 2) == cAndA.gauges.asScala)
assert(Map("a" -> 2) == cAndA.counters.asScala)
assert(Map("a" -> EmptySnapshot) == cAndA.histograms.asScala)
assert(Map("a" -> Verbosity.Default) == cAndA.verbosity.asScala)
}
test("of ignores empty maps") {
val empty = new Impl()
val aAndEmpty = MetricsView.of(a1, empty)
assert(Map("a" -> 1) == aAndEmpty.gauges.asScala)
assert(Map("a" -> 1) == aAndEmpty.counters.asScala)
assert(Map("a" -> EmptySnapshot) == aAndEmpty.histograms.asScala)
assert(Map("a" -> Verbosity.Debug) == aAndEmpty.verbosity.asScala)
val emptyAndA = MetricsView.of(empty, a1)
assert(Map("a" -> 1) == emptyAndA.gauges.asScala)
assert(Map("a" -> 1) == emptyAndA.counters.asScala)
assert(Map("a" -> EmptySnapshot) == emptyAndA.histograms.asScala)
assert(Map("a" -> Verbosity.Debug) == emptyAndA.verbosity.asScala)
}
}
| twitter/finagle | finagle-stats-core/src/test/scala/com/twitter/finagle/stats/MetricsViewTest.scala | Scala | apache-2.0 | 3,240 |
/*
# Copyright 2016 Georges Lipka
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
*/
package com.glipka.easyReactJS.reactBootstrap
import scala.scalajs.js
import scala.scalajs.js._
import com.glipka.easyReactJS.react._
import ReactBootstrap._
@js.native trait NavbarProps extends HTMLProps[Navbar] with js.Any {
var brand: Any = js.native // TODO: Add more specific type
var bsSize: Sizes = js.native
var bsStyle: String = js.native
var componentClass: ReactType = js.native
var defaultNavExpanded: Boolean = js.native
var fixedBottom: Boolean = js.native
var fixedTop: Boolean = js.native
var fluid: Boolean = js.native
var inverse: Boolean = js.native
var navExpanded: Boolean = js.native
var onToggle: Function = js.native
var staticTop: Boolean = js.native
var toggleButton: Any = js.native // TODO: Add more specific type
var toggleNavKey: String = js.native
}
@js.native trait NavbarClass extends ClassicComponentClass[NavbarProps] with js.Any {
var Brand: NavbarBrand = js.native
var Collapse: NavbarCollapse = js.native
var Header: NavbarHeader = js.native
var Toggle: NavbarToggle = js.native
var Form: Any = js.native
var Link: Any = js.native
var Text: Any = js.native
}
| glipka/Easy-React-With-ScalaJS | src/main/scala/com/glipka/easyReactJS/reactBootstrap/NavBar.scala | Scala | apache-2.0 | 1,728 |
package com.komanov.uuid
import java.lang.Long.parseLong
import java.util.UUID
object UuidScala1Utils {
/**
* A fast version of java.util.UUID#fromString. Less memory allocations (in JDK implementation there are 6 redundant
* allocation: array allocation for split and 5 string concatenations "0x" + component[i]).
*/
def fromStringFast(s: String): UUID = {
val component1EndIndex = indexOfHyphen(s, 0)
val component2EndIndex = indexOfHyphen(s, component1EndIndex + 1)
val component3EndIndex = indexOfHyphen(s, component2EndIndex + 1)
val component4EndIndex = indexOfHyphen(s, component3EndIndex + 1)
require(s.indexOf('-', component4EndIndex + 1) == -1, s"Too much hyphens in a string: $s")
// This is a copy-paste from UUID.fromString implementation
var mostSigBits: Long = parseLong(s.substring(0, component1EndIndex), 16)
mostSigBits <<= 16
mostSigBits |= parseLong(s.substring(component1EndIndex + 1, component2EndIndex), 16)
mostSigBits <<= 16
mostSigBits |= parseLong(s.substring(component2EndIndex + 1, component3EndIndex), 16)
var leastSigBits: Long = parseLong(s.substring(component3EndIndex + 1, component4EndIndex), 16)
leastSigBits <<= 48
leastSigBits |= parseLong(s.substring(component4EndIndex + 1), 16)
new UUID(mostSigBits, leastSigBits)
}
private def indexOfHyphen(s: String, from: Int): Int = {
val index = s.indexOf('-', from)
require(index != -1, s"Expected 4 hyphens (-) in a string: $s")
index
}
}
| dkomanov/stuff | src/com/komanov/uuid/UuidScala1Utils.scala | Scala | mit | 1,522 |
package com.lookout.borderpatrol.session
import argonaut._
import Argonaut._
import scala.util.Try
package object id {
implicit def SessionIdCodecJson: CodecJson[SessionId] =
casecodec4(SessionId.apply, SessionId.unapply)("expires", "entropy", "secretId", "signature")
implicit class SessionIdSerialize(val s: SessionId) extends AnyVal {
def asString(implicit marshaller: Marshaller): String =
marshaller.encode(s)
}
implicit class SessionIdDeserialize(val s: String) extends AnyVal {
def asSessionId(implicit marshaller: Marshaller): Try[SessionId] =
marshaller.decode(s)
}
implicit class SessionIdAndSecretDeserialize(val s: String) extends AnyVal {
def asSessionIdAndSecret(implicit marshaller: Marshaller): Try[(SessionId, Secret)] =
marshaller.decodeWithSecret(s)
}
implicit class SessionIdAndSecret(val s: SessionId) extends AnyVal {
def asSessionIdAndSecret(implicit marshaller: Marshaller): Try[(SessionId, Secret)] =
marshaller.injector.idAndSecret2Id.invert(s)
}
}
| rtyler/borderpatrol | borderpatrol-core/src/main/scala/com/lookout/borderpatrol/session/id/package.scala | Scala | mit | 1,045 |
package com.emstlk.nacl4s.crypto.verify
object Verify {
private def verify(x: Array[Byte], xOffset: Int, y: Array[Byte], count: Int): Boolean = {
var differentBits = 0
var i = 0
while (i < count) {
differentBits |= x(xOffset + i) ^ y(i)
i += 1
}
0 == (1 & (((differentBits & 0xff) - 1) >>> 8)) - 1
}
def cryptoVerify16(x: Array[Byte], xOffset: Int, y: Array[Byte]) = verify(x, xOffset, y, 16)
def cryptoVerify32(x: Array[Byte], xOffset: Int, y: Array[Byte]) = verify(x, xOffset, y, 32)
}
| emstlk/nacl4s | src/main/scala/com/emstlk/nacl4s/crypto/verify/Verify.scala | Scala | mit | 534 |
package controllers
import javax.inject.Singleton
import play.api.mvc._
@Singleton
class Application extends Controller {
def index = Action {
Ok(views.html.index())
}
}
| JustinMullin/mtg-search | app/controllers/Application.scala | Scala | apache-2.0 | 183 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.