Browse Source

WIP

master
Christof Nolle 2 years ago
parent
commit
3258ddda8b

+ 3
- 2
build.sbt View File

@@ -46,7 +46,8 @@ libraryDependencies ++= compileDependencies

lazy val testDependencies = Seq(
"com.typesafe.akka" %% "akka-http-testkit" % "10.0.5",
"org.scalatest" %% "scalatest" % "3.0.1",
"org.scalamock" %% "scalamock-scalatest-support" % "3.5.0"
"org.mockito" % "mockito-inline" % "2.20.1",
"org.scalatest" %% "scalatest" % "3.0.1"
// "org.scalamock" %% "scalamock-scalatest-support" % "3.5.0"
).map(_ % "test")
libraryDependencies ++= testDependencies

+ 7
- 0
src/main/scala/de/codingchallenge/DependencyInjectionModule.scala View File

@@ -7,6 +7,9 @@ import fr.davit.akka.http.prometheus.scaladsl.server.HttpMetricsExports
import fr.davit.akka.http.prometheus.scaladsl.server.settings.HttpMetricsSettings
import de.codingchallenge.configuration.{AkkaConfigurator, Environment}
import de.codingchallenge.logging.LoggingModule
import de.codingchallenge.models.ProductExport
import de.codingchallenge.repositories.{ArticleRepository, ProductExportRepository}
import de.codingchallenge.services.ArticleExportService
import io.prometheus.client.CollectorRegistry

trait DependencyInjectionModule extends LoggingModule {
@@ -26,4 +29,8 @@ trait DependencyInjectionModule extends LoggingModule {
}
implicit val httpMetricsSettings = HttpMetricsSettings(
exports = httpMetricsExports)

lazy val articleRepository = wire[ArticleRepository]
lazy val exportService = wire[ArticleExportService]
lazy val productExportRepo = wire[ProductExportRepository]
}

+ 1
- 0
src/main/scala/de/codingchallenge/Main.scala View File

@@ -18,6 +18,7 @@ import fr.davit.akka.http.prometheus.scaladsl.server.HttpMetricsDirectives._
import io.prometheus.client.hotspot.DefaultExports

object Main extends App with DependencyInjectionModule with LazyLogging {

implicit lazy val actorSystem = ActorSystem("system")
implicit lazy val actorMaterializer = ActorMaterializer()
implicit lazy val executionContext = actorSystem.dispatcher

+ 7
- 3
src/main/scala/de/codingchallenge/Routes.scala View File

@@ -8,8 +8,10 @@ import akka.stream.ActorMaterializer
import akka.util.ByteString
import de.codingchallenge.configuration.Environment
import com.typesafe.scalalogging.LazyLogging
import de.codingchallenge.services.ArticleExportService

class Routes(actorSystem: ActorSystem,
class Routes(articleExportService: ArticleExportService,
actorSystem: ActorSystem,
actorMaterializer: ActorMaterializer,
environment: Environment)
extends LazyLogging {
@@ -42,14 +44,16 @@ class Routes(actorSystem: ActorSystem,
val exportRoute: Route =
path("articles") {
get {
complete(StatusCodes.ServiceUnavailable)
println("test")
complete(articleExportService.exportArticles())
complete(StatusCodes.Accepted)
}
}

val routes: Route =
pathPrefix("inoio-coding-challenge") {
serviceRoutes
} ~ pathPrefix("export" / "articles") {
} ~ pathPrefix("inoio-coding-challenge" / "export") {
exportRoute
} ~ serviceRoutes


+ 1
- 1
src/main/scala/de/codingchallenge/configuration/Environment.scala View File

@@ -4,5 +4,5 @@ class Environment() {
import scala.util.Properties._
val loggingFormat: String = envOrElse("LOGGING_FORMAT", "humanreadable")
val hostIP: String = envOrElse("HOST_IP", "0.0.0.0")
val hostPort: Int = envOrElse("HOST_PORT", "8080").toInt
val hostPort: Int = envOrElse("HOST_PORT", "9080").toInt
}

+ 3
- 2
src/main/scala/de/codingchallenge/models/Article.scala View File

@@ -12,7 +12,7 @@ import scala.util.{Success, Try}
* @param price the articles price
* @param stock the current stock
*/
case class Article(id: String, productId: String, description: String, price: Float, stock: Int)
case class Article(id: String, productId: String, name: String, description: String, price: Float, stock: Int)

object Article {

@@ -20,8 +20,9 @@ object Article {
* Reads an article from a sequence of columns. Returns an Option with article in success case
*/
implicit val csvColumnReads: CsvColumnReads[Article] = (s: Seq[String]) =>
Try{ (s.head, s(1), s(2), s(3).toFloat, s(4).toInt) } match {
Try{ (s.head, s(1), s(2), s(3), s(4).toFloat, s(5).toInt) } match {
case Success(t) => Some((Article.apply _).tupled(t))
case _ => None
}

}

+ 19
- 0
src/main/scala/de/codingchallenge/models/ProductExport.scala View File

@@ -0,0 +1,19 @@
package de.codingchallenge.models

import de.codingchallenge.csv.CsvColumnWrites

case class ProductExport(productId: String, name: String, description: String, price:Float, stockSum: Int)

object ProductExport{

implicit val columnWrites: CsvColumnWrites[ProductExport] = (p: ProductExport) =>
p.productIterator.map(_.toString).toSeq

def apply(a: Article, stockSum: Int): ProductExport = new ProductExport(
productId = a.productId,
name = a.name,
description = a.description,
price = a.price,
stockSum = stockSum
)
}

+ 36
- 3
src/main/scala/de/codingchallenge/repositories/ArticleRepository.scala View File

@@ -3,10 +3,43 @@ package de.codingchallenge.repositories
import akka.NotUsed
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.stream.scaladsl.Source
import akka.http.scaladsl.model.{HttpRequest, Uri}
import akka.stream.scaladsl.{Flow, Framing, Source}
import akka.util.ByteString
import com.typesafe.scalalogging.LazyLogging
import de.codingchallenge.csv.CsvOps._
import de.codingchallenge.models.Article

class ArticleRepository(implicit as: ActorSystem){

def getArticles(limit: Int): Source[Article, NotUsed] = Http().re
class ArticleRepository(actorSystem: ActorSystem) extends LazyLogging{

implicit val as = actorSystem
// might be a good idea to make that value configurable
val baseUrl: String = "http://localhost:8080"

lazy val connection = Http().superPool[NotUsed]()

def getArticles(limit: Int): Source[Article, _] = Source.fromFuture(
Http()
.singleRequest(HttpRequest(uri = s"$baseUrl/articles/$limit"))
).flatMapConcat(res =>
res.entity
.dataBytes
.via(lineDelimiter)
.via(articleFlow))

val lineDelimiter: Flow[ByteString, ByteString, NotUsed] =
Framing.delimiter(ByteString("\n"), 128, allowTruncation = true)

val articleFlow: Flow[ByteString, Article, NotUsed] = Flow[ByteString]
.map(_.utf8String)
.drop(1)
.map{e => logger.info(s"CSV string: $e"); e}
.map(_.csvToOptOf[Article])
.map{
case Some(a) => a
case _ => throw new IllegalStateException("Malformed CSV data aborting stream")
}
}



+ 38
- 0
src/main/scala/de/codingchallenge/repositories/ProductExportRepository.scala View File

@@ -0,0 +1,38 @@
package de.codingchallenge.repositories

import akka.NotUsed
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.HttpEntity.Chunked
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.ContentNegotiator.Alternative.ContentType
import akka.stream.scaladsl.{Flow, JavaFlowSupport, Source}
import akka.util.ByteString
import de.codingchallenge.models.ProductExport
import de.codingchallenge.csv.CsvOps._

import scala.concurrent.Future

class ProductExportRepository(actorSystem: ActorSystem) {

// val headerLine = "produktId|name|beschreibung|preis|summeBestand"

val baseUrl = "http://localhost:8080"

def add(p: Source[ProductExport, _], articlesSize: Int): Future[HttpResponse] = {

val entity = Chunked.fromData(
ContentTypes.`text/csv(UTF-8)`,
p.via(csvFlow))

Http()(actorSystem)
.singleRequest(
HttpRequest(
method = HttpMethods.PUT,
uri = s"$baseUrl/products/$articlesSize",
entity = entity))
}

private val csvFlow: Flow[ProductExport, ByteString, Any] =
Flow.fromFunction(p => ByteString(p.toCsvLine + "\n"))
}

+ 39
- 0
src/main/scala/de/codingchallenge/services/ArticleExportService.scala View File

@@ -0,0 +1,39 @@
package de.codingchallenge.services

import akka.NotUsed
import akka.http.scaladsl.model.HttpResponse
import akka.stream.Materializer
import akka.stream.scaladsl.{Sink, Source}
import com.typesafe.scalalogging.LazyLogging
import de.codingchallenge.models.{Article, ProductExport}
import de.codingchallenge.repositories.{ArticleRepository, ProductExportRepository}

import scala.concurrent.Future

class ArticleExportService(
articleRepository: ArticleRepository,
productExportRepository: ProductExportRepository,
mat: Materializer
) extends LazyLogging {
implicit val m: Materializer = mat

val productsSize: Int = 100

def exportArticles(): Future[HttpResponse] = productExportRepository.add(Source.fromGraph(
articleRepository
.getArticles(productsSize)
.filter(_.stock > 0)
.groupBy(1, _.productId)
.map(a => a -> a.stock)
.reduce[(Article, Int)] {
case ((a1, c1), (a2, c2)) if a1.price < a2.price => (a1, c1 + c2)
case ((a1, c1), (a2, c2)) if a1.price > a2.price => (a2, c1 + c2)
case ((a1, c1), (_, c2)) => (a1, c1 + c2)
}
.mergeSubstreams
.map { case (article, stockSum) =>
logger.debug(s"Reduced to article: $article and stockSum: $stockSum")
ProductExport(article, stockSum) }
), productsSize)

}

+ 19
- 0
src/test/scala/de/codingchallenge/AkkaSpec.scala View File

@@ -0,0 +1,19 @@
package de.codingchallenge

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import org.scalatest.BeforeAndAfterAll

trait AkkaSpec extends BeforeAndAfterAll{ self: BaseSpec =>

implicit val system = ActorSystem("test")

implicit val dispatcher = system.dispatcher

implicit val materializer = ActorMaterializer()

override protected def afterAll(): Unit = {
super.afterAll()
system.terminate()
}
}

+ 7
- 0
src/test/scala/de/codingchallenge/BaseSpec.scala View File

@@ -0,0 +1,7 @@
package de.codingchallenge

import org.scalatest.mockito.MockitoSugar
import org.scalatest.{MustMatchers, WordSpec}

trait BaseSpec extends WordSpec with MustMatchers with MockitoSugar


+ 24
- 0
src/test/scala/de/codingchallenge/fixtures/ArticleFixture.scala View File

@@ -0,0 +1,24 @@
package de.codingchallenge.fixtures

import de.codingchallenge.models.Article

trait ArticleFixture {

val articleUnavailable = Article(
id = "id",
productId = "productId",
name = "name",
description = "desc",
price = 1,
stock = 0)

val cheapestArticle = Article(
id = "cheapest",
productId = "productId",
name = "cheapestArticle",
description = "cheapestArticleDesc",
price = 0,
stock = 1)

val anotherArticle = articleUnavailable.copy(price = 1, stock = 5)
}

+ 14
- 0
src/test/scala/de/codingchallenge/fixtures/ProductExportFixture.scala View File

@@ -0,0 +1,14 @@
package de.codingchallenge.fixtures

import de.codingchallenge.models.{Article, ProductExport}

trait ProductExportFixture {

val productExport = ProductExport(
productId = "productId",
name = "cheapestArticle",
description = "cheapestArticleDesc",
price = 0,
stockSum = 6
)
}

+ 20
- 0
src/test/scala/de/codingchallenge/models/ArticleSpec.scala View File

@@ -0,0 +1,20 @@
package de.codingchallenge.models

import de.codingchallenge.BaseSpec
import de.codingchallenge.csv.CsvOps._

class ArticleSpec extends BaseSpec {

"The Article" must {
"parse from sample" in {
val sampleCsv = "A-UhnpVjCE|P-NhImbQSB|CKVTFO LCCOR TFIAZTP|lxqjlivf dppzKc|79.54|0"
sampleCsv.csvToOptOf[Article].get mustBe Article(
"A-UhnpVjCE",
"P-NhImbQSB",
"CKVTFO LCCOR TFIAZTP",
"lxqjlivf dppzKc",
79.54f,
0)
}
}
}

+ 20
- 0
src/test/scala/de/codingchallenge/repositories/ArticleRepositorySpec.scala View File

@@ -0,0 +1,20 @@
package de.codingchallenge.repositories

import akka.stream.scaladsl.Sink
import de.codingchallenge.models.Article
import de.codingchallenge.{AkkaSpec, BaseSpec}
import org.scalatest.{BeforeAndAfterAll, MustMatchers, WordSpec}

import scala.concurrent.duration._
import scala.concurrent.Await

class ArticleRepositorySpec extends BaseSpec with AkkaSpec {

"The ArticleRepository" must {
"parse articles as expected" in {
val repo = new ArticleRepository(system)
Await.result(repo.getArticles(1).runWith(Sink.head), 1.second) mustBe a[Article]
}
}

}

+ 41
- 0
src/test/scala/de/codingchallenge/services/ArticleExportServiceSpec.scala View File

@@ -0,0 +1,41 @@
package de.codingchallenge.services

import akka.http.scaladsl.model.HttpResponse
import akka.stream.scaladsl.{Sink, Source}
import de.codingchallenge.fixtures.{ArticleFixture, ProductExportFixture}
import de.codingchallenge.models.ProductExport
import de.codingchallenge.repositories.{ArticleRepository, ProductExportRepository}
import de.codingchallenge.{AkkaSpec, BaseSpec}
import org.mockito.ArgumentCaptor

import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
import org.mockito.Mockito._
import org.mockito.ArgumentMatchers._
import org.mockito.internal.verification.argumentmatching.ArgumentMatchingTool

class ArticleExportServiceSpec extends BaseSpec with AkkaSpec{

trait TestSetup extends ProductExportFixture with ArticleFixture{
val articleRepositoryMock = mock[ArticleRepository]
val productExportRepositoryMock = mock[ProductExportRepository]
val service = new ArticleExportService(articleRepositoryMock, productExportRepositoryMock, materializer)
}

"The ArticleExportService" must {
"pass the expected source to the product export repository" in new TestSetup{
doReturn(Source(List(articleUnavailable, cheapestArticle, anotherArticle)), Nil: _*)
.when(articleRepositoryMock)
.getArticles(100)
when(productExportRepositoryMock.add(any(), any()))
.thenReturn(Future.successful(HttpResponse()))

Await.result(service.exportArticles(), 5.second)
val sourceCaptor: ArgumentCaptor[Source[ProductExport, _]] = ArgumentCaptor.forClass(classOf[Source[ProductExport, _]])
verify(productExportRepositoryMock).add(sourceCaptor.capture(), any())
Await.result(sourceCaptor.getValue.runWith(Sink.head), 1.second) mustBe productExport


}
}
}

Loading…
Cancel
Save