Browse Source

init

master
Christof Nolle 2 years ago
commit
12294e2bd4

+ 3
- 0
.gitignore View File

@@ -0,0 +1,3 @@
project/project
target
/.idea

+ 58
- 0
README.md View File

@@ -0,0 +1,58 @@
## Coding Challenge

Als Marketingbeauftragter möchte ich die aktuelle Produktliste unseres
Online-Shops an einen externen Dienstleister weiterleiten.

Ausgangsbasis ist die aktuelle Liste von Artikeln im CSV Format ("Artikel"/"Produkt": ein Produkt
enthält mehrere Artikel, auch Varianten genannt). Alle Artikel eines Produktes
folgen aufeinander.

Wenn kein Artikel eines Produktes einen Bestand > 0 hat, soll für dieses Produkt
kein Eintrag im Ergebnis vorhanden sein.

Pro Produkt soll jeweils der günstigste Artikel mit Bestand > 0 im Ergebnis enthalten
sein (mit seinen jeweiligen Eigenschaften).

Mit dem günstigsten Artikel soll weiterhin die Summe der Bestände aller Artikel
des Produkts ausgewiesen werden.

Haben mehere Artikel den gleichen Preis, soll der erste genommen werden.

Die aktuelle Artikelliste steht per HTTP-Schnittstelle zur Verfügung (Details s.u.).
Die transformierten Daten sollen per PUT in eine externe HTTP-Schnittstelle
hochgeladen werden.

### Format der Quelldatei
- Trennzeichen: Pipe (|)
- Zeilentrenner: LF (\n)
- Zeile 1: Header / Spaltennamen
- Spalten: id|produktId|name|beschreibung|preis|bestand (String|String|String|String|Float|Int)
- Hinweis: das Trennzeichen ist nicht in den Spalteninhalten vorhanden

### Format der Zieldatei
- Trennzeichen: Pipe (|)
- Zeilentrenner: LF (\n)
- Zeile 1: Header / Spaltennamen
- Spalten: produktId|name|beschreibung|preis|summeBestand (String|String|String|Float|Int)

### Preise
Preise sind mit einer oder zwei Nachkommastellen und "." als Trenner zwischen Euro und Cent ohne Währungssymbol angegeben.
z.B.: 12.13 , 42.03 oder 90.0

### Schnittstellenbeschreibung Download
- HTTP GET /articles/:lines
- lines legt die Anzahl der zurückgelieferten Artikel fest
- Response Body: die CSV-Datei

### Schnittstellenbeschreibung Upload
- HTTP PUT /products/:lines, Content-Type: text/csv
- lines bezieht sich auf die Anzahl der zugrundeliegenden Artikel
- Response Code: 200 wenn alle Einträge erfolgreich verarbeitet werden konnten

### Test Service
- Download link: https://cloud.inoio.de/s/SnLlMwp4XEWlDe9/download
- Starten: `java -jar coding-challenge.jar`

### Entwicklungsumgebung

Schreibe den Code in der Sprache in der du dich wohl fühlst und so, dass du ihn für "production ready" hällst.

+ 52
- 0
build.sbt View File

@@ -0,0 +1,52 @@
enablePlugins(GitVersioning)
enablePlugins(GitBranchPrompt)
enablePlugins(BuildInfoPlugin)

lazy val root = (project in file(".")).
settings(
inThisBuild(List(
organization := "cnolle",
scalaVersion := "2.12.6"
)),
name := "inoio-coding-challenge"
).settings(dependencyCheckSuppressionFiles += file("suppress-checks.xml"))
.settings(dependencyCheckFailBuildOnCVSS := 1)
.settings(
buildInfoOptions += BuildInfoOption.BuildTime,
buildInfoOptions += BuildInfoOption.ToJson,
buildInfoPackage := "de.codingchallenge",
buildInfoOptions += BuildInfoOption.Traits("de.codingchallenge.logging.LoggerContextInfo"),
buildInfoKeys := Seq[BuildInfoKey](name, version, "gitHash" -> git.gitHeadCommit.value.getOrElse("emptyRepository"))
)

lazy val compileDependencies = {
val macVersion = "2.3.1"
val prometheusVersion = "0.4.0"

Seq(
"com.typesafe.scala-logging" %% "scala-logging"% "3.7.1",
"ch.qos.logback" % "logback-classic" % "1.2.3",
"org.codehaus.janino" % "janino" % "3.0.8",
"net.logstash.logback" % "logstash-logback-encoder" % "4.11",
"com.softwaremill.macwire" %% "macros" % macVersion,
"com.softwaremill.macwire" %% "util" % macVersion,
"com.softwaremill.macwire" %% "proxy" % macVersion,
"io.prometheus" % "simpleclient" % prometheusVersion,
"io.prometheus" % "simpleclient_hotspot" % prometheusVersion,
"fr.davit" %% "akka-http-prometheus" % "0.1.1",
"com.typesafe.akka" %% "akka-slf4j" % "2.5.12",
"com.typesafe.akka" %% "akka-http" % "10.1.3",
"com.typesafe.akka" %% "akka-actor" % "2.5.8",
"com.typesafe.akka" %% "akka-stream" % "2.5.8"
)
}


libraryDependencies ++= compileDependencies

lazy val testDependencies = Seq(
"com.typesafe.akka" %% "akka-http-testkit" % "10.0.5",
"org.scalatest" %% "scalatest" % "3.0.1",
"org.scalamock" %% "scalamock-scalatest-support" % "3.5.0"
).map(_ % "test")
libraryDependencies ++= testDependencies

+ 1
- 0
project/assembly.sbt View File

@@ -0,0 +1 @@
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.7")

+ 1
- 0
project/build.properties View File

@@ -0,0 +1 @@
sbt.version=1.1.6

+ 12
- 0
project/plugins.sbt View File

@@ -0,0 +1,12 @@
// building docker containers

addSbtPlugin("se.marcuslonnberg" % "sbt-docker" % "1.4.1")

addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0")

addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.7.0")

addSbtPlugin("net.vonbuchholtz" % "sbt-dependency-check" % "0.2.6")

addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.9.0")


+ 28
- 0
src/main/resources/logback-humanreadable.xml View File

@@ -0,0 +1,28 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="30 seconds">

<jmxConfigurator/>

<property name="PATTERN_DEFAULT" value="%date{ISO8601} %highlight([%-5level]) %-11.-11marker [%thread] %logger{50} - %message%n" />

<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${PATTERN_DEFAULT}</pattern>
</encoder>
</appender>

<appender name="SYSLOG" class="ch.qos.logback.classic.net.SyslogAppender">
<syslogHost>localhost</syslogHost>
<port>10514</port>
<facility>USER</facility>
<suffixPattern>[%thread] %logger %msg</suffixPattern>
</appender>

<root level="DEBUG">
<appender-ref ref="SYSLOG" />
</root>

<root level="INFO">
<appender-ref ref="STDOUT"/>
</root>
</configuration>

+ 45
- 0
src/main/resources/logback-server.xml View File

@@ -0,0 +1,45 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="30 seconds">

<jmxConfigurator/>

<if condition='isNull("gitHash")'>
<then><property name="gitHash" value="undefined" /></then>
</if>

<if condition='isNull("serviceName")'>
<then><property name="serviceName" value="undefined" /></then>
</if>

<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="net.logstash.logback.encoder.LogstashEncoder">
<fieldNames>
<timestamp>ts</timestamp>
</fieldNames>
</encoder>
</appender>

<appender name="SYSLOG" class="ch.qos.logback.classic.net.SyslogAppender">
<encoder class="net.logstash.logback.encoder.LogstashEncoder">
<fieldNames>
<timestamp>ts</timestamp>
</fieldNames>
</encoder>
<syslogHost>rsyslog-service</syslogHost>
<port>10514</port>
<facility>USER</facility>
<suffixPattern>[%thread] %logger %msg %marker</suffixPattern>
</appender>

<root level="DEBUG">
<appender-ref ref="SYSLOG" />
</root>

<appender name="ASYNC" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="STDOUT" />
</appender>

<root level="INFO">
<appender-ref ref="ASYNC"/>
</root>
</configuration>

+ 29
- 0
src/main/scala/de/codingchallenge/DependencyInjectionModule.scala View File

@@ -0,0 +1,29 @@
package de.codingchallenge

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import com.softwaremill.macwire._
import fr.davit.akka.http.prometheus.scaladsl.server.HttpMetricsExports
import fr.davit.akka.http.prometheus.scaladsl.server.settings.HttpMetricsSettings
import de.codingchallenge.configuration.{AkkaConfigurator, Environment}
import de.codingchallenge.logging.LoggingModule
import io.prometheus.client.CollectorRegistry

trait DependencyInjectionModule extends LoggingModule {

lazy val environment = wire[Environment]

lazy val routes = wire[Routes]
lazy val akkaConfigurator = wire[AkkaConfigurator]

def actorSystem: ActorSystem
def actorMaterializer: ActorMaterializer

// the custom prometheus registry that you use in your app
val customCollectorRegistry = CollectorRegistry.defaultRegistry
val httpMetricsExports = new HttpMetricsExports {
override val registry = customCollectorRegistry
}
implicit val httpMetricsSettings = HttpMetricsSettings(
exports = httpMetricsExports)
}

+ 71
- 0
src/main/scala/de/codingchallenge/Main.scala View File

@@ -0,0 +1,71 @@
package de.codingchallenge

import akka.Done
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpResponse}
import akka.http.scaladsl.model.StatusCodes.InternalServerError
import akka.http.scaladsl.server.Directives.{
complete,
extractUri
}
import akka.http.scaladsl.server._
import akka.stream.ActorMaterializer
import akka.actor.{ActorSystem, CoordinatedShutdown}
import com.typesafe.scalalogging.LazyLogging
import scala.util.{Failure, Success}
import fr.davit.akka.http.prometheus.scaladsl.server.HttpMetricsRoute._
import fr.davit.akka.http.prometheus.scaladsl.server.HttpMetricsDirectives._
import io.prometheus.client.hotspot.DefaultExports

object Main extends App with DependencyInjectionModule with LazyLogging {
implicit lazy val actorSystem = ActorSystem("system")
implicit lazy val actorMaterializer = ActorMaterializer()
implicit lazy val executionContext = actorSystem.dispatcher

logbackReconfigurator.configureLogging()
akkaConfigurator.configure()
DefaultExports.initialize()

val (host, port) = (environment.hostIP, environment.hostPort)
val bindingFuture =
Http().bindAndHandle(handleExceptions()(withMetrics(httpMetricsExports) {
routes.routes
}), host, port)

Http().bindAndHandle(routes.rejectRoute.withMetricsHandler, host, port + 1)

def handleExceptions(): (Route) => Route = (route: Route) => {
val exceptionHandler: ExceptionHandler = ExceptionHandler {
case e: Exception =>
extractUri { uri =>
logger.error("error in route processing for request "+uri.toString , e)
complete(HttpResponse(InternalServerError, entity = "There was an internal server error, please try again later."))
}
}
akka.http.scaladsl.server.Directives.handleExceptions(exceptionHandler) {
route
}
}


bindingFuture.onComplete {
case Success(_) =>
println(s"Server for started at localhost:8080")
case Failure(cause) =>
println(s"Server failed to start", cause)
}

CoordinatedShutdown(actorSystem).addTask(
CoordinatedShutdown.PhaseBeforeServiceUnbind, "http_shutdown") { () =>
routes.shutdown()
logger.info(s"Server is Shuting down, cause (external signal), wait 4.8 Seconds to complete requests")
Thread.sleep(4800)
logger.info(s"Going down")
bindingFuture.flatMap(_.unbind).flatMap { _ =>
Http().shutdownAllConnectionPools
}.map { _ =>
Done
}
}

}

+ 60
- 0
src/main/scala/de/codingchallenge/Routes.scala View File

@@ -0,0 +1,60 @@
package de.codingchallenge

import akka.actor.ActorSystem
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.Directives.{pathPrefix, _}
import akka.http.scaladsl.server._
import akka.stream.ActorMaterializer
import akka.util.ByteString
import de.codingchallenge.configuration.Environment
import com.typesafe.scalalogging.LazyLogging

class Routes(actorSystem: ActorSystem,
actorMaterializer: ActorMaterializer,
environment: Environment)
extends LazyLogging {

implicit val ec = actorSystem
implicit val amaterializer = actorMaterializer

var on: Boolean = true


val serviceRoutes: Route =
pathSuffix("readinessProbe") {
get {
if(on) {
complete {
HttpEntity(ContentTypes.`application/json`, ByteString(de.codingchallenge.BuildInfo.toJson))
}
} else {
complete(StatusCodes.ServiceUnavailable)
}
}
} ~ path("health") {
get {
complete {
HttpEntity(ContentTypes.`application/json`, ByteString(de.codingchallenge.BuildInfo.toJson))
}
}
}

val exportRoute: Route =
path("articles") {
get {
complete(StatusCodes.ServiceUnavailable)
}
}

val routes: Route =
pathPrefix("inoio-coding-challenge") {
serviceRoutes
} ~ pathPrefix("export" / "articles") {
exportRoute
} ~ serviceRoutes

val rejectRoute: Route = reject

def shutdown(): Unit = on = false

}

+ 23
- 0
src/main/scala/de/codingchallenge/configuration/AkkaConfigurator.scala View File

@@ -0,0 +1,23 @@
package de.codingchallenge.configuration

import com.typesafe.scalalogging.LazyLogging

class AkkaConfigurator extends LazyLogging {

private val settings = Map(
"akka.http.server.idle-timeout" -> "10 min",
"akka.http.client.idle-timeout" -> "10 sec",
"akka.loggers.0" -> "akka.event.slf4j.Slf4jLogger",
"akka.loglevel" -> "INFO",
"akka.logging-filter" -> "akka.event.slf4j.Slf4jLoggingFilter"
)

def configure(): Unit = {
logger.info("configuring Akka via system props")
settings.foreach{ case (key, value) =>
logger.debug("key: "+key+", value: "+value)
System.setProperty(key, value)
}
}

}

+ 8
- 0
src/main/scala/de/codingchallenge/configuration/Environment.scala View File

@@ -0,0 +1,8 @@
package de.codingchallenge.configuration

class Environment() {
import scala.util.Properties._
val loggingFormat: String = envOrElse("LOGGING_FORMAT", "humanreadable")
val hostIP: String = envOrElse("HOST_IP", "0.0.0.0")
val hostPort: Int = envOrElse("HOST_PORT", "8080").toInt
}

+ 9
- 0
src/main/scala/de/codingchallenge/csv/CsvColumnReads.scala View File

@@ -0,0 +1,9 @@
package de.codingchallenge.csv

/**
* Typeclass for converting a sequence of columns into an Option of A
* @tparam A the type we try to parse
*/
trait CsvColumnReads[A] {
def read(columns: Seq[String]): Option[A]
}

+ 9
- 0
src/main/scala/de/codingchallenge/csv/CsvColumnWrites.scala View File

@@ -0,0 +1,9 @@
package de.codingchallenge.csv

/**
* Typeclass for converting an a into a sequence of String
* @tparam A the type we try to parse
*/
trait CsvColumnWrites[A] {
def write(a: A): Seq[String]
}

+ 22
- 0
src/main/scala/de/codingchallenge/csv/CsvOps.scala View File

@@ -0,0 +1,22 @@
package de.codingchallenge.csv

object CsvOps {

implicit class CsvReader(row: String) {

def csvToOptOf[A](delimiter: String)(implicit columnReader: CsvColumnReads[A]): Option[A] =
columnReader.read(row.split(delimiter))

def csvToOptOf[A](implicit reads: CsvColumnReads[A]): Option[A] =
reads.read(row.split("\\|"))
}

implicit class CsvWriter[A](a: A) {
def toCsvLine(delimiter: String)(implicit columnWriter: CsvColumnWrites[A]): String =
columnWriter.write(a).mkString(delimiter)

def toCsvLine(implicit columnWriter: CsvColumnWrites[A]): String =
columnWriter.write(a).mkString("|")
}

}

+ 5
- 0
src/main/scala/de/codingchallenge/logging/ConfigResourcePath.scala View File

@@ -0,0 +1,5 @@
package de.codingchallenge.logging

trait ConfigResourcePath {
def path: Option[String]
}

+ 47
- 0
src/main/scala/de/codingchallenge/logging/LogbackReconfigurator.scala View File

@@ -0,0 +1,47 @@
package de.codingchallenge.logging

import java.io.InputStream

import ch.qos.logback.classic.LoggerContext
import ch.qos.logback.classic.joran.JoranConfigurator
import ch.qos.logback.core.joran.spi.JoranException
import ch.qos.logback.core.util.StatusPrinter
import com.typesafe.scalalogging.LazyLogging
import org.slf4j.LoggerFactory


class LogbackReconfigurator(configResourcePath: ConfigResourcePath, loggerContextInfo: LoggerContextInfo) extends LazyLogging {

private def enrich(context: LoggerContext): Unit = {
logger.info("Enrich")
context.putProperty("service_version", loggerContextInfo.gitHash)
context.putProperty("service", loggerContextInfo.name)
}

private def reconfigure(context: LoggerContext, configFileStream: InputStream): Unit = {
try {
val configurator = new JoranConfigurator()
configurator.setContext(context)
context.reset()
enrich(context)
configurator.doConfigure(configFileStream)
} catch {
case _: JoranException => // StatusPrinter will handle this
}
StatusPrinter.printInCaseOfErrorsOrWarnings(context)
}

def configureLogging(): Unit = {
LoggerFactory.getILoggerFactory match {
case context: LoggerContext =>
configResourcePath.path.flatMap { path =>
Option(getClass.getResourceAsStream(path))
} match {
case Some(configFileStream) => reconfigure(context, configFileStream)
case None => enrich(context)
}
case _ =>
logger.warn("Current logging framework is not logback, cannot reconfigure.")
}
}
}

+ 6
- 0
src/main/scala/de/codingchallenge/logging/LoggerContextInfo.scala View File

@@ -0,0 +1,6 @@
package de.codingchallenge.logging

trait LoggerContextInfo {
val name: String
val gitHash: String
}

+ 14
- 0
src/main/scala/de/codingchallenge/logging/LoggingModule.scala View File

@@ -0,0 +1,14 @@
package de.codingchallenge.logging

import de.codingchallenge.BuildInfo
import de.codingchallenge.configuration.Environment
import com.softwaremill.macwire._

trait LoggingModule {
protected def environment: Environment
protected lazy val configResourcePath = new ConfigResourcePath {
override def path: Option[String] = Some("/logback-"+environment.loggingFormat+".xml")
}
protected lazy val buildInfo = BuildInfo
protected lazy val logbackReconfigurator: LogbackReconfigurator = wire[LogbackReconfigurator]
}

+ 27
- 0
src/main/scala/de/codingchallenge/models/Article.scala View File

@@ -0,0 +1,27 @@
package de.codingchallenge.models

import de.codingchallenge.csv.CsvColumnReads

import scala.util.{Success, Try}

/**
* Represents a record of the article list
* @param id identifier of the article
* @param productId identifier of the product the article is a variant of
* @param description some descriptive text
* @param price the articles price
* @param stock the current stock
*/
case class Article(id: String, productId: String, description: String, price: Float, stock: Int)

object Article {

/**
* Reads an article from a sequence of columns. Returns an Option with article in success case
*/
implicit val csvColumnReads: CsvColumnReads[Article] = (s: Seq[String]) =>
Try{ (s.head, s(1), s(2), s(3).toFloat, s(4).toInt) } match {
case Success(t) => Some((Article.apply _).tupled(t))
case _ => None
}
}

+ 12
- 0
src/main/scala/de/codingchallenge/repositories/ArticleRepository.scala View File

@@ -0,0 +1,12 @@
package de.codingchallenge.repositories

import akka.NotUsed
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.stream.scaladsl.Source
import de.codingchallenge.models.Article

class ArticleRepository(implicit as: ActorSystem){

def getArticles(limit: Int): Source[Article, NotUsed] = Http().re
}

+ 44
- 0
src/test/scala/de/codingchallenge/csv/CsvOpsSpec.scala View File

@@ -0,0 +1,44 @@
package de.codingchallenge.csv

import org.scalatest.{MustMatchers, WordSpec}
import CsvOps._

import scala.util.{Success, Try}

class CsvOpsSpec extends WordSpec with MustMatchers{

case class CsvTestData(s: String, d: Double)
implicit val csvReads: CsvColumnReads[CsvTestData] = (columns: Seq[String]) => {
println(columns)
Try{(columns.head, columns(1).toDouble)} match{
case Success((s: String, d: Double)) => Some(CsvTestData(s,d))
case _ => None
}
}

implicit val csvWrites: CsvColumnWrites[CsvTestData] = (csvTestData: CsvTestData) =>
csvTestData.productIterator.map(_.toString).toSeq

"The CsvOps reading operations" should {
"parse a csv line with the default delimiter as expected" in {
val csvLine = "myStringVal|3.0"
csvLine.csvToOptOf[CsvTestData] mustBe Some(CsvTestData("myStringVal", 3d))
}
"parse a csv line with expected delimiter" in {
val csvLine = "myStringVal,3.0"
csvLine.csvToOptOf[CsvTestData](",") mustBe Some(CsvTestData("myStringVal", 3d))
}
}
"The CsvOps writing operations" should {
"write a csv line with default delimiter" in {
val expectedLine = "myStringVal|3.0"
val testData = CsvTestData("myStringVal", 3)
testData.toCsvLine mustBe expectedLine
}
"write a csv line with expected delimiter" in {
val expectedCsvLine = "myStringVal,3.0"
val testData = CsvTestData("myStringVal", 3)
testData.toCsvLine(",") mustBe expectedCsvLine
}
}
}

+ 11
- 0
suppress-checks.xml View File

@@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<suppressions xmlns="https://jeremylong.github.io/DependencyCheck/dependency-suppression.1.1.xsd">
<suppress>
<notes><![CDATA[
False positive: akka-http-prometheus is not akka:akka
]]>
</notes>
<gav regex="true">^fr\.davit:akka-http-prometheus_2\.12:.*$</gav>
<cpe>cpe:/a:akka:akka</cpe>
</suppress>
</suppressions>

Loading…
Cancel
Save