make logback stfu + minor edits

This commit is contained in:
Rohan Sircar 2021-05-05 22:53:19 +05:30
parent ddefc489e7
commit c25f2e0810
8 changed files with 56 additions and 74 deletions

View File

@ -4,12 +4,12 @@ import scala.concurrent.Future
import cats.syntax.all._ import cats.syntax.all._
import io.odin.Logger import io.odin.Logger
import io.odin.consoleLogger
import io.odin.fileLogger import io.odin.fileLogger
import io.odin.syntax._ import io.odin.syntax._
import monix.bio.Task import monix.bio.Task
import monix.execution.Scheduler import monix.execution.Scheduler
import munit.TestOptions import munit.TestOptions
import java.time.LocalDateTime
trait MonixBioSuite extends munit.TaglessFinalSuite[Task] { trait MonixBioSuite extends munit.TaglessFinalSuite[Task] {
override protected def toFuture[A](f: Task[A]): Future[A] = { override protected def toFuture[A](f: Task[A]): Future[A] = {
@ -17,18 +17,10 @@ trait MonixBioSuite extends munit.TaglessFinalSuite[Task] {
f.runToFuture f.runToFuture
} }
def loggerFixture(fileName: Option[String] = None)(implicit val date = LocalDateTime.now()
enc: sourcecode.Enclosing
) = val noopLogger = Logger.noop[Task]
ResourceFixture(
consoleLogger[Task]().withAsync() |+| fileLogger[Task]( val consoleLogger = io.odin.consoleLogger[Task]()
fileName.getOrElse(enc.value.split("#").head + ".log")
),
(
options: TestOptions,
value: Logger[Task]
) => Task.unit,
(_: Logger[Task]) => Task.unit
)
} }

View File

@ -0,0 +1,10 @@
<configuration>
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%msg%n</pattern>
</encoder>
</appender>
<root level="error">
<appender-ref ref="CONSOLE" />
</root>
</configuration>

View File

@ -1,6 +1,5 @@
package wow.doge.http4sdemo package wow.doge.http4sdemo
import cats.syntax.all._
import com.dimafeng.testcontainers.PostgreSQLContainer import com.dimafeng.testcontainers.PostgreSQLContainer
import monix.bio.UIO import monix.bio.UIO
import wow.doge.http4sdemo.dto.BookSearchMode import wow.doge.http4sdemo.dto.BookSearchMode
@ -33,7 +32,6 @@ class LibraryServiceSpec extends DatabaseIntegrationTestBase {
book <- service.insertBook(NewBook("blah", "Segehwe", id)) book <- service.insertBook(NewBook("blah", "Segehwe", id))
_ <- service _ <- service
.getBookById(book.bookId) .getBookById(book.bookId)
.flatTap(r => UIO(println(r)))
.assertEquals(Some(book)) .assertEquals(Some(book))
} yield () } yield ()
) )
@ -109,7 +107,7 @@ class LibraryServiceSpec extends DatabaseIntegrationTestBase {
book1 <- service.insertBook(NewBook("blah3", "aeaega", id)) book1 <- service.insertBook(NewBook("blah3", "aeaega", id))
book2 <- service.insertBook(NewBook("blah4", "afgegg", id)) book2 <- service.insertBook(NewBook("blah4", "afgegg", id))
_ <- service _ <- service
.searchBook(BookSearchMode.AuthorName, id.toString) .searchBook(BookSearchMode.AuthorName, "bar")
.toListL .toListL
.toIO .toIO
.attempt .attempt

View File

@ -5,13 +5,10 @@ import java.time.LocalDateTime
import cats.syntax.either._ import cats.syntax.either._
import enumeratum.EnumEntry import enumeratum.EnumEntry
import enumeratum._ import enumeratum._
import io.circe.Printer
import io.circe.generic.semiauto._ import io.circe.generic.semiauto._
import io.scalaland.chimney.dsl._ import io.scalaland.chimney.dsl._
import org.http4s.EntityEncoder
import org.http4s.ParseFailure import org.http4s.ParseFailure
import org.http4s.QueryParamDecoder import org.http4s.QueryParamDecoder
import org.http4s.circe.streamJsonArrayEncoderWithPrinterOf
import org.http4s.dsl.impl.QueryParamDecoderMatcher import org.http4s.dsl.impl.QueryParamDecoderMatcher
import slick.jdbc.JdbcProfile import slick.jdbc.JdbcProfile
import wow.doge.http4sdemo.slickcodegen.Tables import wow.doge.http4sdemo.slickcodegen.Tables
@ -26,9 +23,6 @@ final case class Book(
object Book { object Book {
def tupled = (apply _).tupled def tupled = (apply _).tupled
implicit val codec = deriveCodec[Book] implicit val codec = deriveCodec[Book]
// implicit def streamEntityEncoder[F[_]]
// : EntityEncoder[F, fs2.Stream[F, Book]] =
// streamJsonArrayEncoderWithPrinterOf(Printer.noSpaces)
def fromBooksRow(row: Tables.BooksRow) = row.transformInto[Book] def fromBooksRow(row: Tables.BooksRow) = row.transformInto[Book]
def fromBooksTableFn(implicit profile: JdbcProfile) = { def fromBooksTableFn(implicit profile: JdbcProfile) = {
import profile.api._ import profile.api._
@ -68,9 +62,6 @@ final case class Author(authorId: Int, authorName: String)
object Author { object Author {
def tupled = (apply _).tupled def tupled = (apply _).tupled
implicit val codec = deriveCodec[Author] implicit val codec = deriveCodec[Author]
implicit def streamEntityEncoder[F[_]]
: EntityEncoder[F, fs2.Stream[F, Author]] =
streamJsonArrayEncoderWithPrinterOf(Printer.noSpaces)
def fromAuthorsRow(row: Tables.AuthorsRow) = row.transformInto[Author] def fromAuthorsRow(row: Tables.AuthorsRow) = row.transformInto[Author]
def fromAuthorsTableFn(implicit profile: JdbcProfile) = { def fromAuthorsTableFn(implicit profile: JdbcProfile) = {
import profile.api._ import profile.api._
@ -97,9 +88,6 @@ final case class BookWithAuthor(
object BookWithAuthor { object BookWithAuthor {
def tupled = (apply _).tupled def tupled = (apply _).tupled
implicit val codec = deriveCodec[BookWithAuthor] implicit val codec = deriveCodec[BookWithAuthor]
implicit def streamEntityEncoder[F[_]]
: EntityEncoder[F, fs2.Stream[F, BookWithAuthor]] =
streamJsonArrayEncoderWithPrinterOf(Printer.noSpaces)
} }
sealed trait BookSearchMode extends EnumEntry sealed trait BookSearchMode extends EnumEntry

View File

@ -66,7 +66,7 @@ class LibraryServiceImpl(
def getBooks = db.streamO(dbio.getBooks.transactionally) def getBooks = db.streamO(dbio.getBooks.transactionally)
def getBookById(id: Int) = db.runL(dbio.getBook(id)) def getBookById(id: Int) = db.runL(dbio.getBookById(id))
def searchBook(mode: BookSearchMode, value: String): Observable[Book] = def searchBook(mode: BookSearchMode, value: String): Observable[Book] =
mode match { mode match {
@ -76,21 +76,20 @@ class LibraryServiceImpl(
case AuthorName => case AuthorName =>
Observable Observable
.fromTask((for { .fromTask((for {
_ <- IO.unit author <- db.runL(dbio.getAuthorByName(value)).flatMap {
id <- IO(value.toInt)
author <- db.runL(dbio.getAuthor(id)).flatMap {
case None => case None =>
IO.raiseError( IO.raiseError(
new EntityDoesNotExist(s"Author with id=$id does not exist") new EntityDoesNotExist(
s"Author with name=$value does not exist"
)
) )
case Some(value) => IO.pure(value) case Some(value) => IO.pure(value)
} }
books = db books = db
.streamO(dbio.getBooksForAuthor(id)) .streamO(dbio.getBooksForAuthor(author.authorId))
.map(Book.fromBooksRow) .map(Book.fromBooksRow)
} yield books).toTask) } yield books).toTask)
.flatten .flatten
} }
def insertAuthor(a: NewAuthor): Task[Int] = db.runL(dbio.insertAuthor(a)) def insertAuthor(a: NewAuthor): Task[Int] = db.runL(dbio.insertAuthor(a))
@ -99,7 +98,7 @@ class LibraryServiceImpl(
for { for {
action <- UIO.deferAction(implicit s => action <- UIO.deferAction(implicit s =>
UIO(for { UIO(for {
mbRow <- dbio.selectBook(id).result.headOption mbRow <- Tables.Books.filter(_.bookId === id).result.headOption
updatedRow <- mbRow match { updatedRow <- mbRow match {
case Some(value) => case Some(value) =>
println(s"Original value -> $value") println(s"Original value -> $value")
@ -110,7 +109,7 @@ class LibraryServiceImpl(
EntityDoesNotExist(s"Book with id=$id does not exist") EntityDoesNotExist(s"Book with id=$id does not exist")
) )
} }
updateAction = dbio.selectBook(id).update(updatedRow) updateAction = Tables.Books.filter(_.bookId === id).update(updatedRow)
_ = println(s"SQL = ${updateAction.statements}") _ = println(s"SQL = ${updateAction.statements}")
_ <- updateAction _ <- updateAction
} yield ()) } yield ())
@ -128,8 +127,8 @@ class LibraryServiceImpl(
IO.deferAction { implicit s => IO.deferAction { implicit s =>
for { for {
action <- UIO(for { action <- UIO(for {
_ <- dbio _ <- Tables.Books
.selectBookByIsbn(newBook.isbn) .filter(_.isbn === newBook.isbn)
.map(Book.fromBooksTableFn) .map(Book.fromBooksTableFn)
.result .result
.headOption .headOption
@ -183,24 +182,30 @@ class LibraryDbio(val profile: JdbcProfile) {
def insertAuthor(newAuthor: NewAuthor): DBIO[Int] = def insertAuthor(newAuthor: NewAuthor): DBIO[Int] =
Query.insertAuthorGetId += newAuthor Query.insertAuthorGetId += newAuthor
def selectBook(id: Int) = Tables.Books.filter(_.bookId === id) def getAuthor(id: Int): DBIO[Option[Author]] =
def getAuthor(id: Int) =
Query.selectAuthor(id).map(Author.fromAuthorsTableFn).result.headOption Query.selectAuthor(id).map(Author.fromAuthorsTableFn).result.headOption
def deleteBook(id: Int) = selectBook(id).delete def getAuthorByName(name: String): DBIO[Option[Author]] =
Tables.Authors
.filter(_.authorName === name)
.map(Author.fromAuthorsTableFn)
.result
.headOption
def getBook(id: Int) = selectBook(id) def deleteBook(id: Int): DBIO[Int] = Query.selectBookById(id).delete
def getBookById(id: Int): DBIO[Option[Book]] = Query
.selectBookById(id)
.map(Book.fromBooksTableFn) .map(Book.fromBooksTableFn)
.result .result
.headOption .headOption
def selectBookByIsbn(isbn: String) = Tables.Books.filter(_.isbn === isbn) def getBooksByTitle(title: String): StreamingDBIO[Seq[Book], Book] =
def getBooksByTitle(title: String) =
Tables.Books.filter(_.bookTitle === title).map(Book.fromBooksTableFn).result Tables.Books.filter(_.bookTitle === title).map(Book.fromBooksTableFn).result
def getBooksForAuthor(authorId: Int) = def getBooksForAuthor(
authorId: Int
): StreamingDBIO[Seq[Tables.BooksRow], Tables.BooksRow] =
Query.booksForAuthorInner(authorId).result Query.booksForAuthorInner(authorId).result
private object Query { private object Query {
@ -227,6 +232,10 @@ class LibraryDbio(val profile: JdbcProfile) {
def selectAuthor(authorId: Int) = def selectAuthor(authorId: Int) =
Tables.Authors.filter(_.authorId === authorId) Tables.Authors.filter(_.authorId === authorId)
def selectBookById(id: Int) = Tables.Books.filter(_.bookId === id)
def selectBookByIsbn(isbn: String) = Tables.Books.filter(_.isbn === isbn)
} }
} }

View File

@ -0,0 +1,10 @@
<configuration>
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%msg%n</pattern>
</encoder>
</appender>
<root level="OFF">
<appender-ref ref="CONSOLE" />
</root>
</configuration>

View File

@ -1,7 +1,5 @@
package wow.doge.http4sdemo package wow.doge.http4sdemo
import java.time.LocalDateTime
import cats.syntax.all._ import cats.syntax.all._
import monix.bio.IO import monix.bio.IO
import monix.bio.Task import monix.bio.Task
@ -20,17 +18,6 @@ import wow.doge.http4sdemo.services.NoopLibraryService
class LibraryControllerSpec extends MonixBioSuite { class LibraryControllerSpec extends MonixBioSuite {
// "libraryControllerSpec"
// val fixture = loggerFixture()
// ResourceFixture
// override def munitFixtures = List(myFixture)
// override def munitFixtures: Seq[Fixture[_]] = ???
val date = LocalDateTime.now()
// val logger = consoleLogger[Task]()
val Root = Uri(path = "") val Root = Uri(path = "")
test("get books success") { test("get books success") {

View File

@ -1,12 +0,0 @@
package wow.doge.http4sdemo
// import sourcecode.File
class LoggerFixtureSpec extends MonixBioSuite {
// "LoggerFixtureSpec"
val fixture = loggerFixture()
loggerFixture().test("blah blah") { logger =>
logger.debug("blah blah blah")
}
}