Compare commits
merge into: nova:master
nova:devel
nova:master
pull from: nova:devel
nova:devel
nova:master
28 Commits
50 changed files with 2042 additions and 479 deletions
-
178.github/workflows/ci.yaml
-
1.gitignore
-
1.scalafix.conf
-
1README.MD
-
184build.sbt
-
BINlib/monix-bio_2.13.jar
-
6modules/flyway/src/main/resources/db/migration/default/V1__create_users_table.sql
-
36modules/flyway/src/main/resources/db/migration/default/V1__library_schema.sql
-
6modules/flyway/src/main/resources/db/migration/default/V2__add_user.sql
-
69modules/flyway/src/main/resources/db/migration/default/V2__sample_data.sql
-
6modules/flyway/src/main/resources/db/migration/default/V3__create_cars_table.sql
-
6modules/flyway/src/main/resources/db/migration/default/V4__add_car.sql
-
12modules/flyway/src/main/resources/db/migration/default/V5__authors_books_table.sql
-
14modules/flyway/src/main/resources/db/migration/default/V6__insert_books_and_authors.sql
-
26modules/test-common/src/main/scala/wow/doge/MonixBioSuite.scala
-
13project/plugins.sbt
-
5scripts/.env
-
46scripts/app.Dockerfile
-
7scripts/app.sh
-
24scripts/build.sh
-
7scripts/curl
-
4scripts/db.Dockerfile
-
6scripts/db.sh
-
41scripts/docker-compose.yml
-
11scripts/native
-
0scripts/native-image-readme.md
-
4scripts/test.Dockerfile
-
182scripts/wait-for-it.sh
-
10src/it/resources/logback-test.xml
-
112src/it/scala/wow/doge/http4sdemo/DatabaseIntegrationTestBase.scala
-
121src/it/scala/wow/doge/http4sdemo/LibraryServiceSpec.scala
-
22src/main/resources/META-INF/native-image/wow/doge/http4sdemo/jni-config.json
-
307src/main/resources/META-INF/native-image/wow/doge/http4sdemo/reflect-config.json
-
19src/main/resources/META-INF/native-image/wow/doge/http4sdemo/resource-config.json
-
2src/main/resources/META-INF/native-image/wow/doge/http4sdemo/serialization-config.json
-
56src/main/resources/application.conf
-
144src/main/scala/wow/doge/http4sdemo/Http4sdemoRoutes.scala
-
50src/main/scala/wow/doge/http4sdemo/Http4sdemoServer.scala
-
47src/main/scala/wow/doge/http4sdemo/Jokes.scala
-
31src/main/scala/wow/doge/http4sdemo/Main.scala
-
1src/main/scala/wow/doge/http4sdemo/Migrate.scala
-
43src/main/scala/wow/doge/http4sdemo/Server.scala
-
4src/main/scala/wow/doge/http4sdemo/SlickResource.scala
-
79src/main/scala/wow/doge/http4sdemo/dto/Library.scala
-
23src/main/scala/wow/doge/http4sdemo/implicits/package.scala
-
114src/main/scala/wow/doge/http4sdemo/routes/LibraryRoutes.scala
-
245src/main/scala/wow/doge/http4sdemo/services/LibraryService.scala
-
10src/test/resources/logback-test.xml
-
25src/test/scala/wow/doge/http4sdemo/HelloWorldSpec.scala
-
156src/test/scala/wow/doge/http4sdemo/LibraryControllerSpec.scala
@ -0,0 +1,178 @@ |
|||
--- |
|||
name: Continuous Integration |
|||
on: |
|||
pull_request: |
|||
branches: ["*", series/*] |
|||
paths-ignore: |
|||
- ".dockerignore" |
|||
- ".github/workflow/ci.yml" |
|||
- "Changelog.md" |
|||
- "Dockerfile" |
|||
- "doc/**" |
|||
- "docker/**" |
|||
- "LICENSE" |
|||
- "README.md" |
|||
# - "tests/e2e/**" |
|||
push: |
|||
branches: ["*", series/*] |
|||
tags: [v*] |
|||
paths-ignore: |
|||
- ".dockerignore" |
|||
- ".github/workflow/ci.yml" |
|||
- "Changelog.md" |
|||
- "Dockerfile" |
|||
- "doc/**" |
|||
- "docker/**" |
|||
- "LICENSE" |
|||
- "README.md" |
|||
# - "tests/e2e/**" |
|||
|
|||
jobs: |
|||
build: |
|||
name: Build and Test |
|||
runs-on: ubuntu-latest |
|||
env: |
|||
HTTP4S_DEMO_CODEGEN_DB_HOST: localhost |
|||
HTTP4S_DEMO_CODEGEN_DB_PORT: 5432 |
|||
HTTP4S_DEMO_CODEGEN_DB_USER: codegenuser |
|||
HTTP4S_DEMO_CODEGEN_DB_PASSWORD: postgres |
|||
HTTP4S_DEMO_CODEGEN_DB_NAME: codegendb |
|||
services: |
|||
postgres: |
|||
image: postgres:12-alpine |
|||
env: |
|||
POSTGRES_PASSWORD: postgres |
|||
POSTGRES_USER: codegenuser |
|||
POSTGRES_DB: codegendb |
|||
# Set health checks to wait until postgres has started |
|||
options: >- |
|||
--health-cmd pg_isready |
|||
--health-interval 10s |
|||
--health-timeout 5s |
|||
--health-retries 5 |
|||
ports: |
|||
- 5432:5432 |
|||
|
|||
steps: |
|||
- name: Check out repository code |
|||
uses: actions/checkout@v2 |
|||
- name: Coursier cache |
|||
uses: coursier/cache-action@v6 |
|||
- name: Setup |
|||
uses: olafurpg/setup-scala@v10 |
|||
with: |
|||
java-version: adopt@1.11 |
|||
- name: Migrate |
|||
run: csbt flyway/flywayMigrate |
|||
- name: Lint |
|||
run: csbt lint-check |
|||
- name: Compile |
|||
run: | |
|||
csbt "compile; test:compile" |
|||
- name: Run Unit Tests |
|||
run: | |
|||
csbt test |
|||
- name: Run Integration Tests |
|||
run: | |
|||
csbt it:test |
|||
publish: |
|||
name: Publish Release Docker Image |
|||
needs: [build] |
|||
if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v')) |
|||
runs-on: ubuntu-latest |
|||
env: |
|||
HTTP4S_DEMO_CODEGEN_DB_HOST: localhost |
|||
HTTP4S_DEMO_CODEGEN_DB_PORT: 5432 |
|||
HTTP4S_DEMO_CODEGEN_DB_USER: codegenuser |
|||
HTTP4S_DEMO_CODEGEN_DB_PASSWORD: postgres |
|||
HTTP4S_DEMO_CODEGEN_DB_NAME: codegendb |
|||
HTTP4S_DEMO_DOCKER_JAVA_IMAGE: azul/zulu-openjdk-alpine:11-jre-headless |
|||
services: |
|||
postgres: |
|||
image: postgres:12-alpine |
|||
env: |
|||
POSTGRES_PASSWORD: postgres |
|||
POSTGRES_USER: codegenuser |
|||
POSTGRES_DB: codegendb |
|||
# Set health checks to wait until postgres has started |
|||
options: >- |
|||
--health-cmd pg_isready |
|||
--health-interval 10s |
|||
--health-timeout 5s |
|||
--health-retries 5 |
|||
ports: |
|||
- 5432:5432 |
|||
steps: |
|||
- name: Check out repository code |
|||
uses: actions/checkout@v2 |
|||
- name: Coursier cache |
|||
uses: coursier/cache-action@v6 |
|||
- name: Setup |
|||
uses: olafurpg/setup-scala@v10 |
|||
with: |
|||
java-version: adopt@1.11 |
|||
- name: Login to Docker Hub |
|||
uses: docker/login-action@v1 |
|||
with: |
|||
username: rohansircar |
|||
password: ${{ secrets.DOCKER_LOGIN_PASSWORD }} |
|||
- name: Migrate |
|||
run: csbt flyway/flywayMigrate |
|||
- name: Publish Tag |
|||
if: startsWith(github.ref, 'refs/tags/v') |
|||
run: | |
|||
csbt docker:publish |
|||
- name: Publish Latest |
|||
if: github.ref == 'refs/heads/main' |
|||
env: |
|||
DOCKER_PUBLISH_TAG: latest |
|||
run: | |
|||
csbt docker:publish |
|||
|
|||
publish-devel: |
|||
name: Publish Devel Docker Image |
|||
needs: [build] |
|||
if: github.event_name != 'pull_request' && github.ref == 'refs/heads/devel' |
|||
runs-on: ubuntu-latest |
|||
env: |
|||
HTTP4S_DEMO_CODEGEN_DB_HOST: localhost |
|||
HTTP4S_DEMO_CODEGEN_DB_PORT: 5432 |
|||
HTTP4S_DEMO_CODEGEN_DB_USER: codegenuser |
|||
HTTP4S_DEMO_CODEGEN_DB_PASSWORD: postgres |
|||
HTTP4S_DEMO_CODEGEN_DB_NAME: codegendb |
|||
HTTP4S_DEMO_DOCKER_JAVA_IMAGE: azul/zulu-openjdk-alpine:11-jre-headless |
|||
HTTP4S_DEMO_DOCKER_PUBLISH_TAG: devel |
|||
services: |
|||
postgres: |
|||
image: postgres:12-alpine |
|||
env: |
|||
POSTGRES_PASSWORD: postgres |
|||
POSTGRES_USER: codegenuser |
|||
POSTGRES_DB: codegendb |
|||
# Set health checks to wait until postgres has started |
|||
options: >- |
|||
--health-cmd pg_isready |
|||
--health-interval 10s |
|||
--health-timeout 5s |
|||
--health-retries 5 |
|||
ports: |
|||
- 5432:5432 |
|||
steps: |
|||
- name: Check out repository code |
|||
uses: actions/checkout@v2 |
|||
- name: Coursier cache |
|||
uses: coursier/cache-action@v6 |
|||
- name: Setup |
|||
uses: olafurpg/setup-scala@v10 |
|||
with: |
|||
java-version: adopt@1.11 |
|||
- name: Login to Docker Hub |
|||
uses: docker/login-action@v1 |
|||
with: |
|||
username: rohansircar |
|||
password: ${{ secrets.DOCKER_LOGIN_PASSWORD }} |
|||
- name: Migrate |
|||
run: csbt flyway/flywayMigrate |
|||
- name: Publish |
|||
run: | |
|||
csbt docker:publish |
@ -0,0 +1 @@ |
|||
rules = [OrganizeImports] |
@ -0,0 +1 @@ |
|||
Just a scala project for me to experiment with CI and (docker) publishing. Nothing much else to see here. |
@ -1,6 +0,0 @@ |
|||
create table "users" ( |
|||
"id" VARCHAR(255) PRIMARY KEY NOT NULL, |
|||
"email" VARCHAR(1024) NOT NULL, |
|||
created_at TIMESTAMP NOT NULL, |
|||
updated_at TIMESTAMP NULL |
|||
); |
@ -0,0 +1,36 @@ |
|||
create table authors ( |
|||
author_id SERIAL PRIMARY KEY, |
|||
author_name VARCHAR(30) NOT NULL |
|||
); |
|||
|
|||
CREATE TABLE books ( |
|||
book_id SERIAL PRIMARY KEY, |
|||
isbn VARCHAR(50) UNIQUE NOT NULL, |
|||
book_title VARCHAR(30) NOT NULL, |
|||
author_id INTEGER REFERENCES authors(author_id) NOT NULL, |
|||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL |
|||
); |
|||
|
|||
create table books_store ( |
|||
books_store_id SERIAL PRIMARY KEY, |
|||
book_id INTEGER REFERENCES books(book_id) NOT NULL, |
|||
quantity INTEGER NOT NULL |
|||
); |
|||
|
|||
create table book_expiry ( |
|||
book_expiry_id SERIAL PRIMARY KEY, |
|||
book_id INTEGER REFERENCES books(book_id) NOT NULL, |
|||
discontinued BOOLEAN NOT NULL |
|||
); |
|||
|
|||
create table users ( |
|||
user_id SERIAL PRIMARY KEY NOT NULL, |
|||
user_name VARCHAR(30) NOT NULL |
|||
); |
|||
|
|||
create table checkouts ( |
|||
checkout_id SERIAL PRIMARY KEY, |
|||
book_id INTEGER REFERENCES books(book_id) NOT NULL, |
|||
taken_by INTEGER REFERENCES users(user_id) NOT NULL, |
|||
return_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP |
|||
); |
@ -1,6 +0,0 @@ |
|||
INSERT INTO "users" VALUES ( |
|||
'd074bce8-a8ca-49ec-9225-a50ffe83dc2f', |
|||
'myuser@example.com', |
|||
(TIMESTAMP '2013-03-26T17:50:06Z'), |
|||
(TIMESTAMP '2013-03-26T17:50:06Z') |
|||
); |
@ -0,0 +1,69 @@ |
|||
insert into |
|||
authors (author_name) |
|||
values |
|||
('Author1'); |
|||
|
|||
insert into |
|||
authors (author_name) |
|||
values |
|||
('Author2'); |
|||
|
|||
insert into |
|||
authors (author_name) |
|||
values |
|||
('Author3'); |
|||
|
|||
insert into |
|||
books (isbn, book_title, author_id) |
|||
values |
|||
('aebwegbwe', 'book1', 3); |
|||
|
|||
insert into |
|||
books (isbn, book_title, author_id) |
|||
values |
|||
('abeqegbqeg', 'book2', 2); |
|||
|
|||
insert into |
|||
books (isbn, book_title, author_id) |
|||
values |
|||
('aebhqeqegq', 'book3', 1); |
|||
|
|||
insert into |
|||
books_store (book_id, quantity) |
|||
values |
|||
(1, 5); |
|||
|
|||
insert into |
|||
books_store (book_id, quantity) |
|||
values |
|||
(2, 3); |
|||
|
|||
insert into |
|||
books_store (book_id, quantity) |
|||
values |
|||
(3, 8); |
|||
|
|||
insert into |
|||
book_expiry (book_id, discontinued) |
|||
values |
|||
(1, false); |
|||
|
|||
insert into |
|||
book_expiry (book_id, discontinued) |
|||
values |
|||
(2, false); |
|||
|
|||
insert into |
|||
book_expiry (book_id, discontinued) |
|||
values |
|||
(3, false); |
|||
|
|||
insert into |
|||
users (user_name) |
|||
values |
|||
('user1'); |
|||
|
|||
insert into |
|||
users (user_name) |
|||
values |
|||
('user2'); |
@ -1,6 +0,0 @@ |
|||
create table "cars" ( |
|||
"id" VARCHAR(255) PRIMARY KEY NOT NULL, |
|||
"model" VARCHAR(1024) NOT NULL, |
|||
created_at TIMESTAMP NOT NULL, |
|||
updated_at TIMESTAMP NULL |
|||
); |
@ -1,6 +0,0 @@ |
|||
INSERT INTO "cars" VALUES ( |
|||
'd074bce8-a8ca-49ec-9225-a50ffe83dc2f', |
|||
'gxxer', |
|||
(TIMESTAMP '2013-03-26T17:50:06Z'), |
|||
(TIMESTAMP '2013-03-26T17:50:06Z') |
|||
); |
@ -1,12 +0,0 @@ |
|||
create table authors ( |
|||
id SERIAL PRIMARY KEY, |
|||
name VARCHAR(15) NOT NULL |
|||
); |
|||
|
|||
create table books ( |
|||
id SERIAL PRIMARY KEY, |
|||
title VARCHAR(50) NOT NULL, |
|||
author_id INTEGER NOT NULL, |
|||
FOREIGN KEY(author_id) REFERENCES authors(id), |
|||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL |
|||
); |
@ -1,14 +0,0 @@ |
|||
-- create table authors ( |
|||
-- id INTEGER PRIMARY KEY NOT NULL, |
|||
-- name VARCHAR(15) |
|||
-- ); |
|||
|
|||
-- create table books ( |
|||
-- id INTEGER PRIMARY KEY NOT NULL, |
|||
-- title VARCHAR(15) NOT NULL, |
|||
-- author_id INTEGER NOT NULL, |
|||
-- FOREIGN KEY(author_id) REFERENCES authors(id) |
|||
-- ); |
|||
|
|||
INSERT INTO authors (name) VALUES ('Jane Austen'); |
|||
INSERT INTO books (title, author_id) VALUES ('Pride and Prejudice', 1); |
@ -0,0 +1,26 @@ |
|||
package wow.doge.http4sdemo |
|||
|
|||
import scala.concurrent.Future |
|||
|
|||
import cats.syntax.all._ |
|||
import io.odin.Logger |
|||
import io.odin.fileLogger |
|||
import io.odin.syntax._ |
|||
import monix.bio.Task |
|||
import monix.execution.Scheduler |
|||
import munit.TestOptions |
|||
import java.time.LocalDateTime |
|||
|
|||
trait MonixBioSuite extends munit.TaglessFinalSuite[Task] { |
|||
override protected def toFuture[A](f: Task[A]): Future[A] = { |
|||
implicit val s = Scheduler.global |
|||
f.runToFuture |
|||
} |
|||
|
|||
val date = LocalDateTime.now() |
|||
|
|||
val noopLogger = Logger.noop[Task] |
|||
|
|||
val consoleLogger = io.odin.consoleLogger[Task]() |
|||
|
|||
} |
@ -1,12 +1,15 @@ |
|||
// addSbtPlugin("io.github.davidgregory084" % "sbt-tpolecat" % "0.1.14") |
|||
addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1") |
|||
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.10") |
|||
|
|||
// https://github.com/tototoshi/sbt-slick-codegen |
|||
libraryDependencies += "com.h2database" % "h2" % "1.4.196" |
|||
libraryDependencies += "org.postgresql" % "postgresql" % "42.2.18" |
|||
|
|||
addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1") |
|||
addSbtPlugin("com.github.tototoshi" % "sbt-slick-codegen" % "1.4.0") |
|||
// Database migration |
|||
// https://github.com/flyway/flyway-sbt |
|||
addSbtPlugin("io.github.davidmweber" % "flyway-sbt" % "7.4.0") |
|||
addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.23") |
|||
addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.8.0") |
|||
addSbtPlugin("com.dwijnand" % "sbt-dynver" % "4.1.1") |
|||
addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") |
|||
addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0") |
|||
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.10.0") |
|||
addSbtPlugin("org.wartremover" % "sbt-wartremover" % "2.4.13") |
@ -0,0 +1,5 @@ |
|||
export POSTGRES_DB=codegen_db |
|||
export CODEGEN_DB_HOST=localhost |
|||
export CODEGEN_DB_NAME=codegen_db |
|||
export CODEGEN_DB_USER=codegen_user |
|||
export CODEGEN_DB_PASSWORD=password |
@ -0,0 +1,46 @@ |
|||
FROM scala/coursier-sbt:0.0.2 |
|||
|
|||
ARG DOCKER_TAG |
|||
|
|||
# RUN apt-get update |
|||
# RUN apt-get -y install git |
|||
# RUN apt-get -y install curl |
|||
# RUN sh -c '(echo "#!/usr/bin/env sh" && curl -fLo cs https://git.io/coursier-cli-"$(uname | tr LD ld)") && chmod +x cs' |
|||
# RUN ./cs install cs |
|||
# ENV PATH=${PATH}:/root/.local/share/coursier/bin |
|||
# RUN export PATH="$PATH:/root/.local/share/coursier/bin" |
|||
# RUN rm ./cs |
|||
|
|||
# ENV PATH=${PATH}:/root/.local/share/coursier/bin |
|||
# RUN export PATH="$PATH:/root/.local/share/coursier/bin" |
|||
# RUN mkdir -p /root/.local/share/coursier |
|||
# COPY coursier/bin /root/.local/share/coursier/bin |
|||
# RUN echo $PATH |
|||
# RUN cs install sbt |
|||
|
|||
RUN mkdir -p /usr/src/app/bin |
|||
WORKDIR /usr/src/app |
|||
COPY ./ /usr/src/app |
|||
|
|||
# RUN cat /etc/hosts |
|||
|
|||
# COPY wait-for-it.sh wait-for-it.sh |
|||
# RUN chmod +x wait-for-it.sh |
|||
# ENTRYPOINT [ "/bin/bash", "-c" ] |
|||
# CMD ["./wait-for-it.sh" , "project_db:5432" , "--strict" , "--timeout=30000" , "--" , "echo 'db has started'"] |
|||
# RUN bash ./wait-for-it.sh project_db:5432 --timeout=3000 --strict -- echo "db is up" |
|||
|
|||
# RUN cat /etc/hosts |
|||
# CMD [ "sbt" , "flyway/flywayMigrate" ] |
|||
# CMD ["sbtn","universal:packageBin"] |
|||
# CMD sh sbtn flyway/flywayMigrate; sbtn universal:packageBin |
|||
# RUN sbt flyway/flywayMigrate |
|||
# RUN sbt docker:stage |
|||
|
|||
CMD sh Docker/app.sh |
|||
|
|||
# CMD ["coursier", "--help"] |
|||
|
|||
# RUN coursier install sbt |
|||
# RUN sbt docker:stage |
|||
# RUN |
@ -0,0 +1,7 @@ |
|||
sbtn flyway/flywayMigrate |
|||
sbtn universal:packageZipTarball |
|||
tar -xf target/universal/http4s-demo-0.0.1-SNAPSHOT.tgz -C bin |
|||
# ./http4s-demo-0.0.1-SNAPSHOT/bin/http4s-demo |
|||
# sbtn docker:stage |
|||
# mv targer/docker/** bin |
|||
rm -r target |
@ -0,0 +1,24 @@ |
|||
# export POSTGRES_DB=codegen_db |
|||
export CODEGEN_DB_HOST=localhost |
|||
export CODEGEN_DB_NAME=codegen_db |
|||
export CODEGEN_DB_USER=codegen_user |
|||
export CODEGEN_DB_PASSWORD=password |
|||
export CODEGEN_DB_PORT=5435 |
|||
|
|||
cid=$(docker run \ |
|||
-e POSTGRES_DB=$CODEGEN_DB_NAME \ |
|||
-e POSTGRES_USER=$CODEGEN_DB_USER \ |
|||
-e POSTGRES_PASSWORD=$CODEGEN_DB_PASSWORD \ |
|||
-p $CODEGEN_DB_PORT:5432 \ |
|||
-d postgres:12) |
|||
|
|||
echo "Container id is $cid" |
|||
sleep 5s |
|||
# ./wait-for-it.sh localhost:5434 -s -t 300 -- echo "db started" |
|||
sbtn flyway/flywayMigrate |
|||
# needs docker login |
|||
sbtn docker:publish |
|||
sbtn shutdown |
|||
|
|||
docker stop $cid |
|||
docker rm $cid |
@ -0,0 +1,7 @@ |
|||
curl -X POST -H "content-type: application/json" http://localhost:8081/api/post/book --data '{"aege":"aaegqE"}' |
|||
curl http://localhost:8081/api/get/books |
|||
curl http://localhost:8081/api/get/book/1 |
|||
curl -X POST -H "content-type: application/json" http://localhost:8081/api/post/book --data '{"title":"aaegqE", "authorId": 1}' |
|||
curl -X PATCH -H "content-type: application/json" http://localhost:8081/api/update/book/2 --data '{"title":"abwbewe"}' |
|||
|
|||
|
@ -0,0 +1,4 @@ |
|||
FROM postgres:12 |
|||
ENV POSTGRES_USER test_user |
|||
ENV POSTGRES_PASSWORD password |
|||
ENV POSTGRES_DB test_db |
@ -0,0 +1,6 @@ |
|||
docker run \ |
|||
-e POSTGRES_DB=test_db \ |
|||
-e POSTGRES_USER=test_user \ |
|||
-e POSTGRES_PASSWORD=password \ |
|||
-p 5433:5432 \ |
|||
-d postgres:12 |
@ -0,0 +1,41 @@ |
|||
version: "3.3" |
|||
services: |
|||
|
|||
|
|||
db: |
|||
container_name: project_db |
|||
image: postgres:12 |
|||
# build: |
|||
# context: ./Docker |
|||
# dockerfile: db.Dockerfile |
|||
environment: |
|||
POSTGRES_DB: 'codegen_db' |
|||
POSTGRES_USER: 'codegen_user' |
|||
POSTGRES_PASSWORD: 'password' |
|||
# volumes: |
|||
# - ./var/pgdata:/var/lib/postgresql/data |
|||
ports: |
|||
- "5432:5433" |
|||
# network_mode: host |
|||
backend: |
|||
container_name: project_backend |
|||
build: |
|||
context: . |
|||
dockerfile: app.Dockerfile |
|||
# ports: |
|||
# - "9000:9001" |
|||
environment: |
|||
POSTGRES_DB: 'codegen_db' |
|||
CODEGEN_DB_HOST: 'project_db' |
|||
CODEGEN_DB_NAME: 'codegen_db' |
|||
CODEGEN_DB_USER: 'codegen_user' |
|||
CODEGEN_DB_PASSWORD: 'password' |
|||
volumes: |
|||
- ./app:/usr/src/app/bin |
|||
# links: |
|||
# - db |
|||
# # command: ["./wait-for-it.sh", "project_db:5432", "--strict" , "--timeout=30000" , "--" , "echo 'db has started'"] |
|||
# depends_on: |
|||
# - db |
|||
# # condition: service_healthy |
|||
|
@ -0,0 +1,11 @@ |
|||
native-image --trace-class-initialization --static -H:+ReportExceptionStackTraces -H:+AddAllCharsets --allow-incomplete-classpath --no-fallback --initialize-at-build-time --enable-http --enable-https --enable-all-security-services --initialize-at-run-time=org.flywaydb.core.internal.scanner.cloud.s3.AwsS3Scanner \ |
|||
--initialize-at-run-time=org.flywaydb.core.internal.scanner.classpath.jboss.JBossVFSv3ClassPathLocationScanner \ |
|||
--initialize-at-run-time=org.postgresql.sspi.SSPIClient \ |
|||
--initialize-at-build-time=scala.runtime.Statics$VM \ |
|||
--initialize-at-run-time=scala.tools.nsc.profile.ExtendedThreadMxBean \ |
|||
--verbose -jar "./target/scala-2.13/http4s-demo-assembly-0.0.1-SNAPSHOT.jar" http4s-demoBinaryImage |
|||
|
|||
|
|||
|
|||
--initialize-at-run-time=scala.tools.nsc.profile.ExtendedThreadMxBean \ |
|||
--initialize-at-build-time=scala.tools.nsc.profile.SunThreadMxBean \ |
@ -0,0 +1,4 @@ |
|||
FROM scala/coursier/sbt:v0.0.1 |
|||
# RUN apt search docker |
|||
RUN apt install -y docker.io |
|||
RUN docker --help |
@ -0,0 +1,182 @@ |
|||
#!/usr/bin/env bash |
|||
# Use this script to test if a given TCP host/port are available |
|||
|
|||
WAITFORIT_cmdname=${0##*/} |
|||
|
|||
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } |
|||
|
|||
usage() |
|||
{ |
|||
cat << USAGE >&2 |
|||
Usage: |
|||
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args] |
|||
-h HOST | --host=HOST Host or IP under test |
|||
-p PORT | --port=PORT TCP port under test |
|||
Alternatively, you specify the host and port as host:port |
|||
-s | --strict Only execute subcommand if the test succeeds |
|||
-q | --quiet Don't output any status messages |
|||
-t TIMEOUT | --timeout=TIMEOUT |
|||
Timeout in seconds, zero for no timeout |
|||
-- COMMAND ARGS Execute command with args after the test finishes |
|||
USAGE |
|||
exit 1 |
|||
} |
|||
|
|||
wait_for() |
|||
{ |
|||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then |
|||
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" |
|||
else |
|||
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout" |
|||
fi |
|||
WAITFORIT_start_ts=$(date +%s) |
|||
while : |
|||
do |
|||
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then |
|||
nc -z $WAITFORIT_HOST $WAITFORIT_PORT |
|||
WAITFORIT_result=$? |
|||
else |
|||
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1 |
|||
WAITFORIT_result=$? |
|||
fi |
|||
if [[ $WAITFORIT_result -eq 0 ]]; then |
|||
WAITFORIT_end_ts=$(date +%s) |
|||
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds" |
|||
break |
|||
fi |
|||
sleep 1 |
|||
done |
|||
return $WAITFORIT_result |
|||
} |
|||
|
|||
wait_for_wrapper() |
|||
{ |
|||
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 |
|||
if [[ $WAITFORIT_QUIET -eq 1 ]]; then |
|||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & |
|||
else |
|||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & |
|||
fi |
|||
WAITFORIT_PID=$! |
|||
trap "kill -INT -$WAITFORIT_PID" INT |
|||
wait $WAITFORIT_PID |
|||
WAITFORIT_RESULT=$? |
|||
if [[ $WAITFORIT_RESULT -ne 0 ]]; then |
|||
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" |
|||
fi |
|||
return $WAITFORIT_RESULT |
|||
} |
|||
|
|||
# process arguments |
|||
while [[ $# -gt 0 ]] |
|||
do |
|||
case "$1" in |
|||
*:* ) |
|||
WAITFORIT_hostport=(${1//:/ }) |
|||
WAITFORIT_HOST=${WAITFORIT_hostport[0]} |
|||
WAITFORIT_PORT=${WAITFORIT_hostport[1]} |
|||
shift 1 |
|||
;; |
|||
--child) |
|||
WAITFORIT_CHILD=1 |
|||
shift 1 |
|||
;; |
|||
-q | --quiet) |
|||
WAITFORIT_QUIET=1 |
|||
shift 1 |
|||
;; |
|||
-s | --strict) |
|||
WAITFORIT_STRICT=1 |
|||
shift 1 |
|||
;; |
|||
-h) |
|||
WAITFORIT_HOST="$2" |
|||
if [[ $WAITFORIT_HOST == "" ]]; then break; fi |
|||
shift 2 |
|||
;; |
|||
--host=*) |
|||
WAITFORIT_HOST="${1#*=}" |
|||
shift 1 |
|||
;; |
|||
-p) |
|||
WAITFORIT_PORT="$2" |
|||
if [[ $WAITFORIT_PORT == "" ]]; then break; fi |
|||
shift 2 |
|||
;; |
|||
--port=*) |
|||
WAITFORIT_PORT="${1#*=}" |
|||
shift 1 |
|||
;; |
|||
-t) |
|||
WAITFORIT_TIMEOUT="$2" |
|||
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi |
|||
shift 2 |
|||
;; |
|||
--timeout=*) |
|||
WAITFORIT_TIMEOUT="${1#*=}" |
|||
shift 1 |
|||
;; |
|||
--) |
|||
shift |
|||
WAITFORIT_CLI=("$@") |
|||
break |
|||
;; |
|||
--help) |
|||
usage |
|||
;; |
|||
*) |
|||
echoerr "Unknown argument: $1" |
|||
usage |
|||
;; |
|||
esac |
|||
done |
|||
|
|||
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then |
|||
echoerr "Error: you need to provide a host and port to test." |
|||
usage |
|||
fi |
|||
|
|||
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15} |
|||
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0} |
|||
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0} |
|||
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0} |
|||
|
|||
# Check to see if timeout is from busybox? |
|||
WAITFORIT_TIMEOUT_PATH=$(type -p timeout) |
|||
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH) |
|||
|
|||
WAITFORIT_BUSYTIMEFLAG="" |
|||
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then |
|||
WAITFORIT_ISBUSY=1 |
|||
# Check if busybox timeout uses -t flag |
|||
# (recent Alpine versions don't support -t anymore) |
|||
if timeout &>/dev/stdout | grep -q -e '-t '; then |
|||
WAITFORIT_BUSYTIMEFLAG="-t" |
|||
fi |
|||
else |
|||
WAITFORIT_ISBUSY=0 |
|||
fi |
|||
|
|||
if [[ $WAITFORIT_CHILD -gt 0 ]]; then |
|||
wait_for |
|||
WAITFORIT_RESULT=$? |
|||
exit $WAITFORIT_RESULT |
|||
else |
|||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then |
|||
wait_for_wrapper |
|||
WAITFORIT_RESULT=$? |
|||
else |
|||
wait_for |
|||
WAITFORIT_RESULT=$? |
|||
fi |
|||
fi |
|||
|
|||
if [[ $WAITFORIT_CLI != "" ]]; then |
|||
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then |
|||
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess" |
|||
exit $WAITFORIT_RESULT |
|||
fi |
|||
exec "${WAITFORIT_CLI[@]}" |
|||
else |
|||
exit $WAITFORIT_RESULT |
|||
fi |
@ -0,0 +1,10 @@ |
|||
<configuration> |
|||
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender"> |
|||
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> |
|||
<pattern>%msg%n</pattern> |
|||
</encoder> |
|||
</appender> |
|||
<root level="error"> |
|||
<appender-ref ref="CONSOLE" /> |
|||
</root> |
|||
</configuration> |
@ -0,0 +1,112 @@ |
|||
package wow.doge.http4sdemo |
|||
|
|||
import com.dimafeng.testcontainers.ContainerDef |
|||
import com.dimafeng.testcontainers.PostgreSQLContainer |
|||
import com.dimafeng.testcontainers.munit.TestContainerForAll |
|||
import com.typesafe.config.ConfigFactory |
|||
import monix.bio.IO |
|||
import monix.bio.Task |
|||
import monix.bio.UIO |
|||
import monix.execution.Scheduler |
|||
import org.testcontainers.utility.DockerImageName |
|||
import slick.jdbc.JdbcBackend |
|||
import slick.jdbc.PostgresProfile |
|||
import wow.doge.http4sdemo.MonixBioSuite |
|||
|
|||
trait DatabaseIntegrationTestBase |
|||
extends MonixBioSuite |
|||
with TestContainerForAll { |
|||
def databaseName = "testcontainer-scala" |
|||
def username = "scala" |
|||
def password = "scala" |
|||
|
|||
override val containerDef: ContainerDef = PostgreSQLContainer.Def( |
|||
dockerImageName = DockerImageName.parse("postgres:12-alpine"), |
|||
databaseName = databaseName, |
|||
username = username, |
|||
password = password |
|||
) |
|||
|
|||
lazy val profile = PostgresProfile |
|||
|
|||
def config(url: String) = ConfigFactory.parseString(s"""| |
|||
|testDatabase = { |
|||
| url = "$url" |
|||
| driver = org.postgresql.Driver |
|||
| user = $username |
|||
| password = $password |
|||
| |
|||
| numThreads = 2 |
|||
| |
|||
| queueSize = 10 |
|||
| |
|||
| maxThreads = 2 |
|||
| |
|||
| maxConnections = 2 |
|||
| |
|||
}""".stripMargin) |
|||
|
|||
def withDb[T](url: String)(f: JdbcBackend.DatabaseDef => Task[T]) = Task( |
|||
// JdbcBackend.Database.forURL( |
|||
// url, |
|||
// // user = username, |
|||
// // password = password, |
|||
// // driver = "org.postgresql.Driver", |
|||
// prop = Map( |
|||
// "driver" -> "org.postgresql.Driver", |
|||
// "user" -> username, |
|||
// "password" -> password, |
|||
// "numThreads" -> "16", |
|||
// "maxThreads" -> "36", |
|||
// "queueSize" -> "10", |
|||
// "maxConnections" -> "36" |
|||
// ) |
|||
// ) |
|||
JdbcBackend.Database.forConfig("testDatabase", config(url)) |
|||
).bracket(f)(db => UIO(db.close())) |
|||
|
|||
def createSchema(containers: Containers) = { |
|||
implicit val s = Scheduler.global |
|||
containers match { |
|||
case container: PostgreSQLContainer => |
|||
val config = JdbcDatabaseConfig( |
|||
container.jdbcUrl, |
|||
"org.postgresql.Driver", |
|||
Some(username), |
|||
Some(password), |
|||
"flyway_schema_history", |
|||
List("classpath:db/migration/default") |
|||
) |
|||
// (UIO(println("creating db")) >> dbBracket(container.jdbcUrl)( |
|||
// // _.runL(Tables.schema.create) |
|||
// _ => DBMigrations.migrate[Task](config) |
|||
// )) |
|||
DBMigrations.migrate[Task](config).runSyncUnsafe(munitTimeout) |
|||
case _ => () |
|||
} |
|||
} |
|||
|
|||
// val fixture = ResourceFixture( |
|||
// Resource.make( |
|||
// Task( |
|||
// JdbcBackend.Database.forURL( |
|||
// "jdbc:postgresql://localhost:49162/testcontainer-scala?", |
|||
// user = username, |
|||
// password = password, |
|||
// driver = "org.postgresql.Driver" |
|||
// ) |
|||
// ) |
|||
// )(db => Task(db.close())) |
|||
// ) |
|||
|
|||
def withContainersIO[A](pf: PartialFunction[Containers, Task[A]]): Task[A] = { |
|||
withContainers { containers => |
|||
pf.applyOrElse( |
|||
containers, |
|||
(c: Containers) => |
|||
IO.terminate(new Exception(s"Unknown container: ${c.toString}")) |
|||
) |
|||
} |
|||
} |
|||
|
|||
} |
@ -0,0 +1,121 @@ |
|||
package wow.doge.http4sdemo |
|||
|
|||
import com.dimafeng.testcontainers.PostgreSQLContainer |
|||
import monix.bio.UIO |
|||
import wow.doge.http4sdemo.dto.BookSearchMode |
|||
import wow.doge.http4sdemo.dto.NewAuthor |
|||
import wow.doge.http4sdemo.dto.NewBook |
|||
import wow.doge.http4sdemo.implicits._ |
|||
import wow.doge.http4sdemo.services.LibraryDbio |
|||
import wow.doge.http4sdemo.services.LibraryService |
|||
import wow.doge.http4sdemo.services.LibraryServiceImpl |
|||
|
|||
class LibraryServiceSpec extends DatabaseIntegrationTestBase { |
|||
|
|||
override def afterContainersStart(containers: Containers): Unit = { |
|||
super.afterContainersStart(containers) |
|||
createSchema(containers) |
|||
} |
|||
|
|||
test("insert and retrieve book") { |
|||
withContainersIO { case container: PostgreSQLContainer => |
|||
val io = |
|||
withDb(container.jdbcUrl)(db => |
|||
for { |
|||
_ <- UIO.unit |
|||
service: LibraryService = new LibraryServiceImpl( |
|||
profile, |
|||
new LibraryDbio(profile), |
|||
db |
|||
) |
|||
id <- service.insertAuthor(NewAuthor("author1")) |
|||
book <- service.insertBook(NewBook("blah", "Segehwe", id)) |
|||
_ <- service |
|||
.getBookById(book.bookId) |
|||
.assertEquals(Some(book)) |
|||
} yield () |
|||
) |
|||
io |
|||
} |
|||
} |
|||
|
|||
test("author does not exist error on book insertion") { |
|||
withContainersIO { case container: PostgreSQLContainer => |
|||
val io = |
|||
withDb(container.jdbcUrl)(db => |
|||
for { |
|||
_ <- UIO.unit |
|||
service: LibraryService = new LibraryServiceImpl( |
|||
profile, |
|||
new LibraryDbio(profile), |
|||
db |
|||
) |
|||
_ <- service |
|||
.insertBook(NewBook("blah2", "agege", 23)) |
|||
.attempt |
|||
.assertEquals( |
|||
Left( |
|||
LibraryService |
|||
.EntityDoesNotExist("Author with id=23 does not exist") |
|||
) |
|||
) |
|||
} yield () |
|||
) |
|||
io |
|||
} |
|||
} |
|||
|
|||
test("books with isbn already exists error on book insertion") { |
|||
withContainersIO { case container: PostgreSQLContainer => |
|||
val io = |
|||
withDb(container.jdbcUrl)(db => |
|||
for { |
|||
_ <- UIO.unit |
|||
service: LibraryService = new LibraryServiceImpl( |
|||
profile, |
|||
new LibraryDbio(profile), |
|||
db |
|||
) |
|||
_ <- service.insertBook(NewBook("blah2", "agege", 1)) |
|||
_ <- service |
|||
.insertBook(NewBook("blah3", "agege", 1)) |
|||
.attempt |
|||
.assertEquals( |
|||
Left( |
|||
LibraryService |
|||
.EntityAlreadyExists("Book with isbn=agege already exists") |
|||
) |
|||
) |
|||
} yield () |
|||
) |
|||
io |
|||
} |
|||
} |
|||
|
|||
test("search books by author id") { |
|||
withContainersIO { case container: PostgreSQLContainer => |
|||
val io = |
|||
withDb(container.jdbcUrl)(db => |
|||
for { |
|||
_ <- UIO.unit |
|||
service: LibraryService = new LibraryServiceImpl( |
|||
profile, |
|||
new LibraryDbio(profile), |
|||
db |
|||
) |
|||
id <- service.insertAuthor(NewAuthor("bar")) |
|||
book1 <- service.insertBook(NewBook("blah3", "aeaega", id)) |
|||
book2 <- service.insertBook(NewBook("blah4", "afgegg", id)) |
|||
_ <- service |
|||
.searchBook(BookSearchMode.AuthorName, "bar") |
|||
.toListL |
|||
.toIO |
|||
.attempt |
|||
.assertEquals(Right(List(book1, book2))) |
|||
} yield () |
|||
) |
|||
io |
|||
} |
|||
} |
|||
|
|||
} |
@ -0,0 +1,22 @@ |
|||
[ |
|||
{ |
|||
"name":"java.lang.ClassLoader", |
|||
"methods":[{"name":"getPlatformClassLoader","parameterTypes":[] }] |
|||
}, |
|||
{ |
|||
"name":"java.lang.NoSuchMethodError" |
|||
}, |
|||
{ |
|||
"name":"sun.management.VMManagementImpl", |
|||
"fields":[ |
|||
{"name":"compTimeMonitoringSupport"}, |
|||
{"name":"currentThreadCpuTimeSupport"}, |
|||
{"name":"objectMonitorUsageSupport"}, |
|||
{"name":"otherThreadCpuTimeSupport"}, |
|||
{"name":"remoteDiagnosticCommandsSupport"}, |
|||
{"name":"synchronizerUsageSupport"}, |
|||
{"name":"threadAllocatedMemorySupport"}, |
|||
{"name":"threadContentionMonitoringSupport"} |
|||
] |
|||
} |
|||
] |
@ -0,0 +1,19 @@ |
|||
{ |
|||
"resources":{ |
|||
"includes":[ |
|||
{"pattern":"\\QMETA-INF/services/java.sql.Driver\\E"}, |
|||
{"pattern":"\\Qapplication.conf\\E"}, |
|||
{"pattern":"\\Qdb/migration/default/V1__create_users_table.sql\\E"}, |
|||
{"pattern":"\\Qdb/migration/default/V2__add_user.sql\\E"}, |
|||
{"pattern":"\\Qdb/migration/default/V3__create_cars_table.sql\\E"}, |
|||
{"pattern":"\\Qdb/migration/default/V4__add_car.sql\\E"}, |
|||
{"pattern":"\\Qdb/migration/default/V5__authors_books_table.sql\\E"}, |
|||
{"pattern":"\\Qdb/migration/default/V6__insert_books_and_authors.sql\\E"}, |
|||
{"pattern":"\\Qdb/migration/default\\E"}, |
|||
{"pattern":"\\Qlogback.xml\\E"}, |
|||
{"pattern":"\\Qorg/flywaydb/core/internal/version.txt\\E"}, |
|||
{"pattern":"\\Qorg/slf4j/impl/StaticLoggerBinder.class\\E"}, |
|||
{"pattern":"\\Qreference.conf\\E"} |
|||
]}, |
|||
"bundles":[] |
|||
} |
@ -0,0 +1,2 @@ |
|||
[ |
|||
] |
@ -1,37 +1,37 @@ |
|||
|
|||
myapp = { |
|||
database = { |
|||
driver = org.postgresql.Driver |
|||
url = "jdbc:postgresql://localhost:5432/test_db" |
|||
user = "test_user" |
|||
password = "password" |
|||
|
|||
// The number of threads determines how many things you can *run* in parallel |
|||
// the number of connections determines you many things you can *keep in memory* at the same time |
|||
// on the database server. |
|||
// numThreads = (core_count (hyperthreading included)) |
|||
numThreads = 20 |
|||
|
|||
// queueSize = ((core_count * 2) + effective_spindle_count) |
|||
// on a MBP 13, this is 2 cores * 2 (hyperthreading not included) + 1 hard disk |
|||
queueSize = 10 |
|||
|
|||
// https://blog.knoldus.com/2016/01/01/best-practices-for-using-slick-on-production/ |
|||
// make larger than numThreads + queueSize |
|||
maxConnections = 20 |
|||
|
|||
connectionTimeout = 5000 |
|||
validationTimeout = 5000 |
|||
|
|||
# connectionPool = disabled |
|||
keepAlive = true |
|||
|
|||
migrations-table = "flyway_schema_history" |
|||
|
|||
driver = org.postgresql.Driver |
|||
dbHost = localhost |
|||
dbHost = ${?HTTP4S_DEMO_DB_HOST} |
|||
dbPort = 5432 |
|||
dbPort = ${?HTTP4S_DEMO_DB_PORT} |
|||
dbName = test_db |
|||
dbName = ${?HTTP4S_DEMO_DB_NAME} |
|||
url = "jdbc:postgresql://"${myapp.database.dbHost}":"${myapp.database.dbPort}"/"${myapp.database.dbName} |
|||
user = "test_user" |
|||
user = ${?HTTP4S_DEMO_DB_USER} |
|||
password = "password" |
|||
password = ${?HTTP4S_DEMO_DB_PASSWORD} |
|||
numThreads = 16 |
|||
queueSize = 1000 |
|||
maxConnections = 16 |
|||
connectionTimeout = 5000 |
|||
validationTimeout = 5000 |
|||
# connectionPool = disabled |
|||
keepAlive = true |
|||
migrations-table = "flyway_schema_history" |
|||
migrations-locations = [ |
|||
# "classpath:example/jdbc" |
|||
"classpath:db/migration/default" |
|||
] |
|||
}, |
|||
testDatabase = { |
|||
driver = org.postgresql.Driver |
|||
user = "scala" |
|||
password = "scala" |
|||
numThreads = 16 |
|||
queueSize = 10 |
|||
maxConnections = 36 |
|||
} |
|||
} |
|||
|
|||
|
@ -1,144 +0,0 @@ |
|||
package wow.doge.http4sdemo |
|||
|
|||
import cats.effect.Sync |
|||
import cats.implicits._ |
|||
import fs2.interop.reactivestreams._ |
|||
import io.circe.Codec |
|||
import io.circe.generic.semiauto._ |
|||
import monix.bio.Task |
|||
import monix.reactive.Observable |
|||
import org.http4s.HttpRoutes |
|||
import org.http4s.dsl.Http4sDsl |
|||
import slick.jdbc.JdbcBackend.DatabaseDef |
|||
import slick.jdbc.JdbcProfile |
|||
import wow.doge.http4sdemo.dto.Book |
|||
import wow.doge.http4sdemo.dto.BookUpdate |
|||
import wow.doge.http4sdemo.dto.NewBook |
|||
import wow.doge.http4sdemo.services.LibraryService |
|||
import wow.doge.http4sdemo.slickcodegen.Tables._ |
|||
object Http4sdemoRoutes { |
|||
|
|||
def jokeRoutes[F[_]: Sync](J: Jokes[F]): HttpRoutes[F] = { |
|||
val dsl = Http4sDsl[F] |
|||
import dsl._ |
|||
HttpRoutes.of[F] { case GET -> Root / "joke" => |
|||
for { |
|||
joke <- J.get |
|||
resp <- Ok(joke) |
|||
} yield resp |
|||
} |
|||
} |
|||
|
|||
def helloWorldRoutes[F[_]: Sync](H: HelloWorld[F]): HttpRoutes[F] = { |
|||
val dsl = new Http4sDsl[F] {} |
|||
import dsl._ |
|||
HttpRoutes.of[F] { case GET -> Root / "hello" / name => |
|||
for { |
|||
greeting <- H.hello(HelloWorld.Name(name)) |
|||
resp <- Ok(greeting) |
|||
r2 <- BadRequest("Bad request") |
|||
} yield r2 |
|||
} |
|||
} |
|||
|
|||
def userRoutes(userService: UserService): HttpRoutes[Task] = { |
|||
val dsl = Http4sDsl[Task] |
|||
import dsl._ |
|||
import org.http4s.circe.CirceEntityCodec._ |
|||
HttpRoutes.of[Task] { case GET -> Root / "users" => |
|||
Task.deferAction(implicit s => |
|||
for { |
|||
_ <- Task.unit |
|||
users = userService.users.toReactivePublisher.toStream[Task] |
|||
res <- Ok(users) |
|||
} yield res |
|||
) |
|||
} |
|||
} |
|||
|
|||
def libraryRoutes(libraryService: LibraryService): HttpRoutes[Task] = { |
|||
val dsl = Http4sDsl[Task] |
|||
import dsl._ |
|||
HttpRoutes.of[Task] { |
|||
case GET -> Root / "api" / "get" / "books" => |
|||
import org.http4s.circe.streamJsonArrayEncoder |
|||
import io.circe.syntax._ |
|||
Task.deferAction(implicit s => |
|||
for { |
|||
books <- Task.pure( |
|||
libraryService.getBooks.toReactivePublisher |
|||
.toStream[Task] |
|||
) |
|||
res <- Ok(books.map(_.asJson)) |
|||
} yield res |
|||
) |
|||
|
|||
case GET -> Root / "api" / "get" / "book" / IntVar(id) => |
|||
// import org.http4s.circe.CirceEntityCodec._ |
|||
import org.http4s.circe.jsonEncoder |
|||
import io.circe.syntax._ |
|||
for { |
|||
bookJson <- libraryService.getBookById(id).map(_.asJson) |
|||
res <- Ok(bookJson) |
|||
} yield res |
|||
|
|||
case req @ POST -> Root / "api" / "post" / "book" => |
|||
import org.http4s.circe.CirceEntityCodec._ |
|||
for { |
|||
newBook <- req.as[NewBook] |
|||
book <- libraryService.insertBook(newBook) |
|||
res <- Created(book) |
|||
} yield res |
|||
|
|||
case req @ PATCH -> Root / "api" / "update" / "book" / IntVar(id) => |
|||
import org.http4s.circe.CirceEntityCodec._ |
|||
for { |
|||
updateData <- req.as[BookUpdate] |
|||
_ <- libraryService |
|||
.updateBook(id, updateData) |
|||
.void |
|||
.onErrorHandleWith(ex => |
|||
Task(println(s"Handled -> ${ex.getMessage}")) |
|||
) |
|||
// .mapError(e => new Exception(e)) |
|||
res <- Ok() |
|||
} yield res |
|||
|
|||
case req @ DELETE -> Root / "api" / "delete" / "book" / IntVar(id) => |
|||
for { |
|||
_ <- libraryService.deleteBook(id) |
|||
res <- Ok() |
|||
} yield res |
|||
|
|||
case req @ POST -> Root / "api" / "post" / "books" / "read" => |
|||
import org.http4s.circe.CirceEntityCodec.circeEntityDecoder |
|||
for { |
|||
newBook <- req.as[List[Book]] |
|||
// book <- libraryService.insertBook(newBook) |
|||
res <- Ok("blah") |
|||
} yield res |
|||
} |
|||
} |
|||
|
|||
} |
|||
|
|||
case class User(id: String, email: String) |
|||
object User { |
|||
val tupled = (this.apply _).tupled |
|||
// implicit val decoder: Decoder[User] = deriveDecoder |
|||
// implicit def entityDecoder[F[_]: Sync]: EntityDecoder[F, User] = |
|||
// jsonOf |
|||
// implicit val encoder: Encoder[User] = deriveEncoder |
|||
// implicit def entityEncoder[F[_]: Applicative]: EntityEncoder[F, User] = |
|||
// jsonEncoderOf |
|||
implicit val codec: Codec[User] = deriveCodec |
|||
} |
|||
|
|||
class UserService(profile: JdbcProfile, db: DatabaseDef) { |
|||
import profile.api._ |
|||
def users: Observable[User] = |
|||
Observable.fromReactivePublisher( |
|||
db.stream(Users.map(u => (u.id, u.email).mapTo[User]).result) |
|||
) |
|||
|
|||
} |
@ -1,50 +0,0 @@ |
|||
package wow.doge.http4sdemo |
|||
|
|||
import cats.implicits._ |
|||
import fs2.Stream |
|||
import monix.bio.Task |
|||
import monix.execution.Scheduler |
|||
import org.http4s.client.blaze.BlazeClientBuilder |
|||
import org.http4s.implicits._ |
|||
import org.http4s.server.blaze.BlazeServerBuilder |
|||
import org.http4s.server.middleware.Logger |
|||
import slick.jdbc.JdbcBackend.DatabaseDef |
|||
import slick.jdbc.JdbcProfile |
|||
import wow.doge.http4sdemo.services.LibraryDbio |
|||
import wow.doge.http4sdemo.services.LibraryService |
|||
|
|||
object Http4sdemoServer { |
|||
|
|||
def stream( |
|||
db: DatabaseDef, |
|||
p: JdbcProfile |
|||
)(implicit s: Scheduler): Stream[Task, Nothing] = { |
|||
for { |
|||
client <- BlazeClientBuilder[Task](s).stream |
|||
helloWorldAlg = HelloWorld.impl |
|||
jokeAlg = Jokes.impl(client) |
|||
ss = new UserService(p, db) |
|||
// Combine Service Routes into an HttpApp. |
|||
// Can also be done via a Router if you |
|||
// want to extract a segments not checked |
|||
// in the underlying routes. |
|||
|
|||
libraryDbio = new LibraryDbio(p) |
|||
libraryService = new LibraryService(p, libraryDbio, db) |
|||
httpApp = ( |
|||
Http4sdemoRoutes.helloWorldRoutes[Task](helloWorldAlg) <+> |
|||
Http4sdemoRoutes.jokeRoutes[Task](jokeAlg) <+> |
|||
Http4sdemoRoutes.userRoutes(ss) <+> |
|||
Http4sdemoRoutes.libraryRoutes(libraryService) |
|||
).orNotFound |
|||
|
|||
// With Middlewares in place |
|||
finalHttpApp = Logger.httpApp(true, true)(httpApp) |
|||
|
|||
exitCode <- BlazeServerBuilder[Task](s) |
|||
.bindHttp(8081, "0.0.0.0") |
|||
.withHttpApp(finalHttpApp) |
|||
.serve |
|||
} yield exitCode |
|||
}.drain |
|||
} |
@ -1,47 +0,0 @@ |
|||
package wow.doge.http4sdemo |
|||
|
|||
import cats.Applicative |
|||
import cats.effect.Sync |
|||
import cats.implicits._ |
|||
import io.circe.Decoder |
|||
import io.circe.Encoder |
|||
import io.circe.generic.semiauto._ |
|||
import monix.bio.Task |
|||
import org.http4s.Method._ |
|||
import org.http4s._ |
|||
import org.http4s.circe._ |
|||
import org.http4s.client.Client |
|||
import org.http4s.client.dsl.Http4sClientDsl |
|||
import org.http4s.implicits._ |
|||
|
|||
sealed trait Jokes[F[_]] { |
|||
def get: F[Jokes.Joke] |
|||
} |
|||
|
|||
object Jokes { |
|||
def apply[F[_]](implicit ev: Jokes[F]): Jokes[F] = ev |
|||
|
|||
final case class Joke(joke: String) |
|||
object Joke { |
|||
implicit val jokeDecoder: Decoder[Joke] = deriveDecoder[Joke] |
|||
implicit def jokeEntityDecoder[F[_]: Sync]: EntityDecoder[F, Joke] = |
|||
jsonOf |
|||
implicit val jokeEncoder: Encoder[Joke] = deriveEncoder[Joke] |
|||
implicit def jokeEntityEncoder[F[_]: Applicative]: EntityEncoder[F, Joke] = |
|||
jsonEncoderOf |
|||
} |
|||
|
|||
final case class JokeError(e: Throwable) extends RuntimeException |
|||
|
|||
def impl(C: Client[Task]): Jokes[Task] = new Jokes[Task] { |
|||
val dsl = new Http4sClientDsl[Task] {} |
|||
import dsl._ |
|||
def get: Task[Jokes.Joke] = { |
|||
C.expect[Joke](GET(uri"https://icanhazdadjoke.com/")) |
|||
.adaptError { case t => |
|||
JokeError(t) |
|||
} // Prevent Client Json Decoding Failure Leaking |
|||
} |
|||
} |
|||
|
|||
} |
@ -0,0 +1,43 @@ |
|||
package wow.doge.http4sdemo |
|||
|
|||
import cats.implicits._ |
|||
import fs2.Stream |
|||
import io.odin |
|||
import monix.bio.Task |
|||
import monix.execution.Scheduler |
|||
import org.http4s.client.blaze.BlazeClientBuilder |
|||
import org.http4s.implicits._ |
|||
import org.http4s.server.blaze.BlazeServerBuilder |
|||
import org.http4s.server.middleware.Logger |
|||
import slick.jdbc.JdbcBackend.DatabaseDef |
|||
import slick.jdbc.JdbcProfile |
|||
import wow.doge.http4sdemo.routes.LibraryRoutes |
|||
import wow.doge.http4sdemo.services.LibraryDbio |
|||
import wow.doge.http4sdemo.services.LibraryServiceImpl |
|||
|
|||
final class Server(db: DatabaseDef, p: JdbcProfile, logger: odin.Logger[Task]) { |
|||
|
|||
def stream(implicit s: Scheduler): Stream[Task, Nothing] = { |
|||
val logger = io.odin.consoleLogger[Task](formatter = |
|||
io.odin.formatter.Formatter.colorful |
|||
) |
|||
val log: String => Task[Unit] = str => logger.debug(str) |
|||
for { |
|||
client <- BlazeClientBuilder[Task](s).stream |
|||
libraryDbio = new LibraryDbio(p) |
|||
libraryService = new LibraryServiceImpl(p, libraryDbio, db) |
|||
httpApp = ( |
|||
new LibraryRoutes(libraryService, logger).routes |
|||
).orNotFound |
|||
finalHttpApp = Logger.httpApp( |
|||
true, |
|||
true, |
|||
logAction = log.pure[Option] |
|||
)(httpApp) |
|||
exitCode <- BlazeServerBuilder[Task](s) |
|||
.bindHttp(8081, "0.0.0.0") |
|||
.withHttpApp(finalHttpApp) |
|||
.serve |
|||
} yield exitCode |
|||
}.drain |
|||
} |
@ -0,0 +1,114 @@ |
|||
package wow.doge.http4sdemo.routes |
|||
|
|||
import fs2.interop.reactivestreams._ |
|||
import io.circe.Codec |
|||
import io.circe.generic.semiauto._ |
|||
import io.odin.Logger |
|||
import monix.bio.IO |
|||
import monix.bio.Task |
|||
import org.http4s.HttpRoutes |
|||
import org.http4s.dsl.Http4sDsl |
|||
import wow.doge.http4sdemo.dto.Book |
|||
import wow.doge.http4sdemo.dto.BookSearchMode |
|||
import wow.doge.http4sdemo.dto.BookUpdate |
|||
import wow.doge.http4sdemo.dto.NewBook |
|||
import wow.doge.http4sdemo.implicits._ |
|||
import wow.doge.http4sdemo.services.LibraryService |
|||
|
|||
class LibraryRoutes(libraryService: LibraryService, logger: Logger[Task]) { |
|||
|
|||
val routes: HttpRoutes[Task] = { |
|||
val dsl = Http4sDsl[Task] |
|||
import dsl._ |
|||
object Value extends QueryParamDecoderMatcher[String]("value") |
|||
HttpRoutes.of[Task] { |
|||
|
|||
case GET -> Root / "api" / "books" :? |
|||
BookSearchMode.Matcher(mode) +& Value(value) => |
|||
import org.http4s.circe.streamJsonArrayEncoder |
|||
import io.circe.syntax._ |
|||
IO.deferAction(implicit s => |
|||
for { |
|||
books <- IO.pure( |
|||
libraryService |
|||
.searchBook(mode, value) |
|||
.toReactivePublisher |
|||
.toStream[Task] |
|||
) |
|||
res <- Ok(books.map(_.asJson)) |
|||
} yield res |
|||
) |
|||
|
|||
case GET -> Root / "api" / "books" => |
|||
import org.http4s.circe.streamJsonArrayEncoder |
|||
import io.circe.syntax._ |
|||
Task.deferAction(implicit s => |
|||
for { |
|||
books <- IO.pure( |
|||
libraryService.getBooks.toReactivePublisher |
|||
.toStream[Task] |
|||
) |
|||
res <- Ok(books.map(_.asJson)) |
|||
} yield res |
|||
) |
|||
|
|||
case GET -> Root / "api" / "books" / IntVar(id) => |
|||
import org.http4s.circe.CirceEntityCodec._ |
|||
for { |
|||
bookJson <- libraryService.getBookById(id) |
|||
res <- Ok(bookJson) |
|||
} yield res |
|||
|
|||
case req @ PUT -> Root / "api" / "books" => |
|||
import org.http4s.circe.CirceEntityCodec._ |
|||
for { |
|||
newBook <- req.as[NewBook] |
|||
res <- libraryService |
|||
.insertBook(newBook) |
|||
.tapError(err => logger.errorU(err.toString)) |
|||
.flatMap(book => Created(book).hideErrors) |
|||
.onErrorHandleWith(_.toResponse) |
|||
} yield res |
|||
|
|||
case req @ PATCH -> Root / "api" / "books" / IntVar(id) => |
|||
import org.http4s.circe.CirceEntityCodec._ |
|||
for { |
|||
updateData <- req.as[BookUpdate] |
|||
res <- libraryService |
|||
.updateBook(id, updateData) |
|||
.flatMap(_ => NoContent().hideErrors) |
|||
.tapError(err => logger.errorU(err.toString)) |
|||
.onErrorHandleWith(_.toResponse) |
|||
} yield res |
|||
|
|||
case req @ DELETE -> Root / "api" / "books" / IntVar(id) => |
|||
for { |
|||
_ <- libraryService.deleteBook(id) |
|||
res <- Ok() |
|||
} yield res |
|||
|
|||
//TODO: use convenience method for decoding json stream |
|||
case req @ POST -> Root / "api" / "books" => |
|||
import org.http4s.circe.CirceEntityCodec.circeEntityDecoder |
|||
for { |
|||
newBooks <- req.as[List[Book]] |
|||
// obs = Observable.fromIterable(newBooks) |
|||
// book <- libraryService.insertBook(newBook) |
|||
res <- Ok("blah") |
|||
} yield res |
|||
} |
|||
} |
|||
|
|||
} |
|||
|
|||
final case class User(id: String, email: String) |
|||
object User { |
|||
val tupled = (this.apply _).tupled |
|||
// implicit val decoder: Decoder[User] = deriveDecoder |
|||
// implicit def entityDecoder[F[_]: Sync]: EntityDecoder[F, User] = |
|||
// jsonOf |
|||
// implicit val encoder: Encoder[User] = deriveEncoder |
|||
// implicit def entityEncoder[F[_]: Applicative]: EntityEncoder[F, User] = |
|||
// jsonEncoderOf |
|||
implicit val codec: Codec[User] = deriveCodec |
|||
} |
@ -0,0 +1,10 @@ |
|||
<configuration> |
|||
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender"> |
|||
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> |
|||
<pattern>%msg%n</pattern> |
|||
</encoder> |
|||
</appender> |
|||
<root level="OFF"> |
|||
<appender-ref ref="CONSOLE" /> |
|||
</root> |
|||
</configuration> |
@ -1,25 +0,0 @@ |
|||
package wow.doge.http4sdemo |
|||
|
|||
import cats.effect.IO |
|||
import org.http4s._ |
|||
import org.http4s.implicits._ |
|||
import munit.CatsEffectSuite |
|||
class HelloWorldSpec extends CatsEffectSuite { |
|||
|
|||
// test("HelloWorld returns status code 200") { |
|||
// assertIO(retHelloWorld.map(_.status), Status.Ok) |
|||
// } |
|||
|
|||
// test("HelloWorld returns hello world message") { |
|||
// assertIO( |
|||
// retHelloWorld.flatMap(_.as[String]), |
|||
// "{\"message\":\"Hello, world\"}" |
|||
// ) |
|||
// } |
|||
|
|||
// private[this] val retHelloWorld: IO[Response[IO]] = { |
|||
// val getHW = Request[IO](Method.GET, uri"/hello/world") |
|||
// val helloWorld = HelloWorld.impl[IO] |
|||
// Http4sdemoRoutes.helloWorldRoutes(helloWorld).orNotFound(getHW) |
|||
// } |
|||
} |
@ -0,0 +1,156 @@ |
|||
package wow.doge.http4sdemo |
|||
|
|||
import cats.syntax.all._ |
|||
import monix.bio.IO |
|||
import monix.bio.Task |
|||
import monix.bio.UIO |
|||
import monix.reactive.Observable |
|||
import org.http4s.Method |
|||
import org.http4s.Request |
|||
import org.http4s.Status |
|||
import org.http4s.Uri |
|||
import org.http4s.implicits._ |
|||
import wow.doge.http4sdemo.MonixBioSuite |
|||
import wow.doge.http4sdemo.dto.Book |
|||
import wow.doge.http4sdemo.dto.BookSearchMode |
|||
import wow.doge.http4sdemo.dto.BookUpdate |
|||
import wow.doge.http4sdemo.routes.LibraryRoutes |
|||
import wow.doge.http4sdemo.services.LibraryService |
|||
import wow.doge.http4sdemo.services.NoopLibraryService |
|||
|
|||
class LibraryControllerSpec extends MonixBioSuite { |
|||
|
|||
val Root = Uri(path = "") |
|||
|
|||
test("get books success") { |
|||
import org.http4s.circe.CirceEntityCodec._ |
|||
val book = Book(1, "book1", "adsgq342dsdc", 1, date) |
|||
val service = new NoopLibraryService { |
|||
|
|||
override def getBooks: Observable[Book] = |
|||
Observable.fromIterable(book :: Nil) |
|||
|
|||
override def getBookById(id: Int): Task[Option[Book]] = |
|||
Task.some(book) |
|||
|
|||
} |
|||
for { |
|||
_ <- UIO.unit |
|||
routes = new LibraryRoutes(service, noopLogger).routes |
|||
res <- routes |
|||
.run(Request[Task](Method.GET, uri"/api/books")) |
|||
.value |
|||
.hideErrors |
|||
body <- res.traverse(_.as[List[Book]]) |
|||
_ <- UIO(assertEquals(body, Some(List(book)))) |
|||
// _ <- logger2.debug(body.toString).hideErrors |
|||
} yield () |
|||
} |
|||
|
|||
test("update book error") { |
|||
import org.http4s.circe.CirceEntityCodec._ |
|||
val service = new NoopLibraryService { |
|||
override def updateBook(id: Int, updateData: BookUpdate) = |
|||
IO.raiseError( |
|||
LibraryService.EntityDoesNotExist(s"Book with id=$id does not exist") |
|||
) |
|||
} |
|||
|
|||
for { |
|||
_ <- UIO.unit |
|||
reqBody = BookUpdate(Some("blah"), None) |
|||
routes = new LibraryRoutes(service, noopLogger).routes |
|||
res <- routes |
|||
.run( |
|||
Request[Task](Method.PATCH, Root / "api" / "books" / "1") |
|||
.withEntity(reqBody) |
|||
) |
|||
.value |
|||
.hideErrors |
|||
_ <- UIO(assertEquals(res.map(_.status), Some(Status.NotFound))) |
|||
body <- res.traverse(_.as[LibraryService.Error]) |
|||
_ <- UIO( |
|||
assertEquals( |
|||
body, |
|||
Some( |
|||
LibraryService.EntityDoesNotExist("Book with id=1 does not exist") |
|||
) |
|||
) |
|||
) |
|||
// _ <- logger.debug(res.toString).hideErrors |
|||
// _ <- logger.debug(body.toString).hideErrors |
|||
} yield () |
|||
} |
|||
|
|||
test("get books by author name") { |
|||
import org.http4s.circe.CirceEntityCodec._ |
|||
val value = "blah" |
|||
val books = |
|||
List(Book(1, "book1", value, 1, date), Book(2, "book1", value, 1, date)) |
|||
val service = new NoopLibraryService { |
|||
override def searchBook(mode: BookSearchMode, value: String) = |
|||
mode match { |
|||
case BookSearchMode.BookTitle => |
|||
Observable.fromIterable(books) |
|||
case BookSearchMode.AuthorName => |
|||
Observable.fromIterable(books) |
|||
} |
|||
} |
|||
for { |
|||
_ <- UIO.unit |
|||
// logger2 = logger.withConstContext( |
|||
// Map("Test" -> "get books by author name") |
|||
// ) |
|||
routes = new LibraryRoutes(service, noopLogger).routes |
|||
request = Request[Task]( |
|||
Method.GET, |
|||
Root / "api" / "books" |
|||
withQueryParams Map( |
|||
"mode" -> BookSearchMode.AuthorName.entryName, |
|||
"value" -> "blah" |
|||
) |
|||
) |
|||
// _ <- logger2.info(s"Request -> $request") |
|||
res <- routes.run(request).value.hideErrors |
|||
body <- res.traverse(_.as[List[Book]]) |
|||
_ <- UIO.pure(body).assertEquals(Some(books)) |
|||
// _ <- logger2.debug(s"Response body -> $body").hideErrors |
|||
} yield () |
|||
} |
|||
|
|||
test("get books by book title") { |
|||
import org.http4s.circe.CirceEntityCodec._ |
|||
val value = "blah" |
|||
val books = |
|||
List(Book(1, "book1", value, 1, date), Book(2, "book1", value, 1, date)) |
|||
val service = new NoopLibraryService { |
|||
override def searchBook(mode: BookSearchMode, value: String) = |
|||
mode match { |
|||
case BookSearchMode.BookTitle => |
|||
Observable.fromIterable(books) |
|||
case BookSearchMode.AuthorName => |
|||
Observable.fromIterable(books) |
|||
} |
|||
} |
|||
for { |
|||
_ <- UIO.unit |
|||
// logger2 = logger.withConstContext( |
|||
// Map("Test" -> "get books by book title") |
|||
// ) |
|||
routes = new LibraryRoutes(service, noopLogger).routes |
|||
request = Request[Task]( |
|||
Method.GET, |
|||
Root / "api" / "books" |
|||
withQueryParams Map( |
|||
"mode" -> BookSearchMode.BookTitle.entryName, |
|||
"value" -> "blah" |
|||
) |
|||
) |
|||
// _ <- logger2.info(s"Request -> $request") |
|||
res <- routes.run(request).value.hideErrors |
|||
body <- res.traverse(_.as[List[Book]]) |
|||
_ <- UIO.pure(body).assertEquals(Some(books)) |
|||
// _ <- logger2.debug(s"Response body -> $body").hideErrors |
|||
} yield () |
|||
} |
|||
} |
Write
Preview
Loading…
Cancel
Save
Reference in new issue