temp
This commit is contained in:
parent
c702bb0773
commit
479b571201
29
.dockerignore
Normal file
29
.dockerignore
Normal file
@ -0,0 +1,29 @@
|
||||
*.class
|
||||
*.log
|
||||
|
||||
# sbt specific
|
||||
.cache/
|
||||
.history/
|
||||
.lib/
|
||||
dist/*
|
||||
target/
|
||||
lib_managed/
|
||||
src_managed/
|
||||
project/boot/
|
||||
project/plugins/project/
|
||||
metals.sbt
|
||||
.metals
|
||||
.bloop
|
||||
.ammonite
|
||||
.bsp
|
||||
|
||||
# Scala-IDE specific
|
||||
.scala_dependencies
|
||||
.worksheet
|
||||
|
||||
.idea/
|
||||
.vscode
|
||||
assets/
|
||||
.attach_pid*
|
||||
hs_err_pid*
|
||||
*.db
|
18
.github/workflows/build.yaml
vendored
Normal file
18
.github/workflows/build.yaml
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
name: Build
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
# - name: Coursier cache
|
||||
# uses: coursier/cache-action@v6
|
||||
- name: Setup
|
||||
uses: olafurpg/setup-scala@v10
|
||||
with:
|
||||
java-version: adopt@1.11
|
||||
# - run: sbt compile
|
||||
# - run: sbt test
|
||||
- run: ./build.sh
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -27,3 +27,4 @@ assets/
|
||||
.attach_pid*
|
||||
hs_err_pid*
|
||||
*.db
|
||||
/app/
|
41
build.sbt
41
build.sbt
@ -6,6 +6,8 @@ val MunitCatsEffectVersion = "0.13.0"
|
||||
val FlywayVersion = "7.5.3"
|
||||
scalaVersion in ThisBuild := "2.13.4"
|
||||
|
||||
resolvers += "jitpack" at "https://jitpack.io"
|
||||
|
||||
import com.github.tototoshi.sbt.slick.CodegenPlugin.autoImport.{
|
||||
slickCodegenDatabasePassword,
|
||||
slickCodegenDatabaseUrl,
|
||||
@ -15,12 +17,15 @@ import com.github.tototoshi.sbt.slick.CodegenPlugin.autoImport.{
|
||||
import _root_.slick.codegen.SourceCodeGenerator
|
||||
import _root_.slick.{model => m}
|
||||
|
||||
lazy val databaseUrl = sys.env.getOrElse(
|
||||
"DB_DEFAULT_URL",
|
||||
"jdbc:postgresql://localhost:5432/test_db"
|
||||
)
|
||||
lazy val databaseUser = sys.env.getOrElse("DB_DEFAULT_USER", "test_user")
|
||||
lazy val databasePassword = sys.env.getOrElse("DB_DEFAULT_PASSWORD", "password")
|
||||
lazy val codegenDbHost = sys.env.getOrElse("CODEGEN_DB_HOST", "localhost")
|
||||
lazy val codegenDbPort = sys.env.getOrElse("CODEGEN_DB_PORT", "5432")
|
||||
lazy val codegenDbName = sys.env.getOrElse("CODEGEN_DB_NAME", "test_db")
|
||||
|
||||
lazy val databaseUrl =
|
||||
s"jdbc:postgresql://$codegenDbHost:$codegenDbPort/$codegenDbName"
|
||||
|
||||
lazy val databaseUser = sys.env.getOrElse("CODEGEN_DB_USER", "test_user")
|
||||
lazy val databasePassword = sys.env.getOrElse("CODEGEN_DB_PASSWORD", "password")
|
||||
|
||||
lazy val flyway = (project in file("modules/flyway"))
|
||||
.enablePlugins(FlywayPlugin)
|
||||
@ -34,11 +39,17 @@ lazy val flyway = (project in file("modules/flyway"))
|
||||
)
|
||||
|
||||
lazy val root = (project in file("."))
|
||||
.enablePlugins(CodegenPlugin)
|
||||
.enablePlugins(CodegenPlugin, DockerPlugin, JavaAppPackaging)
|
||||
.settings(
|
||||
organization := "wow.doge",
|
||||
name := "http4s-demo",
|
||||
version := "0.0.1-SNAPSHOT",
|
||||
version in Docker := "0.0.1",
|
||||
dockerExposedPorts ++= Seq(9000, 9001),
|
||||
dockerBaseImage := "openjdk:11-slim",
|
||||
dockerUsername := Some("rohansircar"),
|
||||
// dockerVe
|
||||
// dockerRepository := ""
|
||||
scalacOptions ++= Seq(
|
||||
"-encoding",
|
||||
"UTF-8",
|
||||
@ -70,12 +81,13 @@ lazy val root = (project in file("."))
|
||||
"org.typelevel" %% "munit-cats-effect-2" % MunitCatsEffectVersion % Test,
|
||||
"ch.qos.logback" % "logback-classic" % LogbackVersion,
|
||||
"org.scalameta" %% "svm-subs" % "20.2.0",
|
||||
"co.fs2" %% "fs2-reactive-streams" % "2.5.0"
|
||||
"co.fs2" %% "fs2-reactive-streams" % "2.5.0",
|
||||
),
|
||||
//format: on
|
||||
libraryDependencies ++= Seq(
|
||||
"io.monix" %% "monix" % "3.3.0",
|
||||
"io.monix" %% "monix-bio" % "1.1.0",
|
||||
// "io.monix" %% "monix-bio" % "1.1.0",
|
||||
"com.github.monix" % "monix-bio" % "0a2ad29275",
|
||||
"io.circe" %% "circe-core" % "0.13.0",
|
||||
"io.circe" %% "circe-generic" % "0.13.0",
|
||||
"com.softwaremill.sttp.client" %% "core" % "2.2.9",
|
||||
@ -103,7 +115,14 @@ lazy val root = (project in file("."))
|
||||
"com.github.pureconfig" %% "pureconfig" % "0.14.0",
|
||||
"io.scalaland" %% "chimney" % "0.6.0",
|
||||
"com.rms.miu" %% "slick-cats" % "0.10.4",
|
||||
"com.kubukoz" %% "slick-effect" % "0.3.0"
|
||||
"com.kubukoz" %% "slick-effect" % "0.3.0",
|
||||
"io.circe" %% "circe-fs2" % "0.13.0",
|
||||
// "org.scalameta" %% "munit" % "0.7.23" % Test,
|
||||
"de.lolhens" %% "munit-tagless-final" % "0.0.1" % Test,
|
||||
"org.scalameta" %% "munit-scalacheck" % "0.7.23" % Test,
|
||||
"org.scalacheck" %% "scalacheck" % "1.15.3" % Test,
|
||||
"com.dimafeng" %% "testcontainers-scala-munit" % "0.39.3" % Test,
|
||||
"com.dimafeng" %% "testcontainers-scala-postgresql" % "0.39.3" % Test
|
||||
),
|
||||
addCompilerPlugin("org.typelevel" %% "kind-projector" % "0.10.3"),
|
||||
addCompilerPlugin("com.olegpy" %% "better-monadic-for" % "0.3.1"),
|
||||
@ -162,7 +181,7 @@ lazy val root = (project in file("."))
|
||||
override def Column = new Column(_) {
|
||||
override def rawType = model.tpe match {
|
||||
case "java.sql.Timestamp" =>
|
||||
"java.time.Instant" // kill j.s.Timestamp
|
||||
"java.time.LocalDateTime" // kill j.s.Timestamp
|
||||
case _ =>
|
||||
super.rawType
|
||||
}
|
||||
|
23
build.sh
Executable file
23
build.sh
Executable file
@ -0,0 +1,23 @@
|
||||
# export POSTGRES_DB=codegen_db
|
||||
export CODEGEN_DB_HOST=localhost
|
||||
export CODEGEN_DB_NAME=codegen_db
|
||||
export CODEGEN_DB_USER=codegen_user
|
||||
export CODEGEN_DB_PASSWORD=password
|
||||
export CODEGEN_DB_PORT=5435
|
||||
|
||||
cid=$(docker run \
|
||||
-e POSTGRES_DB=$CODEGEN_DB_NAME \
|
||||
-e POSTGRES_USER=$CODEGEN_DB_USER \
|
||||
-e POSTGRES_PASSWORD=$CODEGEN_DB_PASSWORD \
|
||||
-p $CODEGEN_DB_PORT:5432 \
|
||||
-d postgres:12)
|
||||
|
||||
echo "Container id is $cid"
|
||||
sleep 5s
|
||||
# ./wait-for-it.sh localhost:5434 -s -t 300 -- echo "db started"
|
||||
sbtn flyway/flywayMigrate
|
||||
sbtn docker:publishLocal
|
||||
sbtn shutdown
|
||||
|
||||
docker stop $cid
|
||||
docker rm $cid
|
4
captain-definition
Normal file
4
captain-definition
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"schemaVersion": 2,
|
||||
"image": "rohansircar/http4s-demo:0.0.1"
|
||||
}
|
BIN
lib/monix-bio_2.13.jar
Normal file
BIN
lib/monix-bio_2.13.jar
Normal file
Binary file not shown.
@ -1,6 +0,0 @@
|
||||
create table "users" (
|
||||
"id" VARCHAR(255) PRIMARY KEY NOT NULL,
|
||||
"email" VARCHAR(1024) NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
updated_at TIMESTAMP NULL
|
||||
);
|
@ -0,0 +1,36 @@
|
||||
create table authors (
|
||||
author_id SERIAL PRIMARY KEY,
|
||||
author_name VARCHAR(30) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE books (
|
||||
book_id SERIAL PRIMARY KEY,
|
||||
isbn VARCHAR(50) UNIQUE NOT NULL,
|
||||
book_title VARCHAR(30) NOT NULL,
|
||||
author_id INTEGER REFERENCES authors(author_id) NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
);
|
||||
|
||||
create table books_store (
|
||||
books_store_id SERIAL PRIMARY KEY,
|
||||
book_id INTEGER REFERENCES books(book_id) NOT NULL,
|
||||
quantity INTEGER NOT NULL
|
||||
);
|
||||
|
||||
create table book_expiry (
|
||||
book_expiry_id SERIAL PRIMARY KEY,
|
||||
book_id INTEGER REFERENCES books(book_id) NOT NULL,
|
||||
discontinued BOOLEAN NOT NULL
|
||||
);
|
||||
|
||||
create table users (
|
||||
user_id SERIAL PRIMARY KEY NOT NULL,
|
||||
user_name VARCHAR(30) NOT NULL
|
||||
);
|
||||
|
||||
create table checkouts (
|
||||
checkout_id SERIAL PRIMARY KEY,
|
||||
book_id INTEGER REFERENCES books(book_id) NOT NULL,
|
||||
taken_by INTEGER REFERENCES users(user_id) NOT NULL,
|
||||
return_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
@ -1,6 +0,0 @@
|
||||
INSERT INTO "users" VALUES (
|
||||
'd074bce8-a8ca-49ec-9225-a50ffe83dc2f',
|
||||
'myuser@example.com',
|
||||
(TIMESTAMP '2013-03-26T17:50:06Z'),
|
||||
(TIMESTAMP '2013-03-26T17:50:06Z')
|
||||
);
|
@ -0,0 +1,69 @@
|
||||
insert into
|
||||
authors (author_name)
|
||||
values
|
||||
('Author1');
|
||||
|
||||
insert into
|
||||
authors (author_name)
|
||||
values
|
||||
('Author2');
|
||||
|
||||
insert into
|
||||
authors (author_name)
|
||||
values
|
||||
('Author3');
|
||||
|
||||
insert into
|
||||
books (isbn, book_title, author_id)
|
||||
values
|
||||
('aebwegbwe', 'book1', 3);
|
||||
|
||||
insert into
|
||||
books (isbn, book_title, author_id)
|
||||
values
|
||||
('abeqegbqeg', 'book2', 2);
|
||||
|
||||
insert into
|
||||
books (isbn, book_title, author_id)
|
||||
values
|
||||
('aebhqeqegq', 'book3', 1);
|
||||
|
||||
insert into
|
||||
books_store (book_id, quantity)
|
||||
values
|
||||
(1, 5);
|
||||
|
||||
insert into
|
||||
books_store (book_id, quantity)
|
||||
values
|
||||
(2, 3);
|
||||
|
||||
insert into
|
||||
books_store (book_id, quantity)
|
||||
values
|
||||
(3, 8);
|
||||
|
||||
insert into
|
||||
book_expiry (book_id, discontinued)
|
||||
values
|
||||
(1, false);
|
||||
|
||||
insert into
|
||||
book_expiry (book_id, discontinued)
|
||||
values
|
||||
(2, false);
|
||||
|
||||
insert into
|
||||
book_expiry (book_id, discontinued)
|
||||
values
|
||||
(3, false);
|
||||
|
||||
insert into
|
||||
users (user_name)
|
||||
values
|
||||
('user1');
|
||||
|
||||
insert into
|
||||
users (user_name)
|
||||
values
|
||||
('user2');
|
@ -1,6 +0,0 @@
|
||||
create table "cars" (
|
||||
"id" VARCHAR(255) PRIMARY KEY NOT NULL,
|
||||
"model" VARCHAR(1024) NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
updated_at TIMESTAMP NULL
|
||||
);
|
@ -1,6 +0,0 @@
|
||||
INSERT INTO "cars" VALUES (
|
||||
'd074bce8-a8ca-49ec-9225-a50ffe83dc2f',
|
||||
'gxxer',
|
||||
(TIMESTAMP '2013-03-26T17:50:06Z'),
|
||||
(TIMESTAMP '2013-03-26T17:50:06Z')
|
||||
);
|
@ -1,12 +0,0 @@
|
||||
create table authors (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(15) NOT NULL
|
||||
);
|
||||
|
||||
create table books (
|
||||
id SERIAL PRIMARY KEY,
|
||||
title VARCHAR(50) NOT NULL,
|
||||
author_id INTEGER NOT NULL,
|
||||
FOREIGN KEY(author_id) REFERENCES authors(id),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
);
|
@ -1,14 +0,0 @@
|
||||
-- create table authors (
|
||||
-- id INTEGER PRIMARY KEY NOT NULL,
|
||||
-- name VARCHAR(15)
|
||||
-- );
|
||||
|
||||
-- create table books (
|
||||
-- id INTEGER PRIMARY KEY NOT NULL,
|
||||
-- title VARCHAR(15) NOT NULL,
|
||||
-- author_id INTEGER NOT NULL,
|
||||
-- FOREIGN KEY(author_id) REFERENCES authors(id)
|
||||
-- );
|
||||
|
||||
INSERT INTO authors (name) VALUES ('Jane Austen');
|
||||
INSERT INTO books (title, author_id) VALUES ('Pride and Prejudice', 1);
|
11
native
Normal file
11
native
Normal file
@ -0,0 +1,11 @@
|
||||
native-image --trace-class-initialization --static -H:+ReportExceptionStackTraces -H:+AddAllCharsets --allow-incomplete-classpath --no-fallback --initialize-at-build-time --enable-http --enable-https --enable-all-security-services --initialize-at-run-time=org.flywaydb.core.internal.scanner.cloud.s3.AwsS3Scanner \
|
||||
--initialize-at-run-time=org.flywaydb.core.internal.scanner.classpath.jboss.JBossVFSv3ClassPathLocationScanner \
|
||||
--initialize-at-run-time=org.postgresql.sspi.SSPIClient \
|
||||
--initialize-at-build-time=scala.runtime.Statics$VM \
|
||||
--initialize-at-run-time=scala.tools.nsc.profile.ExtendedThreadMxBean \
|
||||
--verbose -jar "./target/scala-2.13/http4s-demo-assembly-0.0.1-SNAPSHOT.jar" http4s-demoBinaryImage
|
||||
|
||||
|
||||
|
||||
--initialize-at-run-time=scala.tools.nsc.profile.ExtendedThreadMxBean \
|
||||
--initialize-at-build-time=scala.tools.nsc.profile.SunThreadMxBean \
|
@ -5,8 +5,10 @@ addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.10")
|
||||
// https://github.com/tototoshi/sbt-slick-codegen
|
||||
libraryDependencies += "com.h2database" % "h2" % "1.4.196"
|
||||
libraryDependencies += "org.postgresql" % "postgresql" % "42.2.18"
|
||||
|
||||
addSbtPlugin("com.github.tototoshi" % "sbt-slick-codegen" % "1.4.0")
|
||||
// Database migration
|
||||
// https://github.com/flyway/flyway-sbt
|
||||
addSbtPlugin("io.github.davidmweber" % "flyway-sbt" % "7.4.0")
|
||||
addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.23")
|
||||
addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.8.0")
|
||||
|
5
scripts/.env
Normal file
5
scripts/.env
Normal file
@ -0,0 +1,5 @@
|
||||
export POSTGRES_DB=codegen_db
|
||||
export CODEGEN_DB_HOST=localhost
|
||||
export CODEGEN_DB_NAME=codegen_db
|
||||
export CODEGEN_DB_USER=codegen_user
|
||||
export CODEGEN_DB_PASSWORD=password
|
46
scripts/app.Dockerfile
Normal file
46
scripts/app.Dockerfile
Normal file
@ -0,0 +1,46 @@
|
||||
FROM scala/coursier-sbt:0.0.2
|
||||
|
||||
ARG DOCKER_TAG
|
||||
|
||||
# RUN apt-get update
|
||||
# RUN apt-get -y install git
|
||||
# RUN apt-get -y install curl
|
||||
# RUN sh -c '(echo "#!/usr/bin/env sh" && curl -fLo cs https://git.io/coursier-cli-"$(uname | tr LD ld)") && chmod +x cs'
|
||||
# RUN ./cs install cs
|
||||
# ENV PATH=${PATH}:/root/.local/share/coursier/bin
|
||||
# RUN export PATH="$PATH:/root/.local/share/coursier/bin"
|
||||
# RUN rm ./cs
|
||||
|
||||
# ENV PATH=${PATH}:/root/.local/share/coursier/bin
|
||||
# RUN export PATH="$PATH:/root/.local/share/coursier/bin"
|
||||
# RUN mkdir -p /root/.local/share/coursier
|
||||
# COPY coursier/bin /root/.local/share/coursier/bin
|
||||
# RUN echo $PATH
|
||||
# RUN cs install sbt
|
||||
|
||||
RUN mkdir -p /usr/src/app/bin
|
||||
WORKDIR /usr/src/app
|
||||
COPY ./ /usr/src/app
|
||||
|
||||
# RUN cat /etc/hosts
|
||||
|
||||
# COPY wait-for-it.sh wait-for-it.sh
|
||||
# RUN chmod +x wait-for-it.sh
|
||||
# ENTRYPOINT [ "/bin/bash", "-c" ]
|
||||
# CMD ["./wait-for-it.sh" , "project_db:5432" , "--strict" , "--timeout=30000" , "--" , "echo 'db has started'"]
|
||||
# RUN bash ./wait-for-it.sh project_db:5432 --timeout=3000 --strict -- echo "db is up"
|
||||
|
||||
# RUN cat /etc/hosts
|
||||
# CMD [ "sbt" , "flyway/flywayMigrate" ]
|
||||
# CMD ["sbtn","universal:packageBin"]
|
||||
# CMD sh sbtn flyway/flywayMigrate; sbtn universal:packageBin
|
||||
# RUN sbt flyway/flywayMigrate
|
||||
# RUN sbt docker:stage
|
||||
|
||||
CMD sh Docker/app.sh
|
||||
|
||||
# CMD ["coursier", "--help"]
|
||||
|
||||
# RUN coursier install sbt
|
||||
# RUN sbt docker:stage
|
||||
# RUN
|
7
scripts/app.sh
Executable file
7
scripts/app.sh
Executable file
@ -0,0 +1,7 @@
|
||||
sbtn flyway/flywayMigrate
|
||||
sbtn universal:packageZipTarball
|
||||
tar -xf target/universal/http4s-demo-0.0.1-SNAPSHOT.tgz -C bin
|
||||
# ./http4s-demo-0.0.1-SNAPSHOT/bin/http4s-demo
|
||||
# sbtn docker:stage
|
||||
# mv targer/docker/** bin
|
||||
rm -r target
|
7
scripts/curl
Normal file
7
scripts/curl
Normal file
@ -0,0 +1,7 @@
|
||||
curl -X POST -H "content-type: application/json" http://localhost:8081/api/post/book --data '{"aege":"aaegqE"}'
|
||||
curl http://localhost:8081/api/get/books
|
||||
curl http://localhost:8081/api/get/book/1
|
||||
curl -X POST -H "content-type: application/json" http://localhost:8081/api/post/book --data '{"title":"aaegqE", "authorId": 1}'
|
||||
curl -X PATCH -H "content-type: application/json" http://localhost:8081/api/update/book/2 --data '{"title":"abwbewe"}'
|
||||
|
||||
|
4
scripts/db.Dockerfile
Normal file
4
scripts/db.Dockerfile
Normal file
@ -0,0 +1,4 @@
|
||||
FROM postgres:12
|
||||
ENV POSTGRES_USER test_user
|
||||
ENV POSTGRES_PASSWORD password
|
||||
ENV POSTGRES_DB test_db
|
6
scripts/db.sh
Normal file
6
scripts/db.sh
Normal file
@ -0,0 +1,6 @@
|
||||
docker run \
|
||||
-e POSTGRES_DB=test_db \
|
||||
-e POSTGRES_USER=test_user \
|
||||
-e POSTGRES_PASSWORD=password \
|
||||
-p 5433:5432 \
|
||||
postgres:12
|
41
scripts/docker-compose.yml
Normal file
41
scripts/docker-compose.yml
Normal file
@ -0,0 +1,41 @@
|
||||
version: "3.3"
|
||||
services:
|
||||
|
||||
|
||||
db:
|
||||
container_name: project_db
|
||||
image: postgres:12
|
||||
# build:
|
||||
# context: ./Docker
|
||||
# dockerfile: db.Dockerfile
|
||||
environment:
|
||||
POSTGRES_DB: 'codegen_db'
|
||||
POSTGRES_USER: 'codegen_user'
|
||||
POSTGRES_PASSWORD: 'password'
|
||||
# volumes:
|
||||
# - ./var/pgdata:/var/lib/postgresql/data
|
||||
ports:
|
||||
- "5432:5433"
|
||||
# network_mode: host
|
||||
backend:
|
||||
container_name: project_backend
|
||||
build:
|
||||
context: .
|
||||
dockerfile: app.Dockerfile
|
||||
# ports:
|
||||
# - "9000:9001"
|
||||
environment:
|
||||
POSTGRES_DB: 'codegen_db'
|
||||
CODEGEN_DB_HOST: 'project_db'
|
||||
CODEGEN_DB_NAME: 'codegen_db'
|
||||
CODEGEN_DB_USER: 'codegen_user'
|
||||
CODEGEN_DB_PASSWORD: 'password'
|
||||
volumes:
|
||||
- ./app:/usr/src/app/bin
|
||||
# links:
|
||||
# - db
|
||||
# # command: ["./wait-for-it.sh", "project_db:5432", "--strict" , "--timeout=30000" , "--" , "echo 'db has started'"]
|
||||
# depends_on:
|
||||
# - db
|
||||
# # condition: service_healthy
|
||||
|
4
scripts/test.Dockerfile
Normal file
4
scripts/test.Dockerfile
Normal file
@ -0,0 +1,4 @@
|
||||
FROM scala/coursier/sbt:v0.0.1
|
||||
# RUN apt search docker
|
||||
RUN apt install -y docker.io
|
||||
RUN docker --help
|
@ -0,0 +1,22 @@
|
||||
[
|
||||
{
|
||||
"name":"java.lang.ClassLoader",
|
||||
"methods":[{"name":"getPlatformClassLoader","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"java.lang.NoSuchMethodError"
|
||||
},
|
||||
{
|
||||
"name":"sun.management.VMManagementImpl",
|
||||
"fields":[
|
||||
{"name":"compTimeMonitoringSupport"},
|
||||
{"name":"currentThreadCpuTimeSupport"},
|
||||
{"name":"objectMonitorUsageSupport"},
|
||||
{"name":"otherThreadCpuTimeSupport"},
|
||||
{"name":"remoteDiagnosticCommandsSupport"},
|
||||
{"name":"synchronizerUsageSupport"},
|
||||
{"name":"threadAllocatedMemorySupport"},
|
||||
{"name":"threadContentionMonitoringSupport"}
|
||||
]
|
||||
}
|
||||
]
|
@ -1,5 +1,4 @@
|
||||
[
|
||||
{
|
||||
[{
|
||||
"name": "org.slf4j.impl.StaticLoggerBinder",
|
||||
"allDeclaredConstructors": true
|
||||
},
|
||||
@ -176,5 +175,303 @@
|
||||
{
|
||||
"name": "com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl",
|
||||
"allDeclaredConstructors": true
|
||||
},
|
||||
{
|
||||
"name": "com.zaxxer.hikari.HikariConfig",
|
||||
"allDeclaredFields": true
|
||||
},
|
||||
{
|
||||
"name": "com.zaxxer.hikari.util.ConcurrentBag$IConcurrentBagEntry[]"
|
||||
},
|
||||
{
|
||||
"name": "java.io.FilePermission"
|
||||
},
|
||||
{
|
||||
"name": "java.lang.RuntimePermission"
|
||||
},
|
||||
{
|
||||
"name": "java.lang.String[]"
|
||||
},
|
||||
{
|
||||
"name": "java.lang.invoke.VarHandle",
|
||||
"methods": [{ "name": "releaseFence", "parameterTypes": [] }]
|
||||
},
|
||||
{
|
||||
"name": "java.lang.reflect.Method[]"
|
||||
},
|
||||
{
|
||||
"name": "java.net.NetPermission"
|
||||
},
|
||||
{
|
||||
"name": "java.net.SocketPermission"
|
||||
},
|
||||
{
|
||||
"name": "java.net.URLPermission",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": ["java.lang.String", "java.lang.String"] }]
|
||||
},
|
||||
{
|
||||
"name": "java.nio.ByteBuffer[]"
|
||||
},
|
||||
{
|
||||
"name": "java.security.AlgorithmParametersSpi"
|
||||
},
|
||||
{
|
||||
"name": "java.security.AllPermission"
|
||||
},
|
||||
{
|
||||
"name": "java.security.KeyStoreSpi"
|
||||
},
|
||||
{
|
||||
"name": "java.security.MessageDigestSpi"
|
||||
},
|
||||
{
|
||||
"name": "java.security.SecureRandomParameters"
|
||||
},
|
||||
{
|
||||
"name": "java.security.SecurityPermission"
|
||||
},
|
||||
{
|
||||
"name": "java.sql.Statement[]"
|
||||
},
|
||||
{
|
||||
"name": "java.util.PropertyPermission"
|
||||
},
|
||||
{
|
||||
"name": "javax.management.ObjectName"
|
||||
},
|
||||
{
|
||||
"name": "javax.security.auth.x500.X500Principal",
|
||||
"fields": [{ "name": "thisX500Name" }],
|
||||
"methods": [{ "name": "<init>", "parameterTypes": ["sun.security.x509.X500Name"] }]
|
||||
},
|
||||
{
|
||||
"name": "monix.execution.internal.atomic.LeftRight128Java8BoxedObjectImpl",
|
||||
"fields": [{ "name": "value", "allowUnsafeAccess": true }]
|
||||
},
|
||||
{
|
||||
"name": "monix.execution.internal.atomic.NormalJava8BoxedInt",
|
||||
"fields": [{ "name": "value", "allowUnsafeAccess": true }]
|
||||
},
|
||||
{
|
||||
"name": "monix.execution.internal.atomic.NormalJava8BoxedObject",
|
||||
"fields": [{ "name": "value", "allowUnsafeAccess": true }]
|
||||
},
|
||||
{
|
||||
"name": "org.flywaydb.core.api.Location[]"
|
||||
},
|
||||
{
|
||||
"name": "org.flywaydb.core.internal.logging.slf4j.Slf4jLogCreator",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": [] }]
|
||||
},
|
||||
{
|
||||
"name": "org.h2.Driver"
|
||||
},
|
||||
{
|
||||
"name": "org.http4s.blaze.channel.nio1.SelectorLoop[]"
|
||||
},
|
||||
{
|
||||
"name": "org.http4s.blaze.util.TickWheelExecutor$Bucket[]"
|
||||
},
|
||||
{
|
||||
"name": "org.postgresql.Driver",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": [] }]
|
||||
},
|
||||
{
|
||||
"name": "org.postgresql.PGProperty",
|
||||
"fields": [
|
||||
{ "name": "ALLOW_ENCODING_CHANGES" },
|
||||
{ "name": "APPLICATION_NAME" },
|
||||
{ "name": "ASSUME_MIN_SERVER_VERSION" },
|
||||
{ "name": "AUTOSAVE" },
|
||||
{ "name": "BINARY_TRANSFER" },
|
||||
{ "name": "BINARY_TRANSFER_DISABLE" },
|
||||
{ "name": "BINARY_TRANSFER_ENABLE" },
|
||||
{ "name": "CANCEL_SIGNAL_TIMEOUT" },
|
||||
{ "name": "CLEANUP_SAVEPOINTS" },
|
||||
{ "name": "CONNECT_TIMEOUT" },
|
||||
{ "name": "CURRENT_SCHEMA" },
|
||||
{ "name": "DATABASE_METADATA_CACHE_FIELDS" },
|
||||
{ "name": "DATABASE_METADATA_CACHE_FIELDS_MIB" },
|
||||
{ "name": "DEFAULT_ROW_FETCH_SIZE" },
|
||||
{ "name": "DISABLE_COLUMN_SANITISER" },
|
||||
{ "name": "ESCAPE_SYNTAX_CALL_MODE" },
|
||||
{ "name": "GSS_ENC_MODE" },
|
||||
{ "name": "GSS_LIB" },
|
||||
{ "name": "HIDE_UNPRIVILEGED_OBJECTS" },
|
||||
{ "name": "HOST_RECHECK_SECONDS" },
|
||||
{ "name": "JAAS_APPLICATION_NAME" },
|
||||
{ "name": "JAAS_LOGIN" },
|
||||
{ "name": "KERBEROS_SERVER_NAME" },
|
||||
{ "name": "LOAD_BALANCE_HOSTS" },
|
||||
{ "name": "LOGGER_FILE" },
|
||||
{ "name": "LOGGER_LEVEL" },
|
||||
{ "name": "LOGIN_TIMEOUT" },
|
||||
{ "name": "LOG_SERVER_ERROR_DETAIL" },
|
||||
{ "name": "LOG_UNCLOSED_CONNECTIONS" },
|
||||
{ "name": "MAX_RESULT_BUFFER" },
|
||||
{ "name": "OPTIONS" },
|
||||
{ "name": "PASSWORD" },
|
||||
{ "name": "PG_DBNAME" },
|
||||
{ "name": "PG_HOST" },
|
||||
{ "name": "PG_PORT" },
|
||||
{ "name": "PREFER_QUERY_MODE" },
|
||||
{ "name": "PREPARED_STATEMENT_CACHE_QUERIES" },
|
||||
{ "name": "PREPARED_STATEMENT_CACHE_SIZE_MIB" },
|
||||
{ "name": "PREPARE_THRESHOLD" },
|
||||
{ "name": "PROTOCOL_VERSION" },
|
||||
{ "name": "READ_ONLY" },
|
||||
{ "name": "READ_ONLY_MODE" },
|
||||
{ "name": "RECEIVE_BUFFER_SIZE" },
|
||||
{ "name": "REPLICATION" },
|
||||
{ "name": "REWRITE_BATCHED_INSERTS" },
|
||||
{ "name": "SEND_BUFFER_SIZE" },
|
||||
{ "name": "SOCKET_FACTORY" },
|
||||
{ "name": "SOCKET_FACTORY_ARG" },
|
||||
{ "name": "SOCKET_TIMEOUT" },
|
||||
{ "name": "SSL" },
|
||||
{ "name": "SSL_CERT" },
|
||||
{ "name": "SSL_FACTORY" },
|
||||
{ "name": "SSL_FACTORY_ARG" },
|
||||
{ "name": "SSL_HOSTNAME_VERIFIER" },
|
||||
{ "name": "SSL_KEY" },
|
||||
{ "name": "SSL_MODE" },
|
||||
{ "name": "SSL_PASSWORD" },
|
||||
{ "name": "SSL_PASSWORD_CALLBACK" },
|
||||
{ "name": "SSL_ROOT_CERT" },
|
||||
{ "name": "SSPI_SERVICE_CLASS" },
|
||||
{ "name": "STRING_TYPE" },
|
||||
{ "name": "TARGET_SERVER_TYPE" },
|
||||
{ "name": "TCP_KEEP_ALIVE" },
|
||||
{ "name": "UNKNOWN_LENGTH" },
|
||||
{ "name": "USER" },
|
||||
{ "name": "USE_SPNEGO" },
|
||||
{ "name": "XML_FACTORY_FACTORY" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "org.slf4j.Logger"
|
||||
},
|
||||
{
|
||||
"name": "org.slf4j.impl.StaticLoggerBinder"
|
||||
},
|
||||
{
|
||||
"name": "scala.Symbol",
|
||||
"methods": [{ "name": "apply", "parameterTypes": ["java.lang.String"] }]
|
||||
},
|
||||
{
|
||||
"name": "scala.concurrent.BlockContext$",
|
||||
"allDeclaredMethods": true
|
||||
},
|
||||
{
|
||||
"name": "scala.util.Either[]"
|
||||
},
|
||||
{
|
||||
"name": "slick.jdbc.hikaricp.HikariCPJdbcDataSource$",
|
||||
"fields": [{ "name": "MODULE$" }]
|
||||
},
|
||||
{
|
||||
"name": "slick.relational.ResultConverter[]"
|
||||
},
|
||||
{
|
||||
"name": "sun.misc.Unsafe",
|
||||
"fields": [{ "name": "theUnsafe" }],
|
||||
"methods": [
|
||||
{ "name": "fullFence", "parameterTypes": [] },
|
||||
{ "name": "getAndAddInt", "parameterTypes": ["java.lang.Object", "long", "int"] },
|
||||
{ "name": "getAndAddLong", "parameterTypes": ["java.lang.Object", "long", "long"] },
|
||||
{ "name": "getAndSetObject", "parameterTypes": ["java.lang.Object", "long", "java.lang.Object"] }
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.pkcs12.PKCS12KeyStore",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": [] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.pkcs12.PKCS12KeyStore$DualFormatPKCS12",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": [] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.provider.JavaKeyStore$JKS",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": [] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.provider.MD5",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": [] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.provider.NativePRNG",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": [] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.provider.SHA",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": [] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.provider.X509Factory",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": [] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.rsa.RSAKeyFactory$Legacy",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": [] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.ssl.KeyManagerFactoryImpl$SunX509",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": [] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.ssl.SSLContextImpl$DefaultSSLContext",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": [] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.ssl.TrustManagerFactoryImpl$PKIXFactory",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": [] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.x509.AuthorityInfoAccessExtension",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": ["java.lang.Boolean", "java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.x509.AuthorityKeyIdentifierExtension",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": ["java.lang.Boolean", "java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.x509.BasicConstraintsExtension",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": ["java.lang.Boolean", "java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.x509.CRLDistributionPointsExtension",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": ["java.lang.Boolean", "java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.x509.CertificatePoliciesExtension",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": ["java.lang.Boolean", "java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.x509.ExtendedKeyUsageExtension",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": ["java.lang.Boolean", "java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.x509.IssuerAlternativeNameExtension",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": ["java.lang.Boolean", "java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.x509.KeyUsageExtension",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": ["java.lang.Boolean", "java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.x509.NetscapeCertTypeExtension",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": ["java.lang.Boolean", "java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.x509.PrivateKeyUsageExtension",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": ["java.lang.Boolean", "java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.x509.SubjectAlternativeNameExtension",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": ["java.lang.Boolean", "java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name": "sun.security.x509.SubjectKeyIdentifierExtension",
|
||||
"methods": [{ "name": "<init>", "parameterTypes": ["java.lang.Boolean", "java.lang.Object"] }]
|
||||
}
|
||||
]
|
@ -0,0 +1,19 @@
|
||||
{
|
||||
"resources":{
|
||||
"includes":[
|
||||
{"pattern":"\\QMETA-INF/services/java.sql.Driver\\E"},
|
||||
{"pattern":"\\Qapplication.conf\\E"},
|
||||
{"pattern":"\\Qdb/migration/default/V1__create_users_table.sql\\E"},
|
||||
{"pattern":"\\Qdb/migration/default/V2__add_user.sql\\E"},
|
||||
{"pattern":"\\Qdb/migration/default/V3__create_cars_table.sql\\E"},
|
||||
{"pattern":"\\Qdb/migration/default/V4__add_car.sql\\E"},
|
||||
{"pattern":"\\Qdb/migration/default/V5__authors_books_table.sql\\E"},
|
||||
{"pattern":"\\Qdb/migration/default/V6__insert_books_and_authors.sql\\E"},
|
||||
{"pattern":"\\Qdb/migration/default\\E"},
|
||||
{"pattern":"\\Qlogback.xml\\E"},
|
||||
{"pattern":"\\Qorg/flywaydb/core/internal/version.txt\\E"},
|
||||
{"pattern":"\\Qorg/slf4j/impl/StaticLoggerBinder.class\\E"},
|
||||
{"pattern":"\\Qreference.conf\\E"}
|
||||
]},
|
||||
"bundles":[]
|
||||
}
|
@ -0,0 +1,2 @@
|
||||
[
|
||||
]
|
@ -10,15 +10,15 @@ myapp = {
|
||||
// the number of connections determines you many things you can *keep in memory* at the same time
|
||||
// on the database server.
|
||||
// numThreads = (core_count (hyperthreading included))
|
||||
numThreads = 20
|
||||
numThreads = 16
|
||||
|
||||
// queueSize = ((core_count * 2) + effective_spindle_count)
|
||||
// on a MBP 13, this is 2 cores * 2 (hyperthreading not included) + 1 hard disk
|
||||
queueSize = 10
|
||||
queueSize = 1000
|
||||
|
||||
// https://blog.knoldus.com/2016/01/01/best-practices-for-using-slick-on-production/
|
||||
// make larger than numThreads + queueSize
|
||||
maxConnections = 20
|
||||
maxConnections = 16
|
||||
|
||||
connectionTimeout = 5000
|
||||
validationTimeout = 5000
|
||||
@ -32,6 +32,18 @@ myapp = {
|
||||
# "classpath:example/jdbc"
|
||||
"classpath:db/migration/default"
|
||||
]
|
||||
},
|
||||
testDatabase = {
|
||||
driver = org.postgresql.Driver
|
||||
user = "scala"
|
||||
password = "scala"
|
||||
|
||||
numThreads = 16
|
||||
|
||||
queueSize = 10
|
||||
|
||||
maxConnections = 36
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
103
src/main/scala/wow/doge/http4sdemo/Http4sdemoRoutes.scala
Normal file → Executable file
103
src/main/scala/wow/doge/http4sdemo/Http4sdemoRoutes.scala
Normal file → Executable file
@ -5,17 +5,17 @@ import cats.implicits._
|
||||
import fs2.interop.reactivestreams._
|
||||
import io.circe.Codec
|
||||
import io.circe.generic.semiauto._
|
||||
import monix.bio.IO
|
||||
import monix.bio.Task
|
||||
import monix.reactive.Observable
|
||||
import monix.bio.UIO
|
||||
import org.http4s.HttpRoutes
|
||||
import org.http4s.dsl.Http4sDsl
|
||||
import slick.jdbc.JdbcBackend.DatabaseDef
|
||||
import slick.jdbc.JdbcProfile
|
||||
import wow.doge.http4sdemo.dto.Book
|
||||
import wow.doge.http4sdemo.dto.BookSearchMode
|
||||
import wow.doge.http4sdemo.dto.BookUpdate
|
||||
import wow.doge.http4sdemo.dto.NewBook
|
||||
import wow.doge.http4sdemo.services.LibraryService
|
||||
import wow.doge.http4sdemo.slickcodegen.Tables._
|
||||
|
||||
object Http4sdemoRoutes {
|
||||
|
||||
def jokeRoutes[F[_]: Sync](J: Jokes[F]): HttpRoutes[F] = {
|
||||
@ -41,44 +41,50 @@ object Http4sdemoRoutes {
|
||||
}
|
||||
}
|
||||
|
||||
def userRoutes(userService: UserService): HttpRoutes[Task] = {
|
||||
val dsl = Http4sDsl[Task]
|
||||
import dsl._
|
||||
import org.http4s.circe.CirceEntityCodec._
|
||||
HttpRoutes.of[Task] { case GET -> Root / "users" =>
|
||||
Task.deferAction(implicit s =>
|
||||
for {
|
||||
_ <- Task.unit
|
||||
users = userService.users.toReactivePublisher.toStream[Task]
|
||||
res <- Ok(users)
|
||||
} yield res
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
def libraryRoutes(libraryService: LibraryService): HttpRoutes[Task] = {
|
||||
val dsl = Http4sDsl[Task]
|
||||
import dsl._
|
||||
object Value extends QueryParamDecoderMatcher[String]("value")
|
||||
HttpRoutes.of[Task] {
|
||||
case GET -> Root / "api" / "get" / "books" =>
|
||||
|
||||
case GET -> Root / "api" / "get" / "book" :?
|
||||
BookSearchMode.Matcher(mode) +& Value(value) =>
|
||||
import org.http4s.circe.streamJsonArrayEncoder
|
||||
import io.circe.syntax._
|
||||
Task.deferAction(implicit s =>
|
||||
IO.deferAction(implicit s =>
|
||||
for {
|
||||
books <- Task.pure(
|
||||
libraryService.getBooks.toReactivePublisher
|
||||
books <- IO.pure(
|
||||
libraryService
|
||||
.searchBook(mode, value)
|
||||
.toReactivePublisher
|
||||
.toStream[Task]
|
||||
)
|
||||
res <- Ok(books.map(_.asJson))
|
||||
} yield res
|
||||
)
|
||||
|
||||
case GET -> Root / "api" / "get" / "book" / IntVar(id) =>
|
||||
// import org.http4s.circe.CirceEntityCodec._
|
||||
import org.http4s.circe.jsonEncoder
|
||||
case GET -> Root / "api" / "get" / "books" =>
|
||||
import org.http4s.circe.streamJsonArrayEncoder
|
||||
import io.circe.syntax._
|
||||
Task.deferAction(implicit s =>
|
||||
for {
|
||||
bookJson <- libraryService.getBookById(id).map(_.asJson)
|
||||
books <- IO.pure(
|
||||
libraryService.getBooks.toReactivePublisher
|
||||
.toStream[Task]
|
||||
)
|
||||
res <- Ok(books.map(_.asJson))
|
||||
// res <- Ok(streamJsonArrayEncoderOf[Task, Book].(books))
|
||||
} yield res
|
||||
)
|
||||
|
||||
case GET -> Root / "blah" => Ok().hideErrors
|
||||
|
||||
case GET -> Root / "api" / "get" / "book" / IntVar(id) =>
|
||||
import org.http4s.circe.CirceEntityCodec._
|
||||
// import org.http4s.circe.jsonEncoder
|
||||
// import io.circe.syntax._
|
||||
for {
|
||||
bookJson <- libraryService.getBookById(id)
|
||||
res <- Ok(bookJson)
|
||||
} yield res
|
||||
|
||||
@ -86,22 +92,36 @@ object Http4sdemoRoutes {
|
||||
import org.http4s.circe.CirceEntityCodec._
|
||||
for {
|
||||
newBook <- req.as[NewBook]
|
||||
book <- libraryService.insertBook(newBook)
|
||||
res <- Created(book)
|
||||
// .onErrorHandleWith {
|
||||
// case ParseF
|
||||
// }
|
||||
res <- libraryService
|
||||
.insertBook(newBook)
|
||||
.flatMap(book => Created(book).hideErrors)
|
||||
.mapErrorPartialWith {
|
||||
case LibraryService.EntityDoesNotExist(message) =>
|
||||
BadRequest(message).hideErrors
|
||||
case LibraryService.EntityAlreadyExists(message) =>
|
||||
BadRequest(message).hideErrors
|
||||
// case LibraryService.MyError2(_) => Ok().hideErrors
|
||||
// case C3 => Ok().hideErrors
|
||||
}
|
||||
} yield res
|
||||
|
||||
case req @ PATCH -> Root / "api" / "update" / "book" / IntVar(id) =>
|
||||
import org.http4s.circe.CirceEntityCodec._
|
||||
for {
|
||||
updateData <- req.as[BookUpdate]
|
||||
_ <- libraryService
|
||||
res <- libraryService
|
||||
.updateBook(id, updateData)
|
||||
.void
|
||||
.onErrorHandleWith(ex =>
|
||||
Task(println(s"Handled -> ${ex.getMessage}"))
|
||||
)
|
||||
// .mapError(e => new Exception(e))
|
||||
res <- Ok()
|
||||
.flatMap(_ => Ok().hideErrors)
|
||||
.tapError(err => UIO(println(s"Handled -> ${err.toString}")))
|
||||
.mapErrorPartialWith {
|
||||
case e @ LibraryService.EntityDoesNotExist(message) =>
|
||||
BadRequest(e: LibraryService.Error).hideErrors
|
||||
// case LibraryService.MyError2(_) => Ok().hideErrors
|
||||
// case C3 => Ok().hideErrors
|
||||
}
|
||||
} yield res
|
||||
|
||||
case req @ DELETE -> Root / "api" / "delete" / "book" / IntVar(id) =>
|
||||
@ -122,7 +142,7 @@ object Http4sdemoRoutes {
|
||||
|
||||
}
|
||||
|
||||
case class User(id: String, email: String)
|
||||
final case class User(id: String, email: String)
|
||||
object User {
|
||||
val tupled = (this.apply _).tupled
|
||||
// implicit val decoder: Decoder[User] = deriveDecoder
|
||||
@ -133,12 +153,3 @@ object User {
|
||||
// jsonEncoderOf
|
||||
implicit val codec: Codec[User] = deriveCodec
|
||||
}
|
||||
|
||||
class UserService(profile: JdbcProfile, db: DatabaseDef) {
|
||||
import profile.api._
|
||||
def users: Observable[User] =
|
||||
Observable.fromReactivePublisher(
|
||||
db.stream(Users.map(u => (u.id, u.email).mapTo[User]).result)
|
||||
)
|
||||
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ import org.http4s.server.middleware.Logger
|
||||
import slick.jdbc.JdbcBackend.DatabaseDef
|
||||
import slick.jdbc.JdbcProfile
|
||||
import wow.doge.http4sdemo.services.LibraryDbio
|
||||
import wow.doge.http4sdemo.services.LibraryService
|
||||
import wow.doge.http4sdemo.services.LibraryServiceImpl
|
||||
|
||||
object Http4sdemoServer {
|
||||
|
||||
@ -23,23 +23,22 @@ object Http4sdemoServer {
|
||||
client <- BlazeClientBuilder[Task](s).stream
|
||||
helloWorldAlg = HelloWorld.impl
|
||||
jokeAlg = Jokes.impl(client)
|
||||
ss = new UserService(p, db)
|
||||
// Combine Service Routes into an HttpApp.
|
||||
// Can also be done via a Router if you
|
||||
// want to extract a segments not checked
|
||||
// in the underlying routes.
|
||||
|
||||
libraryDbio = new LibraryDbio(p)
|
||||
libraryService = new LibraryService(p, libraryDbio, db)
|
||||
libraryService = new LibraryServiceImpl(p, libraryDbio, db)
|
||||
httpApp = (
|
||||
Http4sdemoRoutes.helloWorldRoutes[Task](helloWorldAlg) <+>
|
||||
Http4sdemoRoutes.jokeRoutes[Task](jokeAlg) <+>
|
||||
Http4sdemoRoutes.userRoutes(ss) <+>
|
||||
Http4sdemoRoutes.libraryRoutes(libraryService)
|
||||
).orNotFound
|
||||
|
||||
// With Middlewares in place
|
||||
finalHttpApp = Logger.httpApp(true, true)(httpApp)
|
||||
// _ = {finalHttpApp.run(Request.)}
|
||||
|
||||
exitCode <- BlazeServerBuilder[Task](s)
|
||||
.bindHttp(8081, "0.0.0.0")
|
||||
|
3
src/main/scala/wow/doge/http4sdemo/Main.scala
Normal file → Executable file
3
src/main/scala/wow/doge/http4sdemo/Main.scala
Normal file → Executable file
@ -6,10 +6,9 @@ import monix.bio.BIOApp
|
||||
import monix.bio.Task
|
||||
import monix.bio.UIO
|
||||
import slick.jdbc.JdbcProfile
|
||||
import wow.doge.http4sdemo.SlickResource
|
||||
|
||||
object Main extends BIOApp {
|
||||
val profile: JdbcProfile = _root_.slick.jdbc.H2Profile
|
||||
val profile: JdbcProfile = slick.jdbc.PostgresProfile
|
||||
def app = for {
|
||||
db <- SlickResource("myapp.database")
|
||||
_ <- Resource.liftF(for {
|
||||
|
@ -1,45 +1,53 @@
|
||||
package wow.doge.http4sdemo.dto
|
||||
|
||||
import java.time.Instant
|
||||
import java.time.LocalDateTime
|
||||
|
||||
import cats.syntax.either._
|
||||
import enumeratum.EnumEntry
|
||||
import enumeratum._
|
||||
import io.circe.Printer
|
||||
import io.circe.generic.semiauto._
|
||||
import io.scalaland.chimney.dsl._
|
||||
import org.http4s.EntityEncoder
|
||||
import org.http4s.ParseFailure
|
||||
import org.http4s.QueryParamDecoder
|
||||
import org.http4s.circe.streamJsonArrayEncoderWithPrinterOf
|
||||
import org.http4s.dsl.impl.QueryParamDecoderMatcher
|
||||
import slick.jdbc.JdbcProfile
|
||||
import wow.doge.http4sdemo.slickcodegen.Tables
|
||||
|
||||
final case class Book(
|
||||
id: Int,
|
||||
title: String,
|
||||
bookId: Int,
|
||||
bookTitle: String,
|
||||
isbn: String,
|
||||
authorId: Int,
|
||||
createdAt: Instant
|
||||
createdAt: LocalDateTime
|
||||
)
|
||||
object Book {
|
||||
def tupled = (Book.apply _).tupled
|
||||
implicit val ec = deriveCodec[Book]
|
||||
def tupled = (apply _).tupled
|
||||
implicit val codec = deriveCodec[Book]
|
||||
// implicit def streamEntityEncoder[F[_]]
|
||||
// : EntityEncoder[F, fs2.Stream[F, Book]] =
|
||||
// streamJsonArrayEncoderWithPrinterOf(Printer.noSpaces)
|
||||
def fromBooksRow(row: Tables.BooksRow) = row.transformInto[Book]
|
||||
def fromBooksTableFn(implicit profile: JdbcProfile) = {
|
||||
import profile.api._
|
||||
(b: Tables.Books) => (b.id, b.title, b.authorId, b.createdAt).mapTo[Book]
|
||||
(b: Tables.Books) =>
|
||||
(b.bookId, b.bookTitle, b.isbn, b.authorId, b.createdAt).mapTo[Book]
|
||||
}
|
||||
def fromBooksTable(implicit profile: JdbcProfile) =
|
||||
Tables.Books.map(fromBooksTableFn)
|
||||
|
||||
}
|
||||
|
||||
final case class NewBook(title: String, authorId: Int)
|
||||
final case class NewBook(bookTitle: String, isbn: String, authorId: Int)
|
||||
object NewBook {
|
||||
def tupled = (NewBook.apply _).tupled
|
||||
def tupled = (apply _).tupled
|
||||
implicit val decoder = deriveDecoder[NewBook]
|
||||
def fromBooksTable(implicit profile: JdbcProfile) = {
|
||||
import profile.api._
|
||||
|
||||
Tables.Books.map(b => (b.title, b.authorId).mapTo[NewBook])
|
||||
Tables.Books.map(b => (b.bookTitle, b.isbn, b.authorId).mapTo[NewBook])
|
||||
}
|
||||
}
|
||||
|
||||
@ -47,36 +55,62 @@ final case class BookUpdate(title: Option[String], authorId: Option[Int]) {
|
||||
import com.softwaremill.quicklens._
|
||||
def update(row: Tables.BooksRow): Tables.BooksRow =
|
||||
row
|
||||
.modify(_.title)
|
||||
.modify(_.bookTitle)
|
||||
.setToIfDefined(title)
|
||||
.modify(_.authorId)
|
||||
.setToIfDefined(authorId)
|
||||
}
|
||||
object BookUpdate {
|
||||
implicit val decoder = deriveDecoder[BookUpdate]
|
||||
implicit val codec = deriveCodec[BookUpdate]
|
||||
}
|
||||
|
||||
final case class Author(id: Int, name: String)
|
||||
final case class Author(authorId: Int, authorName: String)
|
||||
object Author {
|
||||
def tupled = (Author.apply _).tupled
|
||||
def tupled = (apply _).tupled
|
||||
implicit val codec = deriveCodec[Author]
|
||||
implicit def streamEntityEncoder[F[_]]
|
||||
: EntityEncoder[F, fs2.Stream[F, Author]] =
|
||||
streamJsonArrayEncoderWithPrinterOf(Printer.noSpaces)
|
||||
def fromAuthorsRow(row: Tables.AuthorsRow) = row.transformInto[Author]
|
||||
def fromAuthorsTableFn(implicit profile: JdbcProfile) = {
|
||||
import profile.api._
|
||||
(a: Tables.Authors) => (a.authorId, a.authorName).mapTo[Author]
|
||||
}
|
||||
}
|
||||
|
||||
final case class NewAuthor(name: String)
|
||||
object NewAuthor {
|
||||
// def fromAuthorsTable(implicit profile: JdbcProfile) = {
|
||||
// import profile.api._
|
||||
|
||||
// Tables.Authors.map(a => (a.authorName).mapTo[NewAuthor])
|
||||
// }
|
||||
}
|
||||
|
||||
final case class BookWithAuthor(
|
||||
id: Int,
|
||||
title: String,
|
||||
isbn: String,
|
||||
author: Author,
|
||||
createdAt: Instant
|
||||
createdAt: LocalDateTime
|
||||
)
|
||||
object BookWithAuthor {
|
||||
def tupled = (BookWithAuthor.apply _).tupled
|
||||
def tupled = (apply _).tupled
|
||||
implicit val codec = deriveCodec[BookWithAuthor]
|
||||
implicit def streamEntityEncoder[F[_]]
|
||||
: EntityEncoder[F, fs2.Stream[F, BookWithAuthor]] =
|
||||
streamJsonArrayEncoderWithPrinterOf(Printer.noSpaces)
|
||||
}
|
||||
|
||||
sealed trait BookSearchMode extends EnumEntry
|
||||
object BookSearchMode extends Enum[BookSearchMode] {
|
||||
val values = findValues
|
||||
case object BookTitle extends BookSearchMode
|
||||
case object AuthorName extends BookSearchMode
|
||||
|
||||
implicit val yearQueryParamDecoder: QueryParamDecoder[BookSearchMode] =
|
||||
QueryParamDecoder[String].emap(s =>
|
||||
withNameEither(s).leftMap(e => ParseFailure(e.getMessage, e.getMessage))
|
||||
)
|
||||
object Matcher extends QueryParamDecoderMatcher[BookSearchMode]("mode")
|
||||
}
|
||||
|
@ -1,5 +1,7 @@
|
||||
package wow.doge.http4sdemo
|
||||
|
||||
import scala.util.Try
|
||||
|
||||
import monix.bio.IO
|
||||
import monix.bio.Task
|
||||
import monix.reactive.Observable
|
||||
@ -13,6 +15,11 @@ package object implicits {
|
||||
def runL[R](a: DBIOAction[R, NoStream, Nothing]) =
|
||||
Task.deferFuture(db.run(a))
|
||||
|
||||
def runTryL[R, A](a: DBIOAction[R, NoStream, Nothing])(implicit
|
||||
ev: R <:< Try[A]
|
||||
) =
|
||||
Task.deferFuture(db.run(a)).flatMap(r => IO.fromTry(ev(r)))
|
||||
|
||||
def streamO[T](a: DBIOAction[_, Streaming[T], Nothing]) =
|
||||
Observable.fromReactivePublisher(db.stream(a))
|
||||
}
|
||||
|
206
src/main/scala/wow/doge/http4sdemo/services/LibraryService.scala
Normal file → Executable file
206
src/main/scala/wow/doge/http4sdemo/services/LibraryService.scala
Normal file → Executable file
@ -1,34 +1,104 @@
|
||||
package wow.doge.http4sdemo.services
|
||||
|
||||
import io.circe.generic.semiauto._
|
||||
import monix.bio.IO
|
||||
import monix.bio.Task
|
||||
import monix.bio.UIO
|
||||
import monix.reactive.Observable
|
||||
import slick.jdbc.JdbcBackend
|
||||
import slick.jdbc.JdbcProfile
|
||||
import wow.doge.http4sdemo.dto.Author
|
||||
import wow.doge.http4sdemo.dto.Book
|
||||
import wow.doge.http4sdemo.dto.BookSearchMode
|
||||
import wow.doge.http4sdemo.dto.BookSearchMode.AuthorName
|
||||
import wow.doge.http4sdemo.dto.BookSearchMode.BookTitle
|
||||
import wow.doge.http4sdemo.dto.BookUpdate
|
||||
import wow.doge.http4sdemo.dto.NewAuthor
|
||||
import wow.doge.http4sdemo.dto.NewBook
|
||||
import wow.doge.http4sdemo.implicits._
|
||||
import wow.doge.http4sdemo.slickcodegen.Tables
|
||||
|
||||
class LibraryService(
|
||||
object LibraryService {
|
||||
sealed trait Error extends Exception {
|
||||
def message: String
|
||||
override def getMessage(): String = message
|
||||
}
|
||||
final case class EntityDoesNotExist(message: String) extends Error
|
||||
final case class EntityAlreadyExists(message: String) extends Error
|
||||
// final case class MyError2(message: String) extends Error
|
||||
// case object C3 extends Error { val message: String = "C3" }
|
||||
|
||||
object Error {
|
||||
implicit val codec = deriveCodec[Error]
|
||||
}
|
||||
}
|
||||
|
||||
trait LibraryService {
|
||||
|
||||
import LibraryService._
|
||||
|
||||
def getBooks: Observable[Book]
|
||||
|
||||
def getBookById(id: Int): Task[Option[Book]]
|
||||
|
||||
def searchBook(mode: BookSearchMode, value: String): Observable[Book]
|
||||
|
||||
def updateBook(id: Int, updateData: BookUpdate): IO[Error, Unit]
|
||||
|
||||
def deleteBook(id: Int): Task[Int]
|
||||
|
||||
def insertBook(newBook: NewBook): IO[Error, Book]
|
||||
|
||||
def insertAuthor(a: NewAuthor): Task[Int]
|
||||
|
||||
def booksForAuthor(authorId: Int): Observable[Book]
|
||||
|
||||
}
|
||||
|
||||
class LibraryServiceImpl(
|
||||
profile: JdbcProfile,
|
||||
dbio: LibraryDbio,
|
||||
db: JdbcBackend.DatabaseDef
|
||||
) {
|
||||
) extends LibraryService {
|
||||
import profile.api._
|
||||
|
||||
def getBooks = db.streamO(dbio.getBooks)
|
||||
import LibraryService._
|
||||
|
||||
def getBooks = db.streamO(dbio.getBooks.transactionally)
|
||||
|
||||
def getBookById(id: Int) = db.runL(dbio.getBook(id))
|
||||
|
||||
// .map(b =>
|
||||
// (b.title, b.authorId, b.createdAt).mapTo[BookUpdateEntity]
|
||||
// )
|
||||
def searchBook(mode: BookSearchMode, value: String): Observable[Book] =
|
||||
mode match {
|
||||
case BookTitle =>
|
||||
db.streamO(dbio.getBooksByTitle(value))
|
||||
|
||||
def updateBook(id: Int, updateData: BookUpdate) =
|
||||
case AuthorName =>
|
||||
Observable
|
||||
.fromTask((for {
|
||||
_ <- IO.unit
|
||||
id <- IO(value.toInt)
|
||||
author <- db.runL(dbio.getAuthor(id)).flatMap {
|
||||
case None =>
|
||||
IO.raiseError(
|
||||
new EntityDoesNotExist(s"Author with id=$id does not exist")
|
||||
)
|
||||
case Some(value) => IO.pure(value)
|
||||
}
|
||||
books = db
|
||||
.streamO(dbio.getBooksForAuthor(id))
|
||||
.map(Book.fromBooksRow)
|
||||
} yield books).toTask)
|
||||
.flatten
|
||||
|
||||
}
|
||||
|
||||
def insertAuthor(a: NewAuthor): Task[Int] = db.runL(dbio.insertAuthor(a))
|
||||
|
||||
def updateBook(id: Int, updateData: BookUpdate): IO[Error, Unit] =
|
||||
for {
|
||||
action <- IO.deferAction { implicit s =>
|
||||
Task(for {
|
||||
action <- UIO.deferAction(implicit s =>
|
||||
UIO(for {
|
||||
mbRow <- dbio.selectBook(id).result.headOption
|
||||
updatedRow <- mbRow match {
|
||||
case Some(value) =>
|
||||
@ -36,35 +106,72 @@ class LibraryService(
|
||||
println(s"Value to be updated with -> $updateData")
|
||||
DBIO.successful(updateData.update(value))
|
||||
case None =>
|
||||
DBIO.failed(new Exception(s"Book with id $id does not exist"))
|
||||
DBIO.failed(
|
||||
EntityDoesNotExist(s"Book with id=$id does not exist")
|
||||
)
|
||||
}
|
||||
updateAction = dbio.selectBook(id).update(updatedRow)
|
||||
_ = println(s"SQL = ${updateAction.statements}")
|
||||
_ <- updateAction
|
||||
} yield ())
|
||||
)
|
||||
_ <- db
|
||||
.runTryL(action.transactionally.asTry)
|
||||
.mapErrorPartial { case e: Error =>
|
||||
e
|
||||
}
|
||||
_ <- db.runL(action.transactionally.asTry).flatMap(Task.fromTry)
|
||||
} yield ()
|
||||
|
||||
def deleteBook(id: Int) = db.runL(dbio.deleteBook(id))
|
||||
|
||||
def insertBook(newBook: NewBook) =
|
||||
Task.deferFutureAction { implicit s =>
|
||||
val action = for {
|
||||
id <- dbio.insertBookAndGetId(newBook)
|
||||
book <- dbio.getBook(id)
|
||||
} yield book.get
|
||||
db.run(action.transactionally)
|
||||
def insertBook(newBook: NewBook): IO[Error, Book] =
|
||||
IO.deferAction { implicit s =>
|
||||
for {
|
||||
action <- UIO(for {
|
||||
_ <- dbio
|
||||
.selectBookByIsbn(newBook.isbn)
|
||||
.map(Book.fromBooksTableFn)
|
||||
.result
|
||||
.headOption
|
||||
.flatMap {
|
||||
case None => DBIO.successful(())
|
||||
case Some(_) =>
|
||||
DBIO.failed(
|
||||
EntityAlreadyExists(
|
||||
s"Book with isbn=${newBook.isbn} already exists"
|
||||
)
|
||||
)
|
||||
}
|
||||
_ <- dbio.getAuthor(newBook.authorId).flatMap {
|
||||
case None =>
|
||||
DBIO.failed(
|
||||
EntityDoesNotExist(
|
||||
s"Author with id=${newBook.authorId} does not exist"
|
||||
)
|
||||
)
|
||||
case Some(_) => DBIO.successful(())
|
||||
}
|
||||
book <- dbio.insertBookAndGetBook(newBook)
|
||||
} yield book)
|
||||
book <- db
|
||||
.runTryL(action.transactionally.asTry)
|
||||
.mapErrorPartial { case e: Error =>
|
||||
e
|
||||
}
|
||||
} yield book
|
||||
}
|
||||
|
||||
def booksForAuthor(authorId: Int) =
|
||||
db.streamO(dbio.booksForAuthor(authorId)).map(Book.fromBooksRow)
|
||||
db.streamO(dbio.getBooksForAuthor(authorId).transactionally)
|
||||
.map(Book.fromBooksRow)
|
||||
|
||||
}
|
||||
|
||||
class LibraryDbio(val profile: JdbcProfile) {
|
||||
import profile.api._
|
||||
|
||||
/* */
|
||||
|
||||
def getBooks: StreamingDBIO[Seq[Book], Book] = Query.getBooksInner.result
|
||||
|
||||
def insertBookAndGetId(newBook: NewBook): DBIO[Int] =
|
||||
@ -73,7 +180,13 @@ class LibraryDbio(val profile: JdbcProfile) {
|
||||
def insertBookAndGetBook(newBook: NewBook): DBIO[Book] =
|
||||
Query.insertBookGetBook += newBook
|
||||
|
||||
def selectBook(id: Int) = Tables.Books.filter(_.id === id)
|
||||
def insertAuthor(newAuthor: NewAuthor): DBIO[Int] =
|
||||
Query.insertAuthorGetId += newAuthor
|
||||
|
||||
def selectBook(id: Int) = Tables.Books.filter(_.bookId === id)
|
||||
|
||||
def getAuthor(id: Int) =
|
||||
Query.selectAuthor(id).map(Author.fromAuthorsTableFn).result.headOption
|
||||
|
||||
def deleteBook(id: Int) = selectBook(id).delete
|
||||
|
||||
@ -82,22 +195,67 @@ class LibraryDbio(val profile: JdbcProfile) {
|
||||
.result
|
||||
.headOption
|
||||
|
||||
def booksForAuthor(authorId: Int) = Query.booksForAuthorInner(authorId).result
|
||||
def selectBookByIsbn(isbn: String) = Tables.Books.filter(_.isbn === isbn)
|
||||
|
||||
def getBooksByTitle(title: String) =
|
||||
Tables.Books.filter(_.bookTitle === title).map(Book.fromBooksTableFn).result
|
||||
|
||||
def getBooksForAuthor(authorId: Int) =
|
||||
Query.booksForAuthorInner(authorId).result
|
||||
|
||||
private object Query {
|
||||
|
||||
val getBooksInner = Book.fromBooksTable
|
||||
|
||||
val insertBookGetId =
|
||||
NewBook.fromBooksTable.returning(Tables.Books.map(_.id))
|
||||
NewBook.fromBooksTable.returning(Tables.Books.map(_.bookId))
|
||||
|
||||
val insertBookGetBook = NewBook.fromBooksTable.returning(getBooksInner)
|
||||
|
||||
val insertAuthorGetId =
|
||||
Tables.Authors
|
||||
.map(a => (a.authorName).mapTo[NewAuthor])
|
||||
.returning(Tables.Authors.map(_.authorId))
|
||||
|
||||
// val insertAuthor = NewAuthor.fromAuthorsTable
|
||||
|
||||
def booksForAuthorInner(authorId: Int) = for {
|
||||
b <- Tables.Books
|
||||
a <- selectAuthor(authorId) if b.authorId === a.id
|
||||
a <- Tables.Authors
|
||||
if b.authorId === a.authorId && b.authorId === authorId
|
||||
} yield b
|
||||
|
||||
def selectAuthor(authorId: Int) = Tables.Authors.filter(_.id === authorId)
|
||||
def selectAuthor(authorId: Int) =
|
||||
Tables.Authors.filter(_.authorId === authorId)
|
||||
}
|
||||
}
|
||||
|
||||
trait NoopLibraryService extends LibraryService {
|
||||
def getBooks: Observable[Book] =
|
||||
Observable.raiseError(new NotImplementedError)
|
||||
|
||||
def getBookById(id: Int): Task[Option[Book]] =
|
||||
IO.terminate(new NotImplementedError)
|
||||
|
||||
def searchBook(
|
||||
mode: BookSearchMode,
|
||||
value: String
|
||||
): Observable[Book] = Observable.raiseError(new NotImplementedError)
|
||||
|
||||
def updateBook(
|
||||
id: Int,
|
||||
updateData: BookUpdate
|
||||
): IO[LibraryService.Error, Unit] = IO.terminate(new NotImplementedError)
|
||||
|
||||
def deleteBook(id: Int): Task[Int] = IO.terminate(new NotImplementedError)
|
||||
|
||||
def insertBook(newBook: NewBook): IO[LibraryService.Error, Book] =
|
||||
IO.terminate(new NotImplementedError)
|
||||
|
||||
def insertAuthor(a: NewAuthor): Task[Int] =
|
||||
IO.terminate(new NotImplementedError)
|
||||
|
||||
def booksForAuthor(authorId: Int): Observable[Book] =
|
||||
Observable.raiseError(new NotImplementedError)
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,111 @@
|
||||
package wow.doge.http4sdemo
|
||||
|
||||
import com.dimafeng.testcontainers.ContainerDef
|
||||
import com.dimafeng.testcontainers.PostgreSQLContainer
|
||||
import com.dimafeng.testcontainers.munit.TestContainerForAll
|
||||
import com.typesafe.config.ConfigFactory
|
||||
import monix.bio.IO
|
||||
import monix.bio.Task
|
||||
import monix.bio.UIO
|
||||
import monix.execution.Scheduler
|
||||
import org.testcontainers.utility.DockerImageName
|
||||
import slick.jdbc.JdbcBackend
|
||||
import slick.jdbc.PostgresProfile
|
||||
|
||||
trait DatabaseIntegrationTestBase
|
||||
extends MonixBioSuite
|
||||
with TestContainerForAll {
|
||||
def databaseName = "testcontainer-scala"
|
||||
def username = "scala"
|
||||
def password = "scala"
|
||||
|
||||
override val containerDef: ContainerDef = PostgreSQLContainer.Def(
|
||||
dockerImageName = DockerImageName.parse("postgres:13.2"),
|
||||
databaseName = databaseName,
|
||||
username = username,
|
||||
password = password
|
||||
)
|
||||
|
||||
lazy val profile = PostgresProfile
|
||||
|
||||
def config(url: String) = ConfigFactory.parseString(s"""|
|
||||
|testDatabase = {
|
||||
| url = "$url"
|
||||
| driver = org.postgresql.Driver
|
||||
| user = $username
|
||||
| password = $password
|
||||
|
|
||||
| numThreads = 2
|
||||
|
|
||||
| queueSize = 10
|
||||
|
|
||||
| maxThreads = 2
|
||||
|
|
||||
| maxConnections = 2
|
||||
|
|
||||
}""".stripMargin)
|
||||
|
||||
def withDb[T](url: String)(f: JdbcBackend.DatabaseDef => Task[T]) = Task(
|
||||
// JdbcBackend.Database.forURL(
|
||||
// url,
|
||||
// // user = username,
|
||||
// // password = password,
|
||||
// // driver = "org.postgresql.Driver",
|
||||
// prop = Map(
|
||||
// "driver" -> "org.postgresql.Driver",
|
||||
// "user" -> username,
|
||||
// "password" -> password,
|
||||
// "numThreads" -> "16",
|
||||
// "maxThreads" -> "36",
|
||||
// "queueSize" -> "10",
|
||||
// "maxConnections" -> "36"
|
||||
// )
|
||||
// )
|
||||
JdbcBackend.Database.forConfig("testDatabase", config(url))
|
||||
).bracket(f)(db => UIO(db.close()))
|
||||
|
||||
def createSchema(containers: Containers) = {
|
||||
implicit val s = Scheduler.global
|
||||
containers match {
|
||||
case container: PostgreSQLContainer =>
|
||||
val config = JdbcDatabaseConfig(
|
||||
container.jdbcUrl,
|
||||
"org.postgresql.Driver",
|
||||
Some(username),
|
||||
Some(password),
|
||||
"flyway_schema_history",
|
||||
List("classpath:db/migration/default")
|
||||
)
|
||||
// (UIO(println("creating db")) >> dbBracket(container.jdbcUrl)(
|
||||
// // _.runL(Tables.schema.create)
|
||||
// _ => DBMigrations.migrate[Task](config)
|
||||
// ))
|
||||
DBMigrations.migrate[Task](config).runSyncUnsafe(munitTimeout)
|
||||
case _ => ()
|
||||
}
|
||||
}
|
||||
|
||||
// val fixture = ResourceFixture(
|
||||
// Resource.make(
|
||||
// Task(
|
||||
// JdbcBackend.Database.forURL(
|
||||
// "jdbc:postgresql://localhost:49162/testcontainer-scala?",
|
||||
// user = username,
|
||||
// password = password,
|
||||
// driver = "org.postgresql.Driver"
|
||||
// )
|
||||
// )
|
||||
// )(db => Task(db.close()))
|
||||
// )
|
||||
|
||||
def withContainersIO[A](pf: PartialFunction[Containers, Task[A]]): Task[A] = {
|
||||
withContainers { containers =>
|
||||
pf.applyOrElse(
|
||||
containers,
|
||||
(c: Containers) =>
|
||||
IO.terminate(new Exception(s"Unknown container: ${c.toString}"))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
package wow.doge.http4sdemo
|
||||
|
||||
import cats.effect.IO
|
||||
import org.http4s._
|
||||
import org.http4s.implicits._
|
||||
import munit.CatsEffectSuite
|
||||
class HelloWorldSpec extends CatsEffectSuite {
|
||||
|
||||
// test("HelloWorld returns status code 200") {
|
||||
// assertIO(retHelloWorld.map(_.status), Status.Ok)
|
||||
// }
|
||||
|
||||
// test("HelloWorld returns hello world message") {
|
||||
// assertIO(
|
||||
// retHelloWorld.flatMap(_.as[String]),
|
||||
// "{\"message\":\"Hello, world\"}"
|
||||
// )
|
||||
// }
|
||||
|
||||
// private[this] val retHelloWorld: IO[Response[IO]] = {
|
||||
// val getHW = Request[IO](Method.GET, uri"/hello/world")
|
||||
// val helloWorld = HelloWorld.impl[IO]
|
||||
// Http4sdemoRoutes.helloWorldRoutes(helloWorld).orNotFound(getHW)
|
||||
// }
|
||||
}
|
166
src/test/scala/wow/doge/http4sdemo/LibraryControllerSpec.scala
Normal file
166
src/test/scala/wow/doge/http4sdemo/LibraryControllerSpec.scala
Normal file
@ -0,0 +1,166 @@
|
||||
package wow.doge.http4sdemo
|
||||
|
||||
import java.time.LocalDateTime
|
||||
|
||||
import cats.syntax.all._
|
||||
import monix.bio.IO
|
||||
import monix.bio.Task
|
||||
import monix.bio.UIO
|
||||
import monix.reactive.Observable
|
||||
import org.http4s.Method
|
||||
import org.http4s.Request
|
||||
import org.http4s.Uri
|
||||
import org.http4s.implicits._
|
||||
import wow.doge.http4sdemo.dto.Book
|
||||
import wow.doge.http4sdemo.dto.BookSearchMode
|
||||
import wow.doge.http4sdemo.dto.BookUpdate
|
||||
import wow.doge.http4sdemo.services.LibraryService
|
||||
import wow.doge.http4sdemo.services.NoopLibraryService
|
||||
|
||||
class LibraryControllerSpec extends MonixBioSuite {
|
||||
|
||||
// "libraryControllerSpec"
|
||||
// val fixture = loggerFixture()
|
||||
// ResourceFixture
|
||||
|
||||
// override def munitFixtures = List(myFixture)
|
||||
// override def munitFixtures: Seq[Fixture[_]] = ???
|
||||
|
||||
val date = LocalDateTime.now()
|
||||
|
||||
|
||||
// val logger = consoleLogger[Task]()
|
||||
|
||||
val Root = Uri(path = "")
|
||||
|
||||
test("get books success") {
|
||||
import org.http4s.circe.CirceEntityCodec._
|
||||
val book = Book(1, "book1", "adsgq342dsdc", 1, date)
|
||||
val service = new NoopLibraryService {
|
||||
|
||||
override def getBooks: Observable[Book] =
|
||||
Observable.fromIterable(book :: Nil)
|
||||
|
||||
override def getBookById(id: Int): Task[Option[Book]] =
|
||||
Task.some(book)
|
||||
|
||||
}
|
||||
for {
|
||||
_ <- UIO.unit
|
||||
routes = Http4sdemoRoutes.libraryRoutes(service)
|
||||
res <- routes
|
||||
.run(Request[Task](Method.GET, uri"/api/get/books"))
|
||||
.value
|
||||
.hideErrors
|
||||
body <- res.map(_.as[List[Book]]).sequence
|
||||
_ <- UIO(assertEquals(body, Some(List(book))))
|
||||
// _ <- logger2.debug(body.toString).hideErrors
|
||||
} yield ()
|
||||
}
|
||||
|
||||
test("update book error") {
|
||||
import org.http4s.circe.CirceEntityCodec._
|
||||
val service = new NoopLibraryService {
|
||||
override def updateBook(id: Int, updateData: BookUpdate) =
|
||||
IO.raiseError(
|
||||
LibraryService.EntityDoesNotExist(s"Book with id=$id does not exist")
|
||||
)
|
||||
}
|
||||
|
||||
for {
|
||||
_ <- UIO.unit
|
||||
reqBody = BookUpdate(Some("blah"), None)
|
||||
routes = Http4sdemoRoutes.libraryRoutes(service)
|
||||
res <- routes
|
||||
.run(
|
||||
Request[Task](Method.PATCH, Root / "api" / "update" / "book" / "1")
|
||||
.withEntity(reqBody)
|
||||
)
|
||||
.value
|
||||
.hideErrors
|
||||
body <- res.map(_.as[LibraryService.Error]).sequence
|
||||
_ <- UIO(
|
||||
assertEquals(
|
||||
body,
|
||||
Some(
|
||||
LibraryService.EntityDoesNotExist("Book with id=1 does not exist")
|
||||
)
|
||||
)
|
||||
)
|
||||
// _ <- logger.debug(res.toString).hideErrors
|
||||
// _ <- logger.debug(body.toString).hideErrors
|
||||
} yield ()
|
||||
}
|
||||
|
||||
test("get books by author name") {
|
||||
import org.http4s.circe.CirceEntityCodec._
|
||||
val value = "blah"
|
||||
val books =
|
||||
List(Book(1, "book1", value, 1, date), Book(2, "book1", value, 1, date))
|
||||
val service = new NoopLibraryService {
|
||||
override def searchBook(mode: BookSearchMode, value: String) =
|
||||
mode match {
|
||||
case BookSearchMode.BookTitle =>
|
||||
Observable.fromIterable(books)
|
||||
case BookSearchMode.AuthorName =>
|
||||
Observable.fromIterable(books)
|
||||
}
|
||||
}
|
||||
for {
|
||||
_ <- UIO.unit
|
||||
// logger2 = logger.withConstContext(
|
||||
// Map("Test" -> "get books by author name")
|
||||
// )
|
||||
routes = Http4sdemoRoutes.libraryRoutes(service)
|
||||
request = Request[Task](
|
||||
Method.GET,
|
||||
Root / "api" / "get" / "book"
|
||||
withQueryParams Map(
|
||||
"mode" -> BookSearchMode.AuthorName.entryName,
|
||||
"value" -> "blah"
|
||||
)
|
||||
)
|
||||
// _ <- logger2.info(s"Request -> $request")
|
||||
res <- routes.run(request).value.hideErrors
|
||||
body <- res.map(_.as[List[Book]]).sequence
|
||||
_ <- UIO.pure(body).assertEquals(Some(books))
|
||||
// _ <- logger2.debug(s"Response body -> $body").hideErrors
|
||||
} yield ()
|
||||
}
|
||||
|
||||
test("get books by book title") {
|
||||
import org.http4s.circe.CirceEntityCodec._
|
||||
val value = "blah"
|
||||
val books =
|
||||
List(Book(1, "book1", value, 1, date), Book(2, "book1", value, 1, date))
|
||||
val service = new NoopLibraryService {
|
||||
override def searchBook(mode: BookSearchMode, value: String) =
|
||||
mode match {
|
||||
case BookSearchMode.BookTitle =>
|
||||
Observable.fromIterable(books)
|
||||
case BookSearchMode.AuthorName =>
|
||||
Observable.fromIterable(books)
|
||||
}
|
||||
}
|
||||
for {
|
||||
_ <- UIO.unit
|
||||
// logger2 = logger.withConstContext(
|
||||
// Map("Test" -> "get books by book title")
|
||||
// )
|
||||
routes = Http4sdemoRoutes.libraryRoutes(service)
|
||||
request = Request[Task](
|
||||
Method.GET,
|
||||
Root / "api" / "get" / "book"
|
||||
withQueryParams Map(
|
||||
"mode" -> BookSearchMode.BookTitle.entryName,
|
||||
"value" -> "blah"
|
||||
)
|
||||
)
|
||||
// _ <- logger2.info(s"Request -> $request")
|
||||
res <- routes.run(request).value.hideErrors
|
||||
body <- res.map(_.as[List[Book]]).sequence
|
||||
_ <- UIO.pure(body).assertEquals(Some(books))
|
||||
// _ <- logger2.debug(s"Response body -> $body").hideErrors
|
||||
} yield ()
|
||||
}
|
||||
}
|
123
src/test/scala/wow/doge/http4sdemo/LibraryServiceSpec.scala
Normal file
123
src/test/scala/wow/doge/http4sdemo/LibraryServiceSpec.scala
Normal file
@ -0,0 +1,123 @@
|
||||
package wow.doge.http4sdemo
|
||||
|
||||
import cats.syntax.all._
|
||||
import com.dimafeng.testcontainers.PostgreSQLContainer
|
||||
import monix.bio.UIO
|
||||
import wow.doge.http4sdemo.dto.BookSearchMode
|
||||
import wow.doge.http4sdemo.dto.NewAuthor
|
||||
import wow.doge.http4sdemo.dto.NewBook
|
||||
import wow.doge.http4sdemo.implicits._
|
||||
import wow.doge.http4sdemo.services.LibraryDbio
|
||||
import wow.doge.http4sdemo.services.LibraryService
|
||||
import wow.doge.http4sdemo.services.LibraryServiceImpl
|
||||
|
||||
class LibraryServiceSpec extends DatabaseIntegrationTestBase {
|
||||
|
||||
override def afterContainersStart(containers: Containers): Unit = {
|
||||
super.afterContainersStart(containers)
|
||||
createSchema(containers)
|
||||
}
|
||||
|
||||
test("insert and retrieve book") {
|
||||
withContainersIO { case container: PostgreSQLContainer =>
|
||||
val io =
|
||||
withDb(container.jdbcUrl)(db =>
|
||||
for {
|
||||
_ <- UIO.unit
|
||||
service: LibraryService = new LibraryServiceImpl(
|
||||
profile,
|
||||
new LibraryDbio(profile),
|
||||
db
|
||||
)
|
||||
id <- service.insertAuthor(NewAuthor("author1"))
|
||||
book <- service.insertBook(NewBook("blah", "Segehwe", id))
|
||||
_ <- service
|
||||
.getBookById(book.bookId)
|
||||
.flatTap(r => UIO(println(r)))
|
||||
.assertEquals(Some(book))
|
||||
} yield ()
|
||||
)
|
||||
io
|
||||
}
|
||||
}
|
||||
|
||||
test("author does not exist error on book insertion") {
|
||||
withContainersIO { case container: PostgreSQLContainer =>
|
||||
val io =
|
||||
withDb(container.jdbcUrl)(db =>
|
||||
for {
|
||||
_ <- UIO.unit
|
||||
service: LibraryService = new LibraryServiceImpl(
|
||||
profile,
|
||||
new LibraryDbio(profile),
|
||||
db
|
||||
)
|
||||
_ <- service
|
||||
.insertBook(NewBook("blah2", "agege", 23))
|
||||
.attempt
|
||||
.assertEquals(
|
||||
Left(
|
||||
LibraryService
|
||||
.EntityDoesNotExist("Author with id=23 does not exist")
|
||||
)
|
||||
)
|
||||
} yield ()
|
||||
)
|
||||
io
|
||||
}
|
||||
}
|
||||
|
||||
test("books with isbn already exists error on book insertion") {
|
||||
withContainersIO { case container: PostgreSQLContainer =>
|
||||
val io =
|
||||
withDb(container.jdbcUrl)(db =>
|
||||
for {
|
||||
_ <- UIO.unit
|
||||
service: LibraryService = new LibraryServiceImpl(
|
||||
profile,
|
||||
new LibraryDbio(profile),
|
||||
db
|
||||
)
|
||||
_ <- service.insertBook(NewBook("blah2", "agege", 1))
|
||||
_ <- service
|
||||
.insertBook(NewBook("blah3", "agege", 1))
|
||||
.attempt
|
||||
.assertEquals(
|
||||
Left(
|
||||
LibraryService
|
||||
.EntityAlreadyExists("Book with isbn=agege already exists")
|
||||
)
|
||||
)
|
||||
} yield ()
|
||||
)
|
||||
io
|
||||
}
|
||||
}
|
||||
|
||||
test("search books by author id") {
|
||||
withContainersIO { case container: PostgreSQLContainer =>
|
||||
val io =
|
||||
withDb(container.jdbcUrl)(db =>
|
||||
for {
|
||||
_ <- UIO.unit
|
||||
service: LibraryService = new LibraryServiceImpl(
|
||||
profile,
|
||||
new LibraryDbio(profile),
|
||||
db
|
||||
)
|
||||
id <- service.insertAuthor(NewAuthor("bar"))
|
||||
book1 <- service.insertBook(NewBook("blah3", "aeaega", id))
|
||||
book2 <- service.insertBook(NewBook("blah4", "afgegg", id))
|
||||
_ <- service
|
||||
.searchBook(BookSearchMode.AuthorName, id.toString)
|
||||
.toListL
|
||||
.toIO
|
||||
.attempt
|
||||
.assertEquals(Right(List(book1, book2)))
|
||||
} yield ()
|
||||
)
|
||||
io
|
||||
}
|
||||
}
|
||||
|
||||
}
|
51
src/test/scala/wow/doge/http4sdemo/LibrarySpec2.scala
Normal file
51
src/test/scala/wow/doge/http4sdemo/LibrarySpec2.scala
Normal file
@ -0,0 +1,51 @@
|
||||
package wow.doge.http4sdemo
|
||||
|
||||
import com.dimafeng.testcontainers.PostgreSQLContainer
|
||||
import monix.bio.IO
|
||||
import monix.bio.UIO
|
||||
import wow.doge.http4sdemo.services.LibraryDbio
|
||||
import wow.doge.http4sdemo.services.LibraryServiceImpl
|
||||
|
||||
class LibrarySpec2 extends DatabaseIntegrationTestBase {
|
||||
|
||||
override def afterContainersStart(containers: Containers): Unit = {
|
||||
createSchema(containers)
|
||||
}
|
||||
|
||||
test("blah") {
|
||||
withContainers {
|
||||
case postgresContainer: PostgreSQLContainer =>
|
||||
val io =
|
||||
withDb(postgresContainer.jdbcUrl)(db =>
|
||||
for {
|
||||
// _ <- db.runL(Tables.schema.create)
|
||||
_ <- UIO.unit
|
||||
service = new LibraryServiceImpl(
|
||||
profile,
|
||||
new LibraryDbio(profile),
|
||||
db
|
||||
)
|
||||
_ <- service
|
||||
.getBookById(1)
|
||||
.hideErrors
|
||||
.flatMap(r => UIO(println(r)))
|
||||
} yield ()
|
||||
)
|
||||
io
|
||||
case other =>
|
||||
IO.terminate(new Exception(s"Invalid container ${other.toString}"))
|
||||
}
|
||||
}
|
||||
|
||||
// override val container: PostgreSQLContainer = PostgreSQLContainer()
|
||||
|
||||
// "PostgreSQL container" should "be started" in {
|
||||
// Class.forName(container.driverClassName)
|
||||
// val connection = DriverManager.getConnection(
|
||||
// container.jdbcUrl,
|
||||
// container.username,
|
||||
// container.password
|
||||
// )
|
||||
// // ...
|
||||
// }
|
||||
}
|
12
src/test/scala/wow/doge/http4sdemo/LoggerFixtureSpec.scala
Normal file
12
src/test/scala/wow/doge/http4sdemo/LoggerFixtureSpec.scala
Normal file
@ -0,0 +1,12 @@
|
||||
package wow.doge.http4sdemo
|
||||
|
||||
// import sourcecode.File
|
||||
|
||||
class LoggerFixtureSpec extends MonixBioSuite {
|
||||
// "LoggerFixtureSpec"
|
||||
val fixture = loggerFixture()
|
||||
|
||||
loggerFixture().test("blah blah") { logger =>
|
||||
logger.debug("blah blah blah")
|
||||
}
|
||||
}
|
35
src/test/scala/wow/doge/http4sdemo/MonixBioSuite.scala
Normal file
35
src/test/scala/wow/doge/http4sdemo/MonixBioSuite.scala
Normal file
@ -0,0 +1,35 @@
|
||||
package wow.doge.http4sdemo
|
||||
|
||||
import cats.syntax.all._
|
||||
import io.odin.consoleLogger
|
||||
import io.odin.fileLogger
|
||||
import io.odin.syntax._
|
||||
import monix.bio.Task
|
||||
import monix.execution.Scheduler
|
||||
|
||||
import scala.concurrent.Future
|
||||
import munit.TestOptions
|
||||
import cats.effect.Resource
|
||||
import io.odin.Logger
|
||||
|
||||
trait MonixBioSuite extends munit.TaglessFinalSuite[Task] {
|
||||
override protected def toFuture[A](f: Task[A]): Future[A] = {
|
||||
implicit val s = Scheduler.global
|
||||
f.runToFuture
|
||||
}
|
||||
|
||||
def loggerFixture(fileName: Option[String] = None)(implicit
|
||||
enc: sourcecode.Enclosing
|
||||
) =
|
||||
ResourceFixture(
|
||||
consoleLogger[Task]().withAsync() |+| fileLogger[Task](
|
||||
fileName.getOrElse(enc.value.split("#").head + ".log")
|
||||
),
|
||||
(
|
||||
options: TestOptions,
|
||||
value: Logger[Task]
|
||||
) => Task(options.name),
|
||||
(_: Logger[Task]) => Task.unit
|
||||
)
|
||||
|
||||
}
|
182
wait-for-it.sh
Executable file
182
wait-for-it.sh
Executable file
@ -0,0 +1,182 @@
|
||||
#!/usr/bin/env bash
|
||||
# Use this script to test if a given TCP host/port are available
|
||||
|
||||
WAITFORIT_cmdname=${0##*/}
|
||||
|
||||
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
|
||||
|
||||
usage()
|
||||
{
|
||||
cat << USAGE >&2
|
||||
Usage:
|
||||
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
USAGE
|
||||
exit 1
|
||||
}
|
||||
|
||||
wait_for()
|
||||
{
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
else
|
||||
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
|
||||
fi
|
||||
WAITFORIT_start_ts=$(date +%s)
|
||||
while :
|
||||
do
|
||||
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
|
||||
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
|
||||
WAITFORIT_result=$?
|
||||
else
|
||||
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
|
||||
WAITFORIT_result=$?
|
||||
fi
|
||||
if [[ $WAITFORIT_result -eq 0 ]]; then
|
||||
WAITFORIT_end_ts=$(date +%s)
|
||||
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
return $WAITFORIT_result
|
||||
}
|
||||
|
||||
wait_for_wrapper()
|
||||
{
|
||||
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
|
||||
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
else
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
fi
|
||||
WAITFORIT_PID=$!
|
||||
trap "kill -INT -$WAITFORIT_PID" INT
|
||||
wait $WAITFORIT_PID
|
||||
WAITFORIT_RESULT=$?
|
||||
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
fi
|
||||
return $WAITFORIT_RESULT
|
||||
}
|
||||
|
||||
# process arguments
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case "$1" in
|
||||
*:* )
|
||||
WAITFORIT_hostport=(${1//:/ })
|
||||
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
|
||||
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
|
||||
shift 1
|
||||
;;
|
||||
--child)
|
||||
WAITFORIT_CHILD=1
|
||||
shift 1
|
||||
;;
|
||||
-q | --quiet)
|
||||
WAITFORIT_QUIET=1
|
||||
shift 1
|
||||
;;
|
||||
-s | --strict)
|
||||
WAITFORIT_STRICT=1
|
||||
shift 1
|
||||
;;
|
||||
-h)
|
||||
WAITFORIT_HOST="$2"
|
||||
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--host=*)
|
||||
WAITFORIT_HOST="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-p)
|
||||
WAITFORIT_PORT="$2"
|
||||
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--port=*)
|
||||
WAITFORIT_PORT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-t)
|
||||
WAITFORIT_TIMEOUT="$2"
|
||||
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--timeout=*)
|
||||
WAITFORIT_TIMEOUT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
WAITFORIT_CLI=("$@")
|
||||
break
|
||||
;;
|
||||
--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
echoerr "Unknown argument: $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
|
||||
echoerr "Error: you need to provide a host and port to test."
|
||||
usage
|
||||
fi
|
||||
|
||||
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
|
||||
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
|
||||
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
|
||||
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
|
||||
|
||||
# Check to see if timeout is from busybox?
|
||||
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
|
||||
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
|
||||
|
||||
WAITFORIT_BUSYTIMEFLAG=""
|
||||
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
|
||||
WAITFORIT_ISBUSY=1
|
||||
# Check if busybox timeout uses -t flag
|
||||
# (recent Alpine versions don't support -t anymore)
|
||||
if timeout &>/dev/stdout | grep -q -e '-t '; then
|
||||
WAITFORIT_BUSYTIMEFLAG="-t"
|
||||
fi
|
||||
else
|
||||
WAITFORIT_ISBUSY=0
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
exit $WAITFORIT_RESULT
|
||||
else
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
wait_for_wrapper
|
||||
WAITFORIT_RESULT=$?
|
||||
else
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CLI != "" ]]; then
|
||||
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
exec "${WAITFORIT_CLI[@]}"
|
||||
else
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
Loading…
Reference in New Issue
Block a user