diff --git a/.gitignore b/.gitignore
index db30022..c167088 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,23 +1,124 @@
-dist
-dist-*
-cabal-dev
-*.o
-*.hi
-*.hie
-*.chi
-*.chs.h
-*.dyn_o
-*.dyn_hi
-.hpc
-.hsenv
-.cabal-sandbox/
-cabal.sandbox.config
-*.prof
-*.aux
-*.hp
-*.eventlog
-.stack-work/
-cabal.project.local
-cabal.project.local~
-.HTF/
-.ghc.environment.*
\ No newline at end of file
+# Sass Cache
+.sass-cache
+
+# css map
+.css.map
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+
+# C extensions
+*.so
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.cache
+nosetests.xml
+coverage.xml
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Ansible
+deployment/ansible/roles/azavea.*
+*.retry
+
+# Vagrant
+.vagrant
+
+# NodeJS / Browserify stuff
+node_modules/
+npm-debug.log
+
+# Emacs
+\#*#
+*~
+.#*
+TAGS
+
+# Vim
+.*.swp
+
+# MacOS
+.DS_Store
+
+# Scala
+*.class
+*.log
+
+# sbt specific
+.cache
+.coursier-cache
+.history
+.lib/
+dist/*
+target/
+lib_managed/
+src_managed/
+project/boot/
+project/plugins/project/
+/project/.sbtboot
+/project/.boot/
+/project/.ivy/
+/.sbtopts
+
+# Molecule
+.molecule/
+*__pycache__*
+
+# Scala-IDE specific
+.scala_dependencies
+.worksheet
+.ensime/*
+.ensime
+.metals/*
+.metals
+/.ivy2/*
+.sbt/
+metals.lock.db
+.metals/
+.bloop/
+
+/.env
+/.envrc
+
+.node_modules
+dist/
+.vscode
+
+/data
+.idea
+.ensime_cache
+/app-tasks/jars/rf-batch.jar
+/data
+
+.vscode
+
+# Patch files
+*.patch
+scratch/
+
+stats.json
+app-lambda/opt/*
+/app-lambda/package.sh
+
+# js files from docusaurus
+node_modules/*
+website/build/*
diff --git a/.scalafix.conf b/.scalafix.conf
new file mode 100644
index 0000000..ca4ff87
--- /dev/null
+++ b/.scalafix.conf
@@ -0,0 +1,12 @@
+rules = [
+ ProcedureSyntax,
+ RemoveUnused,
+ SortImports
+]
+
+SortImports.blocks = [
+ "com.azavea",
+ "*",
+ "scala.",
+ "java.",
+]
diff --git a/.scalafmt.conf b/.scalafmt.conf
new file mode 100644
index 0000000..b43bce5
--- /dev/null
+++ b/.scalafmt.conf
@@ -0,0 +1,5 @@
+version = 2.1.1
+align = more // For pretty alignment.
+maxColumn = 120
+rewrite.rules = [SortImports]
+newlines.alwaysBeforeTopLevelStatements = true
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000..182f95b
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,13 @@
+## [Unreleased]
+
+### Added
+
+### Changed
+
+### Deprecated
+
+### Removed
+
+### Fixed
+
+### Security
diff --git a/README.md b/README.md
index 99531d0..b9b388e 100644
--- a/README.md
+++ b/README.md
@@ -1,29 +1,25 @@
-# pgSockets4s
+# pgsockets4s
+An API project that uses `tapir`, `http4s`, and `doobie`
-## Setup
+### Development
-### Tools you'll need
+The following tools must be present for a development environment:
+ - Docker
+ - Java 8 or newer -- if this is a problem, try using [`jabba`](https://github.com/shyiko/jabba#jabba--) to manage different java versions
+ - Sbt 1.x -- the newer the better probably, installation instructions can be found [here](https://www.scala-sbt.org/download.html)
+ - [Bloop](https://scalacenter.github.io/bloop/) -- this is a scala build server that should run in the background for building the project; installation instructions can be found [here](https://scalacenter.github.io/bloop/setup)
-- `psql`
-- [`rambler`](https://github.com/elwinar/rambler)
+### STRTA
-### Initial setup
+This project uses [`scripts-to-rule-them-all`](https://github.blog/2015-06-30-scripts-to-rule-them-all/) for managing the developer experience. Below is a quick explanation for the specific usage of each for this particular project.
-- Copy `.env.example` to `.env` and `rambler.json.example` to `rambler.json`
-- `docker-compose up -d postgres`
-- After `docker-compose ps postgres` reports an `Up (healthy)` state, `./scripts/load-development-data` (password in `.env` file)
-- Then apply migrations: `rambler apply`
-
-### Prove you're all set
-
-- `docker-compose exec postgres psql -U pgsockets -d pgsockets`
-- `LISTEN new_city_channel;`
--
-```sql
-insert into city (
- id, name, countrycode, district, population
-) values (
- 4080, 'New City Great Place', 'ABC', 'Good Place District', 12345
-);
-```
-- observe the async notification that gets logged
+| Script | Use |
+|---|---|
+| `setup` | Used to provision the project from scratch |
+| `update` | Update project, assemble, run migrations; assume that you can test any PR after running this script on that PR's branch |
+| `console` | Open an `ammonite` shell |
+| `server` | Start the server |
+| `dbshell` | Open a `psql` shell connected to the database |
+| `migrate` | Run migrations against the database |
+| `tests` | Run tests |
+| `cibuild` | Assemble jars and create containers |
diff --git a/application/Dockerfile b/application/Dockerfile
new file mode 100644
index 0000000..af5dc90
--- /dev/null
+++ b/application/Dockerfile
@@ -0,0 +1,12 @@
+FROM openjdk:11-jre-slim
+
+RUN \
+ addgroup --system pgsockets4s \
+ && adduser --system --disabled-login --disabled-password --home /var/lib/pgsockets4s --shell /sbin/nologin --ingroup pgsockets4s pgsockets4s
+
+COPY ./target/scala-2.12/application-assembly.jar /var/lib/pgsockets4s/
+
+USER pgsockets4s
+WORKDIR /var/lib/pgsockets4s
+
+ENTRYPOINT ["java", "-jar", "application-assembly.jar"]
\ No newline at end of file
diff --git a/application/src/main/resources/logback.xml b/application/src/main/resources/logback.xml
new file mode 100644
index 0000000..a6acb7e
--- /dev/null
+++ b/application/src/main/resources/logback.xml
@@ -0,0 +1,15 @@
+
+
+
+ [%thread] %highlight(%-5level) %cyan(%logger{15}) - %msg %n
+
+
+
+
+
+
+
+
+
+
+
diff --git a/application/src/main/resources/migrations/V1__Add_PostGIS_and_Users.sql b/application/src/main/resources/migrations/V1__Add_PostGIS_and_Users.sql
new file mode 100644
index 0000000..98217d8
--- /dev/null
+++ b/application/src/main/resources/migrations/V1__Add_PostGIS_and_Users.sql
@@ -0,0 +1,7 @@
+CREATE EXTENSION IF NOT EXISTS postgis;
+CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
+
+CREATE TABLE users (
+ id uuid PRIMARY KEY DEFAULT uuid_generate_v4 (),
+ email text not null
+);
diff --git a/application/src/main/scala/api/Server.scala b/application/src/main/scala/api/Server.scala
new file mode 100644
index 0000000..d8f2b69
--- /dev/null
+++ b/application/src/main/scala/api/Server.scala
@@ -0,0 +1,75 @@
+package com.azavea.pgsockets4s.api
+
+import cats.effect._
+import cats.implicits._
+import com.azavea.pgsockets4s.api.commands.{ApiConfig, Commands, DatabaseConfig}
+import com.azavea.pgsockets4s.api.endpoints.UserEndpoints
+import com.azavea.pgsockets4s.api.services.UsersService
+import doobie.hikari.HikariTransactor
+import doobie.util.ExecutionContexts
+import org.http4s.implicits._
+import org.http4s.server.blaze._
+import org.http4s.server.middleware._
+import org.http4s.server.{Router, Server => HTTP4sServer}
+import tapir.docs.openapi._
+import tapir.openapi.circe.yaml._
+import tapir.swagger.http4s.SwaggerHttp4s
+
+object Server extends IOApp {
+
+ private def createServer(
+ apiConfig: ApiConfig,
+ dbConfig: DatabaseConfig
+ ): Resource[IO, HTTP4sServer[IO]] =
+ for {
+ connectionEc <- ExecutionContexts.fixedThreadPool[IO](2)
+ transactionEc <- ExecutionContexts.cachedThreadPool[IO]
+ xa <- HikariTransactor.newHikariTransactor[IO](
+ "org.postgresql.Driver",
+ dbConfig.jdbcUrl,
+ dbConfig.dbUser,
+ dbConfig.dbPass,
+ connectionEc,
+ transactionEc
+ )
+ allEndpoints = UserEndpoints.endpoints
+ docs = allEndpoints.toOpenAPI("pgsockets4s", "0.0.1")
+ docRoutes = new SwaggerHttp4s(docs.toYaml, "open-api", "spec.yaml")
+ .routes[IO]
+ userRoutes = new UsersService[IO](xa).routes
+ router = CORS(
+ Router(
+ "/api" -> ResponseLogger
+ .httpRoutes(false, false)(userRoutes <+> docRoutes)
+ )
+ ).orNotFound
+ server <- {
+ BlazeServerBuilder[IO]
+ .bindHttp(apiConfig.internalPort.value, "0.0.0.0")
+ .withHttpApp(router)
+ .resource
+ }
+ } yield {
+ server
+ }
+
+ override def run(args: List[String]): IO[ExitCode] = {
+ import Commands._
+
+ applicationCommand.parse(args) map {
+ case RunServer(apiConfig, dbConfig) =>
+ createServer(apiConfig, dbConfig)
+ .use(_ => IO.never)
+ .as(ExitCode.Success)
+ case RunMigrations(config) => runMigrations(config)
+ } match {
+ case Left(e) =>
+ IO {
+ println(e.toString())
+ } map { _ =>
+ ExitCode.Error
+ }
+ case Right(s) => s
+ }
+ }
+}
diff --git a/application/src/main/scala/api/commands/ApiConfig.scala b/application/src/main/scala/api/commands/ApiConfig.scala
new file mode 100644
index 0000000..d37bbcb
--- /dev/null
+++ b/application/src/main/scala/api/commands/ApiConfig.scala
@@ -0,0 +1,17 @@
+package com.azavea.pgsockets4s.api.commands
+
+import eu.timepit.refined.types.numeric.PosInt
+import eu.timepit.refined.types.string.NonEmptyString
+
+case class ApiConfig(publicPort: PosInt,
+ internalPort: PosInt,
+ host: String,
+ scheme: String) {
+
+ val apiHost: NonEmptyString = (publicPort.value, scheme) match {
+ case (443, "https") => NonEmptyString.unsafeFrom(s"$scheme://$host")
+ case (80, "http") => NonEmptyString.unsafeFrom(s"$scheme://$host")
+ case _ => NonEmptyString.unsafeFrom(s"$scheme://$host:$publicPort")
+ }
+
+}
diff --git a/application/src/main/scala/api/commands/ApiOptions.scala b/application/src/main/scala/api/commands/ApiOptions.scala
new file mode 100644
index 0000000..550f633
--- /dev/null
+++ b/application/src/main/scala/api/commands/ApiOptions.scala
@@ -0,0 +1,40 @@
+package com.azavea.pgsockets4s.api.commands
+
+import cats.implicits._
+import com.monovore.decline.Opts
+import eu.timepit.refined.types.numeric.PosInt
+import com.monovore.decline.refined._
+
+trait ApiOptions {
+
+ private val externalPort = Opts
+ .option[PosInt]("external-port",
+ help = "Port users/clients hit for requests")
+ .withDefault(PosInt(8080))
+
+ private val internalPort = Opts
+ .option[PosInt](
+ "internal-port",
+ help =
+ "Port server listens on, this will be different from 'external-port' when service is started behind a proxy"
+ )
+ .withDefault(PosInt(8080))
+
+ private val apiHost = Opts
+ .option[String]("api-host",
+ help = "Hostname pgsockets4s is hosted it (e.g. localhost)")
+ .withDefault("localhost")
+
+ private val apiScheme =
+ Opts
+ .option[String]("api-scheme",
+ "Scheme server is exposed to end users with")
+ .withDefault("http")
+ .validate("Scheme must be either 'http' or 'https'")(s =>
+ (s == "http" || s == "https"))
+
+ val apiConfig: Opts[ApiConfig] = (externalPort,
+ internalPort,
+ apiHost,
+ apiScheme) mapN ApiConfig
+}
diff --git a/application/src/main/scala/api/commands/Commands.scala b/application/src/main/scala/api/commands/Commands.scala
new file mode 100644
index 0000000..2cdb940
--- /dev/null
+++ b/application/src/main/scala/api/commands/Commands.scala
@@ -0,0 +1,44 @@
+package com.azavea.pgsockets4s.api.commands
+
+import cats.effect.{ContextShift, ExitCode, IO}
+import com.monovore.decline._
+import org.flywaydb.core.Flyway
+import cats.implicits._
+
+object Commands {
+
+ final case class RunMigrations(databaseConfig: DatabaseConfig)
+
+ final case class RunServer(apiConfig: ApiConfig, dbConfig: DatabaseConfig)
+
+ private def runMigrationsOpts(
+ implicit cs: ContextShift[IO]): Opts[RunMigrations] =
+ Opts.subcommand("migrate", "Runs migrations against database") {
+ Options.databaseConfig map RunMigrations
+ }
+
+ private def runServerOpts(implicit cs: ContextShift[IO]): Opts[RunServer] =
+ Opts.subcommand("serve", "Runs web service") {
+ (Options.apiConfig, Options.databaseConfig) mapN RunServer
+ }
+
+ def runMigrations(dbConfig: DatabaseConfig): IO[ExitCode] = IO {
+ Flyway
+ .configure()
+ .dataSource(
+ s"${dbConfig.jdbcUrl}",
+ dbConfig.dbUser,
+ dbConfig.dbPass
+ )
+ .locations("classpath:migrations/")
+ .load()
+ .migrate()
+ ExitCode.Success
+ }
+
+ def applicationCommand(implicit cs: ContextShift[IO]): Command[Product] =
+ Command("", "Welcome to the jungle") {
+ runServerOpts orElse runMigrationsOpts
+ }
+
+}
diff --git a/application/src/main/scala/api/commands/DatabaseConfig.scala b/application/src/main/scala/api/commands/DatabaseConfig.scala
new file mode 100644
index 0000000..ea58873
--- /dev/null
+++ b/application/src/main/scala/api/commands/DatabaseConfig.scala
@@ -0,0 +1,14 @@
+package com.azavea.pgsockets4s.api.commands
+
+import eu.timepit.refined.types.numeric._
+
+final case class DatabaseConfig(
+ dbUser: String,
+ dbPass: String,
+ dbHost: String,
+ dbPort: PosInt,
+ dbName: String
+) {
+ val jdbcUrl = s"jdbc:postgresql://$dbHost:$dbPort/$dbName"
+ val driver = "org.postgresql.Driver"
+}
diff --git a/application/src/main/scala/api/commands/DatabaseOptions.scala b/application/src/main/scala/api/commands/DatabaseOptions.scala
new file mode 100644
index 0000000..15a30f2
--- /dev/null
+++ b/application/src/main/scala/api/commands/DatabaseOptions.scala
@@ -0,0 +1,63 @@
+package com.azavea.pgsockets4s.api.commands
+
+import cats.implicits._
+import com.monovore.decline.Opts
+import cats.effect._
+import doobie.util.transactor.Transactor
+import doobie.implicits._
+import com.lightbend.emoji.ShortCodes.Implicits._
+import com.lightbend.emoji.ShortCodes.Defaults._
+import com.monovore.decline._
+import eu.timepit.refined.types.numeric._
+import com.monovore.decline.refined._
+
+import scala.util.Try
+
+trait DatabaseOptions {
+
+ private val databasePort = Opts
+ .option[PosInt]("db-port", help = "Port to connect to database on")
+ .withDefault(PosInt(5432))
+
+ private val databaseHost = Opts
+ .option[String]("db-host", help = "Database host to connect to")
+ .withDefault("localhost")
+
+ private val databaseName = Opts
+ .option[String]("db-name", help = "Database name to connect to")
+ .withDefault("pgsockets4s")
+
+ private val databasePassword = Opts
+ .option[String]("db-password", help = "Database password to use")
+ .withDefault("pgsockets4s")
+
+ private val databaseUser = Opts
+ .option[String]("db-user", help = "User to connect with database with")
+ .withDefault("pgsockets4s")
+
+ def databaseConfig(
+ implicit contextShift: ContextShift[IO]): Opts[DatabaseConfig] =
+ ((
+ databaseUser,
+ databasePassword,
+ databaseHost,
+ databasePort,
+ databaseName
+ ) mapN DatabaseConfig).validate(
+ e":boom: Unable to connect to database - please ensure database is configured and listening at entered port"
+ ) { config =>
+ val xa =
+ Transactor
+ .fromDriverManager[IO](config.driver,
+ config.jdbcUrl,
+ config.dbUser,
+ config.dbPass)
+ val select = Try {
+ fr"SELECT 1".query[Int].unique.transact(xa).unsafeRunSync()
+ }
+ select.toEither match {
+ case Right(_) => true
+ case Left(_) => false
+ }
+ }
+}
diff --git a/application/src/main/scala/api/commands/Options.scala b/application/src/main/scala/api/commands/Options.scala
new file mode 100644
index 0000000..4e85030
--- /dev/null
+++ b/application/src/main/scala/api/commands/Options.scala
@@ -0,0 +1,9 @@
+package com.azavea.pgsockets4s.api.commands
+
+import com.monovore.decline.Opts
+
+object Options extends DatabaseOptions with ApiOptions {
+
+ val catalogRoot: Opts[String] = Opts
+ .option[String]("catalog-root", "Root of STAC catalog to import")
+}
diff --git a/application/src/main/scala/api/endpoints/UserEndpoints.scala b/application/src/main/scala/api/endpoints/UserEndpoints.scala
new file mode 100644
index 0000000..7924afc
--- /dev/null
+++ b/application/src/main/scala/api/endpoints/UserEndpoints.scala
@@ -0,0 +1,47 @@
+package com.azavea.pgsockets4s.api.endpoints
+
+import java.util.UUID
+
+import com.azavea.pgsockets4s.datamodel.User
+import tapir._
+import tapir.json.circe._
+
+object UserEndpoints {
+
+ val base = endpoint.in("users")
+
+ val listUsers: Endpoint[Unit, Unit, List[User], Nothing] =
+ base.get
+ .out(jsonBody[List[User]])
+ .description("List Users")
+ .name("User List View")
+
+ val getUser: Endpoint[UUID, Unit, User, Nothing] =
+ base.get
+ .in(path[UUID])
+ .out(jsonBody[User])
+ .description("Retrieve a single user")
+ .name("search-get")
+
+ val createUser: Endpoint[User.Create, Unit, User, Nothing] =
+ base.post
+ .in(jsonBody[User.Create])
+ .out(jsonBody[User])
+ .description("Create a User")
+ .name("create-user")
+
+ val deleteUser: Endpoint[UUID, Unit, Unit, Nothing] =
+ base.delete
+ .in(path[UUID])
+ .description("Delete a user")
+
+ val updateUser: Endpoint[(User, UUID), Unit, Unit, Nothing] =
+ base.put
+ .in(jsonBody[User])
+ .in(path[UUID])
+ .description("Update a user")
+ .name("update-user")
+
+ val endpoints = List(listUsers, getUser, createUser, deleteUser, updateUser)
+
+}
diff --git a/application/src/main/scala/api/implicits/package.scala b/application/src/main/scala/api/implicits/package.scala
new file mode 100644
index 0000000..b53315c
--- /dev/null
+++ b/application/src/main/scala/api/implicits/package.scala
@@ -0,0 +1,13 @@
+package com.azavea.pgsockets4s.api
+
+import eu.timepit.refined.types.string.NonEmptyString
+
+package object implicits {
+
+ implicit class combineNonEmptyString(s: NonEmptyString) {
+
+ def +(otherString: String): NonEmptyString =
+ NonEmptyString.unsafeFrom(s.value.concat(otherString))
+ }
+
+}
diff --git a/application/src/main/scala/api/services/UserService.scala b/application/src/main/scala/api/services/UserService.scala
new file mode 100644
index 0000000..6b4a367
--- /dev/null
+++ b/application/src/main/scala/api/services/UserService.scala
@@ -0,0 +1,55 @@
+package com.azavea.pgsockets4s.api.services
+
+import java.util.UUID
+
+import cats.effect._
+import cats.implicits._
+import com.azavea.pgsockets4s.api.endpoints.UserEndpoints
+import com.azavea.pgsockets4s.database.UserDao
+import com.azavea.pgsockets4s.datamodel.User
+import doobie.util.transactor.Transactor
+import doobie._
+import doobie.implicits._
+import org.http4s._
+import org.http4s.dsl.Http4sDsl
+import tapir.server.http4s._
+import eu.timepit.refined.auto._
+
+class UsersService[F[_]: Sync](xa: Transactor[F])(
+ implicit contextShift: ContextShift[F]
+) extends Http4sDsl[F] {
+
+ def listUsers: F[Either[Unit, List[User]]] =
+ UserDao.query.list.transact(xa).map(Either.right)
+
+ def getUser(id: UUID): F[Either[Unit, User]] =
+ UserDao.query.filter(id).selectOption.transact(xa) map {
+ case Some(user) => Either.right(user)
+ case _ => Either.left(())
+ }
+
+ def createUser(user: User.Create): F[Either[Unit, User]] =
+ UserDao.create(user).transact(xa) map {
+ case user: User => Either.right(user)
+ case _ => Either.left(())
+ }
+
+ def deleteUser(id: UUID): F[Either[Unit, Unit]] =
+ UserDao.query.filter(id).delete.transact(xa) map {
+ case 1 => Either.right(())
+ case _ => Either.left(())
+ }
+
+ def updateUser(id: UUID, user: User): F[Either[Unit, Unit]] =
+ UserDao.update(id, user).transact(xa) map {
+ case 1 => Either.right(())
+ case _ => Either.left(())
+ }
+
+ val routes
+ : HttpRoutes[F] = UserEndpoints.listUsers.toRoutes(_ => listUsers) <+> UserEndpoints.getUser
+ .toRoutes(id => getUser(id)) <+> UserEndpoints.createUser.toRoutes(
+ json => createUser(json)
+ ) <+> UserEndpoints.deleteUser.toRoutes(id => deleteUser(id)) <+> UserEndpoints.updateUser
+ .toRoutes { case (json, id) => updateUser(id, json) }
+}
diff --git a/application/src/main/scala/database/UserDao.scala b/application/src/main/scala/database/UserDao.scala
new file mode 100644
index 0000000..4fef636
--- /dev/null
+++ b/application/src/main/scala/database/UserDao.scala
@@ -0,0 +1,31 @@
+package com.azavea.pgsockets4s.database
+
+import java.util.UUID
+
+import com.azavea.pgsockets4s.datamodel.User
+import com.azavea.pgsockets4s.database.util.Dao
+import doobie.util.fragment.Fragment
+import doobie._
+import doobie.implicits._
+import doobie.postgres.implicits._
+
+object UserDao extends Dao[User] {
+ val tableName: String = "users"
+
+ /** An abstract select statement to be used for constructing queries */
+ def selectF: Fragment = fr"SELECT id, email FROM" ++ tableF
+
+ def create(user: User.Create): ConnectionIO[User] = {
+ (fr"INSERT INTO" ++ tableF ++ fr"""
+ (id, email)
+ VALUES
+ (uuid_generate_v4(), ${user.email})
+ """).update.withUniqueGeneratedKeys[User]("id", "email")
+ }
+
+ def update(id: UUID, user: User): ConnectionIO[Int] = {
+ val updateQuery = fr"UPDATE" ++ tableF ++ fr"SET email = ${user.id} WHERE id = ${id}"
+ updateQuery.update.run
+ }
+
+}
diff --git a/application/src/main/scala/database/database.scala b/application/src/main/scala/database/database.scala
new file mode 100644
index 0000000..79ea391
--- /dev/null
+++ b/application/src/main/scala/database/database.scala
@@ -0,0 +1,12 @@
+package com.azavea.pgsockets4s
+
+import com.azavea.pgsockets4s.database.util.{
+ CirceJsonbMeta,
+ Filterables,
+ GeotrellisWktMeta
+}
+
+package object database
+ extends CirceJsonbMeta
+ with GeotrellisWktMeta
+ with Filterables
diff --git a/application/src/main/scala/database/util/CirceJsonbMeta.scala b/application/src/main/scala/database/util/CirceJsonbMeta.scala
new file mode 100644
index 0000000..69580c1
--- /dev/null
+++ b/application/src/main/scala/database/util/CirceJsonbMeta.scala
@@ -0,0 +1,23 @@
+package com.azavea.pgsockets4s.database.util
+
+import cats.implicits._
+import doobie._
+import doobie.postgres.circe.jsonb.implicits._
+import io.circe._
+import io.circe.syntax._
+
+import scala.reflect.runtime.universe.TypeTag
+
+object CirceJsonbMeta {
+
+ def apply[Type: TypeTag: Encoder: Decoder] = {
+ val get = Get[Json].tmap[Type](_.as[Type].valueOr(throw _))
+ val put = Put[Json].tcontramap[Type](_.asJson)
+ new Meta[Type](get, put)
+ }
+}
+
+trait CirceJsonbMeta {
+ // If you want to be able to put Json in the database and there is a circe codec available
+ // you can do the following: implicit val - Meta: Meta[Item] = CirceJsonBMeta[Item]
+}
diff --git a/application/src/main/scala/database/util/Dao.scala b/application/src/main/scala/database/util/Dao.scala
new file mode 100644
index 0000000..d4475bd
--- /dev/null
+++ b/application/src/main/scala/database/util/Dao.scala
@@ -0,0 +1,138 @@
+package com.azavea.pgsockets4s.database.util
+
+import java.util.UUID
+
+import doobie.implicits._
+import doobie.util.{Read, Write}
+import doobie.{LogHandler => _, _}
+import doobie.postgres.implicits._
+
+/**
+ * This is abstraction over the listing of arbitrary types from the DB with filters/pagination
+ */
+abstract class Dao[Model: Read: Write] extends Filterables {
+
+ val tableName: String
+
+ /** The fragment which holds the associated table's name */
+ def tableF: Fragment = Fragment.const(tableName)
+
+ /** An abstract select statement to be used for constructing queries */
+ def selectF: Fragment
+
+ /** Begin construction of a complex, filtered query */
+ def query: Dao.QueryBuilder[Model] =
+ Dao.QueryBuilder[Model](selectF, tableF, List.empty)
+}
+
+object Dao {
+
+ final case class QueryBuilder[Model: Read: Write](
+ selectF: Fragment,
+ tableF: Fragment,
+ filters: List[Option[Fragment]],
+ countFragment: Option[Fragment] = None
+ ) {
+
+ val countF: Fragment =
+ countFragment.getOrElse(fr"SELECT count(id) FROM" ++ tableF)
+ val deleteF: Fragment = fr"DELETE FROM" ++ tableF
+ val existF: Fragment = fr"SELECT 1 FROM" ++ tableF
+
+ /** Add another filter to the query being constructed */
+ def filter[M >: Model, T](
+ thing: T
+ )(implicit filterable: Filterable[M, T]): QueryBuilder[Model] =
+ this.copy(filters = filters ++ filterable.toFilters(thing))
+
+ def filter[M >: Model](
+ thing: Fragment
+ )(implicit filterable: Filterable[M, Fragment]): QueryBuilder[Model] =
+ thing match {
+ case Fragment.empty => this
+ case _ => this.copy(filters = filters ++ filterable.toFilters(thing))
+ }
+
+ def filter[M >: Model](id: UUID)(
+ implicit filterable: Filterable[M, Option[Fragment]]
+ ): QueryBuilder[Model] = {
+ this.copy(filters = filters ++ filterable.toFilters(Some(fr"id = ${id}")))
+ }
+
+ def filter[M >: Model](
+ fragments: List[Option[Fragment]]
+ ): QueryBuilder[Model] = {
+ this.copy(filters = filters ::: fragments)
+ }
+
+ def listQ(limit: Int): Query0[Model] =
+ (selectF ++ Fragments.whereAndOpt(filters: _*) ++ fr"LIMIT $limit")
+ .query[Model]
+
+ /** Provide a list of responses */
+ def list(limit: Int): ConnectionIO[List[Model]] = {
+ listQ(limit).to[List]
+ }
+
+ def listQ(offset: Int, limit: Int): Query0[Model] =
+ (selectF ++ Fragments.whereAndOpt(filters: _*) ++ fr"OFFSET $offset" ++ fr"LIMIT $limit")
+ .query[Model]
+
+ def listQ(offset: Int, limit: Int, orderClause: Fragment): Query0[Model] =
+ (selectF ++ Fragments.whereAndOpt(filters: _*) ++ orderClause ++ fr"OFFSET $offset" ++ fr"LIMIT $limit")
+ .query[Model]
+
+ /** Provide a list of responses */
+ def list: ConnectionIO[List[Model]] = {
+ (selectF ++ Fragments.whereAndOpt(filters: _*))
+ .query[Model]
+ .to[List]
+ }
+
+ /** Provide a list of responses */
+ def list(offset: Int, limit: Int): ConnectionIO[List[Model]] = {
+ listQ(offset, limit).to[List]
+ }
+
+ def list(offset: Int,
+ limit: Int,
+ orderClause: Fragment): ConnectionIO[List[Model]] = {
+ listQ(offset, limit, orderClause).to[List]
+ }
+
+ def selectQ: Query0[Model] =
+ (selectF ++ Fragments.whereAndOpt(filters: _*)).query[Model]
+
+ /** Select a single value - returning an Optional value */
+ def selectOption: ConnectionIO[Option[Model]] =
+ selectQ.option
+
+ /** Select a single value - throw on failure */
+ def select: ConnectionIO[Model] = {
+ selectQ.unique
+ }
+
+ def deleteQOption: Option[Update0] = {
+ if (filters.isEmpty) {
+ None
+ } else {
+ Some((deleteF ++ Fragments.whereAndOpt(filters: _*)).update)
+ }
+ }
+
+ def delete: ConnectionIO[Int] = {
+ deleteQOption
+ .getOrElse(
+ throw new Exception("Unsafe delete - delete requires filters")
+ )
+ .run
+ }
+
+ def exists: ConnectionIO[Boolean] = {
+ (existF ++ Fragments.whereAndOpt(filters: _*) ++ fr"LIMIT 1")
+ .query[Int]
+ .to[List]
+ .map(_.nonEmpty)
+ }
+ }
+}
diff --git a/application/src/main/scala/database/util/Filterable.scala b/application/src/main/scala/database/util/Filterable.scala
new file mode 100644
index 0000000..ab5bb0e
--- /dev/null
+++ b/application/src/main/scala/database/util/Filterable.scala
@@ -0,0 +1,13 @@
+package com.azavea.pgsockets4s.database.util
+
+import doobie.Fragment
+
+import scala.annotation.implicitNotFound
+
+/**
+ * This case class is provided to allow the production of rules for transforming datatypes to doobie fragments
+ */
+@implicitNotFound(
+ "No instance of Filterable[${Model}, ${T}] in scope, check imports and make sure one is defined"
+)
+final case class Filterable[-Model, T](toFilters: T => List[Option[Fragment]])
diff --git a/application/src/main/scala/database/util/Filterables.scala b/application/src/main/scala/database/util/Filterables.scala
new file mode 100644
index 0000000..5f5a065
--- /dev/null
+++ b/application/src/main/scala/database/util/Filterables.scala
@@ -0,0 +1,31 @@
+package com.azavea.pgsockets4s.database.util
+
+import doobie._
+
+trait Filterables {
+
+ implicit val fragmentFilter: Filterable[Any, doobie.Fragment] =
+ Filterable[Any, Fragment] { fragment: Fragment =>
+ List(Some(fragment))
+ }
+
+ implicit def maybeTFilter[T](
+ implicit filterable: Filterable[Any, T]
+ ): Filterable[Any, Option[T]] = Filterable[Any, Option[T]] {
+ case None => List.empty[Option[Fragment]]
+ case Some(thing) => filterable.toFilters(thing)
+ }
+
+ implicit def listTFilter[T](
+ implicit filterable: Filterable[Any, T]
+ ): Filterable[Any, List[T]] = Filterable[Any, List[T]] {
+ someFilterables: List[T] =>
+ {
+ someFilterables
+ .map(filterable.toFilters)
+ .foldLeft(List.empty[Option[Fragment]])(_ ++ _)
+ }
+ }
+}
+
+object Filterables extends Filterables
diff --git a/application/src/main/scala/database/util/GeotrellisWktMeta.scala b/application/src/main/scala/database/util/GeotrellisWktMeta.scala
new file mode 100644
index 0000000..fd42f0c
--- /dev/null
+++ b/application/src/main/scala/database/util/GeotrellisWktMeta.scala
@@ -0,0 +1,76 @@
+package com.azavea.pgsockets4s.database.util
+
+import doobie.Meta
+import doobie.postgres.pgisimplicits.PGgeometryType
+import doobie.util.invariant.InvalidObjectMapping
+import geotrellis.vector.io.wkt.WKT
+import geotrellis.vector.{
+ Geometry,
+ GeometryCollection,
+ Line,
+ MultiLine,
+ MultiPoint,
+ MultiPolygon,
+ Point,
+ Polygon,
+ Projected
+}
+import org.postgis.PGgeometry
+
+import scala.reflect.ClassTag
+import scala.reflect.runtime.universe.TypeTag
+
+trait GeotrellisWktMeta {
+
+ implicit val pgMeta: Meta[PGgeometry] =
+ Meta.Advanced.other[PGgeometry]("geometry")
+
+ // Constructor for geometry types via WKT reading/writing
+ @SuppressWarnings(Array("AsInstanceOf"))
+ private def geometryType[A >: Null <: Geometry: TypeTag](
+ implicit A: ClassTag[A]
+ ): Meta[Projected[A]] =
+ PGgeometryType.timap[Projected[A]](pgGeom => {
+ val split = PGgeometry.splitSRID(pgGeom.getValue)
+ val srid = split(0).splitAt(5)._2.toInt
+ val geom = WKT.read(split(1))
+ try Projected[A](A.runtimeClass.cast(geom).asInstanceOf[A], srid)
+ catch {
+ case _: ClassCastException =>
+ throw InvalidObjectMapping(
+ A.runtimeClass,
+ pgGeom.getGeometry.getClass
+ )
+ }
+ })(geom => {
+ val wkt = s"SRID=${geom.srid};" + WKT.write(geom)
+ val pgGeom = PGgeometry.geomFromString(wkt)
+ new PGgeometry(pgGeom)
+ })
+
+ implicit val GeometryType: Meta[Projected[Geometry]] =
+ geometryType[Geometry]
+
+ implicit val GeometryCollectionType: Meta[Projected[GeometryCollection]] =
+ geometryType[GeometryCollection]
+
+ implicit val MultiLineStringType: Meta[Projected[MultiLine]] =
+ geometryType[MultiLine]
+
+ implicit val MultiPolygonType: Meta[Projected[MultiPolygon]] =
+ geometryType[MultiPolygon]
+ implicit val LineStringType: Meta[Projected[Line]] = geometryType[Line]
+
+ implicit val MultiPointType: Meta[Projected[MultiPoint]] =
+ geometryType[MultiPoint]
+
+ implicit val PolygonType: Meta[Projected[Polygon]] =
+ geometryType[Polygon]
+
+ implicit val PointType: Meta[Projected[Point]] =
+ geometryType[Point]
+
+ implicit val ComposedGeomType: Meta[Projected[GeometryCollection]] =
+ geometryType[GeometryCollection]
+
+}
diff --git a/application/src/main/scala/datamodel/User.scala b/application/src/main/scala/datamodel/User.scala
new file mode 100644
index 0000000..9c2f5e3
--- /dev/null
+++ b/application/src/main/scala/datamodel/User.scala
@@ -0,0 +1,22 @@
+package com.azavea.pgsockets4s.datamodel
+
+import java.util.UUID
+
+import io.circe._
+import io.circe.generic.semiauto._
+
+case class User(id: UUID, email: String)
+
+object User {
+ implicit val userDecoder: Decoder[User] = deriveDecoder[User]
+ implicit val userEncoder: Encoder[User] = deriveEncoder[User]
+
+ case class Create(email: String)
+
+ object Create {
+ implicit val userCreateDecoder: Decoder[Create] = deriveDecoder[Create]
+ implicit val userCreateEncoder: Encoder[Create] = deriveEncoder[Create]
+
+ }
+
+}
diff --git a/application/src/main/scala/error/CrudError.scala b/application/src/main/scala/error/CrudError.scala
new file mode 100644
index 0000000..617cfba
--- /dev/null
+++ b/application/src/main/scala/error/CrudError.scala
@@ -0,0 +1,27 @@
+package com.azavea.pgsockets4s.error
+
+import cats.implicits._
+import io.circe._
+import io.circe.generic.semiauto._
+import io.circe.syntax._
+
+sealed abstract class CrudError
+
+object CrudError {
+
+ implicit val decCrudErrror: Decoder[CrudError] = Decoder[NotFound].widen
+
+ implicit val encCrudError: Encoder[CrudError] = new Encoder[CrudError] {
+
+ def apply(thing: CrudError): Json = thing match {
+ case t: NotFound => t.asJson
+ }
+ }
+}
+
+case class NotFound(msg: String = "Not found") extends CrudError
+
+object NotFound {
+ implicit val encNotFound: Encoder[NotFound] = deriveEncoder
+ implicit val decNotFound: Decoder[NotFound] = deriveDecoder
+}
diff --git a/application/src/main/scala/tile/ImageNode.scala b/application/src/main/scala/tile/ImageNode.scala
new file mode 100644
index 0000000..1cbef69
--- /dev/null
+++ b/application/src/main/scala/tile/ImageNode.scala
@@ -0,0 +1,92 @@
+package com.azavea.pgsockets4s.tile
+
+import java.net.URI
+
+import cats.data.NonEmptyList
+import cats.effect._
+import geotrellis.contrib.vlm.TargetRegion
+import geotrellis.contrib.vlm.geotiff.GeoTiffRasterSource
+import geotrellis.proj4.{CRS, WebMercator}
+import geotrellis.raster._
+import geotrellis.raster.io.geotiff.AutoHigherResolution
+import geotrellis.raster.resample.{NearestNeighbor, ResampleMethod}
+import geotrellis.server._
+import geotrellis.server.vlm._
+import geotrellis.spark.SpatialKey
+import geotrellis.vector.Extent
+
+case class ImageNode(uri: URI)
+
+object ImageNode extends RasterSourceUtils {
+
+ def getRasterSource(uri: String): GeoTiffRasterSource = GeoTiffRasterSource(uri)
+
+ implicit val cogNodeRasterExtents: HasRasterExtents[ImageNode] =
+ new HasRasterExtents[ImageNode] {
+
+ def rasterExtents(self: ImageNode)(implicit contextShift: ContextShift[IO]): IO[NonEmptyList[RasterExtent]] =
+ getRasterExtents(self.uri.toString)
+ }
+
+ private val invisiTile: ByteArrayTile = ByteArrayTile.empty(256, 256)
+
+ private val invisiRaster: Raster[MultibandTile] = Raster(
+ MultibandTile(invisiTile, invisiTile, invisiTile),
+ Extent(0, 0, 256, 256)
+ )
+
+ override def fetchTile(
+ uri: String,
+ zoom: Int,
+ x: Int,
+ y: Int,
+ crs: CRS = WebMercator,
+ method: ResampleMethod = NearestNeighbor
+ ): IO[Raster[MultibandTile]] =
+ IO {
+ val key = SpatialKey(x, y)
+ val layoutDefinition = tmsLevels(zoom)
+ val rasterSource =
+ getRasterSource(uri).reproject(crs, method).tileToLayout(layoutDefinition, method)
+
+ rasterSource.read(key).map(Raster(_, layoutDefinition.mapTransform(key)))
+ } flatMap {
+ case Some(t) =>
+ IO.pure(t)
+ case _ =>
+ IO.pure(invisiRaster)
+ }
+
+ implicit val imageNodeTmsReification: TmsReification[ImageNode] =
+ new TmsReification[ImageNode] {
+
+ def tmsReification(self: ImageNode, buffer: Int)(
+ implicit contextShift: ContextShift[IO]
+ ): (Int, Int, Int) => IO[ProjectedRaster[MultibandTile]] = (z: Int, x: Int, y: Int) => {
+ def fetch(xCoord: Int, yCoord: Int): IO[Raster[MultibandTile]] =
+ fetchTile(self.uri.toString, z, xCoord, yCoord, WebMercator)
+
+ fetch(x, y).map { tile =>
+ val extent = tmsLevels(z).mapTransform.keyToExtent(x, y)
+ ProjectedRaster(tile.tile, extent, WebMercator)
+ }
+ }
+ }
+
+ implicit val imageNodeExtentReification: ExtentReification[ImageNode] =
+ new ExtentReification[ImageNode] {
+
+ def extentReification(
+ self: ImageNode
+ )(implicit contextShift: ContextShift[IO]): (Extent, CellSize) => IO[ProjectedRaster[MultibandTile]] =
+ (extent: Extent, cs: CellSize) => {
+ getRasterSource(self.uri.toString)
+ .resample(TargetRegion(new GridExtent[Long](extent, cs)), NearestNeighbor, AutoHigherResolution)
+ .read(extent)
+ .map { ProjectedRaster(_, WebMercator) }
+ .toIO {
+ new Exception(s"No tile available for RasterExtent: ${RasterExtent(extent, cs)}")
+ }
+ }
+ }
+}
diff --git a/build.sbt b/build.sbt
new file mode 100644
index 0000000..8ea92ce
--- /dev/null
+++ b/build.sbt
@@ -0,0 +1,154 @@
+cancelable in Global := true
+onLoad in Global ~= (_ andThen ("project application" :: _))
+
+import sbt._
+
+// Versions
+val CirceFs2Version = "0.11.0"
+val CirceVersion = "0.11.1"
+val DeclineVersion = "0.6.2"
+val DoobieVersion = "0.7.1"
+val EmojiVersion = "1.2.1"
+val FlywayVersion = "5.2.4"
+val GeotrellisServer = "3.4.0-9-geeb1ede-SNAPSHOT"
+val Http4sVersion = "0.20.10"
+val Log4CatsVersion = "0.3.0"
+val LogbackVersion = "1.2.3"
+val PostGISVersion = "2.2.1"
+val PureConfigVersion = "0.12.1"
+val RefinedVersion = "0.9.3"
+val ScapegoatVersion = "1.3.8"
+val SparkVersion = "2.4.2"
+val Specs2Version = "4.6.0"
+val TapirVersion = "0.10.1"
+
+// Dependencies
+val circeCore = "io.circe" %% "circe-core" % CirceVersion
+val circeFs2 = "io.circe" %% "circe-fs2" % CirceFs2Version
+val circeGeneric = "io.circe" %% "circe-generic" % CirceVersion
+val circeRefined = "io.circe" %% "circe-refined" % CirceVersion
+val decline = "com.monovore" %% "decline" % DeclineVersion
+val declineRefined = "com.monovore" %% "decline-refined" % DeclineVersion
+val doobie = "org.tpolecat" %% "doobie-core" % DoobieVersion
+val doobieHikari = "org.tpolecat" %% "doobie-hikari" % DoobieVersion
+val doobiePostgres = "org.tpolecat" %% "doobie-postgres" % DoobieVersion
+val doobiePostgresCirce = "org.tpolecat" %% "doobie-postgres-circe" % DoobieVersion
+val doobieRefined = "org.tpolecat" %% "doobie-refined" % DoobieVersion
+val doobieScalatest = "org.tpolecat" %% "doobie-scalatest" % DoobieVersion % "test"
+val doobieSpecs2 = "org.tpolecat" %% "doobie-specs2" % DoobieVersion % "test"
+val emoji = "com.lightbend" %% "emoji" % EmojiVersion
+val flyway = "org.flywaydb" % "flyway-core" % FlywayVersion
+val geotrellisServerCore = "com.azavea.geotrellis" %% "geotrellis-server-core" % GeotrellisServer
+val geotrellisServerStac = "com.azavea.geotrellis" %% "geotrellis-server-stac" % GeotrellisServer
+val http4s = "org.http4s" %% "http4s-blaze-server" % Http4sVersion
+val http4sCirce = "org.http4s" %% "http4s-circe" % Http4sVersion
+val http4sDsl = "org.http4s" %% "http4s-dsl" % Http4sVersion
+val http4sServer = "org.http4s" %% "http4s-blaze-server" % Http4sVersion
+val log4cats = "io.chrisdavenport" %% "log4cats-slf4j" % Log4CatsVersion
+val logbackClassic = "ch.qos.logback" % "logback-classic" % LogbackVersion
+val postgis = "net.postgis" % "postgis-jdbc" % PostGISVersion
+val pureConfig = "com.github.pureconfig" %% "pureconfig" % PureConfigVersion
+val refined = "eu.timepit" %% "refined" % RefinedVersion
+val refinedCats = "eu.timepit" %% "refined-cats" % RefinedVersion
+val spark = "org.apache.spark" %% "spark-core" % SparkVersion
+val specs2Core = "org.specs2" %% "specs2-core" % Specs2Version % "test"
+val tapir = "com.softwaremill.tapir" %% "tapir-core" % TapirVersion
+val tapirCirce = "com.softwaremill.tapir" %% "tapir-json-circe" % TapirVersion
+val tapirHttp4sServer = "com.softwaremill.tapir" %% "tapir-http4s-server" % TapirVersion
+val tapirOpenAPICirceYAML = "com.softwaremill.tapir" %% "tapir-openapi-circe-yaml" % TapirVersion
+val tapirOpenAPIDocs = "com.softwaremill.tapir" %% "tapir-openapi-docs" % TapirVersion
+val tapirSwaggerUIHttp4s = "com.softwaremill.tapir" %% "tapir-swagger-ui-http4s" % TapirVersion
+
+// Enable a basic import sorter -- rules are defined in .scalafix.conf
+scalafixDependencies in ThisBuild +=
+ "com.nequissimus" %% "sort-imports" % "0.3.2"
+
+lazy val settings = Seq(
+ organization := "com.azavea",
+ name := "pgsockets4s",
+ version := "0.0.1-SNAPSHOT",
+ scalaVersion := "2.12.10",
+ scalafmtOnCompile := true,
+ scapegoatVersion in ThisBuild := Versions.ScapegoatVersion,
+ addCompilerPlugin("org.spire-math" %% "kind-projector" % "0.9.6"),
+ addCompilerPlugin("com.olegpy" %% "better-monadic-for" % "0.2.4"),
+ addCompilerPlugin(
+ "org.scalamacros" % "paradise" % "2.1.0" cross CrossVersion.full
+ ),
+ addCompilerPlugin(scalafixSemanticdb),
+ autoCompilerPlugins := true,
+ assemblyJarName in assembly := "application-assembly.jar",
+ assemblyMergeStrategy in assembly := {
+ case "reference.conf" => MergeStrategy.concat
+ case "application.conf" => MergeStrategy.concat
+ case n if n.startsWith("META-INF/services") => MergeStrategy.concat
+ case n if n.endsWith(".SF") || n.endsWith(".RSA") || n.endsWith(".DSA") =>
+ MergeStrategy.discard
+ case "META-INF/MANIFEST.MF" => MergeStrategy.discard
+ case _ => MergeStrategy.first
+ },
+ excludeDependencies ++= Seq(
+ "log4j" % "log4j",
+ "org.slf4j" % "slf4j-log4j12",
+ "org.slf4j" % "slf4j-nop"
+ ),
+ externalResolvers := Seq(
+ DefaultMavenRepository,
+ Resolver.sonatypeRepo("snapshots"),
+ Resolver.typesafeIvyRepo("releases"),
+ Resolver.bintrayRepo("azavea", "maven"),
+ Resolver.bintrayRepo("azavea", "geotrellis"),
+ "locationtech-releases" at "https://repo.locationtech.org/content/groups/releases",
+ "locationtech-snapshots" at "https://repo.locationtech.org/content/groups/snapshots",
+ Resolver.bintrayRepo("guizmaii", "maven"),
+ Resolver.bintrayRepo("colisweb", "maven"),
+ "jitpack".at("https://jitpack.io"),
+ Resolver.file("local", file(Path.userHome.absolutePath + "/.ivy2/local"))(
+ Resolver.ivyStylePatterns
+ )
+ )
+)
+
+lazy val dependencies = Seq(
+ specs2Core,
+ logbackClassic,
+ circeCore,
+ circeGeneric,
+ circeRefined,
+ decline,
+ declineRefined,
+ doobie,
+ doobieHikari,
+ doobiePostgres,
+ doobiePostgresCirce,
+ doobieRefined,
+ doobieScalatest,
+ doobieSpecs2,
+ emoji,
+ flyway,
+ geotrellisServerCore,
+ geotrellisServerStac,
+ http4s,
+ http4sCirce,
+ http4sDsl,
+ http4sServer,
+ log4cats,
+ postgis,
+ pureConfig,
+ refined,
+ refinedCats,
+ spark,
+ tapir,
+ tapirCirce,
+ tapirHttp4sServer,
+ tapirOpenAPICirceYAML,
+ tapirOpenAPIDocs,
+ tapirSwaggerUIHttp4s
+)
+
+lazy val application = (project in file("application"))
+ .settings(settings: _*)
+ .settings({
+ libraryDependencies ++= dependencies
+ })
+lazy val applicationRef = LocalProject("application")
diff --git a/docker-compose.yml b/docker-compose.yml
index 2124e5d..1f4ae84 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,19 +1,16 @@
-version: "2.4"
+version: '2.3'
services:
database:
image: quay.io/azavea/postgis:2.3-postgres9.6-slim
- volumes:
- - ./data/:/tmp/data/
- ports:
- - 5432:5432
environment:
- - POSTGRES_USER=pgsockets
- - POSTGRES_PASSWORD=pgsockets
- - POSTGRES_DB=pgsockets
+ - POSTGRES_USER=pgsockets4s
+ - POSTGRES_PASSWORD=pgsockets4s
+ - POSTGRES_DB=pgsockets4s
+ ports:
+ - "5432:5432"
healthcheck:
- test: ["CMD", "pg_isready", "-U", "pgsockets"]
+ test: ["CMD", "pg_isready", "-U", "pgsockets4s"]
interval: 3s
timeout: 3s
retries: 3
start_period: 5s
-
diff --git a/project/Dependencies.scala b/project/Dependencies.scala
new file mode 100644
index 0000000..688d62c
--- /dev/null
+++ b/project/Dependencies.scala
@@ -0,0 +1,59 @@
+import scala.util.Properties
+
+import sbt._
+
+// Versions
+object Versions {
+ val CirceFs2Version = "0.11.0"
+ val CirceVersion = "0.11.1"
+ val DeclineVersion = "0.6.2"
+ val DoobieVersion = "0.7.1"
+ val EmojiVersion = "1.2.1"
+ val Flyway = "5.2.4"
+ val GeotrellisServer = "3.4.0-8-g362e95f-SNAPSHOT"
+ val Http4sVersion = "0.20.10"
+ val Log4CatsVersion = "0.3.0"
+ val LogbackVersion = "1.2.3"
+ val Postgis = "2.2.1"
+ val PureConfig = "0.12.1"
+ val Refined = "0.9.3"
+ val ScapegoatVersion = "1.3.8"
+ val Specs2Version = "4.6.0"
+ val TapirVersion = "0.10.1"
+}
+
+object Dependencies {
+ val circeCore = "io.circe" %% "circe-core" % Versions.CirceVersion
+ val circeFs2 = "io.circe" %% "circe-fs2" % Versions.CirceFs2Version
+ val circeGeneric = "io.circe" %% "circe-generic" % Versions.CirceVersion
+ val circeRefined = "io.circe" %% "circe-refined" % Versions.CirceVersion
+ val decline = "com.monovore" %% "decline" % Versions.DeclineVersion
+ val declineRefined = "com.monovore" %% "decline-refined" % Versions.DeclineVersion
+ val doobie = "org.tpolecat" %% "doobie-core" % Versions.DoobieVersion
+ val doobieHikari = "org.tpolecat" %% "doobie-hikari" % Versions.DoobieVersion
+ val doobiePostgres = "org.tpolecat" %% "doobie-postgres" % Versions.DoobieVersion
+ val doobiePostgresCirce = "org.tpolecat" %% "doobie-postgres-circe" % Versions.DoobieVersion
+ val doobieRefined = "org.tpolecat" %% "doobie-refined" % Versions.DoobieVersion
+ val doobieScalatest = "org.tpolecat" %% "doobie-scalatest" % Versions.DoobieVersion % "test"
+ val emoji = "com.lightbend" %% "emoji" % Versions.EmojiVersion
+ val doobieSpecs2 = "org.tpolecat" %% "doobie-specs2" % Versions.DoobieVersion % "test"
+ val flyway = "org.flywaydb" % "flyway-core" % Versions.Flyway
+ val geotrellisServer = "com.azavea.geotrellis" %% "geotrellis-server-stac" % Versions.GeotrellisServer
+ val http4s = "org.http4s" %% "http4s-blaze-server" % Versions.Http4sVersion
+ val http4sCirce = "org.http4s" %% "http4s-circe" % Versions.Http4sVersion
+ val http4sDsl = "org.http4s" %% "http4s-dsl" % Versions.Http4sVersion
+ val http4sServer = "org.http4s" %% "http4s-blaze-server" % Versions.Http4sVersion
+ val log4cats = "io.chrisdavenport" %% "log4cats-slf4j" % Versions.Log4CatsVersion
+ val logbackClassic = "ch.qos.logback" % "logback-classic" % Versions.LogbackVersion
+ val postgis = "net.postgis" % "postgis-jdbc" % Versions.Postgis
+ val pureConfig = "com.github.pureconfig" %% "pureconfig" % Versions.PureConfig
+ val refined = "eu.timepit" %% "refined" % Versions.Refined
+ val refinedCats = "eu.timepit" %% "refined-cats" % Versions.Refined
+ val specs2Core = "org.specs2" %% "specs2-core" % Versions.Specs2Version % "test"
+ val tapir = "com.softwaremill.tapir" %% "tapir-core" % Versions.TapirVersion
+ val tapirCirce = "com.softwaremill.tapir" %% "tapir-json-circe" % Versions.TapirVersion
+ val tapirHttp4sServer = "com.softwaremill.tapir" %% "tapir-http4s-server" % Versions.TapirVersion
+ val tapirOpenAPICirceYAML = "com.softwaremill.tapir" %% "tapir-openapi-circe-yaml" % Versions.TapirVersion
+ val tapirOpenAPIDocs = "com.softwaremill.tapir" %% "tapir-openapi-docs" % Versions.TapirVersion
+ val tapirSwaggerUIHttp4s = "com.softwaremill.tapir" %% "tapir-swagger-ui-http4s" % Versions.TapirVersion
+}
diff --git a/project/build.properties b/project/build.properties
new file mode 100644
index 0000000..c0bab04
--- /dev/null
+++ b/project/build.properties
@@ -0,0 +1 @@
+sbt.version=1.2.8
diff --git a/project/metals.sbt b/project/metals.sbt
new file mode 100644
index 0000000..187c96a
--- /dev/null
+++ b/project/metals.sbt
@@ -0,0 +1,4 @@
+// DO NOT EDIT! This file is auto-generated.
+// This file enables sbt-bloop to create bloop config files.
+
+addSbtPlugin("ch.epfl.scala" % "sbt-bloop" % "1.4.0-RC1-90-70cfd9e2")
diff --git a/project/plugins.sbt b/project/plugins.sbt
new file mode 100644
index 0000000..8cc54d2
--- /dev/null
+++ b/project/plugins.sbt
@@ -0,0 +1,8 @@
+addSbtCoursier
+addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1")
+addSbtPlugin("com.geirsson" % "sbt-scalafmt" % "1.5.1")
+addSbtPlugin("com.sksamuel.scapegoat" %% "sbt-scapegoat" % "1.0.9")
+addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.8")
+addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.9")
+addSbtPlugin("io.github.davidgregory084" % "sbt-tpolecat" % "0.1.8")
+addSbtPlugin("ch.epfl.scala" % "sbt-bloop" % "1.3.4")
\ No newline at end of file
diff --git a/project/project/plugins.sbt b/project/project/plugins.sbt
new file mode 100644
index 0000000..e36e26b
--- /dev/null
+++ b/project/project/plugins.sbt
@@ -0,0 +1 @@
+addSbtPlugin("io.get-coursier" % "sbt-coursier" % "2.0.0-RC4-1")
\ No newline at end of file
diff --git a/rambler.json.example b/rambler.json.example
index def71ce..9c4daa6 100644
--- a/rambler.json.example
+++ b/rambler.json.example
@@ -3,9 +3,9 @@
"protocol": "tcp",
"host": "localhost",
"port": 5432,
- "user": "pgsockets",
- "password": "pgsockets",
- "database": "pgsockets",
+ "user": "pgsockets4s",
+ "password": "pgsockets4s",
+ "database": "pgsockets4s",
"directory": "./migrations",
"table": "migrations"
}
diff --git a/scripts/cibuild b/scripts/cibuild
new file mode 100755
index 0000000..eb7b284
--- /dev/null
+++ b/scripts/cibuild
@@ -0,0 +1,41 @@
+#!/bin/bash
+
+set -e
+
+if [[ -n "${PGSOCKETS4S_DEBUG}" ]]; then
+ set -x
+fi
+
+if [[ -n "${GIT_COMMIT}" ]]; then
+ GIT_COMMIT="${GIT_COMMIT:0:7}"
+else
+ GIT_COMMIT="$(git rev-parse --short HEAD)"
+fi
+
+function usage() {
+ echo -n \
+"Usage: $(basename "$0")
+
+Build application for staging or a release.
+"
+}
+
+function build_assembly() {
+ echo -e "\e[32m[pgsockets4s] Building Scala assembly JAR\e[0m"
+ sbt application/assembly
+}
+
+
+if [ "${BASH_SOURCE[0]}" = "${0}" ]; then
+ case "${1}" in
+ --help)
+ usage
+ ;;
+ *)
+ ./scripts/tests
+
+ build_assembly
+ echo -e "\e[32m[pgsockets4s] Building scala container image\e[0m"
+ docker build -t "pgsockets4s:${GIT_COMMIT}" application
+ esac
+fi
diff --git a/scripts/console b/scripts/console
new file mode 100755
index 0000000..2615912
--- /dev/null
+++ b/scripts/console
@@ -0,0 +1,22 @@
+#!/bin/bash
+set -e
+
+function usage() {
+ echo -n \
+"Usage: $(basename "$0") SERVICE COMMAND[S]
+Use Docker Compose to run a command for a service, or drop into a console.
+Example: ./scripts/console
+"
+}
+
+if [ "${BASH_SOURCE[0]}" = "${0}" ]
+then
+ case "${1}" in
+ --help)
+ usage
+ ;;
+ *)
+ bloop console application
+ ;;
+ esac
+fi
diff --git a/scripts/dbshell b/scripts/dbshell
new file mode 100755
index 0000000..cd786a8
--- /dev/null
+++ b/scripts/dbshell
@@ -0,0 +1,22 @@
+#!/bin/bash
+set -eu
+
+function usage() {
+ echo -n \
+"Usage: $(basename "$0")
+Use Docker Compose to enter a psql shell on the local database container
+"
+}
+
+if [ "${BASH_SOURCE[0]}" = "${0}" ]
+then
+ case "${1:-}" in
+ --help)
+ usage
+ ;;
+ *)
+ echo -e "\e[32m[pgsockets4s] Starting dbshell...\e[0m"
+ docker-compose run --rm -e PGPASSWORD=pgsockets4s database psql -U pgsockets4s -h database
+ ;;
+ esac
+fi
diff --git a/scripts/load-development-data b/scripts/load-development-data
index c512926..1cf6fe6 100755
--- a/scripts/load-development-data
+++ b/scripts/load-development-data
@@ -20,7 +20,7 @@ function download_database() {
}
function initdb() {
- psql -q -U pgsockets -d pgsockets -h localhost -p 5432 < data/world.sql
+ psql -q -U pgsockets4s -d pgsockets4s -h localhost -p 5432 < data/world.sql
}
if [ "${BASH_SOURCE[0]}" = "${0}" ]
diff --git a/scripts/migrate b/scripts/migrate
new file mode 100755
index 0000000..d1b3cc4
--- /dev/null
+++ b/scripts/migrate
@@ -0,0 +1,30 @@
+#!/bin/bash
+
+set -e
+
+if [[ -n "${PGSOCKETS4S_DEBUG}" ]]; then
+ set -x
+fi
+
+function usage() {
+ echo -n \
+ "Usage: $(basename "$0") [flyway command]
+
+Runs app-backend migrations via rambler CLI
+
+Available rambler commands are apply|reverse|help
+
+"
+}
+
+if [ "${BASH_SOURCE[0]}" = "${0}" ]
+then
+ case "${1}" in
+ --help)
+ usage
+ ;;
+ *)
+ echo -e "\e[32m[pgsockets4s] Running migrations\e[0m"
+ rambler "$@"
+ esac
+fi
diff --git a/scripts/server b/scripts/server
new file mode 100755
index 0000000..cfde88a
--- /dev/null
+++ b/scripts/server
@@ -0,0 +1,30 @@
+#!/bin/bash
+
+set -e
+
+
+if [[ -n "${PGSOCKETS4S_DEBUG}" ]]; then
+ set -x
+fi
+
+function usage() {
+ echo -n \
+ "Usage: $(basename "$0")
+
+Starts servers using docker-compose.
+"
+}
+
+
+if [ "${BASH_SOURCE[0]}" = "${0}" ]
+then
+ case "${1}" in
+ --help)
+ usage
+ ;;
+ *)
+ echo -e "\e[32m[pgsockets4s] Starting API service\e[0m"
+ bloop run application --args serve
+ ;;
+ esac
+fi
diff --git a/scripts/setup b/scripts/setup
new file mode 100755
index 0000000..90b385a
--- /dev/null
+++ b/scripts/setup
@@ -0,0 +1,28 @@
+#!/bin/bash
+
+set -e
+
+if [[ -n "${PGSOCKETS4S_DEBUG}" ]]; then
+ set -x
+fi
+
+function usage() {
+ echo -n \
+ "Usage: $(basename "$0")
+Attempts to setup the project's development environment.
+"
+}
+
+
+if [ "${BASH_SOURCE[0]}" = "${0}" ]
+then
+
+ case "${1}" in
+ --help)
+ usage
+ ;;
+ *)
+ echo -e "\e[32m[pgsockets4s] Running Setup...\e[0m"
+ ./scripts/update
+ esac
+fi
diff --git a/scripts/tests b/scripts/tests
new file mode 100755
index 0000000..474aaa5
--- /dev/null
+++ b/scripts/tests
@@ -0,0 +1,30 @@
+#! /bin/bash
+
+set -e
+
+if [[ -n "${PGSOCKETS4S_DEBUG}" ]]; then
+ set -x
+fi
+
+function usage() {
+ echo -ne \
+ "Usage: $(basename "${0}")
+Run linters and tests.
+"
+}
+
+
+if [ "${BASH_SOURCE[0]}" = "${0}" ]; then
+ case "${1}" in
+ --help)
+ usage
+ ;;
+ *)
+ echo -e "\e[32m[pgsockets4s] Execute Scalafix\e[0m"
+ sbt "scalafix --check"
+
+ echo -e "\e[32m[pgsockets4s] Execute Scala tests\e[0m"
+ bloop test application
+ ;;
+ esac
+fi
diff --git a/scripts/update b/scripts/update
new file mode 100755
index 0000000..7baaee0
--- /dev/null
+++ b/scripts/update
@@ -0,0 +1,30 @@
+#!/bin/bash
+
+set -e
+
+
+if [[ -n "${PGSOCKETS4S_DEBUG}" ]]; then
+ set -x
+fi
+
+function usage() {
+ echo -n \
+ "Usage: $(basename "$0")
+Setup external project dependencies.
+"
+}
+
+
+if [ "${BASH_SOURCE[0]}" = "${0}" ]
+then
+ case "${1}" in
+ --help)
+ usage
+ ;;
+ *)
+ echo -e "\e[32m[pgsockets4s] Configuring bloop...\e[0m"
+ sbt ";bloopInstall; scalafmt; scalafmtSbt"
+ docker-compose up -d database
+ ./scripts/migrate
+ esac
+fi