Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 15 additions & 1 deletion .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,20 @@ on:

jobs:
deploy:
services:
postgres:
image: postgres:15
env:
POSTGRES_PASSWORD: test_password
POSTGRES_USER: test_user
POSTGRES_DB: micro_test
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432

runs-on: ubuntu-latest

Expand Down Expand Up @@ -103,7 +117,7 @@ jobs:
with:
images: snowplow/snowplow-micro
tags: |
type=raw,value=latest-distroless,enable=${{ !contains(steps.ver.outputs.MICRO_VERSION, 'rc') }}
type=raw,value=latest-distroless,enable=${{ !contains(steps.version.outputs.MICRO_VERSION, 'rc') }}
type=raw,value=${{ steps.version.outputs.MICRO_VERSION }}-distroless
flavor: |
latest=false
Expand Down
15 changes: 15 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,21 @@ jobs:

runs-on: ubuntu-latest

services:
postgres:
image: postgres:15
env:
POSTGRES_PASSWORD: test_password
POSTGRES_USER: test_user
POSTGRES_DB: micro_test
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432

steps:
- uses: actions/checkout@v2

Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,4 @@ config/
.DS_Store
.bsp/
.idea/
stream-collector
9 changes: 9 additions & 0 deletions CHANGELOG
Original file line number Diff line number Diff line change
@@ -1,3 +1,12 @@
Version 4.0.0 (2026-01-30)
--------------------------
Integrate with Snowplow Console SSO (#176)
Move UI filters, sorting and pagination to the backend (#178)
Add PostgreSQL persistent storage (#177, #183)
Support cloud storage blob URLs (#179)
Add 7-day timeline view alongside existing 30-minute timeline (#181)
Add a more prominent button to copy JSON (#186)

Version 3.0.1 (2025-10-08)
--------------------------
Add --yauaa command line flag to enable YAUAA enrichment (#170)
Expand Down
18 changes: 18 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,28 @@ sbt +publishLocal && cd ..

To run the tests:

**Prerequisites**: PostgreSQL tests require a local PostgreSQL instance. Start one with Docker:

```bash
docker run --name micro-postgres-test \
-e POSTGRES_PASSWORD=test_password \
-e POSTGRES_USER=test_user \
-e POSTGRES_DB=micro_test \
-p 5432:5432 \
-d postgres:15
```

Then run the tests:

```bash
sbt test
```

To stop the test database:
```bash
docker stop micro-postgres-test && docker rm micro-postgres-test
```

To build a Docker image for local testing:
```
cd ui
Expand Down
13 changes: 12 additions & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,23 @@ lazy val dependencies = Seq(
Dependencies.snowplowAnalyticsSdk,
Dependencies.decline,
Dependencies.http4sCirce,
Dependencies.http4sClient,
Dependencies.circeJawn,
Dependencies.circeGeneric,
Dependencies.doobieCore,
Dependencies.doobieHikari,
Dependencies.doobiePostgres,
Dependencies.doobiePostgresCirce,
Dependencies.postgresqlJdbc,
Dependencies.specs2,
Dependencies.specs2CE,
Dependencies.badRows,
Dependencies.slf4jSimple
Dependencies.slf4jSimple,
Dependencies.awsS3,
Dependencies.awsSts,
Dependencies.googleCloudStorage,
Dependencies.azureStorageBlob,
Dependencies.azureIdentity
)
)

Expand Down
51 changes: 37 additions & 14 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,23 @@ object Dependencies {
val snowplowStreamCollector = "3.7.0"
val snowplowCommonEnrich = "6.1.2"
val snowplowAnalyticsSdk = "3.2.0"
val http4sCirce = "0.23.23"

val decline = "2.4.1"
val slf4j = "2.0.17"

// circe
val circe = "0.14.2"
val http4sCirce = "0.23.23"
val circe = "0.14.2"
val decline = "2.4.1"
val slf4j = "2.0.17"
val doobie = "1.0.0-RC10"
val postgresql = "42.7.8"

// specs2
val specs2 = "4.12.2"
val specs2CE = "1.5.0"
val specs2 = "4.12.2"
val specs2CE = "1.5.0"

// cloud SDKs
val awsSdk = "2.33.1"
val gcpSdk = "2.45.0"
val azureStorageBlob = "12.25.1"
val azureIdentity = "1.13.3"

// force versions of transitive dependencies
val badRows = "2.2.0"
Expand All @@ -39,18 +45,35 @@ object Dependencies {
val snowplowCommonEnrich = "com.snowplowanalytics" %% "snowplow-common-enrich" % V.snowplowCommonEnrich
val snowplowAnalyticsSdk = "com.snowplowanalytics" %% "snowplow-scala-analytics-sdk" % V.snowplowAnalyticsSdk

val http4sCirce = "org.http4s" %% "http4s-circe" % V.http4sCirce
val decline = "com.monovore" %% "decline-effect" % V.decline
val slf4jSimple = "org.slf4j" % "slf4j-simple" % V.slf4j
val http4sCirce = "org.http4s" %% "http4s-circe" % V.http4sCirce
val http4sClient = "org.http4s" %% "http4s-ember-client" % V.http4sCirce
val decline = "com.monovore" %% "decline-effect" % V.decline
val slf4jSimple = "org.slf4j" % "slf4j-simple" % V.slf4j

// circe
val circeJawn = "io.circe" %% "circe-jawn" % V.circe
val circeGeneric = "io.circe" %% "circe-generic" % V.circe

// doobie
val doobieCore = "org.tpolecat" %% "doobie-core" % V.doobie
val doobieHikari = "org.tpolecat" %% "doobie-hikari" % V.doobie
val doobiePostgres = "org.tpolecat" %% "doobie-postgres" % V.doobie
val doobiePostgresCirce = "org.tpolecat" %% "doobie-postgres-circe" % V.doobie

// postgresql
val postgresqlJdbc = "org.postgresql" % "postgresql" % V.postgresql

// specs2
val specs2 = "org.specs2" %% "specs2-core" % V.specs2 % Test
val specs2CE = "org.typelevel" %% "cats-effect-testing-specs2" % V.specs2CE % Test
val specs2 = "org.specs2" %% "specs2-core" % V.specs2 % Test
val specs2CE = "org.typelevel" %% "cats-effect-testing-specs2" % V.specs2CE % Test

// cloud SDKs
val awsS3 = "software.amazon.awssdk" % "s3" % V.awsSdk
val awsSts = "software.amazon.awssdk" % "sts" % V.awsSdk % Runtime
val googleCloudStorage = "com.google.cloud" % "google-cloud-storage" % V.gcpSdk
val azureStorageBlob = "com.azure" % "azure-storage-blob" % V.azureStorageBlob
val azureIdentity = "com.azure" % "azure-identity" % V.azureIdentity

// transitive
val badRows = "com.snowplowanalytics" %% "snowplow-badrows" % V.badRows
val badRows = "com.snowplowanalytics" %% "snowplow-badrows" % V.badRows
}
78 changes: 78 additions & 0 deletions src/main/scala/com.snowplowanalytics.snowplow.micro/Auth.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
/*
* Copyright (c) 2019-present Snowplow Analytics Ltd. All rights reserved.
*
* This software is made available by Snowplow Analytics, Ltd.,
* under the terms of the Snowplow Limited Use License Agreement, Version 1.1
* located at https://docs.snowplow.io/limited-use-license-1.1
* BY INSTALLING, DOWNLOADING, ACCESSING, USING OR DISTRIBUTING ANY PORTION
* OF THE SOFTWARE, YOU AGREE TO THE TERMS OF SUCH LICENSE AGREEMENT.
*/

package com.snowplowanalytics.snowplow.micro

import cats.data.{EitherT, Kleisli, OptionT}
import cats.effect.IO
import com.snowplowanalytics.snowplow.micro.Configuration.AuthConfig
import org.http4s.{AuthedRoutes, Headers, MediaType, Method, Request, Status, Uri}
import org.http4s.dsl.Http4sDsl
import org.http4s.headers.{Accept, Authorization}
import org.http4s.server.AuthMiddleware
import org.http4s.client.Client
import org.http4s.ember.client.EmberClientBuilder
import org.typelevel.log4cats.Logger
import org.typelevel.log4cats.slf4j.Slf4jLogger

import scala.concurrent.duration._

object Auth extends Http4sDsl[IO] {
implicit private def logger: Logger[IO] = Slf4jLogger.getLogger[IO]

private val clientResource = EmberClientBuilder.default[IO]
.withTimeout(10.seconds)
.withIdleTimeInPool(30.seconds)
.build

private def authorize(authHeader: Authorization, authConfig: AuthConfig, client: Client[IO]): IO[Either[String, Unit]] = {
val uri = Uri.unsafeFromString(s"https://${authConfig.apiDomain}/api/msc/internal/authz/query/v1/${authConfig.organizationId}/minis/list")
val headers = Headers(authHeader, Accept(MediaType.application.json))
val request = Request[IO](Method.POST, uri)
.withHeaders(headers)
.withEntity("{}")

client.run(request).use { response =>
if (response.status == Status.Ok) {
response.bodyText.compile.string.map { body =>
if (body.trim == "true") Right(())
else Left("Authorization denied")
}
} else {
IO.pure(Left(s"Authorization API returned ${response.status}"))
}
}.handleError { error =>
Left(s"Authorization API error: ${error.getMessage}")
}
}

def authMiddleware(authConfig: AuthConfig): AuthMiddleware[IO, Unit] = {
val authUser: Kleisli[IO, Request[IO], Either[String, Unit]] = Kleisli({ request =>
clientResource.use { client =>
(for {
authHeader <- EitherT.fromOption[IO](request.headers.get[Authorization], "Missing Authorization header")
// We are proxying the token to Console API without reading or using it in any way.
// As such, there is no point in running the same validation the Console would do.
// However, if in the future we need to access the token in Micro, it must be validated:
// - use https://github.com/auth0/jwks-rsa-java to automatically fetch the public keys from `authConfig.domain`
// - parse the token manually to extract the `kid` and pick the correct key
// - use https://github.com/jwt-scala/jwt-scala to check signature, issuer, audience and time validity
_ <- EitherT(authorize(authHeader, authConfig, client))
} yield ()).leftSemiflatMap { error =>
logger.warn(s"Authentication failed: $error").as("Authentication denied")
}.value
}
})

val onFailure: AuthedRoutes[String, IO] = Kleisli(_ => OptionT.liftF(Forbidden()))

AuthMiddleware(authUser, onFailure)
}
}
Loading
Loading