Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
108 changes: 100 additions & 8 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ on:
branches:
- main
- staging
- 'task-*'
workflow_dispatch:

env:
Expand Down Expand Up @@ -56,8 +57,8 @@ jobs:
&& github.event.pull_request.base.repo.full_name == github.event.pull_request.head.repo.full_name
with:
commit_message: automatically run ormolu
build-exe:
name: Build share-api executable
build-exes:
name: Build executables
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
Expand All @@ -83,12 +84,19 @@ jobs:
--copy-bins \
${{ (env.is_published_build && '--ghc-options -O2') || '--fast' }}

- name: Save exes for docker build
- name: Save exes for share-api docker build
uses: actions/upload-artifact@v4
with:
name: share-api-exe
path: ${{env.share_local_bin}}

- name: Save exes for share-task-runner docker build
if: ${{ (github.event_name == 'push' && startsWith(github.ref, 'refs/heads/task-')) || (github.event_name == 'pull_request' && startsWith(github.head_ref, 'task-')) }}
uses: actions/upload-artifact@v4
with:
name: share-task-runner-exe
path: ${{env.share_local_bin}}

- name: save stack caches
if: |
!cancelled()
Expand All @@ -97,12 +105,95 @@ jobs:
with:
cache-prefix: ${{env.exe_cache_prefix}}

# A separate job for docker build because it requires elevated github token permissions.
docker-build:
# Separate jobs for the docker builds because they requires elevated github token permissions.
share-task-runner-docker-build:
if: ${{ (github.event_name == 'push' && startsWith(github.ref, 'refs/heads/task-')) || (github.event_name == 'pull_request' && startsWith(github.head_ref, 'task-')) }}
env:
container_registry: ghcr.io
docker_image_name: unisoncomputing/share-task-runner
needs: [build-exes]
runs-on: ubuntu-24.04
# Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job.
permissions:
contents: read
# Allow uploading the docker image to the container registry
packages: write
# Allow creating and updating the artifact attestation
attestations: write
# Required to get user information for building attestations
id-token: write

steps:
- uses: actions/checkout@v4
with:
# Don't need unison submodule for docker image build
submodules: false

# Downloads the artifact that contains the share-api-exe from the previous job.
- uses: actions/download-artifact@v4
with:
name: share-task-runner-exe
path: ./docker/tmp/

# Configure Docker's builder,
# This seems necessary to support docker cache layers.
- name: Setup Docker buildx
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb # v3.3.0


# Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here.
- name: Log in to the Container registry
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
with:
registry: ${{ env.container_registry }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

# This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels.
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
with:
images: ${{ env.container_registry }}/${{ env.docker_image_name }}
tags: |
type=sha,priority=1000,format=short,prefix={{branch}}_{{date 'YYYY-MM-DD-HH-mm'}}_gitref-
type=sha,format=long

# This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages.
# It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository.
# It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step.
- name: Build and push Docker image
id: push
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0 # v5.3.0
with:
context: ./docker/
file: ./docker/share-task-runner.Dockerfile
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
# Use github actions cache for docker image layers
cache-from: type=gha
cache-to: type=gha,mode=max
build-args: |
SHARE_COMMIT=${{ github.sha }}
# Save image locally for use in tests even if we don't push it.
outputs: type=docker,dest=/tmp/share-docker-image.tar # export docker image

# This step generates an artifact attestation for the image, which is an unforgeable statement about where and how it was built. It increases supply chain security for people who consume the image. For more information, see "[AUTOTITLE](/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds)."
- name: Generate artifact attestation
uses: actions/[email protected]
with:
subject-name: ${{ env.container_registry }}/${{ env.docker_image_name}}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true


# Separate jobs for the docker builds because they requires elevated github token permissions.
share-api-docker-build:
env:
container_registry: ghcr.io
docker_image_name: ${{ github.repository }}
needs: [build-exe]
docker_image_name: unisoncomputing/share-api
needs: [build-exes]
runs-on: ubuntu-24.04
# Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job.
permissions:
Expand Down Expand Up @@ -158,6 +249,7 @@ jobs:
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0 # v5.3.0
with:
context: ./docker/
file: ./docker/share-api.Dockerfile
push: ${{ env.is_published_build }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
Expand Down Expand Up @@ -186,7 +278,7 @@ jobs:

# A separate job for docker build because it requires elevated github token permissions.
transcript-tests:
needs: [build-exe, docker-build]
needs: [build-exes, share-api-docker-build]
runs-on: ubuntu-24.04

steps:
Expand Down
File renamed without changes.
6 changes: 3 additions & 3 deletions docker/Dockerfile → docker/share-api.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@ RUN openssl genrsa -out /etc/ssl/private/share.key &&\
update-locale LANG=C.UTF-8
ENV LANG=C.UTF-8

COPY share-entrypoint.sh /usr/local/bin/share-entrypoint
RUN chmod 555 /usr/local/bin/share-entrypoint
COPY share-api-entrypoint.sh /usr/local/bin/share-api-entrypoint
RUN chmod 555 /usr/local/bin/share-api-entrypoint

COPY tmp/share-api /usr/local/bin/share
RUN chmod 555 /usr/local/bin/share

ENTRYPOINT /usr/local/bin/share-entrypoint
ENTRYPOINT /usr/local/bin/share-api-entrypoint

ARG SHARE_COMMIT
ENV SHARE_COMMIT=$SHARE_COMMIT
14 changes: 14 additions & 0 deletions docker/share-task-runner-entrypoint.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
#!/bin/sh

set -ex

echo SHARE_REDIS: "$SHARE_REDIS"

if [ -n "$NOMAD_PORT_enlil_http" ]; then
export SHARE_SERVER_PORT="$NOMAD_PORT_enlil_http"
fi

export SHARE_IP=0.0.0.0

exec 2>&1
exec /usr/local/bin/share-task-runner
18 changes: 18 additions & 0 deletions docker/share-task-runner.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
FROM debian:trixie
RUN apt-get update && apt-get install -y ssl-cert libpq5 ca-certificates curl locales
RUN openssl genrsa -out /etc/ssl/private/share.key &&\
echo "C.UTF-8 UTF-8" > /etc/locale.gen &&\
dpkg-reconfigure --frontend=noninteractive locales &&\
update-locale LANG=C.UTF-8
ENV LANG=C.UTF-8

COPY share-task-runner-entrypoint.sh /usr/local/bin/share-task-runner-entrypoint
RUN chmod 555 /usr/local/bin/share-task-runner-entrypoint

COPY tmp/share-task-runner /usr/local/bin/share-task-runner
RUN chmod 555 /usr/local/bin/share-task-runner

ENTRYPOINT /usr/local/bin/share-task-runner-entrypoint

ARG SHARE_COMMIT
ENV SHARE_COMMIT=$SHARE_COMMIT
5 changes: 5 additions & 0 deletions share-api/src/Share/Utils/Tags.hs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ module Share.Utils.Tags
where

import Control.Monad.Trans.Maybe (mapMaybeT)
import Control.Monad.Writer.CPS
import Share.Prelude

type Tags = Map Text Text
Expand Down Expand Up @@ -35,6 +36,10 @@ instance (MonadTags m) => MonadTags (ReaderT e m) where
askTags = lift askTags
withTags newTags = mapReaderT (withTags newTags)

instance (Monoid w, MonadTags m) => MonadTags (WriterT w m) where
askTags = lift askTags
withTags newTags = mapWriterT (withTags newTags)

instance (MonadTags m) => MonadTags (MaybeT m) where
askTags = lift askTags
withTags newTags = mapMaybeT (withTags newTags)
Expand Down
11 changes: 3 additions & 8 deletions share-task-runner/app/Main.hs
Original file line number Diff line number Diff line change
@@ -1,14 +1,9 @@
module Main where

import Share.Env (withEnv)
import Share.BackgroundJobs.Monad
import UnliftIO
import Share.Env (withEnv)
import Share.Tasks.AmbiguousComponentCheck qualified as AmbiguousComponentCheck

main :: IO ()
main = do
withEnv \env -> runBackground env "share-task-runner" task

task :: Background ()
task = do
liftIO $ putStrLn "Hello from the task runner!"
pure ()
withEnv \env -> runBackground env "share-task-runner" AmbiguousComponentCheck.run
6 changes: 6 additions & 0 deletions share-task-runner/package.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,12 @@ dependencies:
- base >= 4.7 && < 5
- share-api
- unliftio
- unison-share-api
- unison-codebase-sqlite
- unison-hash
- serialise
- transformers
- cborg

default-extensions:
- ApplicativeDo
Expand Down
14 changes: 14 additions & 0 deletions share-task-runner/share-task-runner.cabal
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ source-repository head
location: https://github.com/unisoncomputing/share-task-runner

library
exposed-modules:
Share.Tasks.AmbiguousComponentCheck
hs-source-dirs:
src
default-extensions:
Expand Down Expand Up @@ -57,7 +59,13 @@ library
ghc-options: -Wall -Werror -Wname-shadowing -Wno-type-defaults -Wno-missing-pattern-synonym-signatures -Wincomplete-uni-patterns -Widentities -Wredundant-constraints -Wpartial-fields -fprint-expanded-synonyms -fwrite-ide-info -O2 -funbox-strict-fields
build-depends:
base >=4.7 && <5
, cborg
, serialise
, share-api
, transformers
, unison-codebase-sqlite
, unison-hash
, unison-share-api
, unliftio
default-language: Haskell2010

Expand Down Expand Up @@ -99,7 +107,13 @@ executable share-task-runner
ghc-options: -Wall -Werror -Wname-shadowing -Wno-type-defaults -Wno-missing-pattern-synonym-signatures -Wincomplete-uni-patterns -Widentities -Wredundant-constraints -Wpartial-fields -fprint-expanded-synonyms -fwrite-ide-info -O2 -funbox-strict-fields -threaded -rtsopts "-with-rtsopts=-N -A32m -qn2 -T"
build-depends:
base >=4.7 && <5
, cborg
, serialise
, share-api
, share-task-runner
, transformers
, unison-codebase-sqlite
, unison-hash
, unison-share-api
, unliftio
default-language: Haskell2010
80 changes: 80 additions & 0 deletions share-task-runner/src/Share/Tasks/AmbiguousComponentCheck.hs
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
module Share.Tasks.AmbiguousComponentCheck (run) where

import Codec.Serialise qualified as CBOR
import Share.BackgroundJobs.Errors (reportError)
import Share.BackgroundJobs.Monad
import Share.Postgres qualified as PG
import Share.Postgres.Cursors qualified as PG
import Share.Prelude
import Share.Utils.Logging (Loggable (..))
import Share.Utils.Logging qualified as Logging
import U.Codebase.Sqlite.TempEntity
import Unison.Hash32
import Unison.Sync.EntityValidation qualified as EV
import Unison.Sync.Types qualified as Sync
import Unison.Util.Servant.CBOR
import Unison.Util.Servant.CBOR qualified as CBOR

data AmbiguousComponentCheckError
= TaskAmbiguousComponentCheckError Hash32
| TaskEntityValidationError Hash32 Sync.EntityValidationError
| TaskEntityDecodingError Hash32 CBOR.DeserialiseFailure
deriving (Show, Eq)

instance Loggable AmbiguousComponentCheckError where
toLog = \case
TaskAmbiguousComponentCheckError hash32 ->
Logging.textLog ("Ambiguous component found for hash: " <> into @Text hash32)
& Logging.withSeverity Logging.Error
TaskEntityValidationError hash32 validationError ->
Logging.textLog
( "Entity validation error for hash: "
<> into @Text hash32
<> ", error: "
<> into @Text (show validationError)
)
& Logging.withSeverity Logging.Error
TaskEntityDecodingError hash32 decodeError ->
Logging.textLog
( "Entity decoding error for hash: "
<> into @Text hash32
<> ", error: "
<> into @Text (show decodeError)
)
& Logging.withSeverity Logging.Error

run :: Background ()
run = withWorkerName "ambiguous-component-task" do
Logging.logInfoText "Starting ambiguous component check task."
errs <- PG.runTransaction $ do
cursor <-
PG.newRowCursor @(CBORBytes TempEntity, Hash32)
"component_cursor"
[PG.sql|
(SELECT DISTINCT ON (t.component_hash_id) bytes.bytes, ch.base32
FROM terms t
JOIN serialized_components sc ON t.component_hash_id = sc.component_hash_id
JOIN bytes ON sc.bytes_id = bytes.id
JOIN component_hashes ch ON t.component_hash_id = ch.id
)
|]
PG.foldBatched cursor 100 \rows -> do
rows
& foldMap
( \(bytes, hash32) -> do
case unpackEntity bytes of
Left err -> [TaskEntityDecodingError hash32 err]
Right entity -> do
case EV.validateTempEntity hash32 entity of
Nothing -> []
Just validationError -> [TaskEntityValidationError hash32 validationError]
)
& pure
for_ errs reportError
Logging.logInfoText "Finished ambiguous component check task."

unpackEntity :: (CBORBytes TempEntity) -> Either CBOR.DeserialiseFailure TempEntity
unpackEntity entityBytes = do
case CBOR.deserialiseOrFailCBORBytes entityBytes of
Left err -> Left err
Right entity -> Right entity
Loading