Skip to content
This repository is currently being migrated. It's locked while the migration is in progress.

Commit a7b3a49

Browse files
committed
Try to push to ecr
1 parent 145c57d commit a7b3a49

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

41 files changed

+3219
-131
lines changed

.github/workflows/deploy.yml

Lines changed: 25 additions & 98 deletions
Original file line numberDiff line numberDiff line change
@@ -2,108 +2,35 @@ name: Deploy
22
on:
33
push:
44
branches:
5-
- main
6-
env:
7-
TF_CLOUD_ORGANIZATION: "shughesuk"
8-
CONFIG_DIRECTORY: "./"
5+
- singhals/add-some-cube
6+
permissions:
7+
id-token: write # This is required for requesting the JWT
8+
contents: read # This is required for actions/checkout
99
jobs:
10-
deploy-backend:
11-
outputs:
12-
sha: ${{ steps.short_sha.outputs.sha }}
13-
environment: backend-production
14-
concurrency: backend-production
15-
permissions:
16-
id-token: write
17-
contents: read
10+
deploy-cube:
1811
runs-on: ubuntu-latest
1912
steps:
20-
- name: Checkout code
13+
- name: Checkout repo
2114
uses: actions/checkout@v4
22-
- name: Set up QEMU
23-
uses: docker/setup-qemu-action@v3
24-
- name: Set up Docker Buildx
25-
uses: docker/setup-buildx-action@v3
26-
- name: Login to Docker Hub
27-
uses: docker/login-action@v3
15+
16+
- name: Configure AWS credentials
17+
uses: aws-actions/configure-aws-credentials@v4
2818
with:
29-
username: ${{ secrets.DOCKERHUB_USERNAME }}
30-
password: ${{ secrets.DOCKERHUB_TOKEN }}
31-
- name: Get short SHA
32-
id: short_sha
33-
run: echo "sha=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
34-
- name: Build and push Docker image
35-
uses: docker/build-push-action@v5
19+
role-to-assume: arn:aws:iam::471881062455:role/system/github_actions_role
20+
role-session-name: GitHub_to_AWS_sync_svc_cube
21+
aws-region: us-east-1
22+
23+
- name: Login to Amazon ECR
24+
id: login-ecr
25+
uses: aws-actions/[email protected]
3626
with:
37-
context: .
38-
push: true
39-
tags: shughesuk/backend:${{ steps.short_sha.outputs.sha }}
40-
run-migrations:
41-
name: "Run Migrations"
42-
runs-on: ubuntu-latest
43-
needs: deploy-backend
44-
permissions:
45-
contents: read
46-
id-token: write
47-
steps:
48-
- name: Checkout
49-
uses: actions/checkout@v4
50-
- name: configure aws credentials
51-
uses: aws-actions/[email protected]
52-
with:
53-
role-to-assume: arn:aws:iam::905418398753:role/github-actions-role
54-
role-session-name: GitHub_to_AWS_via_FederatedOIDC
55-
aws-region: "us-east-1"
56-
- uses: prefix-dev/[email protected]
57-
with:
58-
cache: true
59-
locked: true
60-
- name: Update task
61-
run: pixi run python scripts/update_task.py --task-definition production --container-name backend-api --image shughesuk/backend:${{ needs.deploy-backend.outputs.sha }}
62-
- name: Run migrations
63-
run: pixi run python scripts/run_task.py --task-definition production --cluster production --command "pixi run python manage.py migrate"
64-
terraform:
65-
needs:
66-
- deploy-backend
67-
- run-migrations
68-
name: "Terraform Apply"
69-
runs-on: ubuntu-latest
70-
permissions:
71-
contents: read
72-
steps:
73-
- name: Checkout
74-
uses: actions/checkout@v4
75-
- name: terraform-apply
76-
uses: dflook/[email protected]
77-
with:
78-
path: ./terraform
79-
auto_approve: true
80-
workspace: resume-workspace
81-
variables: |
82-
app_image = "shughesuk/backend:${{ needs.deploy-backend.outputs.sha }}"
27+
mask-password: "false"
28+
29+
- name: Build, tag, and push docker image to Amazon ECR
8330
env:
84-
TERRAFORM_CLOUD_TOKENS: app.terraform.io=${{ secrets.TF_API_TOKEN }}
85-
deploy-frontend:
86-
needs: terraform
87-
environment: frontend-production
88-
concurrency: frontend-production
89-
permissions:
90-
id-token: write
91-
contents: read
92-
runs-on: ubuntu-latest
93-
steps:
94-
- name: Checkout
95-
uses: actions/checkout@v4
96-
- name: configure aws credentials
97-
uses: aws-actions/[email protected]
98-
with:
99-
role-to-assume: arn:aws:iam::905418398753:role/github-actions-role
100-
role-session-name: GitHub_to_AWS_via_FederatedOIDC
101-
aws-region: "us-east-1"
102-
- uses: prefix-dev/[email protected]
103-
env:
104-
ACTIONS_STEP_DEBUG: true
105-
with:
106-
cache: true
107-
locked: true
108-
- name: Deploy
109-
run: pixi run frontend-deploy
31+
REGISTRY: ${{ steps.login-ecr.outputs.registry }}
32+
REPOSITORY: sync-svc-cube-prod
33+
IMAGE_TAG: ${{ github.sha }}
34+
run: |
35+
docker build -t $REGISTRY/$REPOSITORY:$IMAGE_TAG .
36+
docker push $REGISTRY/$REPOSITORY:$IMAGE_TAG

Dockerfile

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
1-
FROM ghcr.io/prefix-dev/pixi:0.18.0-bookworm-slim
1+
FROM cubejs/cube:v1.1.9
22

3-
COPY ./backend /opt/backend
4-
COPY ./pixi.lock /opt/backend/pixi.lock
5-
COPY ./pixi.toml /opt/backend/pixi.toml
6-
WORKDIR /opt/backend/
7-
RUN pixi install
8-
CMD pixi run uvicorn --port 80 --host 0.0.0.0 resume.asgi:application --log-config logging.yaml
3+
COPY cube.js cube.js
4+
COPY fetch.js fetch.js
5+
RUN mkdir model
6+
COPY model/ model/

cube.js

Lines changed: 145 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,145 @@
1+
const fetchUniqueTenants = require("./fetch").fetchUniqueTenants;
2+
const fetch = require("node-fetch");
3+
const tenantIdClaim = "https://synccomputing.com/sync_tenant_id";
4+
const tenantIdOverrideClaim = "https://synccomputing.com/sync_tenant_id_override";
5+
const cubeBasePath = "/sync-query";
6+
const port = process.env.PORT || 4000;
7+
8+
exports.logger = (message, params) => {
9+
console.log(JSON.stringify({ message, params }));
10+
};
11+
12+
exports.telemetry = false;
13+
exports.basePath = cubeBasePath;
14+
exports.http = {
15+
"cors": {
16+
"allowedHeaders": ["*"],
17+
}
18+
};
19+
exports.scheduledRefreshTimer = 60 * 60 * 24; // this refreshs our data models every 24 hours
20+
21+
22+
exports.contextToAppId = ({ securityContext }) => {
23+
const syncTenantId = securityContext[tenantIdOverrideClaim] ||
24+
securityContext[tenantIdClaim];
25+
26+
if (!syncTenantId) {
27+
throw new Error("You shall not pass! 🧙");
28+
}
29+
30+
return `tenant_${syncTenantId}`;
31+
};
32+
33+
exports.extendContext = (req) => {
34+
return {
35+
securityContext: {
36+
...req.securityContext,
37+
token: req.headers.authorization,
38+
}
39+
}
40+
}
41+
42+
exports.scheduledRefreshContexts = async () => {
43+
console.log("Running refresh contexts");
44+
const uniqueTenants = await fetchUniqueTenants();
45+
console.log(uniqueTenants);
46+
return uniqueTenants;
47+
};
48+
49+
50+
async function getCubeMeta(token) {
51+
const CUBEJS_API_URL = `http://localhost:${port}`;
52+
53+
try {
54+
const response = await fetch(`${CUBEJS_API_URL}${cubeBasePath}/v1/meta`, {
55+
method: "GET",
56+
headers: {
57+
Authorization: token,
58+
},
59+
});
60+
61+
if (!response.ok) {
62+
throw new Error(`HTTP error trying to retrieve cube metadata: ${response.status}`);
63+
}
64+
65+
const metaData = await response.json();
66+
return metaData;
67+
} catch (err) {
68+
console.error("Error fetching cube metadata:", err.message);
69+
}
70+
}
71+
72+
const findAuxiliarySortDimFromCube = (cube, dim) => {
73+
const auxDim = cube.dimensions.find( (cubeDim) => {
74+
console.debug(`comparing ${cube.name}._sort_${dim} to ${cubeDim.name}`);
75+
return `${cube.name}._sort_${dim}` == cubeDim.name;
76+
});
77+
console.log(`Found aux sort dim ${JSON.stringify(auxDim)}`);
78+
return auxDim;
79+
}
80+
81+
const findAuxiliarySortDim = (cubeOrViewName, dimension, metadata) => {
82+
console.debug(`finding aux dims for: ${cubeOrViewName} ${dimension}`);
83+
84+
let cube = metadata.cubes.find((model) => model.name == cubeOrViewName);
85+
if (cube.type == "view") {
86+
// get the og cube from the view's dimension
87+
const viewDimension = cube.dimensions.find( (cubeDim) => cubeDim.name == `${cubeOrViewName}.${dimension}` );
88+
const parts = viewDimension.aliasMember.split(".");
89+
const cubeFromAliasMember = parts[0];
90+
cube = metadata.cubes.find((model) => model.name == cubeFromAliasMember);
91+
dimension = parts[1];
92+
console.debug(`Found og cube from view: ${cube.name} ${dimension}`)
93+
}
94+
95+
const auxSortDim = findAuxiliarySortDimFromCube(cube, dimension);
96+
97+
return auxSortDim;
98+
}
99+
100+
const maybeUseAuxilarySortDim = (orderByClause, metadata) => {
101+
const orderByPath = orderByClause[0]
102+
const orderByDirection = orderByClause[1] // desc vs asc
103+
const [cubeOrViewName, dim] = orderByPath.split(".");
104+
const auxSortByDim = findAuxiliarySortDim(cubeOrViewName, dim, metadata);
105+
106+
if (auxSortByDim && orderByDirection == "desc") {
107+
// We only need to use an auxilary sort dimension to sort nulls last if descending order.
108+
// Ascending order will already sort nulls last.
109+
return [auxSortByDim.name, orderByDirection];
110+
} else {
111+
return orderByClause;
112+
}
113+
};
114+
115+
const replaceOrderBy = (order, metadata) => {
116+
const orderByQueries = [];
117+
if (order && order.length > 0) {
118+
order.forEach((orderByClause) => {
119+
let newOrderDim = maybeUseAuxilarySortDim(orderByClause, metadata)
120+
if (newOrderDim) {
121+
orderByQueries.push(newOrderDim);
122+
} else {
123+
orderByQueries.push(orderByClause);
124+
}
125+
126+
});
127+
}
128+
return orderByQueries;
129+
};
130+
131+
exports.queryRewrite = async (query, { securityContext }) => {
132+
if (query.order && query.order.length > 0 && query.ungrouped) { // we can skip if customer isn't ordering by anything
133+
const metadata = await getCubeMeta(securityContext.token);
134+
query.order = replaceOrderBy(query.order, metadata);
135+
query.order.forEach((orderClause) => {
136+
let orderByDimName = orderClause[0];
137+
if (!query.dimensions.includes(orderByDimName)) {
138+
// We must add the auxiliary sort dimension in order for sorting to work
139+
query.dimensions.push(orderByDimName);
140+
}
141+
});
142+
console.log(`Rewritten query: ${JSON.stringify(query)}`)
143+
}
144+
return query;
145+
};

docker-compose.yml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
version: "2.2"
2+
3+
services:
4+
cube:
5+
image: sync-cube-image
6+
ports:
7+
- 4000:4000
8+
- 15432:15432
9+
volumes:
10+
- .:/cube/conf

fetch.js

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
const { Pool } = require("pg");
2+
3+
const pool = new Pool({
4+
host: process.env.CUBEJS_DB_HOST,
5+
port: process.env.CUBEJS_DB_PORT,
6+
user: process.env.CUBEJS_DB_USER,
7+
password: process.env.CUBEJS_DB_PASS,
8+
database: process.env.CUBEJS_DB_NAME,
9+
ssl: {
10+
rejectUnauthorized: false // Heroku requires SSL but with this flag off
11+
},
12+
});
13+
const tenantIdClaim = "https://synccomputing.com/sync_tenant_id";
14+
15+
16+
exports.fetchUniqueTagKeys = async (sync_tenant_id) => {
17+
let client;
18+
let tagKeys = [];
19+
try {
20+
console.log("looking up tags for: ", sync_tenant_id);
21+
client = await pool.connect();
22+
const uniqueTagKeysQuery = `
23+
SELECT DISTINCT tag_key
24+
FROM public.databricks_cluster_tags
25+
WHERE sync_tenant_id = '${sync_tenant_id}'
26+
`;
27+
const result = await client.query(uniqueTagKeysQuery);
28+
// remove special characters from the tag key name so we can expose as a dimension
29+
tagKeys = result.rows.map((row) => row.tag_key.replace(/[^a-zA-Z0-9_]/g, '_'));
30+
} catch(error) {
31+
console.error(error)
32+
} finally {
33+
if (client) {
34+
client.release();
35+
}
36+
}
37+
38+
return tagKeys;
39+
};
40+
41+
exports.fetchUniqueTenants = async () => {
42+
console.log("trying to fetch unique tenants")
43+
let client;
44+
let uniqueTenants = [];
45+
try {
46+
client = await pool.connect();
47+
const uniqueTenantsQuery = `
48+
SELECT DISTINCT sync_tenant_id
49+
FROM public.user
50+
WHERE sync_tenant_id IS NOT NULL and last_login > NOW() - INTERVAL '30 days';
51+
`;
52+
const result = await client.query(uniqueTenantsQuery);
53+
console.log(result);
54+
uniqueTenants = result.rows.map((row) => {
55+
const secContext = { "securityContext": {}};
56+
secContext["securityContext"][tenantIdClaim] = row.sync_tenant_id;
57+
58+
return secContext;
59+
});
60+
} catch(error) {
61+
console.error('Error fetching unique tenants:', error);
62+
} finally {
63+
if (client) {
64+
client.release();
65+
}
66+
}
67+
console.log("Found tenants: " + uniqueTenants)
68+
return uniqueTenants
69+
}
70+

0 commit comments

Comments
 (0)