Skip to content

Commit c65519d

Browse files
konardclaude
andcommitted
Implement GraphQL comparison: PostgreSQL+Hasura vs Doublets+GQL
- Add comprehensive GraphQL schema covering all benchmark operations - Set up PostgreSQL+Hasura with Docker Compose and metadata configuration - Set up Doublets GraphQL server with Docker setup - Implement k6 load testing scripts for both GraphQL endpoints - Add visualization and results processing similar to existing benchmarks - Create GitHub Actions workflow for automated benchmarking - Add local benchmark runner script for development - Update README with GraphQL benchmark results section Addresses issue #1 for GraphQL-based performance comparison. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <[email protected]>
1 parent e265cdd commit c65519d

File tree

14 files changed

+995
-0
lines changed

14 files changed

+995
-0
lines changed
Lines changed: 98 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,98 @@
1+
name: GraphQL Benchmark Comparison
2+
3+
on: [push, pull_request]
4+
5+
env:
6+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
7+
8+
jobs:
9+
graphql-benchmark:
10+
runs-on: ubuntu-latest
11+
12+
steps:
13+
- name: Checkout repository
14+
uses: actions/checkout@v4
15+
16+
- name: Install k6
17+
run: |
18+
sudo gpg -k
19+
sudo gpg --no-default-keyring --keyring /usr/share/keyrings/k6-archive-keyring.gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys C5AD17C747E3415A3642D57D77C6C491D6AC1D69
20+
echo "deb [signed-by=/usr/share/keyrings/k6-archive-keyring.gpg] https://dl.k6.io/deb stable main" | sudo tee /etc/apt/sources.list.d/k6.list
21+
sudo apt-get update
22+
sudo apt-get install k6
23+
24+
- name: Install Docker Compose
25+
run: |
26+
sudo curl -L "https://github.com/docker/compose/releases/download/v2.20.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
27+
sudo chmod +x /usr/local/bin/docker-compose
28+
29+
- name: Start PostgreSQL + Hasura stack
30+
run: |
31+
docker-compose -f docker-compose.postgresql-hasura.yml up -d
32+
echo "Waiting for Hasura to be ready..."
33+
timeout 120s bash -c 'until curl -f http://localhost:8080/healthz; do sleep 2; done'
34+
35+
- name: Apply Hasura metadata
36+
run: |
37+
# Install Hasura CLI
38+
curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | bash
39+
export PATH=$PATH:$HOME/.hasura/bin
40+
# Apply metadata
41+
cd postgresql-hasura
42+
hasura metadata apply --endpoint http://localhost:8080
43+
44+
- name: Build and start Doublets GraphQL stack
45+
run: |
46+
docker-compose -f docker-compose.doublets-gql.yml up -d --build
47+
echo "Waiting for Doublets GraphQL to be ready..."
48+
timeout 180s bash -c 'until curl -f http://localhost:60341/v1/graphql; do sleep 5; done'
49+
50+
- name: Run Hasura GraphQL benchmarks
51+
run: |
52+
cd benchmarks/k6
53+
k6 run --out json=hasura-results.json hasura-benchmark.js
54+
continue-on-error: true
55+
56+
- name: Run Doublets GraphQL benchmarks
57+
run: |
58+
cd benchmarks/k6
59+
k6 run --out json=doublets-results.json doublets-benchmark.js
60+
continue-on-error: true
61+
62+
- name: Process benchmark results
63+
run: |
64+
cd benchmarks
65+
pip install matplotlib numpy
66+
python process-results.py k6/hasura-results.json k6/doublets-results.json | tee results.txt
67+
68+
- name: Publish benchmark results to gh-pages
69+
run: |
70+
git config --global user.email "[email protected]"
71+
git config --global user.name "LinksPlatformBencher"
72+
cd benchmarks
73+
git fetch
74+
git checkout gh-pages || git checkout --orphan gh-pages
75+
mkdir -p Docs
76+
mv -f bench_graphql.png Docs/
77+
mv -f bench_graphql_log_scale.png Docs/
78+
mv -f results.txt Docs/graphql-results.txt
79+
git add Docs/
80+
git commit -m "Publish GraphQL benchmark results" || echo "No changes to commit"
81+
git push origin gh-pages || echo "No changes to push"
82+
83+
- name: Save benchmark artifacts
84+
uses: actions/upload-artifact@v4
85+
with:
86+
name: GraphQL benchmark results
87+
path: |
88+
benchmarks/bench_graphql.png
89+
benchmarks/bench_graphql_log_scale.png
90+
benchmarks/results.txt
91+
benchmarks/k6/hasura-results.json
92+
benchmarks/k6/doublets-results.json
93+
94+
- name: Stop services
95+
if: always()
96+
run: |
97+
docker-compose -f docker-compose.postgresql-hasura.yml down
98+
docker-compose -f docker-compose.doublets-gql.yml down

README.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,10 @@ The results below represent the amount of time (ns) the operation takes per iter
2828
![Image of Rust benchmark (pixel scale)](https://github.com/linksplatform/Comparisons.PostgreSQLVSDoublets/blob/gh-pages/Docs/bench_rust.png?raw=true)
2929
![Image of Rust benchmark (log scale)](https://github.com/linksplatform/Comparisons.PostgreSQLVSDoublets/blob/gh-pages/Docs/bench_rust_log_scale.png?raw=true)
3030

31+
### GraphQL
32+
![Image of GraphQL benchmark (pixel scale)](https://github.com/linksplatform/Comparisons.PostgreSQLVSDoublets/blob/gh-pages/Docs/bench_graphql.png?raw=true)
33+
![Image of GraphQL benchmark (log scale)](https://github.com/linksplatform/Comparisons.PostgreSQLVSDoublets/blob/gh-pages/Docs/bench_graphql_log_scale.png?raw=true)
34+
3135
### Raw benchmark results (all numbers are in nanoseconds)
3236

3337
| Operation | Doublets United Volatile | Doublets United NonVolatile | Doublets Split Volatile | Doublets Split NonVolatile | PSQL NonTransaction | PSQL Transaction |

benchmarks/k6/common.js

Lines changed: 137 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,137 @@
1+
// Common utilities for GraphQL benchmarking with k6
2+
import http from 'k6/http';
3+
import { check } from 'k6';
4+
5+
export class GraphQLBenchmark {
6+
constructor(endpoint, headers = {}) {
7+
this.endpoint = endpoint;
8+
this.headers = {
9+
'Content-Type': 'application/json',
10+
...headers
11+
};
12+
}
13+
14+
query(query, variables = {}) {
15+
const payload = JSON.stringify({
16+
query: query,
17+
variables: variables
18+
});
19+
20+
const response = http.post(this.endpoint, payload, { headers: this.headers });
21+
22+
check(response, {
23+
'GraphQL request successful': (r) => r.status === 200,
24+
'No GraphQL errors': (r) => {
25+
const body = JSON.parse(r.body);
26+
return !body.errors;
27+
}
28+
});
29+
30+
return response;
31+
}
32+
}
33+
34+
// GraphQL queries and mutations
35+
export const queries = {
36+
// Create operations
37+
createPointLink: `
38+
mutation CreatePointLink {
39+
createPointLink {
40+
id
41+
source
42+
target
43+
}
44+
}
45+
`,
46+
47+
createLink: `
48+
mutation CreateLink($source: ID!, $target: ID!) {
49+
createLink(input: { source: $source, target: $target }) {
50+
id
51+
source
52+
target
53+
}
54+
}
55+
`,
56+
57+
// Update operation
58+
updateLink: `
59+
mutation UpdateLink($id: ID!, $source: ID, $target: ID) {
60+
updateLink(input: { id: $id, source: $source, target: $target }) {
61+
id
62+
source
63+
target
64+
}
65+
}
66+
`,
67+
68+
// Delete operation
69+
deleteLink: `
70+
mutation DeleteLink($id: ID!) {
71+
deleteLink(id: $id)
72+
}
73+
`,
74+
75+
// Read operations - Each variants
76+
allLinks: `
77+
query AllLinks($limit: Int, $offset: Int) {
78+
allLinks(limit: $limit, offset: $offset) {
79+
id
80+
source
81+
target
82+
}
83+
}
84+
`,
85+
86+
linkById: `
87+
query LinkById($id: ID!) {
88+
linkById(id: $id) {
89+
id
90+
source
91+
target
92+
}
93+
}
94+
`,
95+
96+
concreteLinks: `
97+
query ConcreteLinks($source: ID!, $target: ID!, $limit: Int, $offset: Int) {
98+
concreteLinks(source: $source, target: $target, limit: $limit, offset: $offset) {
99+
id
100+
source
101+
target
102+
}
103+
}
104+
`,
105+
106+
outgoingLinks: `
107+
query OutgoingLinks($source: ID!, $limit: Int, $offset: Int) {
108+
outgoingLinks(source: $source, limit: $limit, offset: $offset) {
109+
id
110+
source
111+
target
112+
}
113+
}
114+
`,
115+
116+
incomingLinks: `
117+
query IncomingLinks($target: ID!, $limit: Int, $offset: Int) {
118+
incomingLinks(target: $target, limit: $limit, offset: $offset) {
119+
id
120+
source
121+
target
122+
}
123+
}
124+
`
125+
};
126+
127+
// Generate random variables for testing
128+
export function randomVariables() {
129+
const id = Math.floor(Math.random() * 1000) + 1;
130+
return {
131+
id: id.toString(),
132+
source: (Math.floor(Math.random() * 1000) + 1).toString(),
133+
target: (Math.floor(Math.random() * 1000) + 1).toString(),
134+
limit: 10,
135+
offset: 0
136+
};
137+
}
Lines changed: 119 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,119 @@
1+
// k6 benchmark script for Doublets GraphQL
2+
import { GraphQLBenchmark, queries, randomVariables } from './common.js';
3+
import { group, sleep } from 'k6';
4+
5+
export let options = {
6+
scenarios: {
7+
create_load: {
8+
executor: 'constant-arrival-rate',
9+
rate: 100, // 100 requests per second
10+
timeUnit: '1s',
11+
duration: '30s',
12+
preAllocatedVUs: 10,
13+
maxVUs: 50,
14+
exec: 'createScenario',
15+
},
16+
update_load: {
17+
executor: 'constant-arrival-rate',
18+
rate: 100,
19+
timeUnit: '1s',
20+
duration: '30s',
21+
preAllocatedVUs: 10,
22+
maxVUs: 50,
23+
exec: 'updateScenario',
24+
startTime: '35s',
25+
},
26+
delete_load: {
27+
executor: 'constant-arrival-rate',
28+
rate: 100,
29+
timeUnit: '1s',
30+
duration: '30s',
31+
preAllocatedVUs: 10,
32+
maxVUs: 50,
33+
exec: 'deleteScenario',
34+
startTime: '70s',
35+
},
36+
read_load: {
37+
executor: 'constant-arrival-rate',
38+
rate: 200,
39+
timeUnit: '1s',
40+
duration: '60s',
41+
preAllocatedVUs: 20,
42+
maxVUs: 100,
43+
exec: 'readScenario',
44+
startTime: '105s',
45+
}
46+
},
47+
thresholds: {
48+
http_req_duration: ['p(95)<1000'], // 95% of requests should be below 1s
49+
http_req_failed: ['rate<0.1'], // Error rate should be below 10%
50+
},
51+
};
52+
53+
const doublets = new GraphQLBenchmark('http://localhost:60341/v1/graphql');
54+
55+
export function createScenario() {
56+
group('Create Operations', function() {
57+
// Create point link
58+
doublets.query(queries.createPointLink);
59+
60+
// Create regular link
61+
const vars = randomVariables();
62+
doublets.query(queries.createLink, {
63+
source: vars.source,
64+
target: vars.target
65+
});
66+
});
67+
}
68+
69+
export function updateScenario() {
70+
group('Update Operations', function() {
71+
const vars = randomVariables();
72+
doublets.query(queries.updateLink, {
73+
id: vars.id,
74+
source: vars.source,
75+
target: vars.target
76+
});
77+
});
78+
}
79+
80+
export function deleteScenario() {
81+
group('Delete Operations', function() {
82+
const vars = randomVariables();
83+
doublets.query(queries.deleteLink, { id: vars.id });
84+
});
85+
}
86+
87+
export function readScenario() {
88+
group('Read Operations', function() {
89+
const vars = randomVariables();
90+
91+
// Each All
92+
doublets.query(queries.allLinks, { limit: 10, offset: 0 });
93+
94+
// Each Identity
95+
doublets.query(queries.linkById, { id: vars.id });
96+
97+
// Each Concrete
98+
doublets.query(queries.concreteLinks, {
99+
source: vars.source,
100+
target: vars.target,
101+
limit: 10,
102+
offset: 0
103+
});
104+
105+
// Each Outgoing
106+
doublets.query(queries.outgoingLinks, {
107+
source: vars.source,
108+
limit: 10,
109+
offset: 0
110+
});
111+
112+
// Each Incoming
113+
doublets.query(queries.incomingLinks, {
114+
target: vars.target,
115+
limit: 10,
116+
offset: 0
117+
});
118+
});
119+
}

0 commit comments

Comments
 (0)