Skip to content

Commit fb2ebcc

Browse files
authored
Merge pull request #1308 from hirosystems/beta
merge beta into master
2 parents a10ac03 + 1a70fa7 commit fb2ebcc

33 files changed

+2113
-904
lines changed

.github/workflows/ci.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -512,6 +512,7 @@ jobs:
512512
@semantic-release/changelog
513513
@semantic-release/git
514514
@semantic-release/exec
515+
conventional-changelog-conventionalcommits
515516
516517
- name: Set up Docker Buildx
517518
uses: docker/setup-buildx-action@v1

CHANGELOG.md

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,56 @@
1+
## [5.0.0-beta.7](https://github.com/hirosystems/stacks-blockchain-api/compare/v5.0.0-beta.6...v5.0.0-beta.7) (2022-09-07)
2+
3+
4+
### Bug Fixes
5+
6+
* filter BNS processing for successful txs only ([#1309](https://github.com/hirosystems/stacks-blockchain-api/issues/1309)) ([6a12936](https://github.com/hirosystems/stacks-blockchain-api/commit/6a129369c6d9fcdc79b5a7ad288d37784cbe77cc))
7+
8+
## [5.0.0-beta.6](https://github.com/hirosystems/stacks-blockchain-api/compare/v5.0.0-beta.5...v5.0.0-beta.6) (2022-09-01)
9+
10+
11+
### Features
12+
13+
* add indexes for index_block_hash on BNS tables ([#1304](https://github.com/hirosystems/stacks-blockchain-api/issues/1304)) ([bbf4b2d](https://github.com/hirosystems/stacks-blockchain-api/commit/bbf4b2d2b8c7f6ed30bfda6eaa430d5c2e84cdf5))
14+
15+
## [5.0.0-beta.5](https://github.com/hirosystems/stacks-blockchain-api/compare/v5.0.0-beta.4...v5.0.0-beta.5) (2022-08-31)
16+
17+
18+
### Bug Fixes
19+
20+
* detect name transfers and renewals in special circumstances ([#1303](https://github.com/hirosystems/stacks-blockchain-api/issues/1303)) ([cd381a9](https://github.com/hirosystems/stacks-blockchain-api/commit/cd381a95b4d0d3f4bb08e447500153c3f652eff6))
21+
22+
## [5.0.0-beta.4](https://github.com/hirosystems/stacks-blockchain-api/compare/v5.0.0-beta.3...v5.0.0-beta.4) (2022-08-31)
23+
24+
25+
### Bug Fixes
26+
27+
* add postgres connection error checking for ECONNRESET code ([03a1896](https://github.com/hirosystems/stacks-blockchain-api/commit/03a1896cff8937a5f39a8b75e5adf51a6344592c))
28+
29+
## [5.0.0-beta.3](https://github.com/hirosystems/stacks-blockchain-api/compare/v5.0.0-beta.2...v5.0.0-beta.3) (2022-08-31)
30+
31+
32+
### Bug Fixes
33+
34+
* import BNS v1 data during event replay ([#1301](https://github.com/hirosystems/stacks-blockchain-api/issues/1301)) ([bc59817](https://github.com/hirosystems/stacks-blockchain-api/commit/bc59817aa98dd3a978a27b73d14738b64eb823f9))
35+
36+
## [5.0.0-beta.2](https://github.com/hirosystems/stacks-blockchain-api/compare/v5.0.0-beta.1...v5.0.0-beta.2) (2022-08-26)
37+
38+
39+
### Bug Fixes
40+
41+
* bump version ([3863cce](https://github.com/hirosystems/stacks-blockchain-api/commit/3863cce1a64cf7a4c6cffd4f888c049cfd3ada65))
42+
43+
## [5.0.0-beta.1](https://github.com/hirosystems/stacks-blockchain-api/compare/v4.1.2...v5.0.0-beta.1) (2022-08-26)
44+
45+
46+
### ⚠ BREAKING CHANGES
47+
48+
* optimize tables and improve canonical treatment of BNS data (#1287)
49+
50+
### Features
51+
52+
* optimize tables and improve canonical treatment of BNS data ([#1287](https://github.com/hirosystems/stacks-blockchain-api/issues/1287)) ([1f64818](https://github.com/hirosystems/stacks-blockchain-api/commit/1f648187b8c701e802a06bac52b077fd10571ff7))
53+
154
## [4.1.2](https://github.com/hirosystems/stacks-blockchain-api/compare/v4.1.1...v4.1.2) (2022-08-18)
255

356

package.json

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,8 +56,12 @@
5656
"engineStrict": true,
5757
"release": {
5858
"plugins": [
59-
"@semantic-release/commit-analyzer",
60-
"@semantic-release/release-notes-generator",
59+
["@semantic-release/commit-analyzer", {
60+
"preset": "conventionalcommits"
61+
}],
62+
["@semantic-release/release-notes-generator", {
63+
"preset": "conventionalcommits"
64+
}],
6165
[
6266
"@semantic-release/exec",
6367
{

readme.md

Lines changed: 53 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -98,19 +98,51 @@ For running offline mode set an environment variable `STACKS_API_MODE=offline`
9898

9999
## Event Replay
100100

101-
The stacks-node is only able to emit events live as they happen. This poses a problem in the scenario where the stacks-blockchain-api needs to
102-
be upgraded and its database cannot be migrated to a new schema. One way to handle this upgrade is to wipe the stacks-blockchain-api's database
103-
and stacks-node working directory, and re-sync from scratch.
101+
The stacks-node is only able to emit events live as they happen. This poses a problem in the
102+
scenario where the stacks-blockchain-api needs to be upgraded and its database cannot be migrated to
103+
a new schema. One way to handle this upgrade is to wipe the stacks-blockchain-api's database and
104+
stacks-node working directory, and re-sync from scratch.
104105

105-
Alternatively, an event-replay feature is available where the API records the HTTP POST requests from the stacks-node event emitter, then streams
106-
these events back to itself. Essentially simulating a wipe & full re-sync, but much quicker.
106+
Alternatively, an event-replay feature is available where the API records the HTTP POST requests
107+
from the stacks-node event emitter, then streams these events back to itself. Essentially simulating
108+
a wipe & full re-sync, but much quicker.
107109

108-
The feature can be used via program args. For example, if there are breaking changes in the API's sql schema, like adding a new column which requires
109-
event's to be re-played, the following steps could be ran:
110+
The feature can be used via program args. For example, if there are breaking changes in the API's
111+
sql schema, like adding a new column which requires event's to be re-played, the following steps
112+
could be ran:
110113

111114
### Event Replay Instructions
112115

113-
1. Ensure the API process is not running. When stopping the API, let the process exit gracefully so that any in-progress SQL writes can finish.
116+
#### V1 BNS Data
117+
118+
**Optional but recommended** - If you want the V1 BNS data, there are going to be a few extra steps:
119+
120+
1. Download BNS data:
121+
```shell
122+
curl -L https://storage.googleapis.com/blockstack-v1-migration-data/export-data.tar.gz -o /stacks-node/bns/export-data.tar.gz
123+
```
124+
1. Extract it:
125+
```shell
126+
tar -xzvf ./bns/export-data.tar.gz -C /stacks-node/bns/
127+
```
128+
1. Each file in `./bns` will have a corresponding `sha256` value. To Verify, run a script like the
129+
following to check the sha256sum:
130+
131+
```bash
132+
for file in `ls /stacks-node/bns/* | grep -v sha256 | grep -v .tar.gz`; do
133+
if [ $(sha256sum $file | awk {'print $1'}) == $(cat ${file}.sha256 ) ]; then
134+
echo "sha256 Matched $file"
135+
else
136+
echo "sha256 Mismatch $file"
137+
fi
138+
done
139+
```
140+
1. Set the data's location as the value of `BNS_IMPORT_DIR` in your `.env` file.
141+
142+
#### Export and Import
143+
144+
1. Ensure the API process is not running. When stopping the API, let the process exit gracefully so
145+
that any in-progress SQL writes can finish.
114146
1. Export event data to disk with the `export-events` command:
115147
116148
```shell
@@ -119,19 +151,25 @@ event's to be re-played, the following steps could be ran:
119151
1. Update to the new stacks-blockchain-api version.
120152
1. Perform the event playback using the `import-events` command:
121153
122-
**WARNING**: This will **drop _all_ tables** from the configured Postgres database, including any tables not automatically added by the API.
154+
**WARNING**: This will **drop _all_ tables** from the configured Postgres database, including any
155+
tables not automatically added by the API.
123156
124157
```shell
125158
node ./lib/index.js import-events --file /tmp/stacks-node-events.tsv --wipe-db --force
126159
```
127160
128161
This command has two modes of operation, specified by the `--mode` option:
129-
* `archival` (default): The process will import and ingest *all* blockchain events that have happened since the first block.
130-
* `pruned`: The import process will ignore some prunable events (mempool, microblocks) until the import block height has reached `chain tip - 256` blocks. This saves a considerable amount of time during import, but sacrifices some historical data. You can use this mode if you're mostly interested in running an API that prioritizes real time information.
131-
132-
Alternatively, instead of performing the `export-events` command in step 1, an environmental variable can be set which enables events to be streamed to a file
133-
as they are received, while the application is running normally. To enable this feature, set the `STACKS_EXPORT_EVENTS_FILE` env var to the file path where
134-
events should be appended. Example:
162+
* `archival` (default): The process will import and ingest *all* blockchain events that have
163+
happened since the first block.
164+
* `pruned`: The import process will ignore some prunable events (mempool, microblocks) until the
165+
import block height has reached `chain tip - 256` blocks. This saves a considerable amount of
166+
time during import, but sacrifices some historical data. You can use this mode if you're mostly
167+
interested in running an API that prioritizes real time information.
168+
169+
Alternatively, instead of performing the `export-events` command in step 1, an environmental
170+
variable can be set which enables events to be streamed to a file as they are received, while the
171+
application is running normally. To enable this feature, set the `STACKS_EXPORT_EVENTS_FILE` env var
172+
to the file path where events should be appended. Example:
135173
```
136174
STACKS_EXPORT_EVENTS_FILE=/tmp/stacks-node-events.tsv
137175
```

running_an_api.md

Lines changed: 1 addition & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -78,34 +78,14 @@ Since we'll need to create some files/dirs for persistent data we'll first creat
7878
We'll be using:
7979

8080
```bash
81-
$ mkdir -p ./stacks-node/{persistent-data/postgres,persistent-data/stacks-blockchain,bns,config}
81+
$ mkdir -p ./stacks-node/{persistent-data/postgres,persistent-data/stacks-blockchain,config}
8282
$ docker pull blockstack/stacks-blockchain-api \
8383
&& docker pull blockstack/stacks-blockchain \
8484
&& docker pull postgres:alpine
8585
$ docker network create stacks-blockchain > /dev/null 2>&1
8686
$ cd ./stacks-node
8787
```
8888

89-
**Optional but recommended**: If you need the v1 BNS data, there are going to be a few extra steps.
90-
91-
1. Download the BNS data:
92-
`curl -L https://storage.googleapis.com/blockstack-v1-migration-data/export-data.tar.gz -o ./bns/export-data.tar.gz`
93-
2. Extract the data:
94-
`tar -xzvf ./bns/export-data.tar.gz -C ./bns/`
95-
3. Each file in `./bns` will have a corresponding `sha256` value.
96-
97-
To Verify, run a script like the following to check the sha256sum:
98-
99-
```bash
100-
for file in `ls ./bns/* | grep -v sha256 | grep -v .tar.gz`; do
101-
if [ $(sha256sum $file | awk {'print $1'}) == $(cat ${file}.sha256 ) ]; then
102-
echo "sha256 Matched $file"
103-
else
104-
echo "sha256 Mismatch $file"
105-
fi
106-
done
107-
```
108-
10989
## Postgres
11090

11191
The `postgres:alpine` image can be run with default settings, the only requirement is that a password Environment Variable is set for the `postgres` user: `POSTGRES_PASSWORD=postgres`
@@ -161,16 +141,9 @@ STACKS_BLOCKCHAIN_API_PORT=3999
161141
STACKS_BLOCKCHAIN_API_HOST=0.0.0.0
162142
STACKS_CORE_RPC_HOST=stacks-blockchain
163143
STACKS_CORE_RPC_PORT=20443
164-
BNS_IMPORT_DIR=/bns-data
165144
API_DOCS_URL=https://docs.hiro.so/api
166145
```
167146

168-
**Note** that here we are importing the bns data with the env var `BNS_IMPORT`.
169-
170-
To Disable this import, simply comment the line: `#BNS_IMPORT_DIR=/bns-data`
171-
172-
***If you leave this enabled***: please allow several minutes for the one-time import to complete before continuing.
173-
174147
The other Environment Variables to pay attention to:
175148

176149
- `PG_HOST`: Set this to your **postgres** instance. In this guide, we'll be using a container named `postgres`.
@@ -184,7 +157,6 @@ docker run -d --rm \
184157
--name stacks-blockchain-api \
185158
--net=stacks-blockchain \
186159
--env-file $(pwd)/.env \
187-
-v $(pwd)/bns:/bns-data \
188160
-p 3700:3700 \
189161
-p 3999:3999 \
190162
blockstack/stacks-blockchain-api

running_api_from_source.md

Lines changed: 2 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -35,15 +35,15 @@ Since we'll need to create some files/dirs for persistent data,
3535
we'll first create a base directory structure and set some permissions:
3636

3737
```bash
38-
$ sudo mkdir -p /stacks-node/{persistent-data/stacks-blockchain,bns,config,binaries}
38+
$ sudo mkdir -p /stacks-node/{persistent-data/stacks-blockchain,config,binaries}
3939
$ sudo chown -R $(whoami) /stacks-node
4040
$ cd /stacks-node
4141
```
4242

4343
## Install Requirements
4444

4545
```bash
46-
$ PG_VERSION=12 \
46+
$ PG_VERSION=14 \
4747
&& NODE_VERSION=16 \
4848
&& sudo apt-get update \
4949
&& sudo apt-get install -y \
@@ -65,26 +65,6 @@ $ PG_VERSION=12 \
6565
nodejs
6666
```
6767

68-
**Optional but recommended** - If you want the V1 BNS data, there are going to be a few extra steps:
69-
70-
1. Download the BNS data:
71-
`curl -L https://storage.googleapis.com/blockstack-v1-migration-data/export-data.tar.gz -o /stacks-node/bns/export-data.tar.gz`
72-
2. Extract the data:
73-
`tar -xzvf ./bns/export-data.tar.gz -C /stacks-node/bns/`
74-
3. Each file in `./bns` will have a corresponding `sha256` value.
75-
76-
To Verify, run a script like the following to check the sha256sum:
77-
78-
```bash
79-
for file in `ls /stacks-node/bns/* | grep -v sha256 | grep -v .tar.gz`; do
80-
if [ $(sha256sum $file | awk {'print $1'}) == $(cat ${file}.sha256 ) ]; then
81-
echo "sha256 Matched $file"
82-
else
83-
echo "sha256 Mismatch $file"
84-
fi
85-
done
86-
```
87-
8868
## postgres
8969

9070
### postgres permissions
@@ -127,8 +107,6 @@ $ git clone https://github.com/hirosystems/stacks-blockchain-api /stacks-node/st
127107
The stacks blockchain api requires several Environment Variables to be set in order to run properly.
128108
To reduce complexity, we're going to create a `.env` file that we'll use for these env vars.
129109
130-
** Note: ** to enable BNS names, uncomment `BNS_IMPORT_DIR` in the below `.env` file.
131-
132110
Create a new file: `/stacks-node/stacks-blockchain-api/.env` with the following content:
133111
134112
```bash
@@ -148,7 +126,6 @@ STACKS_BLOCKCHAIN_API_PORT=3999
148126
STACKS_BLOCKCHAIN_API_HOST=0.0.0.0
149127
STACKS_CORE_RPC_HOST=localhost
150128
STACKS_CORE_RPC_PORT=20443
151-
#BNS_IMPORT_DIR=/stacks-node/bns
152129
EOF
153130
$ cd /stacks-node/stacks-blockchain-api && nohup node ./lib/index.js &
154131
```

src/api/routes/bns/addresses.ts

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,20 @@ import { asyncHandler } from '../../async-handler';
33
import { DataStore } from '../../../datastore/common';
44
import { isUnanchoredRequest } from '../../query-helpers';
55
import { ChainID } from '@stacks/transactions';
6+
import {
7+
getETagCacheHandler,
8+
setETagCacheHeaders,
9+
} from '../../../api/controllers/cache-controller';
610

711
const SUPPORTED_BLOCKCHAINS = ['stacks'];
812

913
export function createBnsAddressesRouter(db: DataStore, chainId: ChainID): express.Router {
1014
const router = express.Router();
15+
const cacheHandler = getETagCacheHandler(db);
16+
1117
router.get(
1218
'/:blockchain/:address',
19+
cacheHandler,
1320
asyncHandler(async (req, res, next) => {
1421
// Retrieves a list of names owned by the address provided.
1522
const { blockchain, address } = req.params;
@@ -23,6 +30,7 @@ export function createBnsAddressesRouter(db: DataStore, chainId: ChainID): expre
2330
includeUnanchored,
2431
chainId,
2532
});
33+
setETagCacheHeaders(res);
2634
if (namesByAddress.found) {
2735
res.json({ names: namesByAddress.result });
2836
} else {

0 commit comments

Comments
 (0)