|
| 1 | +/*****************************************************************************************************************/ |
| 2 | + |
| 3 | +// @author Michael Roberts <michael@observerly.com> |
| 4 | +// @package @observerly/fits |
| 5 | +// @license Copyright © 2021-2025 observerly |
| 6 | + |
| 7 | +/*****************************************************************************************************************/ |
| 8 | + |
| 9 | +import { afterAll, beforeAll, describe, expect, it } from 'vitest' |
| 10 | + |
| 11 | +import { ImageData, createCanvas } from 'canvas' |
| 12 | +import * as fs from 'node:fs' |
| 13 | +import * as path from 'node:path' |
| 14 | + |
| 15 | +import { FITS, ZScaleInterval } from '../..' |
| 16 | + |
| 17 | +/*****************************************************************************************************************/ |
| 18 | + |
| 19 | +const filename = 'Rosetta_Nebula_[Ha]_Monochrome_M_300s_2024-11-26T17_20_00Z.fits' |
| 20 | + |
| 21 | +/*****************************************************************************************************************/ |
| 22 | + |
| 23 | +const input = path.join(__dirname, `../../../samples/RosettaNebula/${filename}`) |
| 24 | + |
| 25 | +/*****************************************************************************************************************/ |
| 26 | + |
| 27 | +const output = path.join(__dirname, '../../../output') |
| 28 | + |
| 29 | +/*****************************************************************************************************************/ |
| 30 | + |
| 31 | +let file: File |
| 32 | + |
| 33 | +/*****************************************************************************************************************/ |
| 34 | + |
| 35 | +beforeAll(() => { |
| 36 | + // Read the FITS file and create a File object |
| 37 | + const fileBuffer = fs.readFileSync(input) |
| 38 | + file = new File([fileBuffer], 'sample.fit', { type: 'application/octet-stream' }) |
| 39 | +}) |
| 40 | + |
| 41 | +/*****************************************************************************************************************/ |
| 42 | + |
| 43 | +afterAll(() => { |
| 44 | + // Clean up |
| 45 | + // fs.rmdirSync(output, { recursive: true }) |
| 46 | +}) |
| 47 | + |
| 48 | +/*****************************************************************************************************************/ |
| 49 | + |
| 50 | +describe('FITS', () => { |
| 51 | + it('should be defined', () => { |
| 52 | + expect(FITS).toBeDefined() |
| 53 | + }) |
| 54 | + |
| 55 | + it('should create an instance of FITS', () => { |
| 56 | + const fits = new FITS() |
| 57 | + expect(fits).toBeInstanceOf(FITS) |
| 58 | + }) |
| 59 | + |
| 60 | + it('should read a FITS file from a File object', async () => { |
| 61 | + const fits = new FITS() |
| 62 | + await fits.readFromFile(file) |
| 63 | + expect(fits.getHeaders()).toBeInstanceOf(Map) |
| 64 | + }) |
| 65 | + |
| 66 | + it('should read a FITS file from a Blob object', async () => { |
| 67 | + const blob = new Blob([file], { type: 'application/octet-stream' }) |
| 68 | + const fits = new FITS() |
| 69 | + await fits.readFromBlob(blob, filename) |
| 70 | + expect(fits.getHeaders()).toBeInstanceOf(Map) |
| 71 | + }) |
| 72 | + |
| 73 | + it('should read a FITS file from an ArrayBuffer', async () => { |
| 74 | + const buffer = await file.arrayBuffer() |
| 75 | + const fits = new FITS() |
| 76 | + await fits.readFromBuffer(buffer, filename) |
| 77 | + expect(fits.getHeaders()).toBeInstanceOf(Map) |
| 78 | + }) |
| 79 | + |
| 80 | + it('should correctly parse headers from a FITS file', async () => { |
| 81 | + const fits = new FITS() |
| 82 | + await fits.readFromFile(file) |
| 83 | + const headers = fits.getHeaders() |
| 84 | + |
| 85 | + expect(headers).toBeInstanceOf(Map) |
| 86 | + expect(headers.size).toBeGreaterThan(0) |
| 87 | + |
| 88 | + expect(headers.get('SIMPLE')).toEqual({ |
| 89 | + key: 'SIMPLE', |
| 90 | + value: true, |
| 91 | + comment: 'FITS Standard 4.0' |
| 92 | + }) |
| 93 | + |
| 94 | + expect(headers.get('BITPIX')).toEqual({ |
| 95 | + key: 'BITPIX', |
| 96 | + value: -32, |
| 97 | + comment: 'Number of bits per data pixel' |
| 98 | + }) |
| 99 | + |
| 100 | + expect(headers.get('NAXIS')).toEqual({ |
| 101 | + key: 'NAXIS', |
| 102 | + value: 2, |
| 103 | + comment: '[1] Number of array dimensions' |
| 104 | + }) |
| 105 | + |
| 106 | + expect(headers.get('NAXIS1')).toEqual({ |
| 107 | + key: 'NAXIS1', |
| 108 | + value: 1463, |
| 109 | + comment: '[1] Length of data axis 1' |
| 110 | + }) |
| 111 | + |
| 112 | + expect(headers.get('NAXIS2')).toEqual({ |
| 113 | + key: 'NAXIS2', |
| 114 | + value: 1168, |
| 115 | + comment: '[1] Length of data axis 2' |
| 116 | + }) |
| 117 | + |
| 118 | + expect(headers.get('BSCALE')).toEqual({ |
| 119 | + key: 'BSCALE', |
| 120 | + value: 1, |
| 121 | + comment: '' |
| 122 | + }) |
| 123 | + |
| 124 | + expect(headers.get('BZERO')).toEqual({ |
| 125 | + key: 'BZERO', |
| 126 | + value: 0, |
| 127 | + comment: '' |
| 128 | + }) |
| 129 | + }) |
| 130 | + |
| 131 | + it('should correctly destroy the FITS instance', async () => { |
| 132 | + const fits = new FITS() |
| 133 | + await fits.readFromFile(file) |
| 134 | + fits.destroy() |
| 135 | + expect(fits.getHeaders()).toBeInstanceOf(Map) |
| 136 | + expect(fits.getHeaders().size).toBe(0) |
| 137 | + expect(fits.width).toBe(0) |
| 138 | + expect(fits.height).toBe(0) |
| 139 | + expect(fits.getImageHDU()).toBeInstanceOf(Float32Array) |
| 140 | + expect(fits.getImageHDU().length).toBe(0) |
| 141 | + expect(fits.bitpix).toBe(0) |
| 142 | + expect(fits.bzero).toBe(0) |
| 143 | + expect(fits.bscale).toBe(1) |
| 144 | + }) |
| 145 | +}) |
| 146 | + |
| 147 | +/*****************************************************************************************************************/ |
| 148 | + |
| 149 | +describe('FITS', () => { |
| 150 | + it('should parse FITS from file, retrieve normalized data, and write a JPEG', async () => { |
| 151 | + // Read the file from disk into a Buffer: |
| 152 | + const fitsBuffer = fs.readFileSync(input) |
| 153 | + const file = new File([fitsBuffer], 'test.fits') |
| 154 | + const fits = await new FITS().readFromFile(file) |
| 155 | + expect(fits.width).toBeGreaterThan(0) |
| 156 | + expect(fits.height).toBeGreaterThan(0) |
| 157 | + |
| 158 | + // Now, getImageHDU() returns a Float32Array of normalized [0..255] data: |
| 159 | + let data = fits.getImageHDU() |
| 160 | + |
| 161 | + // Compute the zscale interval for normalization giving us vmin and vmax: |
| 162 | + const { vmin, vmax } = ZScaleInterval(data) |
| 163 | + |
| 164 | + const resolution = fits.width * fits.height |
| 165 | + |
| 166 | + // Normalize the data to the [0..255] range |
| 167 | + const normalizedData = new Float32Array(resolution) |
| 168 | + |
| 169 | + for (let i = 0; i < resolution; i++) { |
| 170 | + normalizedData[i] = ((data[i] - vmin) / (vmax - vmin)) * 255 |
| 171 | + } |
| 172 | + |
| 173 | + data = normalizedData |
| 174 | + |
| 175 | + // For consistency, confirm we have width*height pixels: |
| 176 | + const width = fits.width |
| 177 | + const height = fits.height |
| 178 | + const pixels = width * height |
| 179 | + expect(data.length).toBe(pixels) |
| 180 | + |
| 181 | + // Create a canvas and 2D context to paint the ImageData: |
| 182 | + const canvas = createCanvas(width, height) |
| 183 | + const ctx = canvas.getContext('2d') |
| 184 | + if (!ctx) { |
| 185 | + throw new Error('Could not get 2D context from node-canvas.') |
| 186 | + } |
| 187 | + |
| 188 | + // Build an RGBA buffer from the normalized grayscale data: |
| 189 | + const rgba = new Uint8ClampedArray(pixels * 4) |
| 190 | + for (let i = 0; i < pixels; i++) { |
| 191 | + // Already scaled to [0..255] by getImageData(): |
| 192 | + const gray = Math.floor(data[i]) |
| 193 | + |
| 194 | + const idx = i * 4 |
| 195 | + rgba[idx + 0] = gray // R |
| 196 | + rgba[idx + 1] = gray // G |
| 197 | + rgba[idx + 2] = gray // B |
| 198 | + rgba[idx + 3] = 255 // A |
| 199 | + } |
| 200 | + |
| 201 | + // Paint the ImageData on the canvas at (0, 0): |
| 202 | + const imageData = new ImageData(rgba, width, height) |
| 203 | + ctx.putImageData(imageData, 0, 0) |
| 204 | + |
| 205 | + // Convert to JPEG buffer with 90% quality: |
| 206 | + const image = canvas.toBuffer('image/jpeg', { quality: 0.9 }) |
| 207 | + |
| 208 | + // Ensure the output directory exists before writing the JPEG file: |
| 209 | + if (!fs.existsSync(output)) { |
| 210 | + fs.mkdirSync(output, { recursive: true }) |
| 211 | + } |
| 212 | + |
| 213 | + // Write the final JPEG file to disk: |
| 214 | + const outputPath = path.join(output, 'output.jpg') |
| 215 | + |
| 216 | + // Convert Buffer to Uint8Array for fs.writeFile(): |
| 217 | + const uint8Array = new Uint8Array(image.buffer, image.byteOffset, image.byteLength) |
| 218 | + fs.writeFileSync(outputPath, uint8Array) |
| 219 | + |
| 220 | + // Confirm the output file is not empty: |
| 221 | + const stats = fs.statSync(outputPath) |
| 222 | + expect(stats.size).toBeGreaterThan(0) |
| 223 | + }) |
| 224 | +}) |
| 225 | + |
| 226 | +/*****************************************************************************************************************/ |
0 commit comments