diff --git a/.babelrc b/.babelrc deleted file mode 100644 index 34bc6d2..0000000 --- a/.babelrc +++ /dev/null @@ -1,3 +0,0 @@ -{ - "plugins": ["@babel/plugin-transform-modules-commonjs"] -} diff --git a/.eslintrc.yml b/.eslintrc.yml index f40ac5c..bc57d11 100644 --- a/.eslintrc.yml +++ b/.eslintrc.yml @@ -1,3 +1 @@ -extends: cheminfo -parserOptions: - sourceType: module +extends: cheminfo-typescript diff --git a/.github/workflows/documentationjs.yml b/.github/workflows/documentationjs.yml deleted file mode 100644 index e93dc38..0000000 --- a/.github/workflows/documentationjs.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Deploy documentation.js on GitHub pages - -on: - workflow_dispatch: - release: - types: [published] - -jobs: - deploy: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Build documentation - uses: zakodium/documentationjs-action@v1 - - name: Deploy to GitHub pages - uses: JamesIves/github-pages-deploy-action@releases/v4 - with: - token: ${{ secrets.BOT_TOKEN }} - branch: gh-pages - folder: docs - clean: true diff --git a/.github/workflows/nodejs.yml b/.github/workflows/nodejs.yml index 12c81fa..e3689a3 100644 --- a/.github/workflows/nodejs.yml +++ b/.github/workflows/nodejs.yml @@ -10,3 +10,5 @@ jobs: nodejs: # Documentation: https://github.com/zakodium/workflows#nodejs-ci uses: zakodium/workflows/.github/workflows/nodejs.yml@nodejs-v1 + with: + lint-check-types: true diff --git a/.github/workflows/typedoc.yml b/.github/workflows/typedoc.yml new file mode 100644 index 0000000..f69a9d2 --- /dev/null +++ b/.github/workflows/typedoc.yml @@ -0,0 +1,32 @@ +name: Deploy TypeDoc on GitHub pages + +on: + workflow_dispatch: + release: + types: [published] + +env: + NODE_VERSION: 16.x + ENTRY_FILE: 'src/index.ts' + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v2 + with: + node-version: ${{ env.NODE_VERSION }} + - name: Install dependencies + run: npm install + - name: Build documentation + uses: zakodium/typedoc-action@v2 + with: + entry: ${{ env.ENTRY_FILE }} + - name: Deploy to GitHub pages + uses: JamesIves/github-pages-deploy-action@releases/v4 + with: + token: ${{ secrets.BOT_TOKEN }} + branch: gh-pages + folder: docs + clean: true diff --git a/.gitignore b/.gitignore index baf9e1e..b70b9f2 100755 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,5 @@ dist/ .vscode docs lib +lib-esm +.eslintcache diff --git a/.prettierrc.json b/.prettierrc.json new file mode 100644 index 0000000..2924a55 --- /dev/null +++ b/.prettierrc.json @@ -0,0 +1,7 @@ +{ + "arrowParens": "always", + "semi": true, + "singleQuote": true, + "tabWidth": 2, + "trailingComma": "all" +} \ No newline at end of file diff --git a/README.md b/README.md index 5dcc560..fe349e0 100755 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ # netcdfjs [![NPM version][npm-image]][npm-url] -[![build status][travis-image]][travis-url] -[![Test coverage][coveralls-image]][coveralls-url] +[![build status][ci-image]][ci-url] +[![Test coverage][codecov-image]][codecov-url] [![npm download][download-image]][download-url] Read and explore NetCDF v3 files. @@ -32,9 +32,10 @@ reader.getDataVariable("wmoId"); // go to offset and read it [MIT](./LICENSE) -[npm-image]: https://img.shields.io/npm/v/netcdfjs.svg?style=flat-square +[npm-image]: https://img.shields.io/npm/v/netcdfjs.svg [npm-url]: https://www.npmjs.com/package/netcdfjs -[coveralls-image]: https://img.shields.io/coveralls/cheminfo/netcdfjs.svg?style=flat-square -[coveralls-url]: https://coveralls.io/github/cheminfo/netcdfjs -[download-image]: https://img.shields.io/npm/dm/netcdfjs.svg?style=flat-square -[download-url]: https://www.npmjs.com/package/netcdfjs +[ci-image]: https://github.com/cheminfo/netcdfjs/workflows/Node.js%20CI/badge.svg?branch=main +[ci-url]: https://github.com/cheminfo/netcdfjs/actions?query=workflow%3A%22Node.js+CI%22 +[codecov-image]: https://img.shields.io/codecov/c/github/cheminfo/netcdfjs.svg +[codecov-url]: https://codecov.io/gh/cheminfo/netcdfjs +[download-image]: https://img.shields.io/npm/dm/netcdfjs.svg diff --git a/demo/agilent.js b/demo/agilent.js deleted file mode 100644 index d6993fe..0000000 --- a/demo/agilent.js +++ /dev/null @@ -1,24 +0,0 @@ -'use strict'; - -const fs = require('fs'); -const join = require('path').join; - -import NetCDFReader from '../index.js' - -const data = fs.readFileSync( - join(__dirname, '../src/__tests__/files/agilent_hplc.cdf') -); - -let reader = new NetCDFReader(data); - -let selectedVariable = reader.variables[4]; - -reader.getDataVariable(selectedVariable); - -for (let variable of reader.variables) { - console.log(variable.name, reader.getDataVariable(variable)); -} - -let ordinates = reader.getDataVariable(reader.variables[5]); -console.log(Math.max(...ordinates)); -console.log(Math.min(...ordinates)); diff --git a/demo/agilent.ts b/demo/agilent.ts new file mode 100644 index 0000000..89fb5d8 --- /dev/null +++ b/demo/agilent.ts @@ -0,0 +1,20 @@ +import { readFileSync as rfs } from 'node:fs'; +import { join } from 'node:path'; + +import { NetCDFReader } from '../src/index'; + +const data = rfs(join(__dirname, '../src/__tests__/files/agilent_hplc.cdf')); + +let reader = new NetCDFReader(data); + +let selectedVariable = reader.variables[4]; + +reader.getDataVariable(selectedVariable); + +for (let variable of reader.variables) { + console.log(variable.name, reader.getDataVariable(variable)); +} + +let ordinates = reader.getDataVariable(reader.variables[5]); +console.log(Math.max(...(ordinates as number[]))); +console.log(Math.min(...(ordinates as number[]))); \ No newline at end of file diff --git a/jest.config.js b/jest.config.js new file mode 100644 index 0000000..f47bd57 --- /dev/null +++ b/jest.config.js @@ -0,0 +1,4 @@ +module.exports = { + preset: 'ts-jest/presets/js-with-ts', + testEnvironment: 'node', +}; diff --git a/package.json b/package.json index f2fcc6f..dd12376 100755 --- a/package.json +++ b/package.json @@ -2,18 +2,20 @@ "name": "netcdfjs", "version": "2.0.2", "description": "Read and explore NetCDF files", - "main": "lib/index.js", - "module": "src/index.js", + "main": "./lib/index.js", + "module": "./lib-esm/index.js", + "types": "./lib/index.d.ts", + "files": [ + "src", + "lib", + "lib-esm" + ], "keywords": [ "netcdf", "nc", "data", "format" ], - "files": [ - "src", - "lib" - ], "author": "Miguel Asencio (https://github.com/maasencioh)", "repository": "cheminfo/netcdfjs", "bugs": { @@ -22,24 +24,30 @@ "homepage": "https://github.com/cheminfo/netcdfjs", "license": "MIT", "scripts": { - "build": "cheminfo-build --entry src/index.js --root NetCDF", - "eslint": "eslint src", - "eslint-fix": "npm run eslint -- --fix", - "prepack": "rollup -c", + "check-types": "tsc --noEmit", + "clean": "rimraf lib lib-esm", + "eslint": "eslint src --cache", + "eslint:fix": "npm run eslint -- --fix", + "prepack": "npm run tsc", "prettier": "prettier --check src", - "prettier-write": "prettier --write src", - "test": "npm run test-only && npm run eslint && npm run prettier", - "test-only": "jest --coverage" + "prettier:fix": "prettier --write src", + "test": "npm run test-only && npm run eslint && npm run prettier && npm run check-types", + "test-only": "jest --coverage", + "tsc": "npm run clean && npm run tsc-cjs && npm run tsc-esm", + "tsc-cjs": "tsc --project tsconfig.cjs.json", + "tsc-esm": "tsc --project tsconfig.esm.json" }, "devDependencies": { - "@babel/plugin-transform-modules-commonjs": "^7.19.6", - "@types/jest": "^29.2.0", + "@types/jest": "^29.2.3", "cheminfo-build": "^1.1.11", + "cheminfo-types": "^1.4.0", "eslint": "^8.25.0", - "eslint-config-cheminfo": "^8.0.2", - "jest": "^29.2.1", + "eslint-config-cheminfo-typescript": "^11.2.2", + "eslint-plugin-import": "^2.28.0", + "jest": "^29.3.1", "prettier": "^2.7.1", - "rollup": "^3.2.3" + "ts-jest": "^29.0.3", + "typescript": "^4.9.3" }, "dependencies": { "iobuffer": "^5.2.1" diff --git a/rollup.config.mjs b/rollup.config.mjs deleted file mode 100644 index 31d63af..0000000 --- a/rollup.config.mjs +++ /dev/null @@ -1,10 +0,0 @@ -const config = { - input: 'src/index.js', - output: { - file: 'lib/index.js', - format: 'cjs', - exports: 'named', - }, -}; - -export default config; diff --git a/src/__tests__/__snapshots__/toString.test.js.snap b/src/__tests__/__snapshots__/toString.test.ts.snap similarity index 100% rename from src/__tests__/__snapshots__/toString.test.js.snap rename to src/__tests__/__snapshots__/toString.test.ts.snap diff --git a/src/__tests__/attributeExists.test.js b/src/__tests__/attributeExists.test.js deleted file mode 100755 index 267dda2..0000000 --- a/src/__tests__/attributeExists.test.js +++ /dev/null @@ -1,13 +0,0 @@ -import { readFileSync } from "fs"; - -const { NetCDFReader } = require(".."); - -const pathFiles = `${__dirname}/files/`; - -test("attributeExists", () => { - const data = readFileSync(`${pathFiles}P071.CDF`); - - let reader = new NetCDFReader(data); - expect(reader.attributeExists("operator_name")).toBe(true); - expect(reader.attributeExists("operator_nameXX")).toBe(false); -}); diff --git a/src/__tests__/attributeExists.test.ts b/src/__tests__/attributeExists.test.ts new file mode 100755 index 0000000..78ed5f7 --- /dev/null +++ b/src/__tests__/attributeExists.test.ts @@ -0,0 +1,13 @@ +import { readFileSync } from 'fs'; + +import { NetCDFReader } from '../parser'; + +const pathFiles = `${__dirname}/files/`; + +test('attributeExists', () => { + const data = readFileSync(`${pathFiles}P071.CDF`); + + let reader = new NetCDFReader(data); + expect(reader.attributeExists('operator_name')).toBe(true); + expect(reader.attributeExists('operator_nameXX')).toBe(false); +}); diff --git a/src/__tests__/dataVariableExists.test.js b/src/__tests__/dataVariableExists.test.js deleted file mode 100755 index dacdb41..0000000 --- a/src/__tests__/dataVariableExists.test.js +++ /dev/null @@ -1,13 +0,0 @@ -import { readFileSync } from "fs"; - -const { NetCDFReader } = require(".."); - -const pathFiles = `${__dirname}/files/`; - -test("dataVariableExists", () => { - const data = readFileSync(`${pathFiles}P071.CDF`); - - let reader = new NetCDFReader(data); - expect(reader.dataVariableExists("instrument_name")).toBe(true); - expect(reader.dataVariableExists("instrument_nameXX")).toBe(false); -}); diff --git a/src/__tests__/dataVariableExists.test.ts b/src/__tests__/dataVariableExists.test.ts new file mode 100755 index 0000000..7ccac0d --- /dev/null +++ b/src/__tests__/dataVariableExists.test.ts @@ -0,0 +1,13 @@ +import { readFileSync } from 'fs'; + +import { NetCDFReader } from '../parser'; + +const pathFiles = `${__dirname}/files/`; + +test('dataVariableExists', () => { + const data = readFileSync(`${pathFiles}P071.CDF`); + + let reader = new NetCDFReader(data); + expect(reader.dataVariableExists('instrument_name')).toBe(true); + expect(reader.dataVariableExists('instrument_nameXX')).toBe(false); +}); diff --git a/src/__tests__/getAttribute.test.js b/src/__tests__/getAttribute.test.js deleted file mode 100755 index 43d17df..0000000 --- a/src/__tests__/getAttribute.test.js +++ /dev/null @@ -1,12 +0,0 @@ -import { readFileSync } from "fs"; - -const { NetCDFReader } = require(".."); - -const pathFiles = `${__dirname}/files/`; - -test("getAttribute", () => { - const data = readFileSync(`${pathFiles}P071.CDF`); - - let reader = new NetCDFReader(data); - expect(reader.getAttribute("operator_name")).toBe("SC"); -}); diff --git a/src/__tests__/getAttribute.test.ts b/src/__tests__/getAttribute.test.ts new file mode 100755 index 0000000..a690da9 --- /dev/null +++ b/src/__tests__/getAttribute.test.ts @@ -0,0 +1,12 @@ +import { readFileSync } from 'fs'; + +import { NetCDFReader } from '../parser'; + +const pathFiles = `${__dirname}/files/`; + +test('getAttribute', () => { + const data = readFileSync(`${pathFiles}P071.CDF`); + + let reader = new NetCDFReader(data); + expect(reader.getAttribute('operator_name')).toBe('SC'); +}); diff --git a/src/__tests__/getDataVariableAsString.test.js b/src/__tests__/getDataVariableAsString.test.js deleted file mode 100755 index 9684900..0000000 --- a/src/__tests__/getDataVariableAsString.test.js +++ /dev/null @@ -1,14 +0,0 @@ -import { readFileSync } from "fs"; - -const { NetCDFReader } = require(".."); - -const pathFiles = `${__dirname}/files/`; - -test("getDataVariableAsString", () => { - const data = readFileSync(`${pathFiles}P071.CDF`); - - let reader = new NetCDFReader(data); - expect(reader.getDataVariableAsString("instrument_name")).toBe( - "Gas Chromatograph" - ); -}); diff --git a/src/__tests__/getDataVariableAsString.test.ts b/src/__tests__/getDataVariableAsString.test.ts new file mode 100755 index 0000000..b37067f --- /dev/null +++ b/src/__tests__/getDataVariableAsString.test.ts @@ -0,0 +1,14 @@ +import { readFileSync } from 'fs'; + +import { NetCDFReader } from '../parser'; + +const pathFiles = `${__dirname}/files/`; + +test('getDataVariableAsString', () => { + const data = readFileSync(`${pathFiles}P071.CDF`); + + let reader = new NetCDFReader(data); + expect(reader.getDataVariableAsString('instrument_name')).toBe( + 'Gas Chromatograph', + ); +}); diff --git a/src/__tests__/index.test.js b/src/__tests__/index.test.js deleted file mode 100755 index c062925..0000000 --- a/src/__tests__/index.test.js +++ /dev/null @@ -1,169 +0,0 @@ -import { readFileSync } from "fs"; - -const { NetCDFReader } = require(".."); - -const pathFiles = `${__dirname}/files/`; - -describe("Read file", () => { - it("Throws on non NetCDF file", () => { - const data = readFileSync(`${pathFiles}not_nc.txt`); - expect(function notValid() { - return new NetCDFReader(data); - }).toThrow("Not a valid NetCDF v3.x file: should start with CDF"); - }); - - it("read header information", () => { - // http://www.unidata.ucar.edu/software/netcdf/examples/files.html - // http://www.unidata.ucar.edu/software/netcdf/examples/madis-sao.cdl - const data = readFileSync(`${pathFiles}madis-sao.nc`); - - let reader = new NetCDFReader(data); - expect(reader.version).toBe("classic format"); - expect(reader.recordDimension).toStrictEqual({ - length: 178, - id: 21, - name: "recNum", - recordStep: 1220, - }); - expect(reader.dimensions).toStrictEqual([ - { name: "maxAutoStaLen", size: 6 }, - { name: "maxAutoWeather", size: 5 }, - { name: "maxAutoWeaLen", size: 12 }, - { name: "maxCldTypeLen", size: 5 }, - { name: "maxCloudTypes", size: 5 }, - { name: "maxDataSrcLen", size: 8 }, - { name: "maxRepLen", size: 5 }, - { name: "maxSAOLen", size: 256 }, - { name: "maxSkyCover", size: 5 }, - { name: "maxSkyLen", size: 8 }, - { name: "maxSkyMethLen", size: 3 }, - { name: "maxStaNamLen", size: 5 }, - { name: "maxWeatherNum", size: 5 }, - { name: "maxWeatherLen", size: 40 }, - { name: "QCcheckNum", size: 10 }, - { name: "QCcheckNameLen", size: 60 }, - { name: "ICcheckNum", size: 55 }, - { name: "ICcheckNameLen", size: 72 }, - { name: "maxStaticIds", size: 350 }, - { name: "totalIdLen", size: 6 }, - { name: "nInventoryBins", size: 24 }, - { name: "recNum", size: 0 }, - ]); - - expect(reader.globalAttributes[0]).toStrictEqual({ - name: "cdlDate", - type: "char", - value: "20010327", - }); - expect(reader.globalAttributes[3]).toStrictEqual({ - name: "filePeriod", - type: "int", - value: 3600, - }); - - expect(reader.variables[0]).toStrictEqual({ - name: "nStaticIds", - dimensions: [], - attributes: [ - { - name: "_FillValue", - type: "int", - value: 0, - }, - ], - type: "int", - size: 4, - offset: 39208, - record: false, - }); - expect(reader.variables[11]).toStrictEqual({ - name: "wmoId", - dimensions: [21], - attributes: [ - { name: "long_name", type: "char", value: "WMO numeric station ID" }, - { name: "_FillValue", type: "int", value: -2147483647 }, - { name: "valid_range", type: "int", value: [1, 89999] }, - { name: "reference", type: "char", value: "station table" }, - ], - type: "int", - size: 4, - offset: 48884, - record: true, - }); - }); - - it("read non-record variable", () => { - const data = readFileSync(`${pathFiles}madis-sao.nc`); - let reader = new NetCDFReader(data); - - expect(reader.getDataVariable("nStaticIds")[0]).toBe(145); - }); - - it("read 2 dimensional variable", () => { - const data = readFileSync(`${pathFiles}ichthyop.nc`); - let reader = new NetCDFReader(data); - expect(reader.getDataVariable("time")).toHaveLength(49); - expect(reader.getDataVariable("time")[0]).toBe(1547070300); - expect(reader.getDataVariable("lat")).toHaveLength(49); - expect(reader.getDataVariable("lat")[0]).toHaveLength(1000); - expect(reader.getDataVariable("lat")[0][0]).toBe(53.26256561279297); - }); - - it("read record variable with string", () => { - const data = readFileSync(`${pathFiles}madis-sao.nc`); - let reader = new NetCDFReader(data); - - let record = reader.getDataVariable("wmoId"); - expect(record[0]).toBe(71419); - expect(record[1]).toBe(71415); - expect(record[2]).toBe(71408); - }); - - it("read non-record variable with object", () => { - const data = readFileSync(`${pathFiles}madis-sao.nc`); - let reader = new NetCDFReader(data); - let variables = reader.variables; - - let withString = reader.getDataVariable("staticIds"); - let withObject = reader.getDataVariable(variables[1]); - expect(withString[0]).toBe("W"); - expect(withString[1]).toBe("A"); - expect(withString[2]).toBe("F"); - expect(withString[0]).toBe(withObject[0]); - expect(withString[1]).toBe(withObject[1]); - expect(withString[2]).toBe(withObject[2]); - }); - - it("read non-existent variable string", () => { - const data = readFileSync(`${pathFiles}madis-sao.nc`); - let reader = new NetCDFReader(data); - - expect(reader.getDataVariable.bind(reader, "n'importe quoi")).toThrow( - "Not a valid NetCDF v3.x file: variable not found" - ); - }); - - it("read 64 bit offset file", () => { - const data = readFileSync(`${pathFiles}model1_md2.nc`); - let reader = new NetCDFReader(data); - expect(reader.version).toBe("64-bit offset format"); - expect(reader.getDataVariable("cell_angular")[0]).toBe("a"); - expect(reader.getDataVariable("cell_spatial")[0]).toBe("a"); - }); - - it("read agilent hplc file file", () => { - const data = readFileSync(`${pathFiles}agilent_hplc.cdf`); - let reader = new NetCDFReader(data); - - expect(reader.version).toBe("classic format"); - - let variables = []; - for (let variable of reader.variables) { - variables.push(variable); - variable.value = reader.getDataVariable(variable); - } - expect(variables[3].value).toStrictEqual([0.012000000104308128]); - expect(variables).toHaveLength(24); - expect(reader.getDataVariable("ordinate_values")).toHaveLength(4651); - }); -}); diff --git a/src/__tests__/index.test.ts b/src/__tests__/index.test.ts new file mode 100755 index 0000000..86bf0b0 --- /dev/null +++ b/src/__tests__/index.test.ts @@ -0,0 +1,171 @@ +import { readFileSync } from 'fs'; + +import { NetCDFReader } from '../parser'; + +const pathFiles = `${__dirname}/files/`; + +describe('Read file', () => { + it('Throws on non NetCDF file', () => { + const data = readFileSync(`${pathFiles}not_nc.txt`); + expect(function notValid() { + return new NetCDFReader(data); + }).toThrow('Not a valid NetCDF v3.x file: should start with CDF'); + }); + + it('read header information', () => { + // http://www.unidata.ucar.edu/software/netcdf/examples/files.html + // http://www.unidata.ucar.edu/software/netcdf/examples/madis-sao.cdl + const data = readFileSync(`${pathFiles}madis-sao.nc`); + + let reader = new NetCDFReader(data); + expect(reader.version).toBe('classic format'); + expect(reader.recordDimension).toStrictEqual({ + length: 178, + id: 21, + name: 'recNum', + recordStep: 1220, + }); + expect(reader.dimensions).toStrictEqual([ + { name: 'maxAutoStaLen', size: 6 }, + { name: 'maxAutoWeather', size: 5 }, + { name: 'maxAutoWeaLen', size: 12 }, + { name: 'maxCldTypeLen', size: 5 }, + { name: 'maxCloudTypes', size: 5 }, + { name: 'maxDataSrcLen', size: 8 }, + { name: 'maxRepLen', size: 5 }, + { name: 'maxSAOLen', size: 256 }, + { name: 'maxSkyCover', size: 5 }, + { name: 'maxSkyLen', size: 8 }, + { name: 'maxSkyMethLen', size: 3 }, + { name: 'maxStaNamLen', size: 5 }, + { name: 'maxWeatherNum', size: 5 }, + { name: 'maxWeatherLen', size: 40 }, + { name: 'QCcheckNum', size: 10 }, + { name: 'QCcheckNameLen', size: 60 }, + { name: 'ICcheckNum', size: 55 }, + { name: 'ICcheckNameLen', size: 72 }, + { name: 'maxStaticIds', size: 350 }, + { name: 'totalIdLen', size: 6 }, + { name: 'nInventoryBins', size: 24 }, + { name: 'recNum', size: 0 }, + ]); + + expect(reader.globalAttributes[0]).toStrictEqual({ + name: 'cdlDate', + type: 'char', + value: '20010327', + }); + expect(reader.globalAttributes[3]).toStrictEqual({ + name: 'filePeriod', + type: 'int', + value: 3600, + }); + + expect(reader.variables[0]).toStrictEqual({ + name: 'nStaticIds', + dimensions: [], + attributes: [ + { + name: '_FillValue', + type: 'int', + value: 0, + }, + ], + type: 'int', + size: 4, + offset: 39208, + record: false, + }); + expect(reader.variables[11]).toStrictEqual({ + name: 'wmoId', + dimensions: [21], + attributes: [ + { name: 'long_name', type: 'char', value: 'WMO numeric station ID' }, + { name: '_FillValue', type: 'int', value: -2147483647 }, + { name: 'valid_range', type: 'int', value: [1, 89999] }, + { name: 'reference', type: 'char', value: 'station table' }, + ], + type: 'int', + size: 4, + offset: 48884, + record: true, + }); + }); + + it('read non-record variable', () => { + const data = readFileSync(`${pathFiles}madis-sao.nc`); + let reader = new NetCDFReader(data); + + expect(reader.getDataVariable('nStaticIds')[0]).toBe(145); + }); + + it('read 2 dimensional variable', () => { + const data = readFileSync(`${pathFiles}ichthyop.nc`); + let reader = new NetCDFReader(data); + expect(reader.getDataVariable('time')).toHaveLength(49); + expect(reader.getDataVariable('time')[0]).toBe(1547070300); + expect(reader.getDataVariable('lat')).toHaveLength(49); + expect(reader.getDataVariable('lat')[0]).toHaveLength(1000); + const lat = reader.getDataVariable('lat')[0] as number[]; + expect(lat[0]).toBe(53.26256561279297); + }); + + it('read record variable with string', () => { + const data = readFileSync(`${pathFiles}madis-sao.nc`); + let reader = new NetCDFReader(data); + + let record = reader.getDataVariable('wmoId'); + expect(record[0]).toBe(71419); + expect(record[1]).toBe(71415); + expect(record[2]).toBe(71408); + }); + + it('read non-record variable with object', () => { + const data = readFileSync(`${pathFiles}madis-sao.nc`); + let reader = new NetCDFReader(data); + let variables = reader.variables; + + let withString = reader.getDataVariable('staticIds'); + let withObject = reader.getDataVariable(variables[1]); + expect(withString[0]).toBe('W'); + expect(withString[1]).toBe('A'); + expect(withString[2]).toBe('F'); + expect(withString[0]).toBe(withObject[0]); + expect(withString[1]).toBe(withObject[1]); + expect(withString[2]).toBe(withObject[2]); + }); + + it('read non-existent variable string', () => { + const data = readFileSync(`${pathFiles}madis-sao.nc`); + let reader = new NetCDFReader(data); + + expect(reader.getDataVariable.bind(reader, "n'importe quoi")).toThrow( + 'Not a valid NetCDF v3.x file: variable not found', + ); + }); + + it('read 64 bit offset file', () => { + const data = readFileSync(`${pathFiles}model1_md2.nc`); + let reader = new NetCDFReader(data); + expect(reader.version).toBe('64-bit offset format'); + expect(reader.getDataVariable('cell_angular')[0]).toBe('a'); + expect(reader.getDataVariable('cell_spatial')[0]).toBe('a'); + }); + + it('read agilent hplc file file', () => { + const data = readFileSync(`${pathFiles}agilent_hplc.cdf`); + let reader = new NetCDFReader(data); + + expect(reader.version).toBe('classic format'); + + let variables = []; + + for (let variable of reader.variables) { + const value = reader.getDataVariable(variable); + variables.push({ value, ...variable }); + } + expect(variables[3].value).toStrictEqual([0.012000000104308128]); + expect(variables).toHaveLength(24); + expect(reader.getDataVariable('ordinate_values')).toHaveLength(4651); + }); +}); diff --git a/src/__tests__/toString.test.js b/src/__tests__/toString.test.ts similarity index 64% rename from src/__tests__/toString.test.js rename to src/__tests__/toString.test.ts index 790971d..96ffc46 100755 --- a/src/__tests__/toString.test.js +++ b/src/__tests__/toString.test.ts @@ -1,10 +1,10 @@ -import { readFileSync } from "fs"; +import { readFileSync } from 'fs'; -const { NetCDFReader } = require(".."); +import { NetCDFReader } from '../parser'; const pathFiles = `${__dirname}/files/`; -test("toString", () => { +test('toString', () => { const data = readFileSync(`${pathFiles}P071.CDF`); let reader = new NetCDFReader(data); diff --git a/src/__tests__/types.test.ts b/src/__tests__/types.test.ts new file mode 100644 index 0000000..513fa06 --- /dev/null +++ b/src/__tests__/types.test.ts @@ -0,0 +1,31 @@ +import { num2bytes, num2str, str2num } from '../types'; + +describe('test type mappings', () => { + test('number to string', () => { + expect(num2str(1)).toBe('byte'); + expect(num2str(2)).toBe('char'); + expect(num2str(3)).toBe('short'); + expect(num2str(4)).toBe('int'); + expect(num2str(5)).toBe('float'); + expect(num2str(6)).toBe('double'); + expect(num2str(7)).toBe('undefined'); + }); + test('num to bytes', () => { + expect(num2bytes(1)).toBe(1); + expect(num2bytes(2)).toBe(1); + expect(num2bytes(3)).toBe(2); + expect(num2bytes(4)).toBe(4); + expect(num2bytes(5)).toBe(4); + expect(num2bytes(6)).toBe(8); + expect(num2bytes(7)).toBe(-1); + }); + test('string to number', () => { + expect(str2num('byte')).toBe(1); + expect(str2num('char')).toBe(2); + expect(str2num('short')).toBe(3); + expect(str2num('int')).toBe(4); + expect(str2num('float')).toBe(5); + expect(str2num('double')).toBe(6); + expect(str2num('undefined')).toBe(-1); + }); +}); diff --git a/src/data.js b/src/data.js deleted file mode 100644 index 1b7f1a7..0000000 --- a/src/data.js +++ /dev/null @@ -1,56 +0,0 @@ -import { num2bytes, str2num, readType } from "./types.js"; - -// const STREAMING = 4294967295; - -/** - * Read data for the given non-record variable - * @ignore - * @param {IOBuffer} buffer - Buffer for the file data - * @param {object} variable - Variable metadata - * @return {Array} - Data of the element - */ -export function nonRecord(buffer, variable) { - // variable type - const type = str2num(variable.type); - - // size of the data - let size = variable.size / num2bytes(type); - - // iterates over the data - let data = new Array(size); - for (let i = 0; i < size; i++) { - data[i] = readType(buffer, type, 1); - } - - return data; -} - -/** - * Read data for the given record variable - * @ignore - * @param {IOBuffer} buffer - Buffer for the file data - * @param {object} variable - Variable metadata - * @param {object} recordDimension - Record dimension metadata - * @return {Array} - Data of the element - */ -export function record(buffer, variable, recordDimension) { - // variable type - const type = str2num(variable.type); - const width = variable.size ? variable.size / num2bytes(type) : 1; - - // size of the data - // TODO streaming data - let size = recordDimension.length; - - // iterates over the data - let data = new Array(size); - const step = recordDimension.recordStep; - - for (let i = 0; i < size; i++) { - let currentOffset = buffer.offset; - data[i] = readType(buffer, type, width); - buffer.seek(currentOffset + step); - } - - return data; -} diff --git a/src/data.ts b/src/data.ts new file mode 100644 index 0000000..e394164 --- /dev/null +++ b/src/data.ts @@ -0,0 +1,66 @@ +import { IOBuffer } from 'iobuffer'; + +import { Header } from './header'; +import { num2bytes, str2num, readType } from './types'; +// const STREAMING = 4294967295; + +/** + * Read data for the given non-record variable + * @param buffer - Buffer for the file data + * @param variable - Variable metadata + * @return - Data of the element + */ +export function nonRecord( + buffer: IOBuffer, + variable: Header['variables'][number], +): ReturnType[] { + // variable type + const type = str2num(variable.type); + + // size of the data + const size = variable.size / num2bytes(type); + + // iterates over the data + let data = new Array(size); + for (let i = 0; i < size; i++) { + data[i] = readType(buffer, type, 1); + } + + return data; +} + +/** + * Read data for the given record variable + * @param buffer - Buffer for the file data + * @param variable - Variable metadata + * @param recordDimension - Record dimension metadata + * @return - Data of the element + */ +export function record( + buffer: IOBuffer, + variable: Header['variables'][number], + recordDimension: Header['recordDimension'], +): ReturnType[] { + // variable type + const type = str2num(variable.type); + const width = variable.size ? variable.size / num2bytes(type) : 1; + + // size of the data + // TODO streaming data + const size = recordDimension.length; + + // iterates over the data + let data = new Array(size); + const step = recordDimension.recordStep; + if (step) { + for (let i = 0; i < size; i++) { + let currentOffset = buffer.offset; + data[i] = readType(buffer, type, width); + buffer.seek(currentOffset + step); + } + } else { + throw new Error('recordDimension.recordStep is undefined'); + } + + return data; +} diff --git a/src/header.js b/src/header.js deleted file mode 100644 index 66accca..0000000 --- a/src/header.js +++ /dev/null @@ -1,236 +0,0 @@ -import { num2str, readType } from "./types.js"; -import { padding, notNetcdf, readName } from "./utils.js"; - -// Grammar constants -const ZERO = 0; -const NC_DIMENSION = 10; -const NC_VARIABLE = 11; -const NC_ATTRIBUTE = 12; - -/** - * Read the header of the file - * @ignore - * @param {IOBuffer} buffer - Buffer for the file data - * @param {number} version - Version of the file - * @return {object} - Object with the fields: - * * `recordDimension`: Number with the length of record dimension - * * `dimensions`: List of dimensions - * * `globalAttributes`: List of global attributes - * * `variables`: List of variables - */ -export function header(buffer, version) { - // Length of record dimension - // sum of the varSize's of all the record variables. - let header = { recordDimension: { length: buffer.readUint32() } }; - - // Version - header.version = version; - - // List of dimensions - let dimList = dimensionsList(buffer); - header.recordDimension.id = dimList.recordId; // id of the unlimited dimension - header.recordDimension.name = dimList.recordName; // name of the unlimited dimension - header.dimensions = dimList.dimensions; - - // List of global attributes - header.globalAttributes = attributesList(buffer); - - // List of variables - let variables = variablesList(buffer, dimList.recordId, version); - header.variables = variables.variables; - header.recordDimension.recordStep = variables.recordStep; - - return header; -} - -const NC_UNLIMITED = 0; - -/** - * List of dimensions - * @ignore - * @param {IOBuffer} buffer - Buffer for the file data - * @return {object} - Ojbect containing the following properties: - * * `dimensions` that is an array of dimension object: - * * `name`: String with the name of the dimension - * * `size`: Number with the size of the dimension dimensions: dimensions - * * `recordId`: the id of the dimension that has unlimited size or undefined, - * * `recordName`: name of the dimension that has unlimited size - */ -function dimensionsList(buffer) { - let recordId, recordName; - const dimList = buffer.readUint32(); - let dimensions; - if (dimList === ZERO) { - notNetcdf( - buffer.readUint32() !== ZERO, - "wrong empty tag for list of dimensions" - ); - return []; - } else { - notNetcdf(dimList !== NC_DIMENSION, "wrong tag for list of dimensions"); - - // Length of dimensions - const dimensionSize = buffer.readUint32(); - dimensions = new Array(dimensionSize); - for (let dim = 0; dim < dimensionSize; dim++) { - // Read name - let name = readName(buffer); - - // Read dimension size - const size = buffer.readUint32(); - if (size === NC_UNLIMITED) { - // in netcdf 3 one field can be of size unlimmited - recordId = dim; - recordName = name; - } - - dimensions[dim] = { - name, - size, - }; - } - } - return { - dimensions, - recordId, - recordName, - }; -} - -/** - * List of attributes - * @ignore - * @param {IOBuffer} buffer - Buffer for the file data - * @return {Array} - List of attributes with: - * * `name`: String with the name of the attribute - * * `type`: String with the type of the attribute - * * `value`: A number or string with the value of the attribute - */ -function attributesList(buffer) { - const gAttList = buffer.readUint32(); - let attributes; - if (gAttList === ZERO) { - notNetcdf( - buffer.readUint32() !== ZERO, - "wrong empty tag for list of attributes" - ); - return []; - } else { - notNetcdf(gAttList !== NC_ATTRIBUTE, "wrong tag for list of attributes"); - - // Length of attributes - const attributeSize = buffer.readUint32(); - attributes = new Array(attributeSize); - for (let gAtt = 0; gAtt < attributeSize; gAtt++) { - // Read name - let name = readName(buffer); - - // Read type - let type = buffer.readUint32(); - notNetcdf(type < 1 || type > 6, `non valid type ${type}`); - - // Read attribute - let size = buffer.readUint32(); - let value = readType(buffer, type, size); - - // Apply padding - padding(buffer); - - attributes[gAtt] = { - name, - type: num2str(type), - value, - }; - } - } - return attributes; -} - -/** - * List of variables - * @ignore - * @param {IOBuffer} buffer - Buffer for the file data - * @param {number} recordId - Id of the unlimited dimension (also called record dimension) - * This value may be undefined if there is no unlimited dimension - * @param {number} version - Version of the file - * @return {object} - Number of recordStep and list of variables with: - * * `name`: String with the name of the variable - * * `dimensions`: Array with the dimension IDs of the variable - * * `attributes`: Array with the attributes of the variable - * * `type`: String with the type of the variable - * * `size`: Number with the size of the variable - * * `offset`: Number with the offset where of the variable begins - * * `record`: True if is a record variable, false otherwise (unlimited size) - */ - -function variablesList(buffer, recordId, version) { - const varList = buffer.readUint32(); - let recordStep = 0; - let variables; - if (varList === ZERO) { - notNetcdf( - buffer.readUint32() !== ZERO, - "wrong empty tag for list of variables" - ); - return []; - } else { - notNetcdf(varList !== NC_VARIABLE, "wrong tag for list of variables"); - - // Length of variables - const variableSize = buffer.readUint32(); - variables = new Array(variableSize); - for (let v = 0; v < variableSize; v++) { - // Read name - let name = readName(buffer); - - // Read dimensionality of the variable - const dimensionality = buffer.readUint32(); - - // Index into the list of dimensions - let dimensionsIds = new Array(dimensionality); - for (let dim = 0; dim < dimensionality; dim++) { - dimensionsIds[dim] = buffer.readUint32(); - } - - // Read variables size - let attributes = attributesList(buffer); - - // Read type - let type = buffer.readUint32(); - notNetcdf(type < 1 && type > 6, `non valid type ${type}`); - - // Read variable size - // The 32-bit varSize field is not large enough to contain the size of variables that require - // more than 2^32 - 4 bytes, so 2^32 - 1 is used in the varSize field for such variables. - const varSize = buffer.readUint32(); - - // Read offset - let offset = buffer.readUint32(); - if (version === 2) { - notNetcdf(offset > 0, "offsets larger than 4GB not supported"); - offset = buffer.readUint32(); - } - - let record = false; - // Count amount of record variables - if (typeof recordId !== "undefined" && dimensionsIds[0] === recordId) { - recordStep += varSize; - record = true; - } - variables[v] = { - name, - dimensions: dimensionsIds, - attributes, - type: num2str(type), - size: varSize, - offset, - record, - }; - } - } - - return { - variables, - recordStep, - }; -} diff --git a/src/header.ts b/src/header.ts new file mode 100644 index 0000000..0687939 --- /dev/null +++ b/src/header.ts @@ -0,0 +1,287 @@ +import { IOBuffer } from 'iobuffer'; + +import { num2str, readType } from './types'; +import { padding, notNetcdf, readName } from './utils'; + +// Grammar constants +const ZERO = 0; +const NC_DIMENSION = 10; +const NC_VARIABLE = 11; +const NC_ATTRIBUTE = 12; +const NC_UNLIMITED = 0; + +export interface Header { + recordDimension: { + /** + Length of the record dimension + sum of the varSize's of all the record variables. + */ + length: number; + id?: number; + name?: string; + recordStep?: number; + }; + // Version + version: number; + /* List of dimensions*/ + dimensions: Dimensions['dimensions']; + /* List of global attributes */ + globalAttributes: Attribute[]; + /* List of variables*/ + variables: Variables['variables']; +} +/** + * Reads the file header as @see {@link Header} + * @param buffer - Buffer for the file data + * @param version - Version of the file + * @returns + */ +export function header(buffer: IOBuffer, version: number): Header { + const header: Partial
= { version }; + + const recordDimension: Header['recordDimension'] = { + length: buffer.readUint32(), + }; + + const dimList = dimensionsList(buffer); + + if (!Array.isArray(dimList)) { + recordDimension.id = dimList.recordId; + recordDimension.name = dimList.recordName; + header.dimensions = dimList.dimensions; + } + + header.globalAttributes = attributesList(buffer); + + const variables = variablesList(buffer, recordDimension?.id, version); + if (!Array.isArray(variables)) { + header.variables = variables.variables; + recordDimension.recordStep = variables.recordStep; + } + + header.recordDimension = recordDimension; + + return header as Header; +} + +export interface Dimensions { + /* that is an array of dimension object:*/ + dimensions: { + /* name of the dimension*/ + name: string; + /* size of the dimension */ + size: number; + }[]; + /* id of the dimension that has unlimited size or undefined,*/ + recordId?: number; + /* name of the dimension that has unlimited size */ + recordName?: string; +} + +/** + * List of dimensions + * @param buffer - Buffer for the file data + * @return List of dimensions + */ +function dimensionsList(buffer: IOBuffer): Dimensions | [] { + const result: Partial = {}; + let recordId: number | undefined, recordName: string | undefined; + + const dimList = buffer.readUint32(); + + let dimensions: Dimensions['dimensions']; + + if (dimList === ZERO) { + notNetcdf( + buffer.readUint32() !== ZERO, + 'wrong empty tag for list of dimensions', + ); + return []; + } else { + notNetcdf(dimList !== NC_DIMENSION, 'wrong tag for list of dimensions'); + + // Length of dimensions + const dimensionSize = buffer.readUint32(); + dimensions = new Array(dimensionSize); + + //populate `name` and `size` for each dimension + for (let dim = 0; dim < dimensionSize; dim++) { + // Read name + const name = readName(buffer); + + // Read dimension size + const size = buffer.readUint32(); + if (size === NC_UNLIMITED) { + // in netcdf 3 one field can be of size unlimited + recordId = dim; + recordName = name; + } + + dimensions[dim] = { + name, + size, + }; + } + } + if (recordId !== undefined) { + result.recordId = recordId; + } + if (recordName !== undefined) { + result.recordName = recordName; + } + result.dimensions = dimensions; + return result as Dimensions; +} + +export interface Attribute { + /* name of the attribute */ + name: string; + /* type of the attribute */ + type: string; + /* value of the attribute */ + value: number | string; +} +/** + * List of attributes + * @param buffer - Buffer for the file data + * @return - List of attributes with: + */ +function attributesList(buffer: IOBuffer): Attribute[] { + const gAttList = buffer.readUint32(); + let attributes; + if (gAttList === ZERO) { + notNetcdf( + buffer.readUint32() !== ZERO, + 'wrong empty tag for list of attributes', + ); + return []; + } else { + notNetcdf(gAttList !== NC_ATTRIBUTE, 'wrong tag for list of attributes'); + + // Length of attributes + const attributeSize = buffer.readUint32(); + attributes = new Array(attributeSize); + // Populate `name`, `type` and `value` for each attribute + for (let gAtt = 0; gAtt < attributeSize; gAtt++) { + // Read name + const name = readName(buffer); + + // Read type + const type = buffer.readUint32(); + notNetcdf(type < 1 || type > 6, `non valid type ${type}`); + + // Read attribute + const size = buffer.readUint32(); + const value = readType(buffer, type, size); + + // Apply padding + padding(buffer); + + attributes[gAtt] = { + name, + type: num2str(type), + value, + }; + } + } + return attributes; +} + +export interface Variable { + /* name of the variable */ + name: string; + /* Array with the dimension IDs of the variable*/ + dimensions: number[]; + /* Array with the attributes of the variable*/ + attributes: []; + /* type of the variable*/ + type: string; + /* size of the variable */ + size: number; + /* offset where of the variable begins */ + offset: number; + /* True if is a record variable, false otherwise (unlimited size) */ + record: boolean; +} +type Variables = { variables: Variable[]; recordStep: number }; +/** + * @param buffer - Buffer for the file data + * @param recordId - Id of the unlimited dimension (also called record dimension) + * This value may be undefined if there is no unlimited dimension + * @param version - Version of the file + * @return - Number of recordStep and list of variables @see {@link Variables} + */ +function variablesList( + buffer: IOBuffer, + recordId: number | undefined, + version: number, +): Variables | [] { + const varList = buffer.readUint32(); + let recordStep = 0; + let variables; + if (varList === ZERO) { + notNetcdf( + buffer.readUint32() !== ZERO, + 'wrong empty tag for list of variables', + ); + return []; + } else { + notNetcdf(varList !== NC_VARIABLE, 'wrong tag for list of variables'); + + // Length of variables + const variableSize = buffer.readUint32(); + variables = new Array(variableSize); + for (let v = 0; v < variableSize; v++) { + // Read name + let name = readName(buffer); + + // Read dimensionality of the variable + const dimensionality = buffer.readUint32(); + + // Index into the list of dimensions + let dimensionsIds = new Array(dimensionality); + for (let dim = 0; dim < dimensionality; dim++) { + dimensionsIds[dim] = buffer.readUint32(); + } + + // Read variables size + let attributes = attributesList(buffer); + + // Read type + let type = buffer.readUint32(); + notNetcdf(type < 1 && type > 6, `non valid type ${type}`); + + // Read variable size + // The 32-bit varSize field is not large enough to contain the size of variables that require + // more than 2^32 - 4 bytes, so 2^32 - 1 is used in the varSize field for such variables. + const varSize = buffer.readUint32(); + + // Read offset + let offset = buffer.readUint32(); + if (version === 2) { + notNetcdf(offset > 0, 'offsets larger than 4GB not supported'); + offset = buffer.readUint32(); + } + + let record = false; + // Count amount of record variables + if (typeof recordId !== 'undefined' && dimensionsIds[0] === recordId) { + recordStep += varSize; + record = true; + } + variables[v] = { + name, + dimensions: dimensionsIds, + attributes, + type: num2str(type), + size: varSize, + offset, + record, + }; + } + } + return { + variables, + recordStep, + }; +} diff --git a/src/index.ts b/src/index.ts new file mode 100755 index 0000000..c4af3c8 --- /dev/null +++ b/src/index.ts @@ -0,0 +1,2 @@ +export * from './parser'; +export type { Header, Attribute, Variable } from './header'; diff --git a/src/index.js b/src/parser.ts old mode 100755 new mode 100644 similarity index 53% rename from src/index.js rename to src/parser.ts index 8e330bf..814ae82 --- a/src/index.js +++ b/src/parser.ts @@ -1,27 +1,30 @@ -import { IOBuffer } from "iobuffer"; +import { IOBuffer } from 'iobuffer'; -import { record, nonRecord } from "./data.js"; -import { header } from "./header.js"; -import { toString } from "./toString.js"; -import { notNetcdf } from "./utils.js"; +import { record, nonRecord } from './data'; +import { Header, header } from './header'; +import { toString } from './toString'; +import { notNetcdf } from './utils'; /** * Reads a NetCDF v3.x file - * https://www.unidata.ucar.edu/software/netcdf/docs/file_format_specifications.html - * @param {ArrayBuffer} data - ArrayBuffer or any Typed Array (including Node.js' Buffer from v4) with the data + * [See specification](https://www.unidata.ucar.edu/software/netcdf/docs/file_format_specifications.html) + * @param data - ArrayBuffer or any Typed Array (including Node.js' Buffer from v4) with the data * @constructor */ export class NetCDFReader { - constructor(data) { + public header: Header; + public buffer: IOBuffer; + + constructor(data: BinaryData) { const buffer = new IOBuffer(data); buffer.setBigEndian(); // Validate that it's a NetCDF file - notNetcdf(buffer.readChars(3) !== "CDF", "should start with CDF"); + notNetcdf(buffer.readChars(3) !== 'CDF', 'should start with CDF'); // Check the NetCDF format const version = buffer.readByte(); - notNetcdf(version > 2, "unknown version"); + notNetcdf(version > 2, 'unknown version'); // Read the header this.header = header(buffer, version); @@ -29,13 +32,13 @@ export class NetCDFReader { } /** - * @return {string} - Version for the NetCDF format + * @return - Version for the NetCDF format */ get version() { if (this.header.version === 1) { - return "classic format"; + return 'classic format'; } else { - return "64-bit offset format"; + return '64-bit offset format'; } } @@ -51,7 +54,7 @@ export class NetCDFReader { } /** - * @return {Array} - List of dimensions with: + * @return - Array - List of dimensions with: * * `name`: String with the name of the dimension * * `size`: Number with the size of the dimension */ @@ -60,23 +63,23 @@ export class NetCDFReader { } /** - * @return {Array} - List of global attributes with: + * @return - Array - List of global attributes with: * * `name`: String with the name of the attribute * * `type`: String with the type of the attribute * * `value`: A number or string with the value of the attribute */ - get globalAttributes() { + get globalAttributes(): Header['globalAttributes'] { return this.header.globalAttributes; } /** * Returns the value of an attribute - * @param {string} attributeName - * @return {string} Value of the attributeName or null + * @param - AttributeName + * @return - Value of the attributeName or null */ - getAttribute(attributeName) { + getAttribute(attributeName: string) { const attribute = this.globalAttributes.find( - (val) => val.name === attributeName + (val) => val.name === attributeName, ); if (attribute) return attribute.value; return null; @@ -84,41 +87,29 @@ export class NetCDFReader { /** * Returns the value of a variable as a string - * @param {string} variableName - * @return {string} Value of the variable as a string or null + * @param - variableName + * @return - Value of the variable as a string or null */ - getDataVariableAsString(variableName) { + getDataVariableAsString(variableName: string) { const variable = this.getDataVariable(variableName); - if (variable) return variable.join(""); + if (variable) return variable.join(''); return null; } - /** - * @return {Array} - List of variables with: - * * `name`: String with the name of the variable - * * `dimensions`: Array with the dimension IDs of the variable - * * `attributes`: Array with the attributes of the variable - * * `type`: String with the type of the variable - * * `size`: Number with the size of the variable - * * `offset`: Number with the offset where of the variable begins - * * `record`: True if is a record variable, false otherwise - */ get variables() { return this.header.variables; } - toString() { - return toString.call(this); - } + toString = toString; /** * Retrieves the data for a given variable - * @param {string|object} variableName - Name of the variable to search or variable object - * @return {Array} - List with the variable values + * @param variableName - Name of the variable to search or variable object + * @return The variable values */ - getDataVariable(variableName) { + getDataVariable(variableName: string | Header['variables'][number]) { let variable; - if (typeof variableName === "string") { + if (typeof variableName === 'string') { // search the variable variable = this.header.variables.find((val) => { return val.name === variableName; @@ -128,7 +119,9 @@ export class NetCDFReader { } // throws if variable not found - notNetcdf(variable === undefined, `variable not found: ${variableName}`); + if (variable === undefined) { + throw new Error('Not a valid NetCDF v3.x file: variable not found'); + } // go to the offset position this.buffer.seek(variable.offset); @@ -144,10 +137,10 @@ export class NetCDFReader { /** * Check if a dataVariable exists - * @param {string} variableName - Name of the variable to find - * @return {boolean} + * @param variableName - Name of the variable to find + * @return boolean */ - dataVariableExists(variableName) { + dataVariableExists(variableName: string) { const variable = this.header.variables.find((val) => { return val.name === variableName; }); @@ -156,12 +149,12 @@ export class NetCDFReader { /** * Check if an attribute exists - * @param {string} attributeName - Name of the attribute to find - * @return {boolean} + * @param attributeName - Name of the attribute to find + * @return boolean */ - attributeExists(attributeName) { + attributeExists(attributeName: string) { const attribute = this.globalAttributes.find( - (val) => val.name === attributeName + (val) => val.name === attributeName, ); return attribute !== undefined; } diff --git a/src/toString.js b/src/toString.ts similarity index 74% rename from src/toString.js rename to src/toString.ts index b72b8b9..6e72946 100644 --- a/src/toString.js +++ b/src/toString.ts @@ -1,20 +1,21 @@ -export function toString() { - let result = []; +import { NetCDFReader } from './parser'; - result.push("DIMENSIONS"); +export function toString(this: NetCDFReader) { + let result = []; + result.push('DIMENSIONS'); for (let dimension of this.dimensions) { result.push(` ${dimension.name.padEnd(30)} = size: ${dimension.size}`); } - result.push(""); - result.push("GLOBAL ATTRIBUTES"); + result.push(''); + result.push('GLOBAL ATTRIBUTES'); for (let attribute of this.globalAttributes) { result.push(` ${attribute.name.padEnd(30)} = ${attribute.value}`); } let variables = JSON.parse(JSON.stringify(this.variables)); - result.push(""); - result.push("VARIABLES:"); + result.push(''); + result.push('VARIABLES:'); for (let variable of variables) { variable.value = this.getDataVariable(variable); let stringify = JSON.stringify(variable.value); @@ -24,5 +25,5 @@ export function toString() { } result.push(` ${variable.name.padEnd(30)} = ${stringify}`); } - return result.join("\n"); + return result.join('\n'); } diff --git a/src/types.js b/src/types.ts similarity index 55% rename from src/types.js rename to src/types.ts index 5856a8a..5833dd1 100644 --- a/src/types.js +++ b/src/types.ts @@ -1,4 +1,4 @@ -import { notNetcdf } from "./utils.js"; +import { IOBuffer } from 'iobuffer'; const types = { BYTE: 1, @@ -11,37 +11,34 @@ const types = { /** * Parse a number into their respective type - * @ignore - * @param {number} type - integer that represents the type - * @return {string} - parsed value of the type + * @param type - integer that represents the type + * @return - parsed value of the type */ -export function num2str(type) { +export function num2str(type: number): string { switch (Number(type)) { case types.BYTE: - return "byte"; + return 'byte'; case types.CHAR: - return "char"; + return 'char'; case types.SHORT: - return "short"; + return 'short'; case types.INT: - return "int"; + return 'int'; case types.FLOAT: - return "float"; + return 'float'; case types.DOUBLE: - return "double"; - /* istanbul ignore next */ + return 'double'; default: - return "undefined"; + return 'undefined'; } } /** * Parse a number type identifier to his size in bytes - * @ignore - * @param {number} type - integer that represents the type - * @return {number} -size of the type + * @param type - integer that represents the type + * @return size of the type */ -export function num2bytes(type) { +export function num2bytes(type: number): number { switch (Number(type)) { case types.BYTE: return 1; @@ -55,7 +52,6 @@ export function num2bytes(type) { return 4; case types.DOUBLE: return 8; - /* istanbul ignore next */ default: return -1; } @@ -63,23 +59,22 @@ export function num2bytes(type) { /** * Reverse search of num2str - * @ignore - * @param {string} type - string that represents the type - * @return {number} - parsed value of the type + * @param type - string that represents the type + * @return parsed value of the type */ -export function str2num(type) { +export function str2num(type: string) { switch (String(type)) { - case "byte": + case 'byte': return types.BYTE; - case "char": + case 'char': return types.CHAR; - case "short": + case 'short': return types.SHORT; - case "int": + case 'int': return types.INT; - case "float": + case 'float': return types.FLOAT; - case "double": + case 'double': return types.DOUBLE; /* istanbul ignore next */ default: @@ -89,12 +84,14 @@ export function str2num(type) { /** * Auxiliary function to read numeric data - * @ignore - * @param {number} size - Size of the element to read - * @param {function} bufferReader - Function to read next value - * @return {Array|number} + * @param size - Size of the element to read + * @param bufferReader - Function to read next value + * @return */ -function readNumber(size, bufferReader) { +function readNumber( + size: number, + bufferReader: () => number, +): number | number[] { if (size !== 1) { let numbers = new Array(size); for (let i = 0; i < size; i++) { @@ -108,16 +105,19 @@ function readNumber(size, bufferReader) { /** * Given a type and a size reads the next element - * @ignore - * @param {IOBuffer} buffer - Buffer for the file data - * @param {number} type - Type of the data to read - * @param {number} size - Size of the element to read - * @return {string|Array|number} + * @param buffer - Buffer for the file data + * @param type - Type of the data to read + * @param size - Size of the element to read + * @return */ -export function readType(buffer, type, size) { +export function readType( + buffer: IOBuffer, + type: number, + size: number, +): string | number | number[] { switch (type) { case types.BYTE: - return buffer.readBytes(size); + return Array.from(buffer.readBytes(size)); case types.CHAR: return trimNull(buffer.readChars(size)); case types.SHORT: @@ -128,20 +128,17 @@ export function readType(buffer, type, size) { return readNumber(size, buffer.readFloat32.bind(buffer)); case types.DOUBLE: return readNumber(size, buffer.readFloat64.bind(buffer)); - /* istanbul ignore next */ default: - notNetcdf(true, `non valid type ${type}`); - return undefined; + throw new Error(`non valid type ${type}`); } } /** * Removes null terminate value - * @ignore - * @param {string} value - String to trim - * @return {string} - Trimmed string + * @param value - String to trim + * @return - Trimmed string */ -function trimNull(value) { +function trimNull(value: string): string { if (value.charCodeAt(value.length - 1) === 0) { return value.substring(0, value.length - 1); } diff --git a/src/utils.js b/src/utils.js deleted file mode 100644 index dd8c911..0000000 --- a/src/utils.js +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Throws a non-valid NetCDF exception if the statement it's true - * @ignore - * @param {boolean} statement - Throws if true - * @param {string} reason - Reason to throw - */ -export function notNetcdf(statement, reason) { - if (statement) { - throw new TypeError(`Not a valid NetCDF v3.x file: ${reason}`); - } -} - -/** - * Moves 1, 2, or 3 bytes to next 4-byte boundary - * @ignore - * @param {IOBuffer} buffer - Buffer for the file data - */ -export function padding(buffer) { - if (buffer.offset % 4 !== 0) { - buffer.skip(4 - (buffer.offset % 4)); - } -} - -/** - * Reads the name - * @ignore - * @param {IOBuffer} buffer - Buffer for the file data - * @return {string} - Name - */ -export function readName(buffer) { - // Read name - let nameLength = buffer.readUint32(); - let name = buffer.readChars(nameLength); - - // validate name - // TODO - - // Apply padding - padding(buffer); - return name; -} diff --git a/src/utils.ts b/src/utils.ts new file mode 100644 index 0000000..cc2fee6 --- /dev/null +++ b/src/utils.ts @@ -0,0 +1,39 @@ +import { IOBuffer } from 'iobuffer'; +/** + * Throws a non-valid NetCDF exception if the statement it's true + * @ignore + * @param statement - Throws if true + * @param reason - Reason to throw + */ +export function notNetcdf(statement: boolean, reason: string) { + if (statement) { + throw new TypeError(`Not a valid NetCDF v3.x file: ${reason}`); + } +} + +/** + * Moves 1, 2, or 3 bytes to next 4-byte boundary + * @param buffer - Buffer for the file data + */ +export function padding(buffer: IOBuffer) { + if (buffer.offset % 4 !== 0) { + buffer.skip(4 - (buffer.offset % 4)); + } +} + +/** + * Reads the name + * @param buffer - Buffer for the file data + * @return Name + */ +export function readName(buffer: IOBuffer) { + // Read name + const nameLength = buffer.readUint32(); + const name = buffer.readChars(nameLength); + + // validate name + // TODO + // Apply padding + padding(buffer); + return name; +} diff --git a/tsconfig.cjs.json b/tsconfig.cjs.json new file mode 100644 index 0000000..3b9e100 --- /dev/null +++ b/tsconfig.cjs.json @@ -0,0 +1,9 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "module": "commonjs", + "declaration": true, + "declarationMap": true + }, + "exclude": ["./src/**/__tests__"] +} diff --git a/tsconfig.esm.json b/tsconfig.esm.json new file mode 100644 index 0000000..050b45d --- /dev/null +++ b/tsconfig.esm.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.cjs.json", + "compilerOptions": { + "module": "es2020", + "outDir": "lib-esm" + } +} diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..4a1e339 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,11 @@ +{ + "compilerOptions": { + "esModuleInterop": true, + "moduleResolution": "node", + "outDir": "lib", + "sourceMap": true, + "strict": true, + "target": "es2020" + }, + "include": ["./src/**/*"] +}