diff --git a/.github/workflows/pull-request.yaml b/.github/workflows/pull-request.yaml index 0d2d4bd..c8f9029 100644 --- a/.github/workflows/pull-request.yaml +++ b/.github/workflows/pull-request.yaml @@ -38,6 +38,7 @@ jobs: runs-on: ubuntu-latest env: DUNE_API_KEY: ${{ secrets.DUNE_API_KEY }} + DUNE_API_KEY_OWNER_HANDLE: ${{ secrets.DUNE_API_KEY_OWNER_HANDLE }} steps: - name: Checkout code diff --git a/src/api/client.ts b/src/api/client.ts index aba2f93..1e81c9f 100644 --- a/src/api/client.ts +++ b/src/api/client.ts @@ -26,6 +26,8 @@ import { TableAPI } from "./table"; import { CustomAPI } from "./custom"; import { UsageAPI } from "./usage"; import { PipelineAPI } from "./pipeline"; +import { DatasetAPI } from "./dataset"; +import { UploadsAPI } from "./uploads"; import { deprecationWarning } from "../deprecation"; /// Various states of query execution that are "terminal". @@ -45,7 +47,7 @@ export class DuneClient { exec: ExecutionAPI; /// Query Management Interface. query: QueryAPI; - /// Table Management Interface + /// Table Management Interface (deprecated, use uploads instead) table: TableAPI; /// Custom Endpoint Interface custom: CustomAPI; @@ -53,6 +55,10 @@ export class DuneClient { usage: UsageAPI; /// Pipeline Interface pipeline: PipelineAPI; + /// Dataset Interface + dataset: DatasetAPI; + /// Uploads Interface + uploads: UploadsAPI; constructor(apiKey: string) { this.exec = new ExecutionAPI(apiKey); @@ -61,6 +67,8 @@ export class DuneClient { this.custom = new CustomAPI(apiKey); this.usage = new UsageAPI(apiKey); this.pipeline = new PipelineAPI(apiKey); + this.dataset = new DatasetAPI(apiKey); + this.uploads = new UploadsAPI(apiKey); } /** diff --git a/src/api/dataset.ts b/src/api/dataset.ts new file mode 100644 index 0000000..b014763 --- /dev/null +++ b/src/api/dataset.ts @@ -0,0 +1,12 @@ +import { Router } from "./router"; +import { DatasetResponse, ListDatasetsArgs, ListDatasetsResponse } from "../types"; + +export class DatasetAPI extends Router { + async list(args?: ListDatasetsArgs): Promise { + return this._get("datasets", args); + } + + async getBySlug(slug: string): Promise { + return this._get(`datasets/${slug}`); + } +} diff --git a/src/api/index.ts b/src/api/index.ts index 6dedadf..d883733 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -1,8 +1,10 @@ export * from "./client"; export * from "./custom"; +export * from "./dataset"; export * from "./execution"; export * from "./pipeline"; export * from "./query"; export * from "./router"; export * from "./table"; +export * from "./uploads"; export * from "./usage"; diff --git a/src/api/table.ts b/src/api/table.ts index c01425a..dda4583 100644 --- a/src/api/table.ts +++ b/src/api/table.ts @@ -11,6 +11,7 @@ import { DeleteTableResult, } from "../types"; import { withDefaults } from "../utils"; +import { deprecationWarning } from "../deprecation"; /** * Table Management Interface (includes uploadCSV) @@ -24,8 +25,10 @@ export class TableAPI extends Router { * * @param args UploadCSVParams relevant fields related to dataset upload. * @returns boolean representing if upload was successful. + * @deprecated Use uploads.uploadCsv() instead. The /v1/table endpoints are deprecated in favor of /v1/uploads. */ async uploadCsv(args: UploadCSVArgs): Promise { + deprecationWarning("table.uploadCsv", "uploads.uploadCsv"); const response = await this.post("table/upload/csv", args); try { return Boolean(response.success); @@ -44,10 +47,12 @@ export class TableAPI extends Router { * * The only limitations are: * - If a table already exists with the same name, the request will fail. - * - Column names in the table can’t start with a special character or a digit. + * - Column names in the table can't start with a special character or a digit. * @param args + * @deprecated Use uploads.create() instead. The /v1/table endpoints are deprecated in favor of /v1/uploads. */ async create(args: CreateTableArgs): Promise { + deprecationWarning("table.create", "uploads.create"); return this.post( "table/create", withDefaults(args, { description: "", is_private: false }), @@ -59,8 +64,10 @@ export class TableAPI extends Router { * Delete a Dune table with the specified name and namespace. * * To be able to delete a table, it must have been created with the /create endpoint. + * @deprecated Use uploads.delete() instead. The /v1/table endpoints are deprecated in favor of /v1/uploads. */ async delete(args: DeleteTableArgs): Promise { + deprecationWarning("table.delete", "uploads.delete"); const route = `table/${args.namespace}/${args.table_name}`; return this._delete(route); } @@ -73,8 +80,10 @@ export class TableAPI extends Router { * - The file has to have the same schema as the table * @param args * @returns + * @deprecated Use uploads.insert() instead. The /v1/table endpoints are deprecated in favor of /v1/uploads. */ async insert(args: InsertTableArgs): Promise { + deprecationWarning("table.insert", "uploads.insert"); return this.post( `table/${args.namespace}/${args.table_name}/insert`, args.data, diff --git a/src/api/uploads.ts b/src/api/uploads.ts new file mode 100644 index 0000000..f8fe2eb --- /dev/null +++ b/src/api/uploads.ts @@ -0,0 +1,60 @@ +import { Router } from "./router"; +import { + CreateTableResult, + SuccessResponse, + UploadCSVArgs, + CreateTableArgs, + InsertTableArgs, + InsertTableResult, + DeleteTableArgs, + DeleteTableResult, + TableListResponse, + ListUploadsArgs, + ClearTableArgs, + TableClearResponse, + DuneError, +} from "../types"; +import { withDefaults } from "../utils"; + +export class UploadsAPI extends Router { + async list(args?: ListUploadsArgs): Promise { + return this._get("uploads", args); + } + + async create(args: CreateTableArgs): Promise { + return this.post( + "uploads", + withDefaults(args, { description: "", is_private: false }), + ); + } + + async uploadCsv(args: UploadCSVArgs): Promise { + const response = await this.post("uploads/csv", args); + try { + return Boolean(response.success); + } catch (error: unknown) { + console.error( + `Upload CSV Error ${error instanceof Error ? error.message : String(error)}`, + ); + throw new DuneError(`UploadCsvResponse ${JSON.stringify(response)}`); + } + } + + async delete(args: DeleteTableArgs): Promise { + const route = `uploads/${args.namespace}/${args.table_name}`; + return this._delete(route); + } + + async clear(args: ClearTableArgs): Promise { + const route = `uploads/${args.namespace}/${args.table_name}/clear`; + return this.post(route); + } + + async insert(args: InsertTableArgs): Promise { + return this.post( + `uploads/${args.namespace}/${args.table_name}/insert`, + args.data, + args.content_type, + ); + } +} diff --git a/src/index.ts b/src/index.ts index a78e0fe..f6290c8 100644 --- a/src/index.ts +++ b/src/index.ts @@ -6,6 +6,8 @@ export { TableAPI, UsageAPI, PipelineAPI, + DatasetAPI, + UploadsAPI, } from "./api"; export * from "./types"; export { Paginator } from "./paginator"; diff --git a/src/types/requestArgs.ts b/src/types/requestArgs.ts index 8e50a8d..c801181 100644 --- a/src/types/requestArgs.ts +++ b/src/types/requestArgs.ts @@ -304,6 +304,31 @@ export interface InsertTableArgs { content_type: ContentType; } +export interface ListDatasetsArgs { + /// Number of results to return (default 50, max 250) + limit?: number; + /// Offset for pagination + offset?: number; + /// Filter by owner handle + owner_handle?: string; + /// Filter by dataset types (comma-separated: transformation_view, transformation_table, uploaded_table, decoded_table, spell, dune_table) + type?: string; +} + +export interface ListUploadsArgs { + /// Number of tables to return on a page. Default: 50, max: 10000 + limit?: number; + /// Offset used for pagination. Negative values are treated as 0 + offset?: number; +} + +export interface ClearTableArgs { + /// The namespace of the table to clear (e.g. my_user). + namespace: string; + /// The name of the table to clear (e.g. interest_rates). + table_name: string; +} + export interface Options { /// The page size when retriving results. batchSize?: number; diff --git a/src/types/response.ts b/src/types/response.ts index 196f9f2..d41ac9d 100644 --- a/src/types/response.ts +++ b/src/types/response.ts @@ -247,3 +247,68 @@ export interface UsageResponse { bytes_allowed: number; billing_periods: BillingPeriod[]; } + +export interface DatasetColumnMetadata { + description?: string; + filtering_column?: boolean; +} + +export interface DatasetColumn { + name: string; + type: string; + nullable?: boolean; + metadata?: DatasetColumnMetadata; +} + +export interface DatasetOwner { + handle: string; + type: string; +} + +export interface DatasetResponse { + full_name: string; + type: string; + columns: DatasetColumn[]; + owner: DatasetOwner; + is_private: boolean; + created_at: string; + updated_at: string; + metadata?: Record; +} + +export interface ListDatasetsResponse { + datasets: DatasetResponse[]; + total: number; +} + +export interface TableOwner { + handle: string; + type: string; +} + +export interface TableColumnInfo { + name: string; + type: string; + nullable?: boolean; + metadata?: Record; +} + +export interface TableListElement { + full_name: string; + is_private: boolean; + owner: TableOwner; + columns: TableColumnInfo[]; + table_size_bytes?: string; + created_at: string; + updated_at: string; + purged_at?: string; +} + +export interface TableListResponse { + tables: TableListElement[]; + next_offset?: number; +} + +export interface TableClearResponse { + message: string; +} diff --git a/tests/e2e/dataset.spec.ts b/tests/e2e/dataset.spec.ts new file mode 100644 index 0000000..817c242 --- /dev/null +++ b/tests/e2e/dataset.spec.ts @@ -0,0 +1,60 @@ +import log from "loglevel"; +import { DatasetAPI } from "../../src/api"; + +log.setLevel("silent", true); + +const API_KEY = process.env.DUNE_API_KEY!; + +describe("Dataset API", () => { + let datasetClient: DatasetAPI; + + beforeAll(() => { + datasetClient = new DatasetAPI(API_KEY); + }); + + it("lists datasets with owner filter", async () => { + const response = await datasetClient.list({ + limit: 5, + owner_handle: "dune", + }); + + expect(response.datasets).toBeDefined(); + expect(Array.isArray(response.datasets)).toBe(true); + + if (response.datasets.length > 0) { + expect(response.datasets[0].owner.handle).toBe("dune"); + } + }); + + it("lists datasets with owner filter", async () => { + const response = await datasetClient.list({ + limit: 1, + type: "materialized_view", + }); + + expect(response.datasets).toBeDefined(); + expect(Array.isArray(response.datasets)).toBe(true); + + expect(response.datasets[0].type).toBe("materialized_view"); + }); + + it("gets dataset by slug", async () => { + const dataset = await datasetClient.getBySlug("dex.trades"); + + expect(dataset).toHaveProperty("full_name"); + expect(dataset.full_name).toContain("dex.trades"); + expect(dataset).toHaveProperty("type"); + expect(dataset).toHaveProperty("columns"); + expect(dataset).toHaveProperty("owner"); + expect(dataset).toHaveProperty("is_private"); + expect(dataset).toHaveProperty("created_at"); + expect(dataset).toHaveProperty("updated_at"); + + expect(Array.isArray(dataset.columns)).toBe(true); + expect(dataset.columns.length).toBeGreaterThan(0); + + const firstColumn = dataset.columns[0]; + expect(firstColumn).toHaveProperty("name"); + expect(firstColumn).toHaveProperty("type"); + }); +}); diff --git a/tests/e2e/uploads.spec.ts b/tests/e2e/uploads.spec.ts new file mode 100644 index 0000000..a41a201 --- /dev/null +++ b/tests/e2e/uploads.spec.ts @@ -0,0 +1,119 @@ +import log from "loglevel"; +import * as fs from "fs/promises"; +import { UploadsAPI } from "../../src/api"; +import { ColumnType, ContentType } from "../../src"; + +log.setLevel("silent", true); + +const API_KEY = process.env.DUNE_API_KEY!; +const USER_NAME = process.env.DUNE_API_KEY_OWNER_HANDLE || "your_username"; + +describe("Uploads API", () => { + let uploadsClient: UploadsAPI; + let namespace: string; + const table_name = "uploads_e2e_test"; + + beforeAll(() => { + uploadsClient = new UploadsAPI(API_KEY); + namespace = USER_NAME; + }); + + beforeEach((done) => { + setTimeout(done, 1000); + }); + + it("lists uploaded tables", async () => { + const response = await uploadsClient.list({ limit: 10 }); + + expect(response).toHaveProperty("tables"); + expect(Array.isArray(response.tables)).toBe(true); + + if (response.tables.length > 0) { + const firstTable = response.tables[0]; + expect(firstTable).toHaveProperty("full_name"); + expect(firstTable).toHaveProperty("is_private"); + expect(firstTable).toHaveProperty("owner"); + expect(firstTable).toHaveProperty("columns"); + expect(firstTable).toHaveProperty("created_at"); + expect(firstTable).toHaveProperty("updated_at"); + } + }); + + it("uploads CSV", async () => { + const public_success = await uploadsClient.uploadCsv({ + table_name: "ts_client_uploads_test", + description: "testing csv upload from node via uploads API", + data: "column1,column2\nvalue1,value2\nvalue3,value4", + }); + expect(public_success).toEqual(true); + + const private_success = await uploadsClient.uploadCsv({ + table_name: "ts_client_uploads_test_private", + data: "column1,column2\nvalue1,value2\nvalue3,value4", + is_private: true, + }); + expect(private_success).toEqual(true); + }); + + it("creates table", async () => { + const createResult = await uploadsClient.create({ + namespace, + table_name, + description: "e2e test table via uploads API", + schema: [ + { name: "date", type: ColumnType.Timestamp }, + { name: "dgs10", type: ColumnType.Double }, + ], + is_private: false, + }); + + expect(createResult).toMatchObject({ + namespace, + table_name, + full_name: `dune.${namespace}.${table_name}`, + example_query: `select * from dune.${namespace}.${table_name} limit 10`, + }); + }); + + it("inserts JSON to Table", async () => { + const data: Buffer = await fs.readFile("./tests/fixtures/sample_table_insert.json"); + const insertResult = await uploadsClient.insert({ + namespace, + table_name, + data, + content_type: ContentType.NDJson, + }); + + expect(insertResult).toMatchObject({ rows_written: 1 }); + }); + + it("inserts CSV to Table", async () => { + const data = await fs.readFile("./tests/fixtures/sample_table_insert.csv"); + const insertResult = await uploadsClient.insert({ + namespace, + table_name, + data, + content_type: ContentType.Csv, + }); + expect(insertResult).toMatchObject({ rows_written: 1 }); + }); + + it("clears table data", async () => { + const result = await uploadsClient.clear({ + namespace, + table_name, + }); + expect(result).toHaveProperty("message"); + expect(result.message).toContain("successfully cleared"); + }); + + it("deletes table", async () => { + const result = await uploadsClient.delete({ + namespace, + table_name, + }); + expect(result).toEqual({ + message: `Table ${namespace}.${table_name} successfully deleted`, + }); + }); +});