diff --git a/api/src/DuckDBAppender.ts b/api/src/DuckDBAppender.ts new file mode 100644 index 00000000..657b9500 --- /dev/null +++ b/api/src/DuckDBAppender.ts @@ -0,0 +1,18 @@ +import * as ddb from '../..'; +import { throwOnFailure } from './throwOnFailure'; + +export class DuckDBAppender { + private readonly appender: ddb.duckdb_appender; + constructor(appender: ddb.duckdb_appender) { + this.appender = appender; + } + public async dispose() { + throwOnFailure(await ddb.duckdb_appender_destroy(this.appender), + 'Failed to destroy appender', () => ddb.duckdb_appender_error(this.appender)); + } + public async flush() { + throwOnFailure(await ddb.duckdb_appender_flush(this.appender), + 'Failed to flush appender', () => ddb.duckdb_appender_error(this.appender)); + } + // TODO +} diff --git a/api/src/DuckDBConnection.ts b/api/src/DuckDBConnection.ts new file mode 100644 index 00000000..001fc4bd --- /dev/null +++ b/api/src/DuckDBConnection.ts @@ -0,0 +1,63 @@ +import * as ddb from '../..'; +import { DuckDBAppender } from './DuckDBAppender'; +import { DuckDBExtractedStatements } from './DuckDBExtractedStatements'; +import { DuckDBInstance } from './DuckDBInstance'; +import { DuckDBPreparedStatement } from './DuckDBPreparedStatement'; +import { DuckDBMaterializedResult } from './DuckDBResult'; +import { throwOnFailure } from './throwOnFailure'; + +export class DuckDBConnection { + private readonly connection: ddb.duckdb_connection; + constructor(connection: ddb.duckdb_connection) { + this.connection = connection; + } + public static async create(instance: DuckDBInstance): Promise { + return instance.connect(); + } + public dispose(): Promise { + return ddb.duckdb_disconnect(this.connection); + } + public interrupt() { + ddb.duckdb_interrupt(this.connection); + } + /** Returns -1 if no progress or a percentage of the progress. */ + public get progress(): number { + return ddb.duckdb_query_progress(this.connection); + } + public get finished(): boolean { + return ddb.duckdb_execution_is_finished(this.connection); + } + public async run(sql: string): Promise { + const result = new ddb.duckdb_result; + throwOnFailure(await ddb.duckdb_query(this.connection, sql, result), + 'Failed to query', () => ddb.duckdb_result_error(result), + () => ddb.duckdb_destroy_result(result)); + return new DuckDBMaterializedResult(result); + } + public async prepare(sql: string): Promise { + const prepared_statement = new ddb.duckdb_prepared_statement; + throwOnFailure(await ddb.duckdb_prepare(this.connection, sql, prepared_statement), + 'Failed to prepare', () => ddb.duckdb_prepare_error(prepared_statement), + () => ddb.duckdb_destroy_prepare(prepared_statement)); + return new DuckDBPreparedStatement(prepared_statement); + } + public async extractStatements(sql: string): Promise { + const extracted_statements = new ddb.duckdb_extracted_statements; + const statementCount = await ddb.duckdb_extract_statements(this.connection, sql, extracted_statements); + if (statementCount === 0) { + try { + throw new Error(`Failed to extract statements: ${ddb.duckdb_extract_statements_error(extracted_statements)}`); + } finally { + ddb.duckdb_destroy_extracted(extracted_statements); + } + } + return new DuckDBExtractedStatements(this.connection, extracted_statements, statementCount); + } + public async createAppender(schema: string, table: string): Promise { + const appender = new ddb.duckdb_appender; + throwOnFailure(await ddb.duckdb_appender_create(this.connection, schema, table, appender), + 'Failed to create appender', () => ddb.duckdb_appender_error(appender), + () => ddb.duckdb_appender_destroy(appender)); + return new DuckDBAppender(appender); + } +} diff --git a/api/src/DuckDBDataChunk.ts b/api/src/DuckDBDataChunk.ts new file mode 100644 index 00000000..4b6dec96 --- /dev/null +++ b/api/src/DuckDBDataChunk.ts @@ -0,0 +1,32 @@ +import * as ddb from '../..'; +import { DuckDBVector } from './DuckDBVector'; + +export class DuckDBDataChunk { + private readonly chunk: ddb.duckdb_data_chunk; + constructor(chunk: ddb.duckdb_data_chunk) { + this.chunk = chunk; + } + public static create(): DuckDBDataChunk { + // TODO: C API takes raw pointer (list of types) + throw new Error('not implemented'); + } + public dispose() { + ddb.duckdb_destroy_data_chunk(this.chunk); + } + public reset() { + ddb.duckdb_data_chunk_reset(this.chunk); + } + public get columnCount(): number { + return ddb.duckdb_data_chunk_get_column_count(this.chunk); + } + public getColumn(columnIndex: number): DuckDBVector { + // TODO: cache vectors? + return DuckDBVector.create(ddb.duckdb_data_chunk_get_vector(this.chunk, columnIndex), this.rowCount); + } + public get rowCount(): number { + return ddb.duckdb_data_chunk_get_size(this.chunk); + } + public set rowCount(count: number) { + ddb.duckdb_data_chunk_set_size(this.chunk, count); + } +} diff --git a/api/src/DuckDBExtractedStatements.ts b/api/src/DuckDBExtractedStatements.ts new file mode 100644 index 00000000..9b429e32 --- /dev/null +++ b/api/src/DuckDBExtractedStatements.ts @@ -0,0 +1,28 @@ +import * as ddb from '../..'; +import { DuckDBPreparedStatement } from './DuckDBPreparedStatement'; +import { throwOnFailure } from './throwOnFailure'; + +export class DuckDBExtractedStatements { + private readonly connection: ddb.duckdb_connection; + private readonly extracted_statements: ddb.duckdb_extracted_statements; + private readonly statementCount: number; + constructor(connection: ddb.duckdb_connection, extracted_statements: ddb.duckdb_extracted_statements, statementCount: number) { + this.connection = connection; + this.extracted_statements = extracted_statements; + this.statementCount = statementCount; + } + public dispose() { + ddb.duckdb_destroy_extracted(this.extracted_statements); + } + public get count(): number { + return this.statementCount; + } + public async prepare(index: number): Promise { + const prepared_statement = new ddb.duckdb_prepared_statement; + throwOnFailure(await ddb.duckdb_prepare_extracted_statement(this.connection, this.extracted_statements, index, prepared_statement), + 'Failed to prepare extracted statement', () => ddb.duckdb_prepare_error(prepared_statement), + () => ddb.duckdb_destroy_prepare(prepared_statement)); + return new DuckDBPreparedStatement(prepared_statement); + } + // TODO +} diff --git a/api/src/DuckDBInstance.ts b/api/src/DuckDBInstance.ts new file mode 100644 index 00000000..d5485670 --- /dev/null +++ b/api/src/DuckDBInstance.ts @@ -0,0 +1,40 @@ +import * as ddb from '../..'; +import { DuckDBConnection } from './DuckDBConnection'; +import { throwOnFailure } from './throwOnFailure'; + +export class DuckDBInstance { + private readonly db: ddb.duckdb_database; + constructor(db: ddb.duckdb_database) { + this.db = db; + } + public static async create(path: string = ':memory:', options?: Record): Promise { + const db = new ddb.duckdb_database; + if (options) { + const config = new ddb.duckdb_config; + throwOnFailure(ddb.duckdb_create_config(config), + 'Failed to create config', undefined, + () => ddb.duckdb_destroy_config(config)); + for (const optionName in options) { + const optionValue = options[optionName]; + throwOnFailure(ddb.duckdb_set_config(config, optionName, optionValue), + 'Failed to set config option', () => optionName, + () => ddb.duckdb_destroy_config(config)); + } + const errorWrapper = new ddb.out_string_wrapper; + throwOnFailure(await ddb.duckdb_open_ext(path, db, config, errorWrapper), + 'Failed to open', () => ddb.out_get_string(errorWrapper), + () => ddb.duckdb_destroy_config(config)); + } else { + throwOnFailure(await ddb.duckdb_open(path, db), 'Failed to open'); + } + return new DuckDBInstance(db); + } + public dispose(): Promise { + return ddb.duckdb_close(this.db); + } + public async connect(): Promise { + const connection = new ddb.duckdb_connection; + throwOnFailure(await ddb.duckdb_connect(this.db, connection), 'Failed to connect'); + return new DuckDBConnection(connection); + } +} diff --git a/api/src/DuckDBLogicalType.ts b/api/src/DuckDBLogicalType.ts new file mode 100644 index 00000000..e5426c38 --- /dev/null +++ b/api/src/DuckDBLogicalType.ts @@ -0,0 +1,278 @@ +import * as ddb from '../..'; +import { + DuckDBBigIntType, + DuckDBBitType, + DuckDBBlobType, + DuckDBBooleanType, + DuckDBDateType, + DuckDBDecimalType, + DuckDBDoubleType, + DuckDBEnumType, + DuckDBHugeIntType, + DuckDBIntegerType, + DuckDBIntervalType, + DuckDBListType, + DuckDBMapType, + DuckDBSmallIntType, + DuckDBStructType, + DuckDBTimeType, + DuckDBTimestampMillisecondsType, + DuckDBTimestampNanosecondsType, + DuckDBTimestampSecondsType, + DuckDBTimestampType, + DuckDBTinyIntType, + DuckDBType, + DuckDBUBigIntType, + DuckDBUIntegerType, + DuckDBUSmallIntType, + DuckDBUTinyIntType, + DuckDBUUIDType, + DuckDBUnionType, + DuckDBVarCharType, +} from './DuckDBType'; +import { DuckDBTypeId } from './DuckDBTypeId'; + +export class DuckDBLogicalType { + readonly logical_type: ddb.duckdb_logical_type; + protected constructor(logical_type: ddb.duckdb_logical_type) { + this.logical_type = logical_type; + } + static consumeAsType(logical_type: ddb.duckdb_logical_type): DuckDBType { + const logicalType = DuckDBLogicalType.create(logical_type); + const type = logicalType.asType(); + logicalType.dispose(); + return type; + } + static create(logical_type: ddb.duckdb_logical_type): DuckDBLogicalType { + switch (ddb.duckdb_get_type_id(logical_type)) { + case ddb.duckdb_type.DUCKDB_TYPE_DECIMAL: + return new DuckDBDecimalLogicalType(logical_type); + case ddb.duckdb_type.DUCKDB_TYPE_ENUM: + return new DuckDBEnumLogicalType(logical_type); + case ddb.duckdb_type.DUCKDB_TYPE_LIST: + return new DuckDBListLogicalType(logical_type); + case ddb.duckdb_type.DUCKDB_TYPE_MAP: + return new DuckDBMapLogicalType(logical_type); + case ddb.duckdb_type.DUCKDB_TYPE_STRUCT: + return new DuckDBStructLogicalType(logical_type); + case ddb.duckdb_type.DUCKDB_TYPE_UNION: + return new DuckDBUnionLogicalType(logical_type); + default: + return new DuckDBLogicalType(logical_type); + } + } + public static createDecimal(width: number, scale: number): DuckDBDecimalLogicalType { + return new DuckDBDecimalLogicalType(ddb.duckdb_create_decimal_type(width, scale)); + } + public static createEnum(values: readonly string[]): DuckDBEnumLogicalType { + // TODO: missing C API + throw new Error('not implemented'); + } + public static createList(valueType: DuckDBLogicalType): DuckDBListLogicalType { + return new DuckDBListLogicalType(ddb.duckdb_create_list_type(valueType.logical_type)); + } + public static createMap(keyType: DuckDBLogicalType, valueType: DuckDBLogicalType): DuckDBMapLogicalType { + return new DuckDBMapLogicalType(ddb.duckdb_create_map_type(keyType.logical_type, valueType.logical_type)); + } + public static createStruct(entries: readonly DuckDBLogicalStructEntry[]): DuckDBStructLogicalType { + // TODO: C API takes raw pointers (lists of names and types) + throw new Error('not implemented'); + } + public static createUnion(alternatives: readonly DuckDBLogicalUnionAlternative[]): DuckDBUnionLogicalType { + // TODO: C API takes raw pointers (lists of tags and types) + throw new Error('not implemented'); + } + public dispose() { + ddb.duckdb_destroy_logical_type(this.logical_type); + } + public get typeId(): DuckDBTypeId { + return ddb.duckdb_get_type_id(this.logical_type) as unknown as DuckDBTypeId; + } + public asType(): DuckDBType { + switch (this.typeId) { + case DuckDBTypeId.BOOLEAN: + return DuckDBBooleanType.instance; + case DuckDBTypeId.TINYINT: + return DuckDBTinyIntType.instance; + case DuckDBTypeId.SMALLINT: + return DuckDBSmallIntType.instance; + case DuckDBTypeId.INTEGER: + return DuckDBIntegerType.instance; + case DuckDBTypeId.BIGINT: + return DuckDBBigIntType.instance; + case DuckDBTypeId.UTINYINT: + return DuckDBUTinyIntType.instance; + case DuckDBTypeId.USMALLINT: + return DuckDBUSmallIntType.instance; + case DuckDBTypeId.UINTEGER: + return DuckDBUIntegerType.instance; + case DuckDBTypeId.UBIGINT: + return DuckDBUBigIntType.instance; + case DuckDBTypeId.FLOAT: + return DuckDBDoubleType.instance; + case DuckDBTypeId.TIMESTAMP: + return DuckDBTimestampType.instance; + case DuckDBTypeId.DATE: + return DuckDBDateType.instance; + case DuckDBTypeId.TIME: + return DuckDBTimeType.instance; + case DuckDBTypeId.INTERVAL: + return DuckDBIntervalType.instance; + case DuckDBTypeId.HUGEINT: + return DuckDBHugeIntType.instance; + case DuckDBTypeId.VARCHAR: + return DuckDBVarCharType.instance; + case DuckDBTypeId.BLOB: + return DuckDBBlobType.instance; + case DuckDBTypeId.DECIMAL: + throw new Error('Expected override'); + case DuckDBTypeId.TIMESTAMP_S: + return DuckDBTimestampSecondsType.instance; + case DuckDBTypeId.TIMESTAMP_MS: + return DuckDBTimestampMillisecondsType.instance; + case DuckDBTypeId.TIMESTAMP_NS: + return DuckDBTimestampNanosecondsType.instance; + case DuckDBTypeId.ENUM: + throw new Error('Expected override'); + case DuckDBTypeId.LIST: + throw new Error('Expected override'); + case DuckDBTypeId.STRUCT: + throw new Error('Expected override'); + case DuckDBTypeId.MAP: + throw new Error('Expected override'); + case DuckDBTypeId.UUID: + return DuckDBUUIDType.instance; + case DuckDBTypeId.UNION: + throw new Error('Expected override'); + case DuckDBTypeId.BIT: + return DuckDBBitType.instance; + default: + throw new Error('Unexpected type id'); + } + } +} + +export class DuckDBDecimalLogicalType extends DuckDBLogicalType { + public get width(): number { + return ddb.duckdb_decimal_width(this.logical_type); + } + public get scale(): number { + return ddb.duckdb_decimal_scale(this.logical_type); + } + public get internalTypeId(): DuckDBTypeId { + return ddb.duckdb_decimal_internal_type(this.logical_type) as unknown as DuckDBTypeId; + } + public override asType(): DuckDBDecimalType { + return new DuckDBDecimalType(this.width, this.scale); + } +} + +export class DuckDBEnumLogicalType extends DuckDBLogicalType { + public get valueCount(): number { + return ddb.duckdb_enum_dictionary_size(this.logical_type); + } + public value(index: number): string { + return ddb.duckdb_enum_dictionary_value(this.logical_type, index); + } + public values(): readonly string[] { + const values: string[] = []; + const count = this.valueCount; + for (let i = 0; i < count; i++) { + values.push(this.value(i)); + } + return values; + } + public get internalTypeId(): DuckDBTypeId { + return ddb.duckdb_enum_internal_type(this.logical_type) as unknown as DuckDBTypeId; + } + public override asType(): DuckDBEnumType { + return new DuckDBEnumType(this.values()); + } +} + +export class DuckDBListLogicalType extends DuckDBLogicalType { + public get valueType(): DuckDBLogicalType { + return DuckDBLogicalType.create(ddb.duckdb_list_type_child_type(this.logical_type)); + } + public override asType(): DuckDBListType { + return new DuckDBListType(this.valueType.asType()); + } +} + +export class DuckDBMapLogicalType extends DuckDBLogicalType { + public get keyType(): DuckDBLogicalType { + return DuckDBLogicalType.create(ddb.duckdb_map_type_key_type(this.logical_type)); + } + public get valueType(): DuckDBLogicalType { + return DuckDBLogicalType.create(ddb.duckdb_map_type_value_type(this.logical_type)); + } + public override asType(): DuckDBMapType { + return new DuckDBMapType(this.keyType.asType(), this.valueType.asType()); + } +} + +export interface DuckDBLogicalStructEntry { + readonly name: string; + readonly valueType: DuckDBLogicalType; +} + +export class DuckDBStructLogicalType extends DuckDBLogicalType { + public get entryCount(): number { + return ddb.duckdb_struct_type_child_count(this.logical_type); + } + public entryName(index: number): string { + return ddb.duckdb_struct_type_child_name(this.logical_type, index); + } + public entryValueType(index: number): DuckDBLogicalType { + return DuckDBLogicalType.create(ddb.duckdb_struct_type_child_type(this.logical_type, index)); + } + public entries(): readonly DuckDBLogicalStructEntry[] { + const entries: DuckDBLogicalStructEntry[] = []; + const count = this.entryCount; + for (let i = 0; i < count; i++) { + const name = this.entryName(i); + const valueType = this.entryValueType(i); + entries.push({ name, valueType }); + } + return entries; + } + public override asType(): DuckDBStructType { + return new DuckDBStructType(this.entries().map(({ name, valueType }) => ({ + name, + valueType: valueType.asType(), + }))); + } +} + +export interface DuckDBLogicalUnionAlternative { + readonly tag: string; + readonly valueType: DuckDBLogicalType; +} + +export class DuckDBUnionLogicalType extends DuckDBLogicalType { + public get alternativeCount(): number { + return ddb.duckdb_union_type_member_count(this.logical_type); + } + public alternativeTag(index: number): string { + return ddb.duckdb_union_type_member_name(this.logical_type, index); + } + public alternativeValueType(index: number): DuckDBLogicalType { + return DuckDBLogicalType.create(ddb.duckdb_union_type_member_type(this.logical_type, index)); + } + public alternatives(): readonly DuckDBLogicalUnionAlternative[] { + const alternatives: DuckDBLogicalUnionAlternative[] = []; + const count = this.alternativeCount; + for (let i = 0; i < count; i++) { + const tag = this.alternativeTag(i); + const valueType = this.alternativeValueType(i); + alternatives.push({ tag, valueType }); + } + return alternatives; + } + public override asType(): DuckDBUnionType { + return new DuckDBUnionType(this.alternatives().map(({ tag, valueType }) => ({ + tag, + valueType: valueType.asType(), + }))); + } +} diff --git a/api/src/DuckDBPendingResult.ts b/api/src/DuckDBPendingResult.ts new file mode 100644 index 00000000..93120716 --- /dev/null +++ b/api/src/DuckDBPendingResult.ts @@ -0,0 +1,53 @@ +import * as ddb from '../..'; +import { DuckDBMaterializedResult, DuckDBStreamingResult } from './DuckDBResult'; +import { throwOnFailure } from './throwOnFailure'; + +// Values match similar enum in C API. +export enum DuckDBPendingResultState { + RESULT_READY = 0, + RESULT_NOT_READY = 1, + NO_TASKS_AVAILABLE = 3, +} + +export abstract class DuckDBPendingResult { + protected readonly pending_result: ddb.duckdb_pending_result; + constructor(pending_result: ddb.duckdb_pending_result) { + this.pending_result = pending_result; + } + public dispose() { + ddb.duckdb_destroy_pending(this.pending_result); + } + public runTask(): DuckDBPendingResultState { + const pending_state = ddb.duckdb_pending_execute_task(this.pending_result); + switch (pending_state) { + case ddb.duckdb_pending_state.DUCKDB_PENDING_RESULT_READY: + return DuckDBPendingResultState.RESULT_READY; + case ddb.duckdb_pending_state.DUCKDB_PENDING_RESULT_NOT_READY: + return DuckDBPendingResultState.RESULT_NOT_READY; + case ddb.duckdb_pending_state.DUCKDB_PENDING_ERROR: + throw new Error(`Failure running pending result task: ${ddb.duckdb_pending_error(this.pending_result)}`); + case ddb.duckdb_pending_state.DUCKDB_PENDING_NO_TASKS_AVAILABLE: + return DuckDBPendingResultState.NO_TASKS_AVAILABLE; + default: + throw new Error(`Unexpected pending state: ${pending_state}`); + } + } +} + +export class DuckDBPendingMaterializedResult extends DuckDBPendingResult { + public async getResult(): Promise { + const result = new ddb.duckdb_result; + throwOnFailure(await ddb.duckdb_execute_pending(this.pending_result, result), + 'Failed to execute pending materialized result', () => ddb.duckdb_pending_error(this.pending_result)); + return new DuckDBMaterializedResult(result); + } +} + +export class DuckDBPendingStreamingResult extends DuckDBPendingResult { + public async getResult(): Promise { + const result = new ddb.duckdb_result; + throwOnFailure(await ddb.duckdb_execute_pending(this.pending_result, result), + 'Failed to execute pending streaming result', () => ddb.duckdb_pending_error(this.pending_result)); + return new DuckDBStreamingResult(result); + } +} diff --git a/api/src/DuckDBPreparedStatement.ts b/api/src/DuckDBPreparedStatement.ts new file mode 100644 index 00000000..a41f68df --- /dev/null +++ b/api/src/DuckDBPreparedStatement.ts @@ -0,0 +1,114 @@ +import * as ddb from '../..'; +import { DuckDBPendingMaterializedResult, DuckDBPendingStreamingResult } from './DuckDBPendingResult'; +import { DuckDBMaterializedResult } from './DuckDBResult'; +import { DuckDBTypeId } from './DuckDBTypeId'; +import { throwOnFailure } from './throwOnFailure'; + +export class DuckDBPreparedStatement { + private readonly prepared_statement: ddb.duckdb_prepared_statement; + constructor(prepared_statement: ddb.duckdb_prepared_statement) { + this.prepared_statement = prepared_statement; + } + public dispose() { + ddb.duckdb_destroy_prepare(this.prepared_statement); + } + public get parameterCount(): number { + return ddb.duckdb_nparams(this.prepared_statement); + } + public parameterName(parameterIndex: number): string { + return ddb.duckdb_parameter_name(this.prepared_statement, parameterIndex); + } + public parameterTypeId(parameterIndex: number): DuckDBTypeId { + return ddb.duckdb_param_type(this.prepared_statement, parameterIndex) as unknown as DuckDBTypeId; + } + public clearBindings() { + throwOnFailure(ddb.duckdb_clear_bindings(this.prepared_statement), 'Failed to clear bindings'); + } + // TODO: is duckdb_bind_value useful? + // TODO: get parameter index from name (duckdb_bind_parameter_index) + public bindBoolean(parameterIndex: number, value: boolean) { + throwOnFailure(ddb.duckdb_bind_boolean(this.prepared_statement, parameterIndex, value), + 'Failed to bind boolean parameter', () => `index: ${parameterIndex}, value: ${value}`); + } + public bindTinyInt(parameterIndex: number, value: number) { + throwOnFailure(ddb.duckdb_bind_int8(this.prepared_statement, parameterIndex, value), + 'Failed to bind tinyint parameter', () => `index: ${parameterIndex}, value: ${value}`); + } + public bindSmallInt(parameterIndex: number, value: number) { + throwOnFailure(ddb.duckdb_bind_int16(this.prepared_statement, parameterIndex, value), + 'Failed to bind smallint parameter', () => `index: ${parameterIndex}, value: ${value}`); + } + public bindInteger(parameterIndex: number, value: number) { + throwOnFailure(ddb.duckdb_bind_int32(this.prepared_statement, parameterIndex, value), + 'Failed to bind integer parameter', () => `index: ${parameterIndex}, value: ${value}`); + } + public bindBigInt(parameterIndex: number, value: number) { + throwOnFailure(ddb.duckdb_bind_int64(this.prepared_statement, parameterIndex, value), + 'Failed to bind bigint parameter', () => `index: ${parameterIndex}, value: ${value}`); + } + // TODO: bind HUGEINT + // TODO: bind DECIMAL + public bindUTinyInt(parameterIndex: number, value: number) { + throwOnFailure(ddb.duckdb_bind_uint8(this.prepared_statement, parameterIndex, value), + 'Failed to bind utinyint parameter', () => `index: ${parameterIndex}, value: ${value}`); + } + public bindUSmallInt(parameterIndex: number, value: number) { + throwOnFailure(ddb.duckdb_bind_uint16(this.prepared_statement, parameterIndex, value), + 'Failed to bind usmallit parameter', () => `index: ${parameterIndex}, value: ${value}`); + } + public bindUInteger(parameterIndex: number, value: number) { + throwOnFailure(ddb.duckdb_bind_uint32(this.prepared_statement, parameterIndex, value), + 'Failed to bind uinteger parameter', () => `index: ${parameterIndex}, value: ${value}`); + } + public bindUBigInt(parameterIndex: number, value: number) { + throwOnFailure(ddb.duckdb_bind_uint64(this.prepared_statement, parameterIndex, value), + 'Failed to bind ubigint parameter', () => `index: ${parameterIndex}, value: ${value}`); + } + public bindFloat(parameterIndex: number, value: number) { + throwOnFailure(ddb.duckdb_bind_float(this.prepared_statement, parameterIndex, value), + 'Failed to bind float parameter', () => `index: ${parameterIndex}, value: ${value}`); + } + public bindDouble(parameterIndex: number, value: number) { + throwOnFailure(ddb.duckdb_bind_double(this.prepared_statement, parameterIndex, value), + 'Failed to bind double parameter', () => `index: ${parameterIndex}, value: ${value}`); + } + // TODO: bind DATE + // TODO: bind TIME + // TODO: bind TIMESTAMP + // TODO: bind TIMESTAMPS_S/_MS/_NS? + // TODO: bind INTERVAL + public bindVarchar(parameterIndex: number, value: string) { + throwOnFailure(ddb.duckdb_bind_varchar(this.prepared_statement, parameterIndex, value), + 'Failed to bind varchar parameter', () => `index: ${parameterIndex}, value: ${value}`); + } + // TODO: bind BLOB + // TODO: bind ENUM? + // TODO: bind nested types? (LIST, STRUCT, MAP, UNION) + // TODO: bind UUID? + // TODO: bind BIT? + public bindNull(parameterIndex: number) { + throwOnFailure(ddb.duckdb_bind_null(this.prepared_statement, parameterIndex), + 'Failed to bind null parameter', () => `index: ${parameterIndex}`); + } + public async run(): Promise { + const result = new ddb.duckdb_result; + throwOnFailure(await ddb.duckdb_execute_prepared(this.prepared_statement, result), + 'Failed to execute prepared statement', () => ddb.duckdb_result_error(result), + () => ddb.duckdb_destroy_result(result)); + return new DuckDBMaterializedResult(result); + } + public start(): DuckDBPendingMaterializedResult { + const pending_result = new ddb.duckdb_pending_result; + throwOnFailure(ddb.duckdb_pending_prepared(this.prepared_statement, pending_result), + 'Failed to start prepared statement', () => ddb.duckdb_pending_error(pending_result), + () => ddb.duckdb_destroy_pending(pending_result)); + return new DuckDBPendingMaterializedResult(pending_result); + } + public startStreaming(): DuckDBPendingStreamingResult { + const pending_result = new ddb.duckdb_pending_result; + throwOnFailure(ddb.duckdb_pending_prepared_streaming(this.prepared_statement, pending_result), + 'Failed to start prepared statement (streaming)', () => ddb.duckdb_pending_error(pending_result), + () => ddb.duckdb_destroy_pending(pending_result)); + return new DuckDBPendingStreamingResult(pending_result); + } +} diff --git a/api/src/DuckDBResult.ts b/api/src/DuckDBResult.ts new file mode 100644 index 00000000..a0317479 --- /dev/null +++ b/api/src/DuckDBResult.ts @@ -0,0 +1,54 @@ +import * as ddb from '../..'; +import { DuckDBDataChunk } from './DuckDBDataChunk'; +import { DuckDBLogicalType } from './DuckDBLogicalType'; +import { DuckDBType } from './DuckDBType'; +import { DuckDBTypeId } from './DuckDBTypeId'; + +export abstract class DuckDBResult { + protected readonly result: ddb.duckdb_result; + constructor(result: ddb.duckdb_result) { + this.result = result; + } + public dispose() { + ddb.duckdb_destroy_result(this.result); + } + public get isStreaming(): boolean { + return ddb.duckdb_result_is_streaming(this.result); + } + public get columnCount(): number { + return ddb.duckdb_column_count(this.result); + } + public columnName(columnIndex: number): string { + return ddb.duckdb_column_name(this.result, columnIndex); + } + public columnTypeId(columnIndex: number): DuckDBTypeId { + return ddb.duckdb_column_type(this.result, columnIndex) as unknown as DuckDBTypeId; + } + public columnLogicalType(columnIndex: number): DuckDBLogicalType { + return DuckDBLogicalType.create(ddb.duckdb_column_logical_type(this.result, columnIndex)); + } + public columnType(columnIndex: number): DuckDBType { + return DuckDBLogicalType.consumeAsType(ddb.duckdb_column_logical_type(this.result, columnIndex)); + } +} + +export class DuckDBMaterializedResult extends DuckDBResult { + public get rowCount(): number { + return ddb.duckdb_row_count(this.result); + } + public get rowsChanged(): number { + return ddb.duckdb_rows_changed(this.result); + } + public get chunkCount(): number { + return ddb.duckdb_result_chunk_count(this.result); + } + public getChunk(chunkIndex: number): DuckDBDataChunk { + return new DuckDBDataChunk(ddb.duckdb_result_get_chunk(this.result, chunkIndex)); + } +} + +export class DuckDBStreamingResult extends DuckDBResult { + public async fetchChunk(): Promise { + return new DuckDBDataChunk(await ddb.duckdb_stream_fetch_chunk(this.result)); + } +} diff --git a/api/src/DuckDBType.ts b/api/src/DuckDBType.ts new file mode 100644 index 00000000..79adee86 --- /dev/null +++ b/api/src/DuckDBType.ts @@ -0,0 +1,266 @@ +import { DuckDBTypeId } from './DuckDBTypeId'; + +export abstract class BaseDuckDBType { + public readonly typeId: DuckDBTypeId; + protected constructor(typeId: DuckDBTypeId) { + this.typeId = typeId; + } +} + +export class DuckDBBooleanType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.BOOLEAN); + } + public static readonly instance = new DuckDBBooleanType(); +} + +export class DuckDBTinyIntType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.TINYINT); + } + public static readonly instance = new DuckDBTinyIntType(); +} + +export class DuckDBSmallIntType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.SMALLINT); + } + public static readonly instance = new DuckDBSmallIntType(); +} + +export class DuckDBIntegerType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.INTEGER); + } + public static readonly instance = new DuckDBIntegerType(); +} + +export class DuckDBBigIntType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.BIGINT); + } + public static readonly instance = new DuckDBBigIntType(); +} + +export class DuckDBUTinyIntType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.UTINYINT); + } + public static readonly instance = new DuckDBUTinyIntType(); +} + +export class DuckDBUSmallIntType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.USMALLINT); + } + public static readonly instance = new DuckDBUSmallIntType(); +} + +export class DuckDBUIntegerType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.UINTEGER); + } + public static readonly instance = new DuckDBUIntegerType(); +} + +export class DuckDBUBigIntType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.UBIGINT); + } + public static readonly instance = new DuckDBUBigIntType(); +} + +export class DuckDBFloatType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.FLOAT); + } + public static readonly instance = new DuckDBFloatType(); +} + +export class DuckDBDoubleType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.DOUBLE); + } + public static readonly instance = new DuckDBDoubleType(); +} + +export class DuckDBTimestampType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.TIMESTAMP); + } + public static readonly instance = new DuckDBTimestampType(); +} + +export type DuckDBTimestampMicrosecondsType = DuckDBTimestampType; + +export class DuckDBDateType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.DATE); + } + public static readonly instance = new DuckDBDateType(); +} + +export class DuckDBTimeType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.TIME); + } + public static readonly instance = new DuckDBTimeType(); +} + +export class DuckDBIntervalType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.INTERVAL); + } + public static readonly instance = new DuckDBIntervalType(); +} + +export class DuckDBHugeIntType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.HUGEINT); + } + public static readonly instance = new DuckDBHugeIntType(); +} + +export class DuckDBVarCharType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.VARCHAR); + } + public static readonly instance = new DuckDBVarCharType(); +} + +export class DuckDBBlobType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.BLOB); + } + public static readonly instance = new DuckDBBlobType(); +} + +export class DuckDBDecimalType extends BaseDuckDBType { + public readonly width: number; + public readonly scale: number; + public constructor(width: number, scale: number) { + super(DuckDBTypeId.DECIMAL); + this.width = width; + this.scale = scale; + } + public static readonly default = new DuckDBDecimalType(18, 3); +} + +export class DuckDBTimestampSecondsType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.TIMESTAMP_S); + } + public static readonly instance = new DuckDBTimestampSecondsType(); +} + +export class DuckDBTimestampMillisecondsType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.TIMESTAMP_MS); + } + public static readonly instance = new DuckDBTimestampMillisecondsType(); +} + +export class DuckDBTimestampNanosecondsType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.TIMESTAMP_NS); + } + public static readonly instance = new DuckDBTimestampNanosecondsType(); +} + +export class DuckDBEnumType extends BaseDuckDBType { + public readonly values: readonly string[]; + public constructor(values: readonly string[]) { + super(DuckDBTypeId.ENUM); + this.values = values; + } +} + +export class DuckDBListType extends BaseDuckDBType { + public readonly valueType: DuckDBType; + public constructor(valueType: DuckDBType) { + super(DuckDBTypeId.LIST); + this.valueType = valueType; + } +} + +export interface DuckDBStructEntryType { + readonly name: string; + readonly valueType: DuckDBType; +} + +export class DuckDBStructType extends BaseDuckDBType { + public readonly entries: readonly DuckDBStructEntryType[]; + public constructor(entries: readonly DuckDBStructEntryType[]) { + super(DuckDBTypeId.STRUCT); + this.entries = entries; + } +} + +export class DuckDBMapType extends BaseDuckDBType { + public readonly keyType: DuckDBType; + public readonly valueType: DuckDBType; + public constructor(keyType: DuckDBType, valueType: DuckDBType) { + super(DuckDBTypeId.MAP); + this.keyType = keyType; + this.valueType = valueType; + } +} + +export class DuckDBUUIDType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.UUID); + } + public static readonly instance = new DuckDBUUIDType(); +} + +export interface DuckDBUnionAlternativeType { + readonly tag: string; + readonly valueType: DuckDBType; +} + +export class DuckDBUnionType extends BaseDuckDBType { + public readonly alternatives: readonly DuckDBUnionAlternativeType[]; + public constructor(alternatives: readonly DuckDBUnionAlternativeType[]) { + super(DuckDBTypeId.UNION); + this.alternatives = alternatives; + } +} + +export class DuckDBBitType extends BaseDuckDBType { + private constructor() { + super(DuckDBTypeId.BIT); + } + public static readonly instance = new DuckDBBitType(); +} + +export type DuckDBType = + | DuckDBBooleanType + | DuckDBTinyIntType + | DuckDBSmallIntType + | DuckDBIntegerType + | DuckDBBigIntType + | DuckDBUTinyIntType + | DuckDBUSmallIntType + | DuckDBUIntegerType + | DuckDBUBigIntType + | DuckDBFloatType + | DuckDBDoubleType + | DuckDBTimestampType + | DuckDBDateType + | DuckDBTimeType + | DuckDBIntervalType + | DuckDBHugeIntType + | DuckDBVarCharType + | DuckDBBlobType + | DuckDBDecimalType + | DuckDBTimestampSecondsType + | DuckDBTimestampMillisecondsType + | DuckDBTimestampNanosecondsType + | DuckDBEnumType + | DuckDBListType + | DuckDBStructType + | DuckDBMapType + | DuckDBUUIDType + | DuckDBUnionType + | DuckDBBitType + ; diff --git a/api/src/DuckDBTypeId.ts b/api/src/DuckDBTypeId.ts new file mode 100644 index 00000000..ee027a9a --- /dev/null +++ b/api/src/DuckDBTypeId.ts @@ -0,0 +1,33 @@ +// copy of duckdb_type, with names shortened +export enum DuckDBTypeId { + INVALID = 0, + BOOLEAN = 1, + TINYINT = 2, + SMALLINT = 3, + INTEGER = 4, + BIGINT = 5, + UTINYINT = 6, + USMALLINT = 7, + UINTEGER = 8, + UBIGINT = 9, + FLOAT = 10, + DOUBLE = 11, + TIMESTAMP = 12, + DATE = 13, + TIME = 14, + INTERVAL = 15, + HUGEINT = 16, + VARCHAR = 17, + BLOB = 18, + DECIMAL = 19, + TIMESTAMP_S = 20, + TIMESTAMP_MS = 21, + TIMESTAMP_NS = 22, + ENUM = 23, + LIST = 24, + STRUCT = 25, + MAP = 26, + UUID = 27, + UNION = 28, + BIT = 29, +} diff --git a/api/src/DuckDBVector.ts b/api/src/DuckDBVector.ts new file mode 100644 index 00000000..686e2e0f --- /dev/null +++ b/api/src/DuckDBVector.ts @@ -0,0 +1,631 @@ +import * as ddb from '../..'; +import { DuckDBLogicalType } from './DuckDBLogicalType'; +import { + DuckDBBigIntType, + DuckDBDoubleType, + DuckDBFloatType, + DuckDBIntegerType, + DuckDBListType, + DuckDBMapType, + DuckDBSmallIntType, + DuckDBStructEntryType, + DuckDBStructType, + DuckDBTinyIntType, + DuckDBType, + DuckDBUBigIntType, + DuckDBUIntegerType, + DuckDBUSmallIntType, + DuckDBUTinyIntType, + DuckDBUnionType, +} from './DuckDBType'; +import { DuckDBTypeId } from './DuckDBTypeId'; + +function vectorData(vector: ddb.duckdb_vector, byteCount: number): Uint8Array { + const pointer = ddb.duckdb_vector_get_data(vector); + const buffer = ddb.copy_buffer(pointer, byteCount); + if (!buffer) { + throw new Error('Failed to get buffer for vector'); + } + return buffer; +} + +class DuckDBValidity { + private readonly validity_pointer: ddb.uint64_pointer; + private readonly offset: number; + private constructor(validity_pointer: ddb.uint64_pointer, offset: number = 0) { + this.validity_pointer = validity_pointer; + this.offset = offset; + } + public static fromVector(vector: ddb.duckdb_vector, offset: number = 0): DuckDBValidity { + const validity_pointer = ddb.duckdb_vector_get_validity(vector); + return new DuckDBValidity(validity_pointer, offset); + } + public itemValid(itemIndex: number): boolean { + return ddb.duckdb_validity_row_is_valid(this.validity_pointer, itemIndex - this.offset); + } + public slice(offset: number): DuckDBValidity { + return new DuckDBValidity(this.validity_pointer, this.offset + offset); + } +} + +export abstract class DuckDBVector { + public static standardSize(): number { + return ddb.duckdb_vector_size(); + } + public static create(vector: ddb.duckdb_vector, itemCount: number, knownType?: DuckDBType): DuckDBVector { + const vectorType = knownType ? knownType : DuckDBLogicalType.consumeAsType(ddb.duckdb_vector_get_column_type(vector)); + switch (vectorType.typeId) { + case DuckDBTypeId.BOOLEAN: // TODO: sizeof(bool) is not guaranteed to be 1 + throw new Error('not yet implemented'); + case DuckDBTypeId.TINYINT: + return DuckDBTinyIntVector.fromRawVector(vector, itemCount); + case DuckDBTypeId.SMALLINT: + return DuckDBSmallIntVector.fromRawVector(vector, itemCount); + case DuckDBTypeId.INTEGER: + return DuckDBIntegerVector.fromRawVector(vector, itemCount); + case DuckDBTypeId.BIGINT: + return DuckDBBigIntVector.fromRawVector(vector, itemCount); + case DuckDBTypeId.UTINYINT: + return DuckDBUTinyIntVector.fromRawVector(vector, itemCount); + case DuckDBTypeId.USMALLINT: + return DuckDBUSmallIntVector.fromRawVector(vector, itemCount); + case DuckDBTypeId.UINTEGER: + return DuckDBUIntegerVector.fromRawVector(vector, itemCount); + case DuckDBTypeId.UBIGINT: + return DuckDBUBigIntVector.fromRawVector(vector, itemCount); + case DuckDBTypeId.FLOAT: + return DuckDBFloatVector.fromRawVector(vector, itemCount); + case DuckDBTypeId.DOUBLE: + return DuckDBDoubleVector.fromRawVector(vector, itemCount); + case DuckDBTypeId.TIMESTAMP: + throw new Error('not yet implemented'); + case DuckDBTypeId.DATE: + throw new Error('not yet implemented'); + case DuckDBTypeId.TIME: + throw new Error('not yet implemented'); + case DuckDBTypeId.INTERVAL: // Int32, Int32, Int64 + throw new Error('not yet implemented'); + case DuckDBTypeId.HUGEINT: // Int128 + throw new Error('not yet implemented'); + case DuckDBTypeId.VARCHAR: // string + throw new Error('not yet implemented'); + case DuckDBTypeId.BLOB: // binary + throw new Error('not yet implemented'); + case DuckDBTypeId.DECIMAL: // variable: Int8, Int16, Int32, Int64 + throw new Error('not yet implemented'); + case DuckDBTypeId.TIMESTAMP_S: + throw new Error('not yet implemented'); + case DuckDBTypeId.TIMESTAMP_MS: + throw new Error('not yet implemented'); + case DuckDBTypeId.TIMESTAMP_NS: + throw new Error('not yet implemented'); + case DuckDBTypeId.ENUM: // variable: Uint8, Uint16, Uint32, Uint64 + throw new Error('not yet implemented'); + case DuckDBTypeId.LIST: + if (vectorType instanceof DuckDBListType) { + return DuckDBListVector.fromRawVector(vectorType, vector, itemCount); + } + throw new Error('DuckDBType has LIST type id but is not an instance of DuckDBListType'); + case DuckDBTypeId.STRUCT: + if (vectorType instanceof DuckDBStructType) { + return DuckDBStructVector.fromRawVector(vectorType, vector, itemCount); + } + throw new Error('DuckDBType has STRUCT type id but is not an instance of DuckDBStructType'); + case DuckDBTypeId.MAP: + if (vectorType instanceof DuckDBMapType) { + return DuckDBMapVector.fromRawVector(vectorType, vector, itemCount); + } + throw new Error('DuckDBType has MAP type id but is not an instance of DuckDBMapType'); + case DuckDBTypeId.UUID: // Int128 + throw new Error('not yet implemented'); + case DuckDBTypeId.UNION: + throw new Error('not yet implemented'); + // return DuckDBUnionVector.fromRawVector(vectorType, vector, itemCount); + case DuckDBTypeId.BIT: // binary + throw new Error('not yet implemented'); + default: + throw new Error('Invalid type id'); + } + } + public abstract get type(): DuckDBType; + public abstract get itemCount(): number; + public abstract getItem(itemIndex: number): T | null; + public abstract slice(offset: number, length: number): DuckDBVector; +} + +export class DuckDBTinyIntVector extends DuckDBVector { + private readonly items: Int8Array; + private readonly validity: DuckDBValidity; + constructor(items: Int8Array, validity: DuckDBValidity) { + super(); + this.items = items + this.validity = validity; + } + static fromRawVector(vector: ddb.duckdb_vector, itemCount: number): DuckDBTinyIntVector { + const data = vectorData(vector, itemCount * Int8Array.BYTES_PER_ELEMENT); + const items = new Int8Array(data.buffer, data.byteOffset, itemCount); + const validity = DuckDBValidity.fromVector(vector); + return new DuckDBTinyIntVector(items, validity); + } + public override get type(): DuckDBTinyIntType { + return DuckDBTinyIntType.instance; + } + public override get itemCount(): number { + return this.items.length; + } + public override getItem(itemIndex: number): number | null { + return this.validity.itemValid(itemIndex) ? this.items[itemIndex] : null; + } + public override slice(offset: number, length: number): DuckDBTinyIntVector { + return new DuckDBTinyIntVector(this.items.slice(offset, offset + length), this.validity.slice(offset)); + } +} + +export class DuckDBSmallIntVector extends DuckDBVector { + private readonly items: Int16Array; + private readonly validity: DuckDBValidity; + constructor(items: Int16Array, validity: DuckDBValidity) { + super(); + this.items = items; + this.validity = validity; + } + static fromRawVector(vector: ddb.duckdb_vector, itemCount: number): DuckDBSmallIntVector { + const data = vectorData(vector, itemCount * Int16Array.BYTES_PER_ELEMENT); + const items = new Int16Array(data.buffer, data.byteOffset, itemCount); + const validity = DuckDBValidity.fromVector(vector); + return new DuckDBSmallIntVector(items, validity); + } + public override get type(): DuckDBSmallIntType { + return DuckDBSmallIntType.instance; + } + public override get itemCount(): number { + return this.items.length; + } + public override getItem(itemIndex: number): number | null { + return this.validity.itemValid(itemIndex) ? this.items[itemIndex] : null; + } + public override slice(offset: number, length: number): DuckDBSmallIntVector { + return new DuckDBSmallIntVector(this.items.slice(offset, offset + length), this.validity.slice(offset)); + } +} + +export class DuckDBIntegerVector extends DuckDBVector { + private readonly items: Int32Array; + private readonly validity: DuckDBValidity; + constructor(items: Int32Array, validity: DuckDBValidity) { + super(); + this.items = items; + this.validity = validity; + } + static fromRawVector(vector: ddb.duckdb_vector, itemCount: number): DuckDBIntegerVector { + const data = vectorData(vector, itemCount * Int32Array.BYTES_PER_ELEMENT); + const items = new Int32Array(data.buffer, data.byteOffset, itemCount); + const validity = DuckDBValidity.fromVector(vector); + return new DuckDBIntegerVector(items, validity); + } + public override get type(): DuckDBIntegerType { + return DuckDBIntegerType.instance; + } + public override get itemCount(): number { + return this.items.length; + } + public override getItem(itemIndex: number): number | null { + return this.validity.itemValid(itemIndex) ? this.items[itemIndex] : null; + } + public override slice(offset: number, length: number): DuckDBIntegerVector { + return new DuckDBIntegerVector(this.items.slice(offset, offset + length), this.validity.slice(offset)); + } +} + +export class DuckDBBigIntVector extends DuckDBVector { + private readonly items: BigInt64Array; + private readonly validity: DuckDBValidity; + constructor(items: BigInt64Array, validity: DuckDBValidity) { + super(); + this.items = items; + this.validity = validity; + } + static fromRawVector(vector: ddb.duckdb_vector, itemCount: number): DuckDBBigIntVector { + const data = vectorData(vector, itemCount * BigInt64Array.BYTES_PER_ELEMENT); + const items = new BigInt64Array(data.buffer, data.byteOffset, itemCount); + const validity = DuckDBValidity.fromVector(vector); + return new DuckDBBigIntVector(items, validity); + } + public override get type(): DuckDBBigIntType { + return DuckDBBigIntType.instance; + } + public override get itemCount(): number { + return this.items.length; + } + public override getItem(itemIndex: number): bigint | null { + return this.validity.itemValid(itemIndex) ? this.items[itemIndex] : null; + } + public override slice(offset: number, length: number): DuckDBBigIntVector { + return new DuckDBBigIntVector(this.items.slice(offset, offset + length), this.validity.slice(offset)); + } +} + +export class DuckDBUTinyIntVector extends DuckDBVector { + private readonly items: Uint8Array; + private readonly validity: DuckDBValidity; + constructor(items: Uint8Array, validity: DuckDBValidity) { + super(); + this.items = items + this.validity = validity; + } + static fromRawVector(vector: ddb.duckdb_vector, itemCount: number): DuckDBUTinyIntVector { + const data = vectorData(vector, itemCount * Uint8Array.BYTES_PER_ELEMENT); + const items = new Uint8Array(data.buffer, data.byteOffset, itemCount); + const validity = DuckDBValidity.fromVector(vector); + return new DuckDBUTinyIntVector(items, validity); + } + public override get type(): DuckDBUTinyIntType { + return DuckDBUTinyIntType.instance; + } + public override get itemCount(): number { + return this.items.length; + } + public override getItem(itemIndex: number): number | null { + return this.validity.itemValid(itemIndex) ? this.items[itemIndex] : null; + } + public override slice(offset: number, length: number): DuckDBUTinyIntVector { + return new DuckDBUTinyIntVector(this.items.slice(offset, offset + length), this.validity.slice(offset)); + } +} + +export class DuckDBUSmallIntVector extends DuckDBVector { + private readonly items: Uint16Array; + private readonly validity: DuckDBValidity; + constructor(items: Uint16Array, validity: DuckDBValidity) { + super(); + this.items = items; + this.validity = validity; + } + static fromRawVector(vector: ddb.duckdb_vector, itemCount: number): DuckDBUSmallIntVector { + const data = vectorData(vector, itemCount * Uint16Array.BYTES_PER_ELEMENT); + const items = new Uint16Array(data.buffer, data.byteOffset, itemCount); + const validity = DuckDBValidity.fromVector(vector); + return new DuckDBUSmallIntVector(items, validity); + } + public override get type(): DuckDBUSmallIntType { + return DuckDBUSmallIntType.instance; + } + public override get itemCount(): number { + return this.items.length; + } + public override getItem(itemIndex: number): number | null { + return this.validity.itemValid(itemIndex) ? this.items[itemIndex] : null; + } + public override slice(offset: number, length: number): DuckDBUSmallIntVector { + return new DuckDBUSmallIntVector(this.items.slice(offset, offset + length), this.validity.slice(offset)); + } +} + +export class DuckDBUIntegerVector extends DuckDBVector { + private readonly items: Uint32Array; + private readonly validity: DuckDBValidity; + constructor(items: Uint32Array, validity: DuckDBValidity) { + super(); + this.items = items; + this.validity = validity; + } + static fromRawVector(vector: ddb.duckdb_vector, itemCount: number): DuckDBUIntegerVector { + const data = vectorData(vector, itemCount * Uint32Array.BYTES_PER_ELEMENT); + const items = new Uint32Array(data.buffer, data.byteOffset, itemCount); + const validity = DuckDBValidity.fromVector(vector); + return new DuckDBUIntegerVector(items, validity); + } + public override get type(): DuckDBUIntegerType { + return DuckDBUIntegerType.instance; + } + public override get itemCount(): number { + return this.items.length; + } + public override getItem(itemIndex: number): number | null { + return this.validity.itemValid(itemIndex) ? this.items[itemIndex] : null; + } + public override slice(offset: number, length: number): DuckDBUIntegerVector { + return new DuckDBUIntegerVector(this.items.slice(offset, offset + length), this.validity.slice(offset)); + } +} + +export class DuckDBUBigIntVector extends DuckDBVector { + private readonly items: BigUint64Array; + private readonly validity: DuckDBValidity; + constructor(items: BigUint64Array, validity: DuckDBValidity) { + super(); + this.items = items; + this.validity = validity; + } + static fromRawVector(vector: ddb.duckdb_vector, itemCount: number): DuckDBUBigIntVector { + const data = vectorData(vector, itemCount * BigUint64Array.BYTES_PER_ELEMENT); + const items = new BigUint64Array(data.buffer, data.byteOffset, itemCount); + const validity = DuckDBValidity.fromVector(vector); + return new DuckDBUBigIntVector(items, validity); + } + public override get type(): DuckDBUBigIntType { + return DuckDBUBigIntType.instance; + } + public override get itemCount(): number { + return this.items.length; + } + public override getItem(itemIndex: number): bigint | null { + return this.validity.itemValid(itemIndex) ? this.items[itemIndex] : null; + } + public override slice(offset: number, length: number): DuckDBUBigIntVector { + return new DuckDBUBigIntVector(this.items.slice(offset, offset + length), this.validity.slice(offset)); + } +} + +export class DuckDBFloatVector extends DuckDBVector { + private readonly items: Float32Array; + private readonly validity: DuckDBValidity; + constructor(items: Float32Array, validity: DuckDBValidity) { + super(); + this.items = items; + this.validity = validity; + } + static fromRawVector(vector: ddb.duckdb_vector, itemCount: number): DuckDBFloatVector { + const data = vectorData(vector, itemCount * Float32Array.BYTES_PER_ELEMENT); + const items = new Float32Array(data.buffer, data.byteOffset, itemCount); + const validity = DuckDBValidity.fromVector(vector); + return new DuckDBFloatVector(items, validity); + } + public override get type(): DuckDBFloatType { + return DuckDBFloatType.instance; + } + public override get itemCount(): number { + return this.items.length; + } + public override getItem(itemIndex: number): number | null { + return this.validity.itemValid(itemIndex) ? this.items[itemIndex] : null; + } + public override slice(offset: number, length: number): DuckDBFloatVector { + return new DuckDBFloatVector(this.items.slice(offset, offset + length), this.validity.slice(offset)); + } +} + +export class DuckDBDoubleVector extends DuckDBVector { + private readonly items: Float64Array; + private readonly validity: DuckDBValidity; + constructor(items: Float64Array, validity: DuckDBValidity) { + super(); + this.items = items; + this.validity = validity; + } + static fromRawVector(vector: ddb.duckdb_vector, itemCount: number): DuckDBDoubleVector { + const data = vectorData(vector, itemCount * Float64Array.BYTES_PER_ELEMENT); + const items = new Float64Array(data.buffer, data.byteOffset, itemCount); + const validity = DuckDBValidity.fromVector(vector); + return new DuckDBDoubleVector(items, validity); + } + public override get type(): DuckDBDoubleType { + return DuckDBDoubleType.instance; + } + public override get itemCount(): number { + return this.items.length; + } + public override getItem(itemIndex: number): number | null { + return this.validity.itemValid(itemIndex) ? this.items[itemIndex] : null; + } + public override slice(offset: number, length: number): DuckDBDoubleVector { + return new DuckDBDoubleVector(this.items.slice(offset, offset + length), this.validity.slice(offset)); + } +} + +export class DuckDBListVector extends DuckDBVector> { + private readonly listType: DuckDBListType; + private readonly entryData: BigUint64Array; + private readonly validity: DuckDBValidity; + private readonly childData: DuckDBVector; + constructor(listType: DuckDBListType, entryData: BigUint64Array, validity: DuckDBValidity, childData: DuckDBVector) { + super(); + this.listType = listType; + this.entryData = entryData; + this.validity = validity; + this.childData = childData; + } + static fromRawVector(listType: DuckDBListType, vector: ddb.duckdb_vector, itemCount: number): DuckDBListVector { + const data = vectorData(vector, itemCount * BigUint64Array.BYTES_PER_ELEMENT * 2); + const entryData = new BigUint64Array(data.buffer, data.byteOffset, itemCount * 2); + + const validity = DuckDBValidity.fromVector(vector); + + const child_vector = ddb.duckdb_list_vector_get_child(vector); + const child_vector_size = ddb.duckdb_list_vector_get_size(vector); + const childData = DuckDBVector.create(child_vector, child_vector_size, listType.valueType); + + return new DuckDBListVector(listType, entryData, validity, childData); + } + public override get type(): DuckDBListType { + return this.listType; + } + public override get itemCount(): number { + return this.entryData.length >> 1; + } + public override getItem(itemIndex: number): DuckDBVector | null { + if (!this.validity.itemValid(itemIndex)) { + return null; + } + const entryDataStartIndex = itemIndex * 2; + const offset = Number(this.entryData[entryDataStartIndex]); + const length = Number(this.entryData[entryDataStartIndex + 1]); + return this.childData.slice(offset, length); + } + public override slice(offset: number, length: number): DuckDBListVector { + const entryDataStartIndex = offset * 2; + return new DuckDBListVector( + this.listType, + this.entryData.slice(entryDataStartIndex, entryDataStartIndex + length * 2), + this.validity.slice(offset), + this.childData.slice(offset, offset + length), + ); + } +} + +export interface DuckDBStructEntry { + readonly name: string; + readonly value: any; +} + +export class DuckDBStructVector extends DuckDBVector { + private readonly structType: DuckDBStructType; + private readonly _itemCount: number; + private readonly entryVectors: readonly DuckDBVector[]; + private readonly validity: DuckDBValidity; + constructor(structType: DuckDBStructType, itemCount: number, entryVectors: readonly DuckDBVector[], validity: DuckDBValidity) { + super(); + this.structType = structType; + this._itemCount = itemCount; + this.entryVectors = entryVectors; + this.validity = validity; + } + static fromRawVector(structType: DuckDBStructType, vector: ddb.duckdb_vector, itemCount: number): DuckDBStructVector { + const entryCount = structType.entries.length; + const entryVectors: DuckDBVector[] = []; + for (let i = 0; i < entryCount; i++) { + const entry = structType.entries[i]; + const child_vector = ddb.duckdb_struct_vector_get_child(vector, i); + entryVectors.push(DuckDBVector.create(child_vector, itemCount, entry.valueType)); + } + const validity = DuckDBValidity.fromVector(vector); + return new DuckDBStructVector(structType, itemCount, entryVectors, validity); + } + public override get type(): DuckDBStructType { + return this.structType; + } + public override get itemCount(): number { + return this._itemCount; + } + public override getItem(itemIndex: number): readonly DuckDBStructEntry[] | null { + if (!this.validity.itemValid(itemIndex)) { + return null; + } + const entries: DuckDBStructEntry[] = []; + const entryCount = this.structType.entries.length; + for (let i = 0; i < entryCount; i++) { + const entry = this.structType.entries[i]; + const entryVector = this.entryVectors[i]; + entries.push({ name: entry.name, value: entryVector.getItem(itemIndex) }); + } + return entries; + } + public getItemValue(itemIndex: number, entryIndex: number): any | null { + if (!this.validity.itemValid(itemIndex)) { + return null; + } + const entryVector = this.entryVectors[entryIndex]; + return entryVector.getItem(itemIndex); + } + public override slice(offset: number, length: number): DuckDBStructVector { + return new DuckDBStructVector( + this.structType, + length, + this.entryVectors.map(entryVector => entryVector.slice(offset, length)), + this.validity.slice(offset), + ); + } +} + +export interface DuckDBMapEntry { + readonly key: any; + readonly value: any; +} + +// MAP = LIST(STRUCT(key KEY_TYPE, value VALUE_TYPE)) +export class DuckDBMapVector extends DuckDBVector { + private readonly mapType: DuckDBMapType; + private readonly listVector: DuckDBListVector; + constructor(mapType: DuckDBMapType, listVector: DuckDBListVector) { + super(); + this.mapType = mapType; + this.listVector = listVector; + } + static fromRawVector(mapType: DuckDBMapType, vector: ddb.duckdb_vector, itemCount: number): DuckDBMapVector { + const listVectorType = new DuckDBListType(new DuckDBStructType([ + { name: 'key', valueType: mapType.keyType }, + { name: 'value', valueType: mapType.valueType } + ])); + return new DuckDBMapVector(mapType, DuckDBListVector.fromRawVector(listVectorType, vector, itemCount)); + } + public override get type(): DuckDBType { + return this.mapType; + } + public override get itemCount(): number { + return this.listVector.itemCount; + } + public override getItem(itemIndex: number): readonly DuckDBMapEntry[] | null { + const itemVector = this.listVector.getItem(itemIndex); + if (!itemVector) { + return null; + } + if (!(itemVector instanceof DuckDBStructVector)) { + throw new Error('item in map list vector is not a struct'); + } + const entries: DuckDBMapEntry[] = []; + const itemEntryCount = itemVector.itemCount; + for (let i = 0; i < itemEntryCount; i++) { + const entry = itemVector.getItem(i); + if (!entry) { + throw new Error('null entry in map struct'); + } + const keyEntry = entry[0]; + const valueEntry = entry[1]; + entries.push({ key: keyEntry.value, value: valueEntry.value }); + } + return entries; + } + public override slice(offset: number, length: number): DuckDBMapVector { + return new DuckDBMapVector( + this.mapType, + this.listVector.slice(offset, length), + ); + } +} + +export interface DuckDBUnionAlternative { + readonly tag: string; + readonly value: any; +} + +// UNION = STRUCT with first entry named "tag" +export class DuckDBUnionVector extends DuckDBVector { + private readonly unionType: DuckDBUnionType; + private readonly structVector: DuckDBStructVector; + constructor(unionType: DuckDBUnionType, structVector: DuckDBStructVector) { + super(); + this.unionType = unionType; + this.structVector = structVector; + } + static fromRawVector(unionType: DuckDBUnionType, vector: ddb.duckdb_vector, itemCount: number): DuckDBUnionVector { + const structEntryTypes: DuckDBStructEntryType[] = [{ name: 'tag', valueType: DuckDBUTinyIntType.instance }]; + for (const alternative of unionType.alternatives) { + structEntryTypes.push({ name: alternative.tag, valueType: alternative.valueType }); + } + const structVectorType = new DuckDBStructType(structEntryTypes); + return new DuckDBUnionVector(unionType, DuckDBStructVector.fromRawVector(structVectorType, vector, itemCount)); + } + public override get type(): DuckDBUnionType { + return this.unionType; + } + public override get itemCount(): number { + return this.structVector.itemCount; + } + public override getItem(itemIndex: number): DuckDBUnionAlternative | null { + const tagValue = this.structVector.getItemValue(itemIndex, 0); + if (tagValue == null) { + return null; + } + const alternativeIndex = Number(tagValue); + const tag = this.unionType.alternatives[alternativeIndex].tag; + const entryIndex = alternativeIndex + 1; + const value = this.structVector.getItemValue(itemIndex, entryIndex); + return { tag, value }; + } + public override slice(offset: number, length: number): DuckDBUnionVector { + return new DuckDBUnionVector( + this.unionType, + this.structVector.slice(offset, length), + ); + } +} diff --git a/api/src/configurationOptionDescriptions.ts b/api/src/configurationOptionDescriptions.ts new file mode 100644 index 00000000..15058e04 --- /dev/null +++ b/api/src/configurationOptionDescriptions.ts @@ -0,0 +1,16 @@ +import * as ddb from '../..'; +import { throwOnFailure } from './throwOnFailure'; + +export function configurationOptionDescriptions(): Readonly> { + const descriptions: Record = {}; + const count = ddb.duckdb_config_count(); + for (let i = 0; i < count; i++) { + const nameWrapper = new ddb.out_string_wrapper; + const descriptionWrapper = new ddb.out_string_wrapper; + throwOnFailure(ddb.duckdb_get_config_flag(i, nameWrapper, descriptionWrapper), 'Failed to get config option description'); + const name = ddb.out_get_string(nameWrapper); + const description = ddb.out_get_string(descriptionWrapper); + descriptions[name] = description; + } + return descriptions; +} diff --git a/api/src/index.ts b/api/src/index.ts new file mode 100644 index 00000000..62edde8f --- /dev/null +++ b/api/src/index.ts @@ -0,0 +1,14 @@ +export * from './configurationOptionDescriptions'; +export * from './DuckDBAppender'; +export * from './DuckDBConnection'; +export * from './DuckDBDataChunk'; +export * from './DuckDBExtractedStatements'; +export * from './DuckDBInstance'; +export * from './DuckDBLogicalType'; +export * from './DuckDBPendingResult'; +export * from './DuckDBPreparedStatement'; +export * from './DuckDBResult'; +export * from './DuckDBType'; +export * from './DuckDBTypeId'; +export * from './DuckDBVector'; +export * from './version'; diff --git a/api/src/throwOnFailure.ts b/api/src/throwOnFailure.ts new file mode 100644 index 00000000..5a415926 --- /dev/null +++ b/api/src/throwOnFailure.ts @@ -0,0 +1,13 @@ +import * as ddb from '../..'; + +export function throwOnFailure(state: ddb.duckdb_state, message: string, getError?: () => string, dispose?: () => void) { + if (state !== ddb.duckdb_state.DuckDBSuccess) { + try { + throw new Error(getError ? `${message}: ${getError()}` : message); + } finally { + if (dispose) { + dispose(); + } + } + } +} diff --git a/api/src/version.ts b/api/src/version.ts new file mode 100644 index 00000000..b2ebb193 --- /dev/null +++ b/api/src/version.ts @@ -0,0 +1,5 @@ +import * as ddb from '../..'; + +export function version(): string { + return ddb.duckdb_library_version(); +} diff --git a/api/test/api.ts b/api/test/api.ts new file mode 100644 index 00000000..f3ff27fb --- /dev/null +++ b/api/test/api.ts @@ -0,0 +1,114 @@ +import assert from 'assert'; +import { + DuckDBBooleanType, + DuckDBConnection, + DuckDBDataChunk, + DuckDBInstance, + DuckDBIntegerType, + DuckDBIntegerVector, + DuckDBResult, + DuckDBType, + DuckDBTypeId, + DuckDBVarCharType, + DuckDBVector, + configurationOptionDescriptions, + version +} from '../src'; + +async function withConnection(fn: (connection: DuckDBConnection) => Promise) { + const instance = await DuckDBInstance.create(); + const connection = await instance.connect(); + await fn(connection); + connection.dispose(); + instance.dispose(); +} + +interface ExpectedColumn { + readonly name: string; + readonly type: DuckDBType; +} + +function assertColumns(result: DuckDBResult, expectedColumns: readonly ExpectedColumn[]) { + assert.equal(result.columnCount, expectedColumns.length, 'column count'); + for (let i = 0; i < expectedColumns.length; i++) { + const { name, type } = expectedColumns[i]; + assert.equal(result.columnName(i), name, 'column name'); + assert.equal(result.columnTypeId(i), type.typeId, 'column type id'); + assert.equal(result.columnType(i), type, 'column type'); + } +} + +function assertIntegerValue(chunk: DuckDBDataChunk, columnIndex: number, rowIndex: number, expectedValue: number) { + const column = chunk.getColumn(columnIndex); + if (!(column instanceof DuckDBIntegerVector)) { + assert.fail('column not integer vector'); + } + const value = column.getItem(rowIndex); + assert.equal(value, expectedValue); +} + +function assertNullValue(chunk: DuckDBDataChunk, columnIndex: number, rowIndex: number) { + const column = chunk.getColumn(columnIndex); + const value = column.getItem(rowIndex); + assert.equal(value, null); +} + +describe('api', () => { + it('should expose version', () => { + const ver = version(); + assert.ok(ver.startsWith('v'), `version starts with 'v'`); + }); + it('should expose configuration option descriptions', () => { + const descriptions = configurationOptionDescriptions(); + assert.ok(descriptions['memory_limit'], `descriptions has 'memory_limit'`); + }); + it('should support creating, connecting, running a basic query, and reading results', async () => { + const instance = await DuckDBInstance.create(); + const connection = await instance.connect(); + const result = await connection.run('select 42 as num'); + assertColumns(result, [{ name: 'num', type: DuckDBIntegerType.instance }]); + assert.equal(result.rowCount, 1); + assert.equal(result.chunkCount, 1); + const chunk = result.getChunk(0); + assert.equal(chunk.columnCount, 1); + assert.equal(chunk.rowCount, 1); + assertIntegerValue(chunk, 0, 0, 42); + chunk.dispose(); + result.dispose(); + connection.dispose(); + instance.dispose(); + }); + it('should support running prepared statements', async () => { + await withConnection(async (connection) => { + const prepared = await connection.prepare('select $num as a, $str as b, $bool as c, $null as d'); + assert.equal(prepared.parameterCount, 4); + assert.equal(prepared.parameterName(1), 'num'); + assert.equal(prepared.parameterName(2), 'str'); + assert.equal(prepared.parameterName(3), 'bool'); + assert.equal(prepared.parameterName(4), 'null'); + prepared.bindInteger(1, 10); + prepared.bindVarchar(2, 'abc'); + prepared.bindBoolean(3, true); + prepared.bindNull(4); + const result = await prepared.run(); + assertColumns(result, [ + { name: 'a', type: DuckDBIntegerType.instance }, + { name: 'b', type: DuckDBVarCharType.instance }, + { name: 'c', type: DuckDBBooleanType.instance }, + { name: 'd', type: DuckDBIntegerType.instance }, + ]); + assert.equal(result.rowCount, 1); + assert.equal(result.chunkCount, 1); + const chunk = result.getChunk(0); + assert.equal(chunk.columnCount, 4); + assert.equal(chunk.rowCount, 1); + assertIntegerValue(chunk, 0, 0, 10); + // TODO: validate varchar + // TODO: validate boolean + assertNullValue(chunk, 3, 0); + chunk.dispose(); + result.dispose(); + prepared.dispose(); + }); + }); +}); diff --git a/generate-wrapper.py b/generate-wrapper.py index d4f8b593..c9d865be 100644 --- a/generate-wrapper.py +++ b/generate-wrapper.py @@ -7,12 +7,77 @@ import zipfile # those functions return promises asynchronously since they may block and/or do IO -async_functions = ['duckdb_open', 'duckdb_open_ext', 'duckdb_close', 'duckdb_connect', 'duckdb_disconnect', 'duckdb_query', 'duckdb_prepare', 'duckdb_execute_prepared', 'duckdb_stream_fetch_chunk', 'duckdb_execute_tasks', 'duckdb_appender_create', 'duckdb_appender_flush', 'duckdb_appender_close', 'duckdb_appender_destroy', 'duckdb_execute_prepared', 'duckdb_extract_statements', 'duckdb_prepare_extracted_statement', 'duckdb_execute_pending'] - -pointer_wrappers = ['duckdb_appender', - 'duckdb_config', 'duckdb_connection', 'duckdb_data_chunk', 'duckdb_database', 'duckdb_extracted_statements', 'duckdb_logical_type', 'duckdb_pending_result', 'duckdb_prepared_statement', 'duckdb_value', 'duckdb_vector'] # 'duckdb_arrow', 'duckdb_arrow_array', 'duckdb_arrow_schema', 'duckdb_arrow_stream' - -deprecated_functions = ['duckdb_column_data', 'duckdb_nullmask_data', 'duckdb_validity_row_is_valid', 'duckdb_validity_set_row_invalid', 'duckdb_validity_set_row_valid', 'duckdb_validity_set_row_validity', 'duckdb_value_blob', 'duckdb_value_boolean', 'duckdb_value_date', 'duckdb_value_decimal', 'duckdb_value_double', 'duckdb_value_float', 'duckdb_value_hugeint', 'duckdb_value_int16', 'duckdb_value_int32', 'duckdb_value_int64', 'duckdb_value_int8', 'duckdb_value_interval', 'duckdb_value_is_null', 'duckdb_value_string', 'duckdb_value_string_internal', 'duckdb_value_time', 'duckdb_value_timestamp', 'duckdb_value_uint16', 'duckdb_value_uint32', 'duckdb_value_uint64', 'duckdb_value_uint8', 'duckdb_value_varchar', 'duckdb_value_varchar_internal'] +async_functions = [ + 'duckdb_open', + 'duckdb_open_ext', + 'duckdb_close', + 'duckdb_connect', + 'duckdb_disconnect', + 'duckdb_query', + 'duckdb_prepare', + 'duckdb_execute_prepared', + 'duckdb_stream_fetch_chunk', + 'duckdb_execute_tasks', + 'duckdb_appender_create', + 'duckdb_appender_flush', + 'duckdb_appender_close', + 'duckdb_appender_destroy', + 'duckdb_execute_prepared', + 'duckdb_extract_statements', + 'duckdb_prepare_extracted_statement', + 'duckdb_execute_pending', +] + +pointer_wrappers = [ + 'duckdb_appender', + 'duckdb_config', + 'duckdb_connection', + 'duckdb_data_chunk', + 'duckdb_database', + 'duckdb_extracted_statements', + 'duckdb_logical_type', + 'duckdb_pending_result', + 'duckdb_prepared_statement', + 'duckdb_value', + 'duckdb_vector', + #'duckdb_arrow', + #'duckdb_arrow_array', + #'duckdb_arrow_schema', + #'duckdb_arrow_stream', +] + +deprecated_functions = [ + 'duckdb_column_data', + 'duckdb_nullmask_data', + #'duckdb_validity_row_is_valid', + 'duckdb_validity_set_row_invalid', + 'duckdb_validity_set_row_valid', + 'duckdb_validity_set_row_validity', + 'duckdb_vector_ensure_validity_writable', + 'duckdb_value_blob', + 'duckdb_value_boolean', + 'duckdb_value_date', + 'duckdb_value_decimal', + 'duckdb_value_double', + 'duckdb_value_float', + 'duckdb_value_hugeint', + 'duckdb_value_int16', + 'duckdb_value_int32', + 'duckdb_value_int64', + 'duckdb_value_int8', + 'duckdb_value_interval', + 'duckdb_value_is_null', + 'duckdb_value_string', + 'duckdb_value_string_internal', + 'duckdb_value_time', + 'duckdb_value_timestamp', + 'duckdb_value_uint16', + 'duckdb_value_uint32', + 'duckdb_value_uint64', + 'duckdb_value_uint8', + 'duckdb_value_varchar', + 'duckdb_value_varchar_internal', +] def typename(decl): const = '' @@ -28,7 +93,32 @@ def typename(decl): raise ValueError(decl) class DuckDBHeaderVisitor(pycparser.c_ast.NodeVisitor): - result = '' + cpp_result = '' + types_result = '' + c_type_to_ts_type = { + "bool": "boolean", + "double": "number", + "char*": "string", + "char**": "out_string_wrapper", + "const char*": "string", + "const char**": "out_string_wrapper", + "float": "number", + "idx_t": "number", + "idx_t*": "idx_pointer", + "int8_t": "number", + "int16_t": "number", + "int32_t": "number", + "int64_t": "number", # should use bigint because max safe int in JS is 2^53-1 + "uint8_t": "number", + "uint16_t": "number", + "uint32_t": "number", + "uint64_t": "number", # should use bigint because max safe int in JS is 2^53-1 + "uint64_t*": "uint64_pointer", + "size_t": "number", + "void": "void", + "void*": "pointer", + "const void*": "pointer", + } def visit_TypeDecl(self, node): name = node.declname @@ -36,17 +126,34 @@ def visit_TypeDecl(self, node): return if isinstance(node.type, pycparser.c_ast.Struct): - self.result += f'exports.Set(Napi::String::New(env, "{name}"), duckdb_node::PointerHolder<{name}>::Init(env, "{name}")->Value());\n' - - if isinstance(node.type, pycparser.c_ast.Enum): - self.result += f'auto {name}_enum = Napi::Object::New(env);\n' + self.cpp_result += f'exports.Set(Napi::String::New(env, "{name}"), duckdb_node::PointerHolder<{name}>::Init(env, "{name}")->Value());\n' + self.types_result += f'export class {name} {{}}\n' + self.c_type_to_ts_type[name] = name + self.c_type_to_ts_type[f'{name}*'] = name + + elif isinstance(node.type, pycparser.c_ast.Enum): + self.cpp_result += f'auto {name}_enum = Napi::Object::New(env);\n' + self.types_result += f'export enum {name} {{\n' + self.c_type_to_ts_type[name] = name enum_idx = 0 for enum in node.type.values.enumerators: if enum.value is not None: enum_idx = int(enum.value.value) - self.result += f'{name}_enum.Set("{enum.name}", {enum_idx});\n' + self.cpp_result += f'{name}_enum.Set("{enum.name}", {enum_idx});\n' + self.types_result += f' {enum.name} = {enum_idx},\n' enum_idx += 1 - self.result += f'exports.DefineProperty(Napi::PropertyDescriptor::Value("{name}", {name}_enum, static_cast(napi_enumerable | napi_configurable)));\n' + self.cpp_result += f'exports.DefineProperty(Napi::PropertyDescriptor::Value("{name}", {name}_enum, static_cast(napi_enumerable | napi_configurable)));\n' + self.types_result += f'}}\n' + + elif typename(node.type) == 'void': + # Do these void* types need any corresponding Napi code? + self.types_result += f'export class {name} {{}}\n' + self.c_type_to_ts_type[name] = name + self.c_type_to_ts_type[f'{name}*'] = name + + else: + print(f'type not handled: {name}') + print(node) def visit_FuncDecl(self, node): @@ -64,51 +171,70 @@ def visit_FuncDecl(self, node): if node.args: for p in node.args.params: - args.append(typename(p.type)) + args.append((p.name, typename(p.type))) if name == '__routine': + print(f'function skipped: {name}') return # ?? if 'replacement' in name: + print(f'function skipped: {name}') return # ?? if 'delete_callback' in name: + print(f'function skipped: {name}') + self.types_result += f'export type {name} = (data: pointer) => void;\n' return # ?? if 'duckdb_init_' in name: + print(f'function skipped: {name}') return if 'table_function' in name: + print(f'function skipped: {name}') return # TODO if 'arrow' in name: + print(f'function skipped: {name}') return # TODO if name in deprecated_functions: + print(f'deprecated function skipped: {name}') return - print(f"{name}") + #print(f"{name}") n_args = len(args) - args.append(name) + + fwrap_args = list(map(lambda arg: arg[1], args)) + [name] + is_async = name in async_functions asyncstr = '' - if name in async_functions: + if is_async: asyncstr = 'Async' voidstr = '' if ret == 'void': voidstr = 'Void' else: - args.insert(0, ret) - arg_str = ', '.join(args) + fwrap_args.insert(0, ret) + fwrap_arg_str = ', '.join(fwrap_args) + + ts_args_strs = list(map(lambda arg: f'{arg[0]}: {self.c_type_to_ts_type[arg[1]] if arg[1] in self.c_type_to_ts_type else arg[1]}', args)) + ts_args_str = ', '.join(ts_args_strs) + ts_ret_type = self.c_type_to_ts_type[ret] if ret in self.c_type_to_ts_type else ret + if is_async: + ts_ret_type = f'Promise<{ts_ret_type}>' + + self.cpp_result += f'exports.Set(Napi::String::New(env, "{name}"), Napi::Function::New>(env));\n' + self.types_result += f'export function {name}({ts_args_str}): {ts_ret_type};\n' + - self.result += f'exports.Set(Napi::String::New(env, "{name}"), Napi::Function::New>(env));\n' def create_func_defs(filename): ast = pycparser.parse_file(filename, use_cpp=False) v = DuckDBHeaderVisitor() v.visit(ast) - return v.result + return v.cpp_result, v.types_result if __name__ == "__main__": @@ -126,8 +252,28 @@ def create_func_defs(filename): os.system("sed -i -e 's/#include /#include /' %s" % os.path.join(tmp, "duckdb.h")) # until 0.10.0 has been released os.system("gcc -E -D__builtin_va_list=int %s > %s" % (os.path.join(tmp, "duckdb.h"), os.path.join(tmp, "duckdb-preprocessed.h"))) + cpp_result, types_result = create_func_defs(os.path.join(tmp, "duckdb-preprocessed.h")) + out = open('src/duckdb_node_generated.cpp', 'wb') out.write('// This file is generated by generate-wrapper.py, please do not edit\n\n#include "function_wrappers.hpp"\n#include "duckdb.h"\n\nstatic void RegisterGenerated(Napi::Env env, Napi::Object exports){\n'.encode()) - out.write(create_func_defs(os.path.join(tmp, "duckdb-preprocessed.h")).encode()) + out.write(cpp_result.encode()) out.write('}\n\n'.encode()) + types_out = open('lib/duckdb.d.ts', 'wb') + + types_out.write('// placeholder interfaces for pointer types\n'.encode()) + types_out.write('export interface pointer {}\n'.encode()) + types_out.write('export interface uint64_pointer extends pointer {}\n'.encode()) + types_out.write('export interface idx_pointer extends pointer {}\n'.encode()) + + types_out.write('// bindings-defined types\n'.encode()) + types_out.write('export class out_string_wrapper {}\n'.encode()) + + types_out.write('// generated types and functions\n'.encode()) + types_out.write(types_result.encode()) + + types_out.write('// bindings-defined functions\n'.encode()) + types_out.write('export function copy_buffer(buffer: pointer, length: number): Uint8Array | null;\n'.encode()) + types_out.write('export function out_get_string(string_wrapper: out_string_wrapper): string;\n'.encode()) + types_out.write('export function convert_string_vector(vector: duckdb_vector, size: number): (Uint8Array | null)[];\n'.encode()) + diff --git a/lib/duckdb.d.ts b/lib/duckdb.d.ts new file mode 100644 index 00000000..8100c74c --- /dev/null +++ b/lib/duckdb.d.ts @@ -0,0 +1,268 @@ +// placeholder interfaces for pointer types +export interface pointer {} +export interface uint64_pointer extends pointer {} +export interface idx_pointer extends pointer {} +// bindings-defined types +export class out_string_wrapper {} +// generated types and functions +export enum duckdb_type { + DUCKDB_TYPE_INVALID = 0, + DUCKDB_TYPE_BOOLEAN = 1, + DUCKDB_TYPE_TINYINT = 2, + DUCKDB_TYPE_SMALLINT = 3, + DUCKDB_TYPE_INTEGER = 4, + DUCKDB_TYPE_BIGINT = 5, + DUCKDB_TYPE_UTINYINT = 6, + DUCKDB_TYPE_USMALLINT = 7, + DUCKDB_TYPE_UINTEGER = 8, + DUCKDB_TYPE_UBIGINT = 9, + DUCKDB_TYPE_FLOAT = 10, + DUCKDB_TYPE_DOUBLE = 11, + DUCKDB_TYPE_TIMESTAMP = 12, + DUCKDB_TYPE_DATE = 13, + DUCKDB_TYPE_TIME = 14, + DUCKDB_TYPE_INTERVAL = 15, + DUCKDB_TYPE_HUGEINT = 16, + DUCKDB_TYPE_VARCHAR = 17, + DUCKDB_TYPE_BLOB = 18, + DUCKDB_TYPE_DECIMAL = 19, + DUCKDB_TYPE_TIMESTAMP_S = 20, + DUCKDB_TYPE_TIMESTAMP_MS = 21, + DUCKDB_TYPE_TIMESTAMP_NS = 22, + DUCKDB_TYPE_ENUM = 23, + DUCKDB_TYPE_LIST = 24, + DUCKDB_TYPE_STRUCT = 25, + DUCKDB_TYPE_MAP = 26, + DUCKDB_TYPE_UUID = 27, + DUCKDB_TYPE_UNION = 28, + DUCKDB_TYPE_BIT = 29, +} +export class duckdb_date {} +export class duckdb_date_struct {} +export class duckdb_time {} +export class duckdb_time_struct {} +export class duckdb_timestamp {} +export class duckdb_timestamp_struct {} +export class duckdb_interval {} +export class duckdb_hugeint {} +export class duckdb_decimal {} +export class duckdb_string {} +export class duckdb_string_t {} +export class duckdb_blob {} +export class duckdb_list_entry {} +export class duckdb_column {} +export class duckdb_result {} +export class duckdb_database {} +export class duckdb_connection {} +export class duckdb_prepared_statement {} +export class duckdb_extracted_statements {} +export class duckdb_pending_result {} +export class duckdb_appender {} +export class duckdb_arrow {} +export class duckdb_arrow_stream {} +export class duckdb_config {} +export class duckdb_arrow_schema {} +export class duckdb_arrow_array {} +export class duckdb_logical_type {} +export class duckdb_data_chunk {} +export class duckdb_vector {} +export class duckdb_value {} +export enum duckdb_state { + DuckDBSuccess = 0, + DuckDBError = 1, +} +export enum duckdb_pending_state { + DUCKDB_PENDING_RESULT_READY = 0, + DUCKDB_PENDING_RESULT_NOT_READY = 1, + DUCKDB_PENDING_ERROR = 2, + DUCKDB_PENDING_NO_TASKS_AVAILABLE = 3, +} +export function duckdb_open(path: string, out_database: duckdb_database): Promise; +export function duckdb_open_ext(path: string, out_database: duckdb_database, config: duckdb_config, out_error: out_string_wrapper): Promise; +export function duckdb_close(database: duckdb_database): Promise; +export function duckdb_connect(database: duckdb_database, out_connection: duckdb_connection): Promise; +export function duckdb_interrupt(connection: duckdb_connection): void; +export function duckdb_query_progress(connection: duckdb_connection): number; +export function duckdb_disconnect(connection: duckdb_connection): Promise; +export function duckdb_library_version(): string; +export function duckdb_create_config(out_config: duckdb_config): duckdb_state; +export function duckdb_config_count(): number; +export function duckdb_get_config_flag(index: number, out_name: out_string_wrapper, out_description: out_string_wrapper): duckdb_state; +export function duckdb_set_config(config: duckdb_config, name: string, option: string): duckdb_state; +export function duckdb_destroy_config(config: duckdb_config): void; +export function duckdb_query(connection: duckdb_connection, query: string, out_result: duckdb_result): Promise; +export function duckdb_destroy_result(result: duckdb_result): void; +export function duckdb_column_name(result: duckdb_result, col: number): string; +export function duckdb_column_type(result: duckdb_result, col: number): duckdb_type; +export function duckdb_column_logical_type(result: duckdb_result, col: number): duckdb_logical_type; +export function duckdb_column_count(result: duckdb_result): number; +export function duckdb_row_count(result: duckdb_result): number; +export function duckdb_rows_changed(result: duckdb_result): number; +export function duckdb_result_error(result: duckdb_result): string; +export function duckdb_result_get_chunk(result: duckdb_result, chunk_index: number): duckdb_data_chunk; +export function duckdb_result_is_streaming(result: duckdb_result): boolean; +export function duckdb_result_chunk_count(result: duckdb_result): number; +export function duckdb_malloc(size: number): pointer; +export function duckdb_free(ptr: pointer): void; +export function duckdb_vector_size(): number; +export function duckdb_string_is_inlined(string: duckdb_string_t): boolean; +export function duckdb_from_date(date: duckdb_date): duckdb_date_struct; +export function duckdb_to_date(date: duckdb_date_struct): duckdb_date; +export function duckdb_from_time(time: duckdb_time): duckdb_time_struct; +export function duckdb_to_time(time: duckdb_time_struct): duckdb_time; +export function duckdb_from_timestamp(ts: duckdb_timestamp): duckdb_timestamp_struct; +export function duckdb_to_timestamp(ts: duckdb_timestamp_struct): duckdb_timestamp; +export function duckdb_hugeint_to_double(val: duckdb_hugeint): number; +export function duckdb_double_to_hugeint(val: number): duckdb_hugeint; +export function duckdb_double_to_decimal(val: number, width: number, scale: number): duckdb_decimal; +export function duckdb_decimal_to_double(val: duckdb_decimal): number; +export function duckdb_prepare(connection: duckdb_connection, query: string, out_prepared_statement: duckdb_prepared_statement): Promise; +export function duckdb_destroy_prepare(prepared_statement: duckdb_prepared_statement): void; +export function duckdb_prepare_error(prepared_statement: duckdb_prepared_statement): string; +export function duckdb_nparams(prepared_statement: duckdb_prepared_statement): number; +export function duckdb_parameter_name(prepared_statement: duckdb_prepared_statement, index: number): string; +export function duckdb_param_type(prepared_statement: duckdb_prepared_statement, param_idx: number): duckdb_type; +export function duckdb_clear_bindings(prepared_statement: duckdb_prepared_statement): duckdb_state; +export function duckdb_bind_value(prepared_statement: duckdb_prepared_statement, param_idx: number, val: duckdb_value): duckdb_state; +export function duckdb_bind_parameter_index(prepared_statement: duckdb_prepared_statement, param_idx_out: idx_pointer, name: string): duckdb_state; +export function duckdb_bind_boolean(prepared_statement: duckdb_prepared_statement, param_idx: number, val: boolean): duckdb_state; +export function duckdb_bind_int8(prepared_statement: duckdb_prepared_statement, param_idx: number, val: number): duckdb_state; +export function duckdb_bind_int16(prepared_statement: duckdb_prepared_statement, param_idx: number, val: number): duckdb_state; +export function duckdb_bind_int32(prepared_statement: duckdb_prepared_statement, param_idx: number, val: number): duckdb_state; +export function duckdb_bind_int64(prepared_statement: duckdb_prepared_statement, param_idx: number, val: number): duckdb_state; +export function duckdb_bind_hugeint(prepared_statement: duckdb_prepared_statement, param_idx: number, val: duckdb_hugeint): duckdb_state; +export function duckdb_bind_decimal(prepared_statement: duckdb_prepared_statement, param_idx: number, val: duckdb_decimal): duckdb_state; +export function duckdb_bind_uint8(prepared_statement: duckdb_prepared_statement, param_idx: number, val: number): duckdb_state; +export function duckdb_bind_uint16(prepared_statement: duckdb_prepared_statement, param_idx: number, val: number): duckdb_state; +export function duckdb_bind_uint32(prepared_statement: duckdb_prepared_statement, param_idx: number, val: number): duckdb_state; +export function duckdb_bind_uint64(prepared_statement: duckdb_prepared_statement, param_idx: number, val: number): duckdb_state; +export function duckdb_bind_float(prepared_statement: duckdb_prepared_statement, param_idx: number, val: number): duckdb_state; +export function duckdb_bind_double(prepared_statement: duckdb_prepared_statement, param_idx: number, val: number): duckdb_state; +export function duckdb_bind_date(prepared_statement: duckdb_prepared_statement, param_idx: number, val: duckdb_date): duckdb_state; +export function duckdb_bind_time(prepared_statement: duckdb_prepared_statement, param_idx: number, val: duckdb_time): duckdb_state; +export function duckdb_bind_timestamp(prepared_statement: duckdb_prepared_statement, param_idx: number, val: duckdb_timestamp): duckdb_state; +export function duckdb_bind_interval(prepared_statement: duckdb_prepared_statement, param_idx: number, val: duckdb_interval): duckdb_state; +export function duckdb_bind_varchar(prepared_statement: duckdb_prepared_statement, param_idx: number, val: string): duckdb_state; +export function duckdb_bind_varchar_length(prepared_statement: duckdb_prepared_statement, param_idx: number, val: string, length: number): duckdb_state; +export function duckdb_bind_blob(prepared_statement: duckdb_prepared_statement, param_idx: number, data: pointer, length: number): duckdb_state; +export function duckdb_bind_null(prepared_statement: duckdb_prepared_statement, param_idx: number): duckdb_state; +export function duckdb_execute_prepared(prepared_statement: duckdb_prepared_statement, out_result: duckdb_result): Promise; +export function duckdb_extract_statements(connection: duckdb_connection, query: string, out_extracted_statements: duckdb_extracted_statements): Promise; +export function duckdb_prepare_extracted_statement(connection: duckdb_connection, extracted_statements: duckdb_extracted_statements, index: number, out_prepared_statement: duckdb_prepared_statement): Promise; +export function duckdb_extract_statements_error(extracted_statements: duckdb_extracted_statements): string; +export function duckdb_destroy_extracted(extracted_statements: duckdb_extracted_statements): void; +export function duckdb_pending_prepared(prepared_statement: duckdb_prepared_statement, out_result: duckdb_pending_result): duckdb_state; +export function duckdb_pending_prepared_streaming(prepared_statement: duckdb_prepared_statement, out_result: duckdb_pending_result): duckdb_state; +export function duckdb_destroy_pending(pending_result: duckdb_pending_result): void; +export function duckdb_pending_error(pending_result: duckdb_pending_result): string; +export function duckdb_pending_execute_task(pending_result: duckdb_pending_result): duckdb_pending_state; +export function duckdb_execute_pending(pending_result: duckdb_pending_result, out_result: duckdb_result): Promise; +export function duckdb_pending_execution_is_finished(pending_state: duckdb_pending_state): boolean; +export function duckdb_destroy_value(value: duckdb_value): void; +export function duckdb_create_varchar(text: string): duckdb_value; +export function duckdb_create_varchar_length(text: string, length: number): duckdb_value; +export function duckdb_create_int64(val: number): duckdb_value; +export function duckdb_get_varchar(value: duckdb_value): string; +export function duckdb_get_int64(value: duckdb_value): number; +export function duckdb_create_logical_type(type: duckdb_type): duckdb_logical_type; +export function duckdb_create_list_type(type: duckdb_logical_type): duckdb_logical_type; +export function duckdb_create_map_type(key_type: duckdb_logical_type, value_type: duckdb_logical_type): duckdb_logical_type; +export function duckdb_create_union_type(member_types: duckdb_logical_type, member_names: out_string_wrapper, member_count: number): duckdb_logical_type; +export function duckdb_create_struct_type(member_types: duckdb_logical_type, member_names: out_string_wrapper, member_count: number): duckdb_logical_type; +export function duckdb_create_decimal_type(width: number, scale: number): duckdb_logical_type; +export function duckdb_get_type_id(type: duckdb_logical_type): duckdb_type; +export function duckdb_decimal_width(type: duckdb_logical_type): number; +export function duckdb_decimal_scale(type: duckdb_logical_type): number; +export function duckdb_decimal_internal_type(type: duckdb_logical_type): duckdb_type; +export function duckdb_enum_internal_type(type: duckdb_logical_type): duckdb_type; +export function duckdb_enum_dictionary_size(type: duckdb_logical_type): number; +export function duckdb_enum_dictionary_value(type: duckdb_logical_type, index: number): string; +export function duckdb_list_type_child_type(type: duckdb_logical_type): duckdb_logical_type; +export function duckdb_map_type_key_type(type: duckdb_logical_type): duckdb_logical_type; +export function duckdb_map_type_value_type(type: duckdb_logical_type): duckdb_logical_type; +export function duckdb_struct_type_child_count(type: duckdb_logical_type): number; +export function duckdb_struct_type_child_name(type: duckdb_logical_type, index: number): string; +export function duckdb_struct_type_child_type(type: duckdb_logical_type, index: number): duckdb_logical_type; +export function duckdb_union_type_member_count(type: duckdb_logical_type): number; +export function duckdb_union_type_member_name(type: duckdb_logical_type, index: number): string; +export function duckdb_union_type_member_type(type: duckdb_logical_type, index: number): duckdb_logical_type; +export function duckdb_destroy_logical_type(type: duckdb_logical_type): void; +export function duckdb_create_data_chunk(types: duckdb_logical_type, column_count: number): duckdb_data_chunk; +export function duckdb_destroy_data_chunk(chunk: duckdb_data_chunk): void; +export function duckdb_data_chunk_reset(chunk: duckdb_data_chunk): void; +export function duckdb_data_chunk_get_column_count(chunk: duckdb_data_chunk): number; +export function duckdb_data_chunk_get_vector(chunk: duckdb_data_chunk, col_idx: number): duckdb_vector; +export function duckdb_data_chunk_get_size(chunk: duckdb_data_chunk): number; +export function duckdb_data_chunk_set_size(chunk: duckdb_data_chunk, size: number): void; +export function duckdb_vector_get_column_type(vector: duckdb_vector): duckdb_logical_type; +export function duckdb_vector_get_data(vector: duckdb_vector): pointer; +export function duckdb_vector_get_validity(vector: duckdb_vector): uint64_pointer; +export function duckdb_vector_assign_string_element(vector: duckdb_vector, index: number, str: string): void; +export function duckdb_vector_assign_string_element_len(vector: duckdb_vector, index: number, str: string, str_len: number): void; +export function duckdb_list_vector_get_child(vector: duckdb_vector): duckdb_vector; +export function duckdb_list_vector_get_size(vector: duckdb_vector): number; +export function duckdb_list_vector_set_size(vector: duckdb_vector, size: number): duckdb_state; +export function duckdb_list_vector_reserve(vector: duckdb_vector, required_capacity: number): duckdb_state; +export function duckdb_struct_vector_get_child(vector: duckdb_vector, index: number): duckdb_vector; +export function duckdb_validity_row_is_valid(validity: uint64_pointer, row: number): boolean; +export class duckdb_table_function {} +export class duckdb_bind_info {} +export class duckdb_init_info {} +export class duckdb_function_info {} +export type duckdb_delete_callback_t = (data: pointer) => void; +export function duckdb_bind_get_extra_info(info: duckdb_bind_info): pointer; +export function duckdb_bind_add_result_column(info: duckdb_bind_info, name: string, type: duckdb_logical_type): void; +export function duckdb_bind_get_parameter_count(info: duckdb_bind_info): number; +export function duckdb_bind_get_parameter(info: duckdb_bind_info, index: number): duckdb_value; +export function duckdb_bind_get_named_parameter(info: duckdb_bind_info, name: string): duckdb_value; +export function duckdb_bind_set_bind_data(info: duckdb_bind_info, bind_data: pointer, destroy: duckdb_delete_callback_t): void; +export function duckdb_bind_set_cardinality(info: duckdb_bind_info, cardinality: number, is_exact: boolean): void; +export function duckdb_bind_set_error(info: duckdb_bind_info, error: string): void; +export function duckdb_function_get_extra_info(info: duckdb_function_info): pointer; +export function duckdb_function_get_bind_data(info: duckdb_function_info): pointer; +export function duckdb_function_get_init_data(info: duckdb_function_info): pointer; +export function duckdb_function_get_local_init_data(info: duckdb_function_info): pointer; +export function duckdb_function_set_error(info: duckdb_function_info, error: string): void; +export class duckdb_replacement_scan_info {} +export function duckdb_appender_create(connection: duckdb_connection, schema: string, table: string, out_appender: duckdb_appender): Promise; +export function duckdb_appender_error(appender: duckdb_appender): string; +export function duckdb_appender_flush(appender: duckdb_appender): Promise; +export function duckdb_appender_close(appender: duckdb_appender): Promise; +export function duckdb_appender_destroy(appender: duckdb_appender): Promise; +export function duckdb_appender_begin_row(appender: duckdb_appender): duckdb_state; +export function duckdb_appender_end_row(appender: duckdb_appender): duckdb_state; +export function duckdb_append_bool(appender: duckdb_appender, value: boolean): duckdb_state; +export function duckdb_append_int8(appender: duckdb_appender, value: number): duckdb_state; +export function duckdb_append_int16(appender: duckdb_appender, value: number): duckdb_state; +export function duckdb_append_int32(appender: duckdb_appender, value: number): duckdb_state; +export function duckdb_append_int64(appender: duckdb_appender, value: number): duckdb_state; +export function duckdb_append_hugeint(appender: duckdb_appender, value: duckdb_hugeint): duckdb_state; +export function duckdb_append_uint8(appender: duckdb_appender, value: number): duckdb_state; +export function duckdb_append_uint16(appender: duckdb_appender, value: number): duckdb_state; +export function duckdb_append_uint32(appender: duckdb_appender, value: number): duckdb_state; +export function duckdb_append_uint64(appender: duckdb_appender, value: number): duckdb_state; +export function duckdb_append_float(appender: duckdb_appender, value: number): duckdb_state; +export function duckdb_append_double(appender: duckdb_appender, value: number): duckdb_state; +export function duckdb_append_date(appender: duckdb_appender, value: duckdb_date): duckdb_state; +export function duckdb_append_time(appender: duckdb_appender, value: duckdb_time): duckdb_state; +export function duckdb_append_timestamp(appender: duckdb_appender, value: duckdb_timestamp): duckdb_state; +export function duckdb_append_interval(appender: duckdb_appender, value: duckdb_interval): duckdb_state; +export function duckdb_append_varchar(appender: duckdb_appender, val: string): duckdb_state; +export function duckdb_append_varchar_length(appender: duckdb_appender, val: string, length: number): duckdb_state; +export function duckdb_append_blob(appender: duckdb_appender, data: pointer, length: number): duckdb_state; +export function duckdb_append_null(appender: duckdb_appender): duckdb_state; +export function duckdb_append_data_chunk(appender: duckdb_appender, chunk: duckdb_data_chunk): duckdb_state; +export class duckdb_task_state {} +export function duckdb_execute_tasks(database: duckdb_database, max_tasks: number): Promise; +export function duckdb_create_task_state(database: duckdb_database): duckdb_task_state; +export function duckdb_execute_tasks_state(state: duckdb_task_state): void; +export function duckdb_execute_n_tasks_state(state: duckdb_task_state, max_tasks: number): number; +export function duckdb_finish_execution(state: duckdb_task_state): void; +export function duckdb_task_state_is_finished(state: duckdb_task_state): boolean; +export function duckdb_destroy_task_state(state: duckdb_task_state): void; +export function duckdb_execution_is_finished(con: duckdb_connection): boolean; +export function duckdb_stream_fetch_chunk(result: duckdb_result): Promise; +// bindings-defined functions +export function copy_buffer(buffer: pointer, length: number): Uint8Array | null; +export function out_get_string(string_wrapper: out_string_wrapper): string; +export function convert_string_vector(vector: duckdb_vector, size: number): (Uint8Array | null)[]; diff --git a/package.json b/package.json index 5958b9c1..b7c2cac7 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ }, "scripts": { "install": "node-pre-gyp install --fallback-to-build", - "pretest": "node test/support/createdb.js", + "xpretest": "node test/support/createdb.js", "test": "mocha -R spec --timeout 480000 --expose-gc", "test-path": "mocha -R spec --timeout 480000 --expose-gc --exclude 'test/*.ts'", "pack": "node-pre-gyp package" diff --git a/src/duckdb_node_generated.cpp b/src/duckdb_node_generated.cpp index 525b6eea..feba87a5 100644 --- a/src/duckdb_node_generated.cpp +++ b/src/duckdb_node_generated.cpp @@ -1,7 +1,7 @@ // This file is generated by generate-wrapper.py, please do not edit -#include "function_wrappers.hpp" #include "duckdb.h" +#include "function_wrappers.hpp" static void RegisterGenerated(Napi::Env env, Napi::Object exports) { auto duckdb_type_enum = Napi::Object::New(env); @@ -547,11 +547,6 @@ static void RegisterGenerated(Napi::Env env, Napi::Object exports) { exports.Set(Napi::String::New(env, "duckdb_vector_get_validity"), Napi::Function::New>(env)); - exports.Set( - Napi::String::New(env, "duckdb_vector_ensure_validity_writable"), - Napi::Function::New< - duckdb_node::FunctionWrappers::FunctionWrapper1Void>( - env)); exports.Set(Napi::String::New(env, "duckdb_vector_assign_string_element"), Napi::Function::New>(env)); @@ -576,6 +571,11 @@ static void RegisterGenerated(Napi::Env env, Napi::Object exports) { Napi::String::New(env, "duckdb_struct_vector_get_child"), Napi::Function::New>(env)); + exports.Set( + Napi::String::New(env, "duckdb_validity_row_is_valid"), + Napi::Function::New< + duckdb_node::FunctionWrappers::FunctionWrapper2>( + env)); exports.Set(Napi::String::New(env, "duckdb_bind_get_extra_info"), Napi::Function::New>(env)); diff --git a/src/value_conversion.hpp b/src/value_conversion.hpp index cf0cd9d3..4af7aba9 100644 --- a/src/value_conversion.hpp +++ b/src/value_conversion.hpp @@ -101,7 +101,7 @@ class ValueConversion { template <> static Napi::Value ToJS(Napi::Env &env, uint64_t *val) { - return PointerHolder::NewAndSet(env, val); + return PointerHolder::NewAndSet(env, val); } template <> @@ -306,6 +306,11 @@ class ValueConversion { return *PointerHolder::FromInfo(info, offset); } + template <> + uint64_t *FromJS(const Napi::CallbackInfo &info, idx_t offset) { + return *PointerHolder::FromInfo(info, offset); + } + template <> idx_t FromJS(const Napi::CallbackInfo &info, idx_t offset) { return GetValue(info, offset).As().Int64Value(); diff --git a/test.js b/test.js index dd98e0f3..51bfa774 100644 --- a/test.js +++ b/test.js @@ -4,7 +4,7 @@ const duckdb_native = require('.'); console.log("DuckDB version:", duckdb_native.duckdb_library_version()); function convert_validity(vector, n) { - const res = new Uint8Array(n).fill(true); + const res = Array.from({ length: n }).fill(true); const validity_buf = duckdb_native.copy_buffer(duckdb_native.duckdb_vector_get_validity(vector), Math.ceil(n / 64) * 8); // this will be null if all rows are valid if (validity_buf == null) { @@ -12,7 +12,7 @@ function convert_validity(vector, n) { } const typed_validity_buf = new BigUint64Array(validity_buf.buffer); for (let row_idx = 0; row_idx < n; row_idx++) { - res[row_idx] = (typed_validity_buf[Math.floor(row_idx / 64)] & (1n << BigInt(row_idx % 64))) > 0; + res[row_idx] = (typed_validity_buf[Math.floor(row_idx / 64)] & (BigInt(1) << BigInt(row_idx % 64))) > 0; } return res; } @@ -21,10 +21,10 @@ function convert_primitive_vector(vector, n, array_type) { const validity = convert_validity(vector, n); const data_buf = duckdb_native.copy_buffer(duckdb_native.duckdb_vector_get_data(vector), array_type.BYTES_PER_ELEMENT * n); - const typed_data_arr = new array_type(data_buf.buffer); + const typed_data_arr = data_buf ? new array_type(data_buf.buffer) : null; const vector_data = new Array(n) for (let row_idx = 0; row_idx < n; row_idx++) { - vector_data[row_idx] = validity[row_idx] ? typed_data_arr[row_idx] : null; + vector_data[row_idx] = validity[row_idx] ? (typed_data_arr ? typed_data_arr[row_idx] : undefined) : null; } return vector_data; } @@ -91,12 +91,16 @@ function convert_vector(vector, n) { const list_buf = duckdb_native.copy_buffer(duckdb_native.duckdb_vector_get_data(vector), 128 * n); // two 64 bit numbers - typed_list_buf = new BigUint64Array(list_buf.buffer); + const typed_list_buf = list_buf ? new BigUint64Array(list_buf.buffer) : null; for (let row_idx = 0; row_idx < n; row_idx++) { - const offset = typed_list_buf[2 * row_idx]; - const len = typed_list_buf[2 * row_idx + 1]; - result[row_idx] = validity[row_idx] ? child.slice(Number(offset), Number(offset + len)) : null; + if (typed_list_buf) { + const offset = typed_list_buf[2 * row_idx]; + const len = typed_list_buf[2 * row_idx + 1]; + result[row_idx] = validity[row_idx] ? child.slice(Number(offset), Number(offset + len)) : null; + } else { + result[row_idx] = undefined; + } } return result; } @@ -110,7 +114,7 @@ function convert_vector(vector, n) { const validity = convert_validity(vector, n); // TODO handle whole NULL - const result = new Object(); + const result = {}; for (let child_idx = 0; child_idx < duckdb_native.duckdb_struct_type_child_count(type); child_idx++) { const child_name = duckdb_native.duckdb_struct_type_child_name(type, child_idx); result[child_name] = convert_vector(duckdb_native.duckdb_struct_vector_get_child(vector, child_idx), n); @@ -228,7 +232,7 @@ async function test() { // we want an incremental AND streaming query result const pending_result = new duckdb_native.duckdb_pending_result; - await duckdb_native.duckdb_pending_prepared_streaming(prepared_statement, pending_result); // TODO can this fail? + duckdb_native.duckdb_pending_prepared_streaming(prepared_statement, pending_result); // TODO can this fail? // pending query api, allows abandoning query processing between each call to pending_execute_task() const result = new duckdb_native.duckdb_result; diff --git a/test.ts b/test.ts new file mode 100644 index 00000000..dc7e4165 --- /dev/null +++ b/test.ts @@ -0,0 +1,307 @@ +import * as duckdb_native from '.'; + +// some warmup +console.log("DuckDB version:", duckdb_native.duckdb_library_version()); + +function convert_validity(vector: duckdb_native.duckdb_vector, n: number) { + const res: boolean[] = Array.from({ length: n }).fill(true); + const validity_buf = duckdb_native.copy_buffer(duckdb_native.duckdb_vector_get_validity(vector), + Math.ceil(n / 64) * 8); // this will be null if all rows are valid + if (validity_buf == null) { + return res; // TODO maybe return a singleton so we dont have to allocate? + } + const typed_validity_buf = new BigUint64Array(validity_buf.buffer); + for (let row_idx = 0; row_idx < n; row_idx++) { + res[row_idx] = (typed_validity_buf[Math.floor(row_idx / 64)] & (BigInt(1) << BigInt(row_idx % 64))) > 0; + } + return res; +} + +type ArrayType = + | BigInt64ArrayConstructor + | BigUint64ArrayConstructor + | Float32ArrayConstructor + | Float64ArrayConstructor + | Int8ArrayConstructor + | Int16ArrayConstructor + | Int32ArrayConstructor + | Uint8ArrayConstructor + | Uint16ArrayConstructor + | Uint32ArrayConstructor + ; + +function convert_primitive_vector(vector: duckdb_native.duckdb_vector, n: number, array_type: ArrayType) { + const validity = convert_validity(vector, n); + const data_buf = + duckdb_native.copy_buffer(duckdb_native.duckdb_vector_get_data(vector), array_type.BYTES_PER_ELEMENT * n); + const typed_data_arr = data_buf ? new array_type(data_buf.buffer) : null; + const vector_data = new Array(n) + for (let row_idx = 0; row_idx < n; row_idx++) { + vector_data[row_idx] = validity[row_idx] ? (typed_data_arr ? typed_data_arr[row_idx] : undefined) : null; + } + return vector_data; +} + +function convert_vector(vector: duckdb_native.duckdb_vector, n: number) { + const type = duckdb_native.duckdb_vector_get_column_type(vector); + const type_id = duckdb_native.duckdb_get_type_id(type); + + switch (type_id) { + case duckdb_native.duckdb_type.DUCKDB_TYPE_BIGINT: + return convert_primitive_vector(vector, n, BigInt64Array); + + case duckdb_native.duckdb_type.DUCKDB_TYPE_BIT: { + break; + } + case duckdb_native.duckdb_type.DUCKDB_TYPE_BOOLEAN: + return convert_primitive_vector(vector, n, Uint8Array); + + case duckdb_native.duckdb_type.DUCKDB_TYPE_DATE: { + break; + } + case duckdb_native.duckdb_type.DUCKDB_TYPE_DECIMAL: { + const decimal_type = duckdb_native.duckdb_decimal_internal_type(type); + switch (decimal_type) { + case duckdb_native.duckdb_type.DUCKDB_TYPE_TINYINT: + return convert_primitive_vector(vector, n, Int8Array); + case duckdb_native.duckdb_type.DUCKDB_TYPE_SMALLINT: + return convert_primitive_vector(vector, n, Int16Array); + case duckdb_native.duckdb_type.DUCKDB_TYPE_INTEGER: + return convert_primitive_vector(vector, n, Int32Array); + case duckdb_native.duckdb_type.DUCKDB_TYPE_BIGINT: + return convert_primitive_vector(vector, n, BigInt64Array); + case duckdb_native.duckdb_type.DUCKDB_TYPE_HUGEINT: + console.log('TODO HUGEINT'); + default: + console.log('unkown decimal internal type'); + } + return null; + } + case duckdb_native.duckdb_type.DUCKDB_TYPE_DOUBLE: + return convert_primitive_vector(vector, n, Float64Array); + + case duckdb_native.duckdb_type.DUCKDB_TYPE_ENUM: { + break; + } + case duckdb_native.duckdb_type.DUCKDB_TYPE_FLOAT: + return convert_primitive_vector(vector, n, Float32Array); + + case duckdb_native.duckdb_type.DUCKDB_TYPE_HUGEINT: { + break; + } + case duckdb_native.duckdb_type.DUCKDB_TYPE_INTEGER: + return convert_primitive_vector(vector, n, Int32Array); + + case duckdb_native.duckdb_type.DUCKDB_TYPE_INTERVAL: { + break; + } + case duckdb_native.duckdb_type.DUCKDB_TYPE_LIST: { + const validity = convert_validity(vector, n); + + const result = Array(n); + const child = convert_vector(duckdb_native.duckdb_list_vector_get_child(vector), + duckdb_native.duckdb_list_vector_get_size(vector)); + + const list_buf = + duckdb_native.copy_buffer(duckdb_native.duckdb_vector_get_data(vector), 128 * n); // two 64 bit numbers + const typed_list_buf = list_buf ? new BigUint64Array(list_buf.buffer) : null; + + for (let row_idx = 0; row_idx < n; row_idx++) { + if (typed_list_buf) { + const offset = typed_list_buf[2 * row_idx]; + const len = typed_list_buf[2 * row_idx + 1]; + result[row_idx] = validity[row_idx] ? child.slice(Number(offset), Number(offset + len)) : null; + } else { + result[row_idx] = undefined; + } + } + return result; + } + case duckdb_native.duckdb_type.DUCKDB_TYPE_MAP: { + break; + } + case duckdb_native.duckdb_type.DUCKDB_TYPE_SMALLINT: + return convert_primitive_vector(vector, n, Int16Array); + + case duckdb_native.duckdb_type.DUCKDB_TYPE_STRUCT: { + const validity = convert_validity(vector, n); + + // TODO handle whole NULL + const result: any = {}; + for (let child_idx = 0; child_idx < duckdb_native.duckdb_struct_type_child_count(type); child_idx++) { + const child_name = duckdb_native.duckdb_struct_type_child_name(type, child_idx); + result[child_name] = convert_vector(duckdb_native.duckdb_struct_vector_get_child(vector, child_idx), n); + } + result['__struct_validity'] = validity; // TODO this is uuugly + return result; + } + case duckdb_native.duckdb_type.DUCKDB_TYPE_TIME: { + break; + } + case duckdb_native.duckdb_type.DUCKDB_TYPE_TIMESTAMP: { + break; + } + case duckdb_native.duckdb_type.DUCKDB_TYPE_TIMESTAMP_MS: { + break; + } + case duckdb_native.duckdb_type.DUCKDB_TYPE_TIMESTAMP_NS: { + break; + } + case duckdb_native.duckdb_type.DUCKDB_TYPE_TIMESTAMP_S: { + break; + } + case duckdb_native.duckdb_type.DUCKDB_TYPE_TINYINT: + return convert_primitive_vector(vector, n, Int8Array); + + case duckdb_native.duckdb_type.DUCKDB_TYPE_UBIGINT: + return convert_primitive_vector(vector, n, BigUint64Array); + + case duckdb_native.duckdb_type.DUCKDB_TYPE_UINTEGER: + return convert_primitive_vector(vector, n, Uint32Array); + + case duckdb_native.duckdb_type.DUCKDB_TYPE_UNION: + break; + + case duckdb_native.duckdb_type.DUCKDB_TYPE_USMALLINT: + return convert_primitive_vector(vector, n, Uint16Array); + + case duckdb_native.duckdb_type.DUCKDB_TYPE_UTINYINT: + return convert_primitive_vector(vector, n, Uint8Array); + + case duckdb_native.duckdb_type.DUCKDB_TYPE_UUID: { + + break; + } + case duckdb_native.duckdb_type.DUCKDB_TYPE_BLOB: + return duckdb_native.convert_string_vector(vector, n); + case duckdb_native.duckdb_type.DUCKDB_TYPE_VARCHAR: { + const bytes = duckdb_native.convert_string_vector(vector, n); + const decoder = new TextDecoder('utf-8'); + const ret = new Array(n); + for (let i = 0; i < n; i++) { + ret[i] = bytes[i] == null ? null : decoder.decode(bytes[i]); + } + return ret; + } + default: + console.log('Unsupported type :/'); + return null; + } +} + +async function test() { + + const config = new duckdb_native.duckdb_config; + duckdb_native.duckdb_create_config(config); + + /* in case someone would want to list conf options + for (let conf_idx = 0; conf_idx < duckdb_native.duckdb_config_count(); conf_idx++) { + const conf_name = new duckdb_native.out_string_wrapper; + const conf_desc = new duckdb_native.out_string_wrapper; + + const status = duckdb_native.duckdb_get_config_flag(conf_idx, conf_name, conf_desc); + if (status == duckdb_native.duckdb_state.DuckDBSuccess) { + console.log(duckdb_native.out_get_string(conf_name), duckdb_native.out_get_string(conf_desc)); + } + } */ + + duckdb_native.duckdb_set_config(config, "threads", "1"); + + const db = new duckdb_native.duckdb_database; + const open_error = new duckdb_native.out_string_wrapper; + const open_status = await duckdb_native.duckdb_open_ext(":memory:", db, config, open_error); + + if (open_status != duckdb_native.duckdb_state.DuckDBSuccess) { + console.error("Failed to initialize database", duckdb_native.out_get_string(open_error)); + return; + } + + const con = new duckdb_native.duckdb_connection; + await duckdb_native.duckdb_connect(db, con); + + // create a statement and bind a value to it + const prepared_statement = new duckdb_native.duckdb_prepared_statement; + // const prepare_status = await duckdb_native.duckdb_prepare( + // con, + // "SELECT 42.0::DECIMAL, CASE WHEN range % 2 == 0 THEN [1, 2, 3] ELSE NULL END, CASE WHEN range % 2 == 0 + // THEN {'key1': 'string', 'key2': 1, 'key3': 12.345} ELSE NULL END , range::INTEGER, CASE WHEN range % 2 == 0 + // THEN range ELSE NULL END, CASE WHEN range % 2 == 0 THEN range::VARCHAR ELSE NULL END FROM range(?)", + // prepared_statement); + // + + const prepare_status = + await duckdb_native.duckdb_prepare(con, "SELECT range::DECIMAL(10,4) asdf FROM range(?)", prepared_statement); + + if (prepare_status != duckdb_native.duckdb_state.DuckDBSuccess) { + console.error(duckdb_native.duckdb_prepare_error(prepared_statement)); + duckdb_native.duckdb_destroy_prepare(prepared_statement); + return; + } + const bind_state = duckdb_native.duckdb_bind_int64(prepared_statement, 1, 4000); + if (bind_state != duckdb_native.duckdb_state.DuckDBSuccess) { + console.error("Failed to bind parameter"); + return; + } + + // we want an incremental AND streaming query result + const pending_result = new duckdb_native.duckdb_pending_result; + duckdb_native.duckdb_pending_prepared_streaming(prepared_statement, pending_result); // TODO can this fail? + + // pending query api, allows abandoning query processing between each call to pending_execute_task() + const result = new duckdb_native.duckdb_result; + var continue_execute = true; + while (continue_execute) { + const pending_status = await duckdb_native.duckdb_pending_execute_task(pending_result); + + switch (pending_status) { + case duckdb_native.duckdb_pending_state.DUCKDB_PENDING_RESULT_NOT_READY: + continue; + case duckdb_native.duckdb_pending_state.DUCKDB_PENDING_RESULT_READY: + await duckdb_native.duckdb_execute_pending(pending_result, result); + continue_execute = false; + break; + case duckdb_native.duckdb_pending_state.DUCKDB_PENDING_ERROR: + console.error(duckdb_native.duckdb_pending_error(pending_result)); // TODO this seems broken + return; + } + } + // can clean this stuff up already + duckdb_native.duckdb_destroy_pending(pending_result); + duckdb_native.duckdb_destroy_prepare(prepared_statement); + + if (!duckdb_native.duckdb_result_is_streaming(result)) { + // TODO: this should also working for materialized result sets! + return; + } + + for (let col_idx = 0; col_idx < duckdb_native.duckdb_column_count(result); col_idx++) { + const colname = duckdb_native.duckdb_column_name(result, col_idx); + console.log(colname, ':', duckdb_native.duckdb_column_type(result, col_idx)); + } + + // now consume result set stream + while (true) { + const chunk = await duckdb_native.duckdb_stream_fetch_chunk(result); + + const n = duckdb_native.duckdb_data_chunk_get_size(chunk); + if (n == 0) { // empty chunk means end of stream + break; + } + + // loop over columns and interpret vector bytes + for (let col_idx = 0; col_idx < duckdb_native.duckdb_data_chunk_get_column_count(chunk); col_idx++) { + console.log(convert_vector(duckdb_native.duckdb_data_chunk_get_vector(chunk, col_idx), n)); + } + + duckdb_native.duckdb_destroy_data_chunk(chunk); + } + + // clean up again + duckdb_native.duckdb_destroy_result(result); + + duckdb_native.duckdb_disconnect(con); + duckdb_native.duckdb_close(db); + duckdb_native.duckdb_destroy_config(config); +} + +test(); diff --git a/test_list.ts b/test_list.ts new file mode 100644 index 00000000..3e94fec3 --- /dev/null +++ b/test_list.ts @@ -0,0 +1,135 @@ +import * as ddb from '.'; + +async function test() { + try { + const db = new ddb.duckdb_database; + const open_state = await ddb.duckdb_open(':memory:', db); + if (open_state != ddb.duckdb_state.DuckDBSuccess) { + throw new Error('Failed to open'); + } + console.log('open successful'); + + const con = new ddb.duckdb_connection; + const connect_state = await ddb.duckdb_connect(db, con); + if (connect_state != ddb.duckdb_state.DuckDBSuccess) { + throw new Error('Failed to connect'); + } + console.log('connect succesful'); + + const sql = 'select * from (values ([[100,101,102],[200,201,202,203,204]], 42), ([[300,301],NULL,[500]], 17), (NULL, NULL), ([NULL,[NULL]], -123)) as t(lst,num)'; + console.log('query:', sql); + + const result = new ddb.duckdb_result; + const query_state = await ddb.duckdb_query(con, sql, result); + if (query_state != ddb.duckdb_state.DuckDBSuccess) { + throw new Error('Failed to query'); + } + console.log('query successful'); + + console.log('column count:', ddb.duckdb_column_count(result)); + console.log('column 0 name:', ddb.duckdb_column_name(result, 0)); + console.log('column 1 name:', ddb.duckdb_column_name(result, 1)); + const rowCount = ddb.duckdb_row_count(result); + console.log('row count:', rowCount); + + const chunk0 = ddb.duckdb_result_get_chunk(result, 0); + console.log('chunk column count:', ddb.duckdb_data_chunk_get_column_count(chunk0)); + console.log('chunk size:', ddb.duckdb_data_chunk_get_size(chunk0)); // chunk size = row count + + const vector0_0 = ddb.duckdb_data_chunk_get_vector(chunk0, 0); + + const logical_type_0_0 = ddb.duckdb_vector_get_column_type(vector0_0); + console.log('type id chunk 0, vector 0:', ddb.duckdb_get_type_id(logical_type_0_0)); // 24 = LIST + const root_list_child_type = ddb.duckdb_list_type_child_type(logical_type_0_0); + console.log('root list child type id:', ddb.duckdb_get_type_id(root_list_child_type)); // 24 = LIST + const nested_list_child_type = ddb.duckdb_list_type_child_type(root_list_child_type); + console.log('nested list child type id:', ddb.duckdb_get_type_id(nested_list_child_type)); // 4 = INTEGER + + const vector0_1 = ddb.duckdb_data_chunk_get_vector(chunk0, 1); + + const logical_type_0_1 = ddb.duckdb_vector_get_column_type(vector0_1); + console.log('type id chunk 0, vector 1:', ddb.duckdb_get_type_id(logical_type_0_1)); // 4 = INTEGER + + const vector0_0_data_pointer = ddb.duckdb_vector_get_data(vector0_0); + const vector0_0_data = ddb.copy_buffer(vector0_0_data_pointer, rowCount * 64 * 2); + if (!vector0_0_data) { + throw new Error('vector0_0_data is null'); + } + const root_list_entry_array = new BigUint64Array(vector0_0_data.buffer, vector0_0_data.byteOffset, rowCount * 2); + console.log('root list entry array: ', root_list_entry_array); + + const root_list_validity_pointer = ddb.duckdb_vector_get_validity(vector0_0); + const root_list_validity_data = ddb.copy_buffer(root_list_validity_pointer, Math.ceil(rowCount / 64) * 8); + if (!root_list_validity_data) { + console.log('root list validity is NULL'); + } else { + console.log('root list validity:', root_list_validity_data); + } + for (let i = 0; i < rowCount; i++) { + console.log('root list item valid (duckdb_validity_row_is_valid):', i, ddb.duckdb_validity_row_is_valid(root_list_validity_pointer, i)); + if (root_list_validity_data) { + console.log('root list item valid (js bit math):', i, !!(root_list_validity_data[Math.floor(i >> 3)] & (1 << (i % 8)))) + } + } + + const root_list_child_vector = ddb.duckdb_list_vector_get_child(vector0_0); + const root_list_child_vector_size = ddb.duckdb_list_vector_get_size(vector0_0); + console.log('child vector size:', root_list_child_vector_size); + + const child_vector_data_pointer = ddb.duckdb_vector_get_data(root_list_child_vector); + const child_vector_data = ddb.copy_buffer(child_vector_data_pointer, root_list_child_vector_size * 64 * 2); + if (!child_vector_data) { + throw new Error('child_vector_data is null'); + } + // console.log('child vector data:', child_vector_data); + const child_list_entry_array = new BigUint64Array(child_vector_data.buffer, child_vector_data.byteOffset, root_list_child_vector_size * 2); + console.log('child vector entry array:', child_list_entry_array); + + const child_vector_validity_pointer = ddb.duckdb_vector_get_validity(root_list_child_vector); + const child_vector_validity_data = ddb.copy_buffer(child_vector_validity_pointer, Math.ceil(root_list_child_vector_size / 64) * 8); + if (!child_vector_validity_data) { + console.log('child vector validity is NULL'); + } else { + console.log('child vector validity:', child_vector_validity_data); + } + for (let i = 0; i < root_list_child_vector_size; i++) { + console.log('child vector item valid (duckdb_validity_row_is_valid):', i, ddb.duckdb_validity_row_is_valid(child_vector_validity_pointer, i)); + if (child_vector_validity_data) { + console.log('child vector item valid (js bit math):', i, !!(child_vector_validity_data[Math.floor(i >> 3)] & (1 << (i % 8)))) + } + } + + const child_vector_child_vector = ddb.duckdb_list_vector_get_child(root_list_child_vector); + const child_vector_child_vector_size = ddb.duckdb_list_vector_get_size(root_list_child_vector); + console.log('child vector child vector size:', child_vector_child_vector_size); + + const child_vector_child_vector_data_pointer = ddb.duckdb_vector_get_data(child_vector_child_vector); + const child_vector_child_vector_data = ddb.copy_buffer(child_vector_child_vector_data_pointer, child_vector_child_vector_size * 4); + if (!child_vector_child_vector_data) { + throw new Error('child_vector_child_vector_data is null'); + } + // console.log('child vector child vector data:', child_vector_child_vector_data); + const child_vector_child_vector_integers = new Int32Array(child_vector_child_vector_data.buffer, child_vector_child_vector_data.byteOffset, child_vector_child_vector_size); + console.log('child vector child vector integers:', child_vector_child_vector_integers); + + const child_vector_child_vector_validity_pointer = ddb.duckdb_vector_get_validity(child_vector_child_vector); + const child_vector_child_vector_validity_data = ddb.copy_buffer(child_vector_child_vector_validity_pointer, Math.ceil(child_vector_child_vector_size / 64) * 8); + if (!child_vector_child_vector_validity_data) { + console.log('child vector child vector validity is NULL'); + } else { + console.log('child vector child vector validity:', child_vector_child_vector_validity_data); + } + for (let i = 0; i < child_vector_child_vector_size; i++) { + console.log('child vector child vector item valid (duckdb_validity_row_is_valid):', i, ddb.duckdb_validity_row_is_valid(child_vector_child_vector_validity_pointer, i)); + if (child_vector_child_vector_validity_data) { + console.log('child vector child vector item valid (js bit math):', i, !!(child_vector_child_vector_validity_data[Math.floor(i >> 3)] & (1 << (i % 8)))) + } + } + + + } catch (e) { + console.error(e); + } +} + +test(); diff --git a/test_union.ts b/test_union.ts new file mode 100644 index 00000000..f1bb36ee --- /dev/null +++ b/test_union.ts @@ -0,0 +1,78 @@ +import * as ddb from '.'; + +async function test() { + try { + const db = new ddb.duckdb_database; + const open_state = await ddb.duckdb_open(':memory:', db); + if (open_state != ddb.duckdb_state.DuckDBSuccess) { + throw new Error('Failed to open'); + } + console.log('open successful'); + + const con = new ddb.duckdb_connection; + const connect_state = await ddb.duckdb_connect(db, con); + if (connect_state != ddb.duckdb_state.DuckDBSuccess) { + throw new Error('Failed to connect'); + } + console.log('connect succesful'); + + const create_result = new ddb.duckdb_result; + const create_query_state = await ddb.duckdb_query(con, `create table tbl1(u union(num int, flt float, str varchar))`, create_result); + if (create_query_state != ddb.duckdb_state.DuckDBSuccess) { + throw new Error('Failed to create: ' + ddb.duckdb_result_error(create_result)); + } + console.log('create successful'); + + const insert_result = new ddb.duckdb_result; + const insert_query_state = await ddb.duckdb_query(con, `insert into tbl1 values ('abc'), (123), (3.14)`, insert_result); + if (insert_query_state != ddb.duckdb_state.DuckDBSuccess) { + throw new Error('Failed to insert: ' + ddb.duckdb_result_error(insert_result)); + } + console.log('insert successful'); + + const select_result = new ddb.duckdb_result; + const select_query_state = await ddb.duckdb_query(con, `select * from tbl1`, select_result); + if (select_query_state != ddb.duckdb_state.DuckDBSuccess) { + throw new Error('Failed to query: ' + ddb.duckdb_result_error(select_result)); + } + console.log('select successful'); + + console.log('column count:', ddb.duckdb_column_count(select_result)); + console.log('column 0 name:', ddb.duckdb_column_name(select_result, 0)); + const rowCount = ddb.duckdb_row_count(select_result); + console.log('row count:', rowCount); + + const chunk0 = ddb.duckdb_result_get_chunk(select_result, 0); + console.log('chunk column count:', ddb.duckdb_data_chunk_get_column_count(chunk0)); + console.log('chunk size:', ddb.duckdb_data_chunk_get_size(chunk0)); // chunk size = row count + + const vector0_0 = ddb.duckdb_data_chunk_get_vector(chunk0, 0); + const logical_type_vector0 = ddb.duckdb_vector_get_column_type(vector0_0); + console.log('type id vector 0 0:', ddb.duckdb_get_type_id(logical_type_vector0)); // 28 = UNION + const logical_type_vector0_count = ddb.duckdb_struct_type_child_count(logical_type_vector0); + console.log('vector 0 0 struct type child count:', logical_type_vector0_count); + for (let i = 0; i < logical_type_vector0_count; i++) { + const child_name = ddb.duckdb_struct_type_child_name(logical_type_vector0, i); + console.log('vector 0 0 struct type child name:', i, child_name); + const child_type = ddb.duckdb_struct_type_child_type(logical_type_vector0, i); + console.log('vector 0 0 struct type child type id:', i, ddb.duckdb_get_type_id(child_type)); // 0: 6 = UTINYINT + } + + for (let i = 0; i < logical_type_vector0_count; i++) { + const child = ddb.duckdb_struct_vector_get_child(vector0_0, i); + const logical_type_child = ddb.duckdb_vector_get_column_type(child); + console.log('type id child:', i, ddb.duckdb_get_type_id(logical_type_child)); // 0: 6 = UTINYINT + } + const tag_data_pointer = ddb.duckdb_vector_get_data(ddb.duckdb_struct_vector_get_child(vector0_0, 0)); + const tag_data = ddb.copy_buffer(tag_data_pointer, 3); + console.log('tag data:', tag_data); + // type id 4 = INTEGER + // type id 10 = FLOAT + // type id 17 = VARCHAR + + } catch (e) { + console.error(e); + } +} + +test(); diff --git a/tsconfig.json b/tsconfig.json index d152d181..f8c37b02 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -8,6 +8,6 @@ "outDir": "./dist", "types": ["node", "mocha"] }, - "include": ["test/**/*"], + "include": ["test/**/*", "test.ts"], "exclude": ["node_modules"] }