diff --git a/CHANGELOG.md b/CHANGELOG.md index 3325bc6..362190f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,16 @@ -## [Unreleased] +# Changelog -## [0.1.0] - 2022-02-21 +## [0.2.0] -- Initial release +### Added + +- Complete basic ActiveRecord adapter functionality +- Test structure with unit, integration, and functional tests +- Modernize parameter binding using DuckDB's native parameter binding +- Support for migrations, associations, and validations + +## [0.1.0] + +### Added +- Initial project structure +- Basic gem setup diff --git a/Rakefile b/Rakefile index 022cf27..d238248 100644 --- a/Rakefile +++ b/Rakefile @@ -11,7 +11,12 @@ Rake::TestTask.new(:test) do |t| t.verbose = true end -require "rubocop/rake_task" - -task default: %i[test rubocop] +begin + require "rubocop/rake_task" + RuboCop::RakeTask.new + task default: %i[test rubocop] +rescue LoadError + # RuboCop not available, skip it + task default: %i[test] +end diff --git a/activerecord-duckdb-adapter.gemspec b/activerecord-duckdb-adapter.gemspec index d413286..9646d0f 100644 --- a/activerecord-duckdb-adapter.gemspec +++ b/activerecord-duckdb-adapter.gemspec @@ -5,31 +5,38 @@ require_relative "lib/activerecord_duckdb_adapter/version" Gem::Specification.new do |spec| spec.name = "activerecord-duckdb-adapter" spec.version = ActiveRecordDuckdbAdapter::VERSION - spec.authors = ["okadakk"] - spec.email = ["k.suke.jp1990@gmail.com"] + spec.authors = ["okadakk", "Eddie A Tejeda"] + spec.email = ["k.suke.jp1990@gmail.com", "eddie.tejeda@gmail.com"] - spec.summary = "https://github.com" - spec.description = "https://github.com" - spec.homepage = "https://github.com" + spec.summary = "ActiveRecord adapter for DuckDB database" + spec.description = "A Ruby gem that provides an ActiveRecord adapter for DuckDB, enabling Ruby and Rails applications to use DuckDB as their database backend." + spec.homepage = "https://github.com/red-data-tools/activerecord-duckdb-adapter" spec.license = "MIT" - spec.required_ruby_version = ">= 2.4.0" + spec.required_ruby_version = ">= 3.1.0" - spec.metadata["allowed_push_host"] = "'https://mygemserver.com'" + spec.metadata = { + "bug_tracker_uri" => "https://github.com/red-data-tools/activerecord-duckdb-adapter/issues", + "changelog_uri" => "https://github.com/red-data-tools/activerecord-duckdb-adapter/blob/main/CHANGELOG.md", + "source_code_uri" => "https://github.com/red-data-tools/activerecord-duckdb-adapter", + "rubygems_mfa_required" => "true" + } - spec.metadata["homepage_uri"] = spec.homepage - spec.metadata["source_code_uri"] = "https://github.com" - spec.metadata["changelog_uri"] = "https://github.com" - - # Specify which files should be added to the gem when it is released. - # The `git ls-files -z` loads the files in the RubyGem that have been added into git. - spec.files = Dir.chdir(File.expand_path(__dir__)) do - `git ls-files -z`.split("\x0").reject { |f| f.match(%r{\A(?:test|spec|features)/}) } - end - spec.bindir = "exe" - spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) } + # Specify files to include in the gem + spec.files = Dir[ + "lib/**/*", + "README.md", + "LICENSE.txt", + "CHANGELOG.md" + ].select { |f| File.file?(f) } + spec.require_paths = ["lib"] - # Uncomment to register a new dependency of your gem - spec.add_dependency('activerecord') - spec.add_dependency('duckdb') + # Development dependencies with bounded versions + spec.add_development_dependency "bundler", "~> 2.0" + spec.add_development_dependency "rake", "~> 13.0" + spec.add_development_dependency "rspec", "~> 3.13" + + # Runtime dependencies with bounded versions + spec.add_dependency "activerecord", "~> 7.1" + spec.add_dependency "duckdb", "~> 1.1" end diff --git a/bin/console b/bin/console index 6b7d8b8..34ab8f9 100755 --- a/bin/console +++ b/bin/console @@ -2,11 +2,77 @@ # frozen_string_literal: true require "bundler/setup" -require "activerecord/duckdb/adapter" +require "activerecord-duckdb-adapter" +require "active_support/logger" +require "yaml" # You can add fixtures and/or initialization code here to make experimenting # with your gem easier. You can also use a different console, if you like. +puts "Setting up ActiveRecord DuckDB Adapter console..." + +# Setup database connection +ActiveRecord::Base.logger = ActiveSupport::Logger.new(STDOUT, level: Logger::INFO) +ActiveRecord::Base.configurations = { + 'duckdb' => { adapter: 'duckdb' } +} +ActiveRecord::Base.establish_connection :duckdb + +puts "✓ Database connection established" + +# Load the schema +load File.expand_path("../../test/schema/schema.rb", __FILE__) +puts "✓ Database schema loaded" + +# Load the models +require File.expand_path("../../test/models/author.rb", __FILE__) +require File.expand_path("../../test/models/post.rb", __FILE__) +puts "✓ Models loaded (Author, Post)" + +# Set up base directory for file paths +BASE_DIR = File.expand_path("../../", __FILE__) + +# Helper method to load fixtures +def load_fixtures + # Clear existing data + Post.delete_all + Author.delete_all + + # Load authors from YAML + authors_data = YAML.load_file(File.join(BASE_DIR, "test/fixtures/authors.yml")) + authors_data.each do |key, attrs| + Author.create!(attrs) + end + + # Load posts from YAML + posts_data = YAML.load_file(File.join(BASE_DIR, "test/fixtures/posts.yml")) + posts_data.each do |key, attrs| + # Skip posts with author_id: 0 as they don't have valid authors + next if attrs['author_id'] == 0 + Post.create!(attrs) + end + + puts "✓ Fixtures loaded: #{Author.count} authors, #{Post.count} posts" +end + +# Load the fixtures +load_fixtures + +puts "\n" + "="*60 +puts "ActiveRecord DuckDB Adapter Console Ready!" +puts "="*60 +puts "\nAvailable models:" +puts " - Author (#{Author.count} records)" +puts " - Post (#{Post.count} records)" +puts "\nExample usage:" +puts " Author.all" +puts " Post.includes(:author).all" +puts " Author.first.posts" +puts " Post.where(enabled: true)" +puts "\nHelper methods:" +puts " load_fixtures # Reload all fixture data" +puts "\n" + "="*60 + # (If you use this, don't forget to add pry to your Gemfile!) # require "pry" # Pry.start diff --git a/lib/active_record/connection_adapters/duckdb/database_statements.rb b/lib/active_record/connection_adapters/duckdb/database_statements.rb index be95a13..21228a0 100644 --- a/lib/active_record/connection_adapters/duckdb/database_statements.rb +++ b/lib/active_record/connection_adapters/duckdb/database_statements.rb @@ -3,40 +3,289 @@ module ActiveRecord module ConnectionAdapters module Duckdb - module DatabaseStatements # :nodoc: - def write_query?(sql) # :nodoc: - false + module DatabaseStatements + + # @override + # @note Implements AbstractAdapter interface method + # @param [String] sql SQL to execute + # @param [String, nil] name Query name for logging + # @param [Boolean] allow_retry Whether to allow retry on failure + # @return [Object] Query result + def execute(sql, name = nil, allow_retry: false) + internal_execute(sql, name, allow_retry: allow_retry) + end + + # @note internal execution wrapper for DuckDB + # @param [String] sql SQL to execute + # @param [String] name Query name for logging + # @param [Array] binds Bind parameters + # @param [Boolean] prepare Whether to prepare statement + # @param [Boolean] async Whether to execute asynchronously + # @param [Boolean] allow_retry Whether to allow retry on failure + # @param [Boolean] materialize_transactions Whether to materialize transactions + # @return [Object] Query result + def internal_execute(sql, name = "SQL", binds = [], prepare: false, async: false, allow_retry: false, materialize_transactions: true, &block) + raw_execute(sql, name, binds, prepare: prepare, async: async, allow_retry: allow_retry, materialize_transactions: materialize_transactions, &block) end - def execute(sql, name = nil) # :nodoc: - sql = transform_query(sql) + # @override + # @note Implements AbstractAdapter interface method - These methods need to return integers for update_all and delete_all + # @param [Object] arel Arel object or SQL string + # @param [String, nil] name Query name for logging + # @param [Array] binds Bind parameters + # @return [Integer] Number of affected rows + def update(arel, name = nil, binds = []) + sql, binds = to_sql_and_binds(arel, binds) + result = internal_execute(sql, name, binds) + extract_row_count(result, sql) + end + + # @override + # @note Implements AbstractAdapter interface method - These methods need to return integers for update_all and delete_all + # @param [Object] arel Arel object or SQL string + # @param [String, nil] name Query name for logging + # @param [Array] binds Bind parameters + # @return [Integer] Number of affected rows + def delete(arel, name = nil, binds = []) + sql, binds = to_sql_and_binds(arel, binds) + result = internal_execute(sql, name, binds) + extract_row_count(result, sql) + end - log(sql, name) do - ActiveSupport::Dependencies.interlock.permit_concurrent_loads do - @connection.query(sql) + # @override + # @note Implements AbstractAdapter interface method + # @param [String] sql SQL to execute + # @param [String] name Query name for logging + # @param [Array] binds Bind parameters + # @param [Boolean] prepare Whether to prepare statement + # @param [Boolean] async Whether to execute asynchronously + # @param [Boolean] allow_retry Whether to allow retry on failure + # @param [Boolean] materialize_transactions Whether to materialize transactions + # @return [ActiveRecord::Result] Query result as ActiveRecord::Result + def internal_exec_query(sql, name = "SQL", binds = [], prepare: false, async: false, allow_retry: false, materialize_transactions: true) + result = internal_execute(sql, name, binds, prepare: prepare, async: async, allow_retry: allow_retry, materialize_transactions: materialize_transactions) + + # Convert DuckDB result to ActiveRecord::Result + raw_cols = result.columns || [] + cols = raw_cols.map { |col| col.respond_to?(:name) ? col.name : col.to_s } + rows = result.to_a || [] + + ActiveRecord::Result.new(cols, rows) + end + + # @note raw execution for DuckDB + # @param [String] sql SQL to execute + # @param [String, nil] name Query name for logging + # @param [Array] binds Bind parameters + # @param [Boolean] prepare Whether to prepare statement + # @param [Boolean] async Whether to execute asynchronously + # @param [Boolean] allow_retry Whether to allow retry on failure + # @param [Boolean] materialize_transactions Whether to materialize transactions + # @param [Boolean] batch Whether to execute in batch mode + # @return [Object] Query result + def raw_execute(sql, name = nil, binds = [], prepare: false, async: false, allow_retry: false, materialize_transactions: true, batch: false) + type_casted_binds = type_casted_binds(binds) + log(sql, name, binds, type_casted_binds, async: async) do |notification_payload| + with_raw_connection(allow_retry: allow_retry, materialize_transactions: materialize_transactions) do |conn| + perform_query(conn, sql, binds, type_casted_binds, prepare: prepare, notification_payload: notification_payload, batch: batch) end end end - def exec_query(sql, name = nil, binds = [], prepare: false, async: false) # :nodoc: - result = execute_and_clear(sql, name, binds, prepare: prepare, async: async) + # @note DuckDB-specific query execution + # @param [Object] raw_connection Raw database connection + # @param [String] sql SQL to execute + # @param [Array] binds Bind parameters + # @param [Array] type_casted_binds Type-casted bind parameters + # @param [Boolean] prepare Whether to prepare statement + # @param [Object] notification_payload Notification payload for logging + # @param [Boolean] batch Whether to execute in batch mode + # @return [Object] Query result + def perform_query(raw_connection, sql, binds, type_casted_binds, prepare:, notification_payload:, batch: false) + # Use DuckDB's native parameter binding - clean and secure + bind_values = extract_bind_values(type_casted_binds, binds) + + if bind_values&.any? + @raw_connection.query(sql, *bind_values) + else + @raw_connection.query(sql) + end + end - # TODO: https://github.com/suketa/ruby-duckdb/issues/168 - # build_result(columns: result.columns, rows: result.to_a) - if result.to_a.first&.size == 1 - build_result(columns: ['count'], rows: result.to_a) - elsif result.to_a.first&.size == 2 - build_result(columns: ['id', 'name'], rows: result.to_a) + # @override + # @note Implements AbstractAdapter interface method + # @param [String] sql SQL to execute + # @param [String, nil] name Query name for logging + # @return [Object] Query result + def query(sql, name = nil) + result = internal_execute(sql, name) + result + end + + # @override + # @note Implements AbstractAdapter interface method + # @param [String] sql SQL to explain + # @return [String] Pretty-printed explanation + def explain(sql) + result = internal_exec_query(sql, "EXPLAIN") + Duckdb::ExplainPrettyPrinter.new.pp(result) + end + + # @override + # @note Implements AbstractAdapter interface method - Executes an INSERT statement and returns the ID of the newly inserted record + # @param [String] sql INSERT SQL to execute + # @param [String, nil] name Query name for logging + # @param [Array] binds Bind parameters + # @param [String, nil] pk Primary key column name + # @param [String, nil] sequence_name Sequence name for auto-increment + # @param [String, nil] returning RETURNING clause + # @return [ActiveRecord::Result] Result containing inserted ID + def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil, returning: nil) + if pk && supports_insert_returning? + # Use INSERT...RETURNING to get the inserted ID + returning_sql = sql.sub(/\bINSERT\b/i, "INSERT").concat(" RETURNING #{quote_column_name(pk)}") + internal_exec_query(returning_sql, name, binds) else - build_result(columns: ['id', 'author', 'title', 'body', 'count'], rows: result.to_a) + # Regular insert - return result from internal_execute + internal_execute(sql, name, binds) + # Return an empty result since we don't have the ID + ActiveRecord::Result.new([], []) end end - def exec_delete(sql, name = nil, binds = []) # :nodoc: - result = execute_and_clear(sql, name, binds) - result.rows_changed + private + + # @note extract row count from DuckDB result + # @param [Object] result Query result + # @param [String] sql Original SQL query + # @return [Integer] Number of affected rows + def extract_row_count(result, sql) + if result.respond_to?(:to_a) + rows = result.to_a + if rows.length == 1 && rows[0].length == 1 + count = rows[0][0] + return count.is_a?(Integer) ? count : count.to_i + end + end + 0 end - alias :exec_update :exec_delete + + # @note convert Arel to SQL string + # @param [Object] arel Arel object or SQL string + # @param [Array] binds Bind parameters (unused) + # @return [String] SQL string + def to_sql(arel, binds = []) + if arel.respond_to?(:to_sql) + arel.to_sql + else + arel.to_s + end + end + + # @note Convert Arel to SQL and extract bind parameters + # @param [Object] arel_or_sql_string Arel object or SQL string + # @param [Array] binds Bind parameters + # @param [Array] args Additional arguments + # @return [Array] Array containing SQL string and bind parameters + def to_sql_and_binds(arel_or_sql_string, binds = [], *args) + # For simple cases, delegate to parent implementation if it exists + if defined?(super) + begin + return super(arel_or_sql_string, binds, *args) + rescue NoMethodError + # Fall through to our implementation + end + end + + # Our simplified implementation for basic cases + if arel_or_sql_string.respond_to?(:ast) + # For Arel objects, visit the AST to get SQL and collect binds + visitor = arel_visitor + collector = Arel::Collectors::SQLString.new + visitor.accept(arel_or_sql_string.ast, collector) + sql = collector.value + + # Extract binds from the visitor if it collected them + visitor_binds = if visitor.respond_to?(:binds) + visitor.binds + else + [] + end + + result = [sql, binds + visitor_binds] + # Add any additional args back to maintain signature compatibility + args.each { |arg| result << arg } + result + elsif arel_or_sql_string.respond_to?(:to_sql) + # For objects with to_sql method, use it directly + result = [arel_or_sql_string.to_sql, binds] + args.each { |arg| result << arg } + result + else + # For plain strings, return as-is + result = [arel_or_sql_string.to_s, binds] + args.each { |arg| result << arg } + result + end + end + + # @note get Arel visitor for SQL generation + # @return [Object] Arel visitor instance + def arel_visitor + connection_pool.get_schema_cache(connection).arel_visitor + rescue + # Fallback for older ActiveRecord versions or if schema cache is not available + Arel::Visitors::ToSql.new(self) + end + + # @override + # @note Implements AbstractAdapter interface method - ActiveRecord calls this method to get properly type-cast bind parameters + # @param [Array] binds Array of bind parameters + # @return [Array] Array of type-cast values + def type_casted_binds(binds) + if binds.empty? + [] + else + binds.map do |attr| + if attr.respond_to?(:value_for_database) + value = attr.value_for_database + # Handle ActiveRecord timestamp value objects that DuckDB doesn't understand + if value.class.name == 'ActiveRecord::Type::Time::Value' + # Convert to a proper Time object that DuckDB can handle + Time.parse(value.to_s) + else + value + end + else + type_cast(attr) + end + end + end + end + + # @note extract bind values for DuckDB parameter binding + # @param [Array] type_casted_binds Type-casted bind parameters + # @param [Array] binds Original bind parameters + # @return [Array, nil] Array of bind values or nil if none + def extract_bind_values(type_casted_binds, binds) + # Prefer type_casted_binds as they are pre-processed by ActiveRecord + return type_casted_binds if type_casted_binds&.any? + + # Extract values from bind objects if no type_casted_binds + return nil unless binds&.any? + + binds.map do |bind| + case bind + when Array + # [column, value] format + bind[1] + else + # Extract value from attribute objects or use direct value + bind.respond_to?(:value) ? bind.value : bind + end + end + end + end end end diff --git a/lib/active_record/connection_adapters/duckdb/explain_pretty_printer.rb b/lib/active_record/connection_adapters/duckdb/explain_pretty_printer.rb new file mode 100644 index 0000000..da0c043 --- /dev/null +++ b/lib/active_record/connection_adapters/duckdb/explain_pretty_printer.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module Duckdb + class ExplainPrettyPrinter # :nodoc: + # @note Pretty prints the result of an EXPLAIN QUERY PLAN in a way that resembles the output of the SQLite shell + # @example Output format + # 0|0|0|SEARCH TABLE users USING INTEGER PRIMARY KEY (rowid=?) (~1 rows) + # 0|1|1|SCAN TABLE posts (~100000 rows) + # @param [ActiveRecord::Result] result Query result containing explain output + # @return [String] Pretty-printed explanation with newlines + def pp(result) + result.rows.map do |row| + row.join("|") + end.join("\n") + "\n" + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/duckdb/quoting.rb b/lib/active_record/connection_adapters/duckdb/quoting.rb new file mode 100644 index 0000000..01b6023 --- /dev/null +++ b/lib/active_record/connection_adapters/duckdb/quoting.rb @@ -0,0 +1,166 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module Duckdb + module Quoting # :nodoc: + extend ActiveSupport::Concern + + QUOTED_COLUMN_NAMES = Concurrent::Map.new # :nodoc: + QUOTED_TABLE_NAMES = Concurrent::Map.new # :nodoc: + + module ClassMethods # :nodoc: + # @note regex pattern for column name matching + # @return [Regexp] Regular expression for matching column names + def column_name_matcher + / + \A + ( + (?: + # "table_name"."column_name" | function(one or no argument) + ((?:\w+\.|"\w+"\.)?(?:\w+|"\w+") | \w+\((?:|\g<2>)\)) + ) + (?:(?:\s+AS)?\s+(?:\w+|"\w+"))? + ) + (?:\s*,\s*\g<1>)* + \z + /ix + end + + # @note regex pattern for column name with order matching + # @return [Regexp] Regular expression for matching column names with order + def column_name_with_order_matcher + / + \A + ( + (?: + # "table_name"."column_name" | function(one or no argument) + ((?:\w+\.|"\w+"\.)?(?:\w+|"\w+") | \w+\((?:|\g<2>)\)) + ) + (?:\s+COLLATE\s+(?:\w+|"\w+"))? + (?:\s+ASC|\s+DESC)? + ) + (?:\s*,\s*\g<1>)* + \z + /ix + end + + # @override + # @note Implements AbstractAdapter interface method + # @param [String, Symbol] name Column name to quote + # @return [String] Quoted column name + def quote_column_name(name) + QUOTED_COLUMN_NAMES[name] ||= %Q("#{name.to_s.gsub('"', '""')}").freeze + end + + # @override + # @note Implements AbstractAdapter interface method + # @param [String, Symbol] name Table name to quote + # @return [String] Quoted table name + def quote_table_name(name) + QUOTED_TABLE_NAMES[name] ||= %Q("#{name.to_s.gsub('"', '""').gsub(".", "\".\"")}").freeze + end + end + + # @override + # @note Implements AbstractAdapter interface method + # @param [String] s String to quote + # @return [String] Quoted string with escaped single quotes + def quote_string(s) + s.gsub("'", "''") # Escape single quotes by doubling them + end + + # @override + # @note Implements AbstractAdapter interface method + # @param [String] table Table name (unused) + # @param [String] attr Attribute name + # @return [String] Quoted column name + def quote_table_name_for_assignment(table, attr) + quote_column_name(attr) + end + + # @override + # @note Implements AbstractAdapter interface method + # @param [Time] value Time value to quote + # @return [String] Quoted time string + def quoted_time(value) + value = value.change(year: 2000, month: 1, day: 1) + quoted_date(value).sub(/\A\d\d\d\d-\d\d-\d\d /, "2000-01-01 ") + end + + # @override + # @note Implements AbstractAdapter interface method + # @param [String] value Binary value to quote + # @return [String] Quoted binary string in hex format + def quoted_binary(value) + "x'#{value.hex}'" + end + + # @override + # @note Implements AbstractAdapter interface method + # @return [String] Quoted true value for DuckDB + def quoted_true + "1" + end + + # @override + # @note Implements AbstractAdapter interface method + # @return [Integer] Unquoted true value for DuckDB + def unquoted_true + 1 + end + + # @override + # @note Implements AbstractAdapter interface method + # @return [String] Quoted false value for DuckDB + def quoted_false + "0" + end + + # @override + # @note Implements AbstractAdapter interface method + # @return [Integer] Unquoted false value for DuckDB + def unquoted_false + 0 + end + + # @override + # @note Implements AbstractAdapter interface method + # @param [Object] value Default value to quote + # @param [ActiveRecord::ConnectionAdapters::Column] column Column object + # @return [String] Quoted default expression + def quote_default_expression(value, column) # :nodoc: + if value.is_a?(Proc) + value = value.call + # Don't wrap nextval() calls in extra parentheses + value + elsif value.is_a?(String) && value.match?(/\Anextval\(/i) + # Handle nextval function calls for sequences - don't quote them + value + else + super + end + end + + # @override + # @note Implements AbstractAdapter interface method + # @param [Object] value Value to type cast + # @return [Object] Type-cast value + def type_cast(value) # :nodoc: + case value + when BigDecimal, Rational + value.to_f + when String + if value.encoding == Encoding::ASCII_8BIT + super(value.encode(Encoding::UTF_8)) + else + super + end + else + super + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/duckdb/schema_statements.rb b/lib/active_record/connection_adapters/duckdb/schema_statements.rb index e694b8f..4d5a90f 100644 --- a/lib/active_record/connection_adapters/duckdb/schema_statements.rb +++ b/lib/active_record/connection_adapters/duckdb/schema_statements.rb @@ -1,37 +1,194 @@ # frozen_string_literal: true -require 'debug' module ActiveRecord module ConnectionAdapters module Duckdb module SchemaStatements # :nodoc: - private - def new_column_from_field(table_name, field) - _cid, name, type, notnull, _dflt_value, _pk = field - - Column.new( - name, - nil, # default value - fetch_type_metadata(type), - !notnull, - nil, # default function - ) + + # @override + # @note Implements AbstractAdapter interface method - Returns an array of indexes for the given table + # @param [String] table_name Name of the table + # @return [Array] Array of index objects (currently empty) + def indexes(table_name) + # DuckDB uses duckdb_indexes() function for index information + # Since we may not have access to the duckdb_indexes() function in all contexts, + # we'll return an empty array for now + # TODO: Implement proper index querying when DuckDB Ruby driver supports it + [] + end + + # @override + # @note Implements AbstractAdapter interface method - Checks to see if the data source +name+ exists on the database + # @example + # data_source_exists?(:ebooks) + # @param [String, Symbol] name Name of the data source + # @return [Boolean] true if data source exists, false otherwise + def data_source_exists?(name) + return false unless name.present? + data_sources.include?(name.to_s) + end + + # @note generates SQL for data source queries + # @param [String, nil] name Data source name + # @param [String, nil] type Data source type + # @return [String] SQL query string + def data_source_sql(name = nil, type: nil) + scope = quoted_scope(name, type: type) + + sql = +"SELECT table_name FROM information_schema.tables" + sql << " WHERE table_schema = #{scope[:schema]}" + if scope[:type] || scope[:name] + conditions = [] + conditions << "table_type = #{scope[:type]}" if scope[:type] + conditions << "table_name = #{scope[:name]}" if scope[:name] + sql << " AND #{conditions.join(" AND ")}" end + sql + end + + # @override + # @note Implements AbstractAdapter interface method + # @param [String] table_name Name of the table + # @return [Boolean] true if table exists, false otherwise + def table_exists?(table_name) + return false unless table_name.present? + + sql = "SELECT COUNT(*) FROM information_schema.tables WHERE table_name = #{quote(table_name.to_s)} AND table_schema = 'main'" + query_value(sql, "SCHEMA") > 0 + end - def data_source_sql(name = nil, type: nil) - scope = quoted_scope(name, type: type) + # @override + # @note Implements AbstractAdapter interface method + # @param [String] table_name Name of the table to create + # @param [Symbol, String, Boolean] id Primary key configuration + # @param [String, nil] primary_key Primary key column name + # @param [Boolean, nil] force Whether to drop existing table + # @param [Hash] options Additional table options + # @return [ActiveRecord::ConnectionAdapters::TableDefinition] Table definition + def create_table(table_name, id: :primary_key, primary_key: nil, force: nil, **options) + if force + drop_table(table_name, if_exists: true, **options) + end - sql = +"SELECT table_name FROM information_schema.tables" - sql << " WHERE table_schema = #{scope[:schema]}" - if scope[:type] || scope[:name] - conditions = [] - conditions << "table_type = #{scope[:type]}" if scope[:type] - conditions << "table_name = #{scope[:name]}" if scope[:name] - sql << " AND #{conditions.join(" AND ")}" + td = create_table_definition(table_name, **options) + + # Add primary key unless explicitly disabled + if id != false + case id + when :primary_key, true + # DuckDB native auto-increment: create sequence then use it as column default + pk_name = primary_key || default_primary_key_name + + # Add primary key column with auto-increment via sequence + # This follows DuckDB's documented pattern for auto-increment primary keys + add_auto_increment_primary_key(td, table_name, pk_name) + when Symbol, String + # For other primary key types, delegate to parent behavior + td.primary_key id, primary_key, **options end - sql end + yield td if block_given? + + if supports_comments? && !supports_comments_in_create? + change_table_comment(table_name, options[:comment]) if options[:comment].present? + end + + execute schema_creation.accept(td) + td + end + + # @override + # @note Implements AbstractAdapter interface method + # @return [Array] Array of data source names + def data_sources + sql = "SELECT table_name FROM information_schema.tables WHERE table_schema = 'main'" + execute(sql).map { |row| row[0] } + end + + # @override + # @note Implements AbstractAdapter interface method + # @param [String] table_name Name of the table to drop + # @param [Boolean] if_exists Whether to use IF EXISTS clause + # @param [Hash] options Additional drop options + # @return [void] + def drop_table(table_name, if_exists: false, **options) + sql = +"DROP TABLE" + sql << " IF EXISTS" if if_exists + sql << " #{quote_table_name(table_name)}" + execute sql + end + + private + # @note creates new column from DuckDB field information + # @param [String] table_name Name of the table + # @param [Array] field Field information array + # @param [Object, nil] type_metadata Type metadata object + # @return [ActiveRecord::ConnectionAdapters::Column] Column object + def new_column_from_field(table_name, field, type_metadata = nil) + # DuckDB information_schema returns: column_name, data_type, is_nullable, column_default + column_name, data_type, is_nullable, column_default = field + + # For auto-increment columns, DuckDB might return internal default expressions + # that we don't want to expose as ActiveRecord column defaults + if column_default && (column_default.match?(/\Anextval\(/i) || column_default.match?(/\Aautoincrement/i)) + column_default = nil + end + + # Convert DuckDB data types to ActiveRecord types + sql_type_metadata = type_metadata || fetch_type_metadata(data_type) + + ActiveRecord::ConnectionAdapters::Column.new( + column_name, + column_default, + sql_type_metadata, + is_nullable == 'YES' + ) + end + + # @note converts DuckDB data types to ActiveRecord type metadata + # @param [String] sql_type DuckDB SQL type string + # @return [ActiveRecord::ConnectionAdapters::SqlTypeMetadata] Type metadata object + def fetch_type_metadata(sql_type) + # Convert DuckDB data types to ActiveRecord types + cast_type = case sql_type.downcase + when /^integer/i + :integer + when /^bigint/i + :bigint + when /^varchar/i, /^text/i + :string + when /^decimal/i, /^numeric/i + :decimal + when /^real/i, /^double/i, /^float/i + :float + when /^boolean/i + :boolean + when /^date$/i + :date + when /^time/i + :time + when /^timestamp/i + :datetime + when /^blob/i + :binary + when /^uuid/i + :string # DuckDB UUID as string for now + else + :string # fallback + end + + # Create type metadata + ActiveRecord::ConnectionAdapters::SqlTypeMetadata.new( + sql_type: sql_type, + type: cast_type + ) + end + + # @note creates quoted scope for SQL queries + # @param [String, nil] name Table or data source name + # @param [String, nil] type Data source type + # @return [Hash] Hash containing quoted scope elements def quoted_scope(name = nil, type: nil) schema, name = extract_schema_qualified_name(name) scope = {} @@ -41,13 +198,69 @@ def quoted_scope(name = nil, type: nil) scope end + # @note extracts schema and name from qualified name string + # @param [String, Symbol] string Qualified name string + # @return [Array] Array containing schema and name def extract_schema_qualified_name(string) schema, name = string.to_s.scan(/[^`.\s]+|`[^`]*`/) schema, name = nil, schema unless name [schema, name] end + + # @note creates table definition for create_table + # @param [String] table_name Name of the table + # @param [Hash] options Table creation options + # @return [ActiveRecord::ConnectionAdapters::TableDefinition] Table definition object + def create_table_definition(table_name, **options) + ActiveRecord::ConnectionAdapters::TableDefinition.new( + self, + table_name, + **options + ) + end + + # @note returns default primary key name + # @return [String] Default primary key column name + def default_primary_key_name + "id" + end + + # @note DuckDB doesn't support table comments yet + # @return [Boolean] false, as DuckDB doesn't support table comments + def supports_comments? + false + end + + # @note DuckDB doesn't support comments in CREATE statements + # @return [Boolean] false, as DuckDB doesn't support comments in CREATE + def supports_comments_in_create? + false + end + + # @note returns schema creation helper + # @return [ActiveRecord::ConnectionAdapters::SchemaCreation] Schema creation helper + def schema_creation + ActiveRecord::ConnectionAdapters::SchemaCreation.new(self) + end + + # @note adds auto-increment primary key using DuckDB's native sequence approach + # @param [ActiveRecord::ConnectionAdapters::TableDefinition] td Table definition + # @param [String] table_name Name of the table + # @param [String] pk_name Primary key column name + # @return [void] + def add_auto_increment_primary_key(td, table_name, pk_name) + sequence_name = "#{table_name}_#{pk_name}_seq" + + # Use DuckDB's native sequence approach - this is the official DuckDB pattern + # Create sequence first, then reference it in the column default + execute "CREATE SEQUENCE IF NOT EXISTS #{quote_table_name(sequence_name)}" + + # Add the column with nextval() as default - DuckDB's standard auto-increment pattern + td.column pk_name, :bigint, primary_key: true, default: -> { "nextval('#{sequence_name}')" } + end + + end end end end - \ No newline at end of file diff --git a/lib/active_record/connection_adapters/duckdb/tasks.rb b/lib/active_record/connection_adapters/duckdb/tasks.rb new file mode 100644 index 0000000..b1f23f8 --- /dev/null +++ b/lib/active_record/connection_adapters/duckdb/tasks.rb @@ -0,0 +1,242 @@ +# frozen_string_literal: true + +require 'fileutils' + +module ActiveRecord + module Tasks # :nodoc: + class DuckdbDatabaseTasks # :nodoc: + # @override + # @note Implements ActiveRecord::Tasks interface method + # @return [Boolean] true if using database configurations + def self.using_database_configurations? + true + end + + # @override + # @note Implements ActiveRecord::Tasks interface method + # @param [Object] db_config Database configuration object + # @param [String, nil] root Root directory path + # @return [DuckdbDatabaseTasks] New database tasks instance + def initialize(db_config, root = nil) + @db_config = db_config + @root = root || determine_root_directory + end + + # @override + # @note Implements ActiveRecord::Tasks interface method + # @raise [ArgumentError] if no database file specified + # @raise [ActiveRecord::DatabaseAlreadyExists] if database already exists + # @raise [ActiveRecord::DatabaseConnectionError] if connection fails + # @return [void] + def create + database_path = db_config.respond_to?(:database) ? db_config.database : db_config[:database] + + # Handle in-memory databases + if database_path == ":memory:" + # In-memory databases are created when connected to + establish_connection + return + end + + # Handle file-based databases + unless database_path.present? + raise ArgumentError, "No database file specified. Missing argument: database" + end + + # Convert relative paths to absolute paths + db_file_path = if File.absolute_path?(database_path) + database_path + else + File.expand_path(database_path, root) + end + + # Check if database already exists + if File.exist?(db_file_path) + raise ActiveRecord::DatabaseAlreadyExists + end + + # Create directory if it doesn't exist + dir = File.dirname(db_file_path) + FileUtils.mkdir_p(dir) unless File.directory?(dir) + + # Create the database by establishing a connection + # DuckDB will create the file when we connect to it + begin + establish_connection + puts "Created database '#{database_path}'" + rescue => e + raise ActiveRecord::DatabaseConnectionError.new(e.message) + end + end + + # @override + # @note Implements ActiveRecord::Tasks interface method + # @raise [ArgumentError] if no database file specified + # @raise [ActiveRecord::NoDatabaseError] if database file doesn't exist + # @raise [ActiveRecord::DatabaseConnectionError] if operation fails + # @return [void] + def drop + database_path = db_config.respond_to?(:database) ? db_config.database : db_config[:database] + + # Handle in-memory databases + if database_path == ":memory:" + # In-memory databases can't be "dropped" in the traditional sense + # Just disconnect + begin + connection.disconnect! if connection&.active? + rescue + # Ignore errors during disconnect for in-memory databases + end + return + end + + # Handle file-based databases + unless database_path.present? + raise ArgumentError, "No database file specified. Missing argument: database" + end + + # Convert relative paths to absolute paths + db_file_path = if File.absolute_path?(database_path) + database_path + else + File.expand_path(database_path, root) + end + + # Disconnect from database first + begin + connection.disconnect! if connection&.active? + rescue + # Continue even if disconnect fails + end + + # Remove the database file + begin + if File.exist?(db_file_path) + FileUtils.rm(db_file_path) + puts "Dropped database '#{database_path}'" + else + puts "Database '#{database_path}' does not exist" + end + + # Also remove any WAL files that might exist + wal_file = "#{db_file_path}.wal" + FileUtils.rm(wal_file) if File.exist?(wal_file) + + rescue Errno::ENOENT => error + raise ActiveRecord::NoDatabaseError.new(error.message) + rescue => error + raise ActiveRecord::DatabaseConnectionError.new(error.message) + end + end + + # @override + # @note Implements ActiveRecord::Tasks interface method + # @return [void] + def purge + drop + create + end + + # @override + # @note Implements ActiveRecord::Tasks interface method + # @return [String] Database character set encoding + def charset + connection.encoding rescue 'UTF-8' + end + + # @override + # @note Implements ActiveRecord::Tasks interface method + # @param [String] filename Output filename for structure dump + # @param [Array] extra_flags Additional command line flags + # @return [void] + def structure_dump(filename, extra_flags) + args = [] + args.concat(Array(extra_flags)) if extra_flags + args << (db_config.respond_to?(:database) ? db_config.database : db_config[:database]) + + ignore_tables = ActiveRecord::SchemaDumper.ignore_tables + if ignore_tables.any? + ignore_tables = connection.data_sources.select { |table| ignore_tables.any? { |pattern| pattern === table } } + condition = ignore_tables.map { |table| connection.quote(table) }.join(", ") + # DuckDB provides sqlite_master for SQLite compatibility + args << "SELECT sql FROM sqlite_master WHERE tbl_name NOT IN (#{condition}) ORDER BY tbl_name, type DESC, name" + else + args << ".schema" + end + run_cmd("duckdb", args, filename) + end + + # @override + # @note Implements ActiveRecord::Tasks interface method + # @param [String] filename Input filename for structure load + # @param [Array] extra_flags Additional command line flags + # @return [void] + def structure_load(filename, extra_flags) + database_path = db_config.respond_to?(:database) ? db_config.database : db_config[:database] + flags = extra_flags.join(" ") if extra_flags + `duckdb #{flags} #{database_path} < "#{filename}"` + end + + private + attr_reader :db_config, :root + + # @note get database connection for DuckDB + # @return [ActiveRecord::ConnectionAdapters::DuckdbAdapter] Database connection + def connection + # Connection pooling is less critical for DuckDB since it's an embedded database + # with lightweight connections (no network overhead), but we maintain ActiveRecord + # compatibility by using lease_connection when available for thread safety + if ActiveRecord::Base.respond_to?(:lease_connection) + ActiveRecord::Base.lease_connection + else + ActiveRecord::Base.connection + end + end + + # @note establish connection to DuckDB database + # @param [Object] config Database configuration (defaults to db_config) + # @return [ActiveRecord::ConnectionAdapters::DuckdbAdapter] Database connection + def establish_connection(config = db_config) + ActiveRecord::Base.establish_connection(config) + connection + end + + # @note run shell command for DuckDB operations + # @param [String] cmd Command to run + # @param [Array] args Command arguments + # @param [String] out Output file path + # @return [void] + # @raise [RuntimeError] if command fails + def run_cmd(cmd, args, out) + fail run_cmd_error(cmd, args) unless Kernel.system(cmd, *args, out: out) + end + + # @note generate error message for failed shell commands + # @param [String] cmd Command that failed + # @param [Array] args Command arguments + # @return [String] Error message + def run_cmd_error(cmd, args) + msg = +"failed to execute:\n" + msg << "#{cmd} #{args.join(' ')}\n\n" + msg << "Please check the output above for any errors and make sure that `#{cmd}` is installed in your PATH and has proper permissions.\n\n" + msg + end + + # @note determine root directory for database files + # @return [String] Root directory path + def determine_root_directory + # Try different ways to determine the root directory + if defined?(Rails) && Rails.respond_to?(:root) && Rails.root + Rails.root.to_s + elsif defined?(Rails) && Rails.respond_to?(:application) && Rails.application&.config&.root + Rails.application.config.root.to_s + elsif ENV['RAILS_ROOT'] + ENV['RAILS_ROOT'] + else + # Fall back to current working directory + Dir.pwd + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/duckdb_adapter.rb b/lib/active_record/connection_adapters/duckdb_adapter.rb index 97e240a..d271596 100644 --- a/lib/active_record/connection_adapters/duckdb_adapter.rb +++ b/lib/active_record/connection_adapters/duckdb_adapter.rb @@ -1,87 +1,302 @@ # frozen_string_literal: true +require 'duckdb' require 'active_record' require 'active_record/base' require 'active_record/connection_adapters/abstract_adapter' +require 'fileutils' +require 'active_record/connection_adapters/duckdb/quoting' require 'active_record/connection_adapters/duckdb/database_statements' require 'active_record/connection_adapters/duckdb/schema_statements' - -begin - require 'duckdb' -rescue LoadError => e - raise e -end +require 'active_record/connection_adapters/duckdb/explain_pretty_printer' +require 'active_record/connection_adapters/duckdb/tasks' module ActiveRecord - module ConnectionHandling # :nodoc: - def duckdb_connection(config) - config = config.symbolize_keys - connection = ::DuckDB::Database.open.connect - ConnectionAdapters::DuckdbAdapter.new(connection, logger, config) - end - end module ConnectionAdapters # :nodoc: class DuckdbAdapter < AbstractAdapter - ADAPTER_NAME = "DuckDB" + # = Active Record DuckDB Adapter + # + # The DuckDB adapter works with https://github.com/suketa/ruby-duckdb driver. + # + # Options: + # + # * :database - Path to the database file. Defaults to 'db/duckdb.db'. + # Use ':memory:' for in-memory database. + class << self + ADAPTER_NAME = "DuckDB".freeze + + # @note DuckDB-specific client creation + # @param [Hash, nil] config Configuration hash containing database path + # @return [DuckDB::Connection] A new DuckDB connection + def new_client(config = nil) + database_path = config&.dig(:database) || 'db/duckdb.db' + + if database_path == ':memory:' + DuckDB::Database.open.connect # in-memory database + else + # Ensure directory exists for file-based database + dir = File.dirname(database_path) + FileUtils.mkdir_p(dir) unless File.directory?(dir) + DuckDB::Database.open(database_path).connect + end + end + + # @override + # @note Implements AbstractAdapter interface method + # @param [Hash] config Database configuration + # @param [Hash] options Console options + # @return [void] + def dbconsole(config, options = {}) + end + end + + # @override + # @note Implements AbstractAdapter interface method + # @param [Array] args Arguments passed to superclass + # @return [DuckdbAdapter] New adapter instance + def initialize(...) + super + @max_identifier_length = nil + @type_map = nil + @raw_connection = self.connect + @notice_receiver_sql_warnings = [] + + # Determine if we're using a memory database + database_path = @config[:database] || 'db/duckdb.db' + @memory_database = database_path == ':memory:' + + # Set up file path for file-based databases + unless @memory_database + case database_path + when "" + raise ArgumentError, "No database file specified. Missing argument: database" + when /\Afile:/ + # Handle file:// URLs by extracting the path + @config[:database] = database_path.sub(/\Afile:/, '') + else + # Handle relative paths - make them relative to Rails.root if in Rails + if defined?(Rails.root) && !File.absolute_path?(database_path) + @config[:database] = File.expand_path(database_path, Rails.root) + else + @config[:database] = File.expand_path(database_path) + end + + # Ensure the directory exists + dirname = File.dirname(@config[:database]) + unless File.directory?(dirname) + begin + FileUtils.mkdir_p(dirname) + rescue SystemCallError + raise ActiveRecord::NoDatabaseError.new(connection_pool: @pool) + end + end + end + end + end + + # @override + # @note Implements AbstractAdapter interface method + # @return [Boolean] true if database exists, false otherwise + def database_exists? + if @memory_database + true # Memory databases always "exist" once created + else + File.exist?(@config[:database].to_s) + end + end + + # @override + # @note Implements AbstractAdapter interface method + # @note Connects to a DuckDB database and sets up the adapter depending on the connected database's characteristics + # @return [DuckDB::Connection] Raw database connection + def connect + @raw_connection = self.class.new_client(@config) + rescue ConnectionNotEstablished => ex + raise ex + end + + # @override + # @note Implements AbstractAdapter interface method + # @return [DuckDB::Connection] Raw database connection + def reconnect + @raw_connection + end include Duckdb::DatabaseStatements include Duckdb::SchemaStatements + include Duckdb::Quoting + + # @override + # @note Implements AbstractAdapter interface method + # @return [Hash] Hash of native database types + def native_database_types # :nodoc: + { + primary_key: "BIGINT PRIMARY KEY", + string: { name: "VARCHAR" }, + text: { name: "TEXT" }, + integer: { name: "INTEGER" }, + bigint: { name: "BIGINT" }, + float: { name: "REAL" }, + decimal: { name: "DECIMAL" }, + datetime: { name: "TIMESTAMP" }, + time: { name: "TIME" }, + date: { name: "DATE" }, + binary: { name: "BLOB" }, + boolean: { name: "BOOLEAN" }, + json: { name: "JSON" } + } + end - NATIVE_DATABASE_TYPES = { - primary_key: "INTEGER PRIMARY KEY", - string: { name: "VARCHAR" }, - integer: { name: "INTEGER" }, - float: { name: "REAL" }, - decimal: { name: "DECIMAL" }, - datetime: { name: "TIMESTAMP" }, - time: { name: "TIME" }, - date: { name: "DATE" }, - bigint: { name: "BIGINT" }, - binary: { name: "BLOB" }, - boolean: { name: "BOOLEAN" }, - uuid: { name: "UUID" }, - } - - def native_database_types - NATIVE_DATABASE_TYPES + # @override + # @note Implements AbstractAdapter interface method + # @return [String] The adapter name + def adapter_name # :nodoc: + "DuckDB" end + # Capability flags - tell ActiveRecord what features DuckDB supports + # These are used internally by ActiveRecord to decide how to handle various operations + + # @override + # @note Implements AbstractAdapter interface method + # @return [Boolean] true if DuckDB supports savepoints + def supports_savepoints? # :nodoc: + true # DuckDB can create savepoints within transactions (SAVEPOINT sp1, ROLLBACK TO sp1) + end + + # @override + # @note Implements AbstractAdapter interface method + # @return [Boolean] true if DuckDB supports transaction isolation + def supports_transaction_isolation? # :nodoc: + true # DuckDB supports transaction isolation using Snapshot Isolation (full ACID compliance) + end + + # @override + # @note Implements AbstractAdapter interface method + # @return [Boolean] true if DuckDB supports index sort order + def supports_index_sort_order? # :nodoc: + true # DuckDB can create indexes with sort order (CREATE INDEX idx ON table (col ASC/DESC)) + end + + # @override + # @note Implements AbstractAdapter interface method + # @return [Boolean] true if DuckDB supports partial indexes + def supports_partial_index? # :nodoc: + true # DuckDB supports advanced indexing including zone maps and selective indexing + end + + # @override + # @note Implements AbstractAdapter interface method + # @return [Boolean] true if adapter needs periodic reloading + def requires_reloading? # :nodoc: + true # Adapter needs to reload connection info periodically due to DuckDB's file-based nature + end + + # @override + # @note Implements AbstractAdapter interface method + # @param [String] table_name Name of the table + # @return [Array] Array of primary key column names def primary_keys(table_name) # :nodoc: raise ArgumentError unless table_name.present? - results = query("PRAGMA table_info(#{table_name})", "SCHEMA") - results.each_with_object([]) do |result, keys| - _cid, name, _type, _notnull, _dflt_value, pk = result - keys << name if pk - end + # Query DuckDB's information_schema for primary key columns using parameterized query + # Use constraint_type = 'PRIMARY KEY' for reliable identification + sql = <<~SQL + SELECT kcu.column_name + FROM information_schema.key_column_usage kcu + JOIN information_schema.table_constraints tc + ON kcu.constraint_name = tc.constraint_name + AND kcu.table_name = tc.table_name + WHERE kcu.table_name = ? + AND tc.constraint_type = 'PRIMARY KEY' + ORDER BY kcu.ordinal_position + SQL + + # Create bind parameter for the parameterized query + binds = [ + ActiveRecord::Relation::QueryAttribute.new("table_name", table_name, ActiveRecord::Type::String.new) + ] + + results = internal_exec_query(sql, "SCHEMA", binds) + results.rows.map { |row| row[0] } end + # @override + # @note Implements AbstractAdapter interface method + # @param [Symbol, nil] isolation Transaction isolation level + # @param [Boolean] joinable Whether transaction is joinable + # @param [Boolean] _lazy Whether transaction is lazy + # @return [void] def begin_transaction(isolation: nil, joinable: true, _lazy: true); end + # @override + # @note Implements AbstractAdapter interface method + # @param [String] table_name Name of the table + # @return [Array] Array of column objects + def columns(table_name) # :nodoc: + column_definitions(table_name).map do |field| + new_column_from_field(table_name, field) + end + end + + # @note Support for getting the next sequence value for auto-increment + # @param [String] sequence_name Name of the sequence + # @return [String] SQL expression for next sequence value + def next_sequence_value(sequence_name) + "nextval('#{sequence_name}')" + end + + # @override + # @note Implements AbstractAdapter interface method - ActiveRecord needs this to know we support INSERT...RETURNING + # @return [Boolean] true if INSERT...RETURNING is supported + def supports_insert_returning? + true + end + + # @override + # @note Implements AbstractAdapter interface method - Tell ActiveRecord to return the primary key value after insert + # @param [ActiveRecord::ConnectionAdapters::Column] column The column to check + # @return [Boolean] true if should return value after insert + def return_value_after_insert?(column) + (column.type == :integer || column.type == :bigint) && column.name == 'id' + end + private + # @note Simple implementation for now - just execute the SQL + # @param [String] sql SQL to execute + # @param [String] name Query name for logging + # @param [Array] binds Bind parameters + # @param [Boolean] prepare Whether to prepare statement + # @param [Boolean] async Whether to execute asynchronously + # @return [Object] Query result def execute_and_clear(sql, name, binds, prepare: false, async: false) - sql = transform_query(sql) - check_if_write_query(sql) - type_casted_binds = type_casted_binds(binds) - - log(sql, name, binds, type_casted_binds, async: async) do - ActiveSupport::Dependencies.interlock.permit_concurrent_loads do - # TODO: prepare の有無でcacheするっぽい? - if without_prepared_statement?(binds) - @connection.query(sql) - else - @connection.query(sql, *type_casted_binds) - end - end + log(sql, name, binds, async: async) do + @raw_connection.query(sql) end end - def column_definitions(table_name) # :nodoc: - execute("PRAGMA table_info('#{quote_table_name(table_name)}')", "SCHEMA") do |result| - each_hash(result) - end - end + # @note used by columns() method + # @param [String] table_name Name of the table + # @return [Array] Array of column definition arrays + def column_definitions(table_name) # :nodoc: + sql = <<~SQL + SELECT column_name, data_type, is_nullable, column_default + FROM information_schema.columns + WHERE table_name = ? + ORDER BY ordinal_position + SQL + + # Create bind parameter for the parameterized query + binds = [ + ActiveRecord::Relation::QueryAttribute.new("table_name", table_name, ActiveRecord::Type::String.new) + ] + + result = internal_exec_query(sql, "SCHEMA", binds) + + # Convert DuckDB result to array format expected by new_column_from_field + result.rows.map { |row| [row[0], row[1], row[2], row[3]] } + end end end -end \ No newline at end of file +end diff --git a/lib/activerecord-duckdb-adapter.rb b/lib/activerecord-duckdb-adapter.rb new file mode 100644 index 0000000..e5fb927 --- /dev/null +++ b/lib/activerecord-duckdb-adapter.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +require 'active_record' +require "activerecord_duckdb_adapter/version" +require "active_record/connection_adapters/duckdb_adapter" + +# Register the adapter with ActiveRecord +if ActiveRecord::ConnectionAdapters.respond_to?(:register) + ActiveRecord::ConnectionAdapters.register("duckdb", "ActiveRecord::ConnectionAdapters::DuckdbAdapter", "active_record/connection_adapters/duckdb_adapter") +else + # For older ActiveRecord versions, define the connection method manually + module ActiveRecord + module ConnectionHandling # :nodoc: + def duckdb_connection(config) + ActiveRecord::ConnectionAdapters::DuckdbAdapter.new(config) + end + end + end +end + +# Register database tasks (this might not be needed in newer versions) +begin + ActiveRecord::Tasks::DatabaseTasks.register_task(/duckdb/, "ActiveRecord::Tasks::DuckdbDatabaseTasks") +rescue NoMethodError + # Ignore if the method doesn't exist in this ActiveRecord version +end diff --git a/lib/activerecord_duckdb_adapter.rb b/lib/activerecord_duckdb_adapter.rb deleted file mode 100644 index 21e532e..0000000 --- a/lib/activerecord_duckdb_adapter.rb +++ /dev/null @@ -1,15 +0,0 @@ -# frozen_string_literal: true - -require "activerecord_duckdb_adapter/version" - -if defined?(Rails) - module ActiveRecord - module ConnectionAdapters - class DuckdbRailtie < ::Rails::Railtie - ActiveSupport.on_load :active_record do - require "active_record/connection_adapters/duckdb_adapter" - end - end - end - end -end \ No newline at end of file diff --git a/lib/activerecord_duckdb_adapter/version.rb b/lib/activerecord_duckdb_adapter/version.rb index 6112302..69cc85b 100644 --- a/lib/activerecord_duckdb_adapter/version.rb +++ b/lib/activerecord_duckdb_adapter/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module ActiveRecordDuckdbAdapter - VERSION = "0.1.0".freeze -end \ No newline at end of file + VERSION = "0.2.0".freeze +end diff --git a/test/.DS_Store b/test/.DS_Store new file mode 100644 index 0000000..6ec6efc Binary files /dev/null and b/test/.DS_Store differ diff --git a/test/cases/finder_test.rb b/test/cases/finder_test.rb deleted file mode 100644 index 99a7ee7..0000000 --- a/test/cases/finder_test.rb +++ /dev/null @@ -1,49 +0,0 @@ -# frozen_string_literal: true - -require "cases/helper" -require "models/author" -require "models/post" - -class FinderTest < TestCase - fixtures :posts - - def test_find - assert_equal(posts(:first).title, Post.find(1).title) - end - - def skip_test_bigint - # TODO: 多分だけど、DuckDBで返ってきたIDの値が、見た目上はInteger型だけど、実際は違う??どっかでCastしないといけない?? - # Primary KeyをBigIntにすると、in_order_ofで、うまく結果が返ってこない。これは、多分IntとBigIntが違うからだと思う。 - - records = Post.where(enabled: true).where(id: [1, 2]).records - p h = records.index_by(&:id) - p h.keys - p Post.find(h.keys) - p [1, 2] - p Post.find([1, 2]) - p h.keys.equal? [1, 2] - p h.keys.map { |v| v.object_id } - p [1, 2].map { |v| v.object_id } - p h.keys.map { |v| v.class.ancestors } - p [1, 2].map { |v| v.class.ancestors } - end - - def test_find_where - records = Post.where(enabled: true).find([2, 1, 3]) - assert_equal 3, records.size - assert_equal posts(:second).title, records[0].title - assert_equal posts(:first).title, records[1].title - assert_equal posts(:third).title, records[2].title - end - - def test_exists - assert_equal true, Post.exists?(1) - assert_equal true, Post.exists?("1") - assert_equal true, Post.exists?(title: Post.find(1).title) - assert_equal true, Post.exists?(id: [1, 9999]) - - assert_equal false, Post.exists?(45) - assert_equal false, Post.exists?(9999999999999999999999999999999) - assert_equal false, Post.exists?(Post.new.id) - end -end diff --git a/test/cases/helper.rb b/test/cases/helper.rb deleted file mode 100644 index 527b682..0000000 --- a/test/cases/helper.rb +++ /dev/null @@ -1,39 +0,0 @@ -# frozen_string_literal: true - -require "config" - -require "stringio" - -require "active_record" -require "active_record/fixtures" -require "active_support/testing/autorun" -require "active_support/logger" - -def connect - ActiveRecord::Base.logger = ActiveSupport::Logger.new("log/debug.log", 0, 100 * 1024 * 1024) - ActiveRecord::Base.configurations = { - 'duckdb' => { adapter: 'duckdb' } - } - ActiveRecord::Base.establish_connection :duckdb -end - -connect - -def load_schema - # silence verbose schema loading - original_stdout = $stdout - $stdout = StringIO.new - - load SCHEMA_ROOT + "/schema.rb" - - ActiveRecord::FixtureSet.reset_cache -ensure - $stdout = original_stdout -end - -load_schema - -class TestCase < ActiveSupport::TestCase - include ActiveRecord::TestFixtures - self.fixture_path = ::FIXTURE_ROOT -end diff --git a/test/functional/finder_test.rb b/test/functional/finder_test.rb new file mode 100644 index 0000000..0f26502 --- /dev/null +++ b/test/functional/finder_test.rb @@ -0,0 +1,97 @@ +# # frozen_string_literal: true + +require "test_helper" + +require "models/author" +require "models/post" + +class FinderTest < TestCase + fixtures :posts + + def setup + # Debug fixture loading + puts "=== FinderTest Setup Debug ===" + puts "Ruby version: #{RUBY_VERSION}" + puts "ActiveRecord version: #{ActiveRecord.version}" + puts "Posts table exists: #{ActiveRecord::Base.connection.table_exists?('posts')}" + puts "Posts count: #{Post.count}" + + if Post.count > 0 + puts "First post: #{Post.first&.attributes}" + puts "All posts: #{Post.all.map(&:attributes)}" + else + puts "No posts found - checking fixture loading..." + puts "Fixture paths: #{self.class.fixture_paths}" + puts "Loaded fixtures: #{loaded_fixtures.keys}" if respond_to?(:loaded_fixtures) + + # Try to manually check table contents + begin + result = ActiveRecord::Base.connection.execute("SELECT COUNT(*) as count FROM posts") + puts "Direct SQL count: #{result.first['count'] rescue 'error'}" + + result = ActiveRecord::Base.connection.execute("SELECT * FROM posts LIMIT 3") + puts "Direct SQL results: #{result.to_a rescue 'error'}" + rescue => e + puts "SQL error: #{e.message}" + end + + # Try to reload fixtures manually + begin + puts "Attempting to reload fixtures..." + self.class.fixture_paths.each do |fixture_path| + puts "Fixture path: #{fixture_path}" + puts "Posts fixture exists: #{File.exist?(File.join(fixture_path, 'posts.yml'))}" + end + + # Force fixture reload + ActiveRecord::FixtureSet.reset_cache + setup_fixtures + puts "After manual setup - Posts count: #{Post.count}" + rescue => e + puts "Fixture reload error: #{e.message}" + puts "Backtrace: #{e.backtrace.first(3).join(', ')}" + end + end + puts "==========================" + end + + def test_find + assert_equal(posts(:first).title, Post.find(1).title) + end + + def skip_test_bigint + # TODO: 多分だけど、DuckDBで返ってきたIDの値が、見た目上はInteger型だけど、実際は違う??どっかでCastしないといけない?? + # Primary KeyをBigIntにすると、in_order_ofで、うまく結果が返ってこない。これは、多分IntとBigIntが違うからだと思う。 + + records = Post.where(enabled: true).where(id: [1, 2]).records + p h = records.index_by(&:id) + p h.keys + p Post.find(h.keys) + p [1, 2] + p Post.find([1, 2]) + p h.keys.equal? [1, 2] + p h.keys.map { |v| v.object_id } + p [1, 2].map { |v| v.object_id } + p h.keys.map { |v| v.class.ancestors } + p [1, 2].map { |v| v.class.ancestors } + end + + def test_find_where + records = Post.where(enabled: true).find([2, 1, 3]) + assert_equal 3, records.size + assert_equal posts(:second).title, records[0].title + assert_equal posts(:first).title, records[1].title + assert_equal posts(:third).title, records[2].title + end + + def test_exists + assert_equal true, Post.exists?(1) + assert_equal true, Post.exists?("1") + assert_equal true, Post.exists?(title: Post.find(1).title) + assert_equal true, Post.exists?(id: [1, 9999]) + + assert_equal false, Post.exists?(45) + assert_equal false, Post.exists?(9999999999999999999999999999999) + assert_equal false, Post.exists?(Post.new.id) + end +end diff --git a/test/cases/models_test.rb b/test/functional/models_test.rb similarity index 82% rename from test/cases/models_test.rb rename to test/functional/models_test.rb index 861501d..a8f8f6f 100644 --- a/test/cases/models_test.rb +++ b/test/functional/models_test.rb @@ -1,6 +1,6 @@ -# frozen_string_literal: true +# # frozen_string_literal: true -require "cases/helper" +require "test_helper" require "models/author" require "models/post" diff --git a/test/cases/persistence_test.rb b/test/functional/persistence_test.rb similarity index 96% rename from test/cases/persistence_test.rb rename to test/functional/persistence_test.rb index 11beb2f..a2cca14 100644 --- a/test/cases/persistence_test.rb +++ b/test/functional/persistence_test.rb @@ -1,6 +1,6 @@ -# frozen_string_literal: true +# # frozen_string_literal: true -require "cases/helper" +require "test_helper" require "models/author" require "models/post" diff --git a/test/integration/associations_test.rb b/test/integration/associations_test.rb new file mode 100644 index 0000000..c151bcd --- /dev/null +++ b/test/integration/associations_test.rb @@ -0,0 +1,59 @@ +# frozen_string_literal: true + +require "test_helper" +require "models/author" +require "models/post" + +class AssociationsTest < TestCase + fixtures :authors, :posts + + def test_has_many_association + # TODO: Test author.posts returns collection of posts + skip "Association tests not implemented yet" + end + + def test_belongs_to_association + # TODO: Test post.author returns the associated author + skip "Association tests not implemented yet" + end + + def test_association_create + # TODO: Test author.posts.create(...) creates associated record + skip "Association tests not implemented yet" + end + + def test_association_build + # TODO: Test author.posts.build(...) builds associated record + skip "Association tests not implemented yet" + end + + def test_association_destroy + # TODO: Test destroying associated records + skip "Association tests not implemented yet" + end + + def test_association_dependent_destroy + # TODO: Test has_many :posts, dependent: :destroy + skip "Association tests not implemented yet" + end + + def test_association_counter_cache + # TODO: Test counter_cache functionality + skip "Association tests not implemented yet" + end + + def test_association_includes + # TODO: Test Post.includes(:author) to prevent N+1 queries + skip "Association tests not implemented yet" + end + + def test_association_joins + # TODO: Test Post.joins(:author) for inner joins + skip "Association tests not implemented yet" + end + + def test_association_conditions + # TODO: Test association with where conditions + skip "Association tests not implemented yet" + end +end \ No newline at end of file diff --git a/test/integration/migrations_test.rb b/test/integration/migrations_test.rb new file mode 100644 index 0000000..b242a04 --- /dev/null +++ b/test/integration/migrations_test.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true + +require "test_helper" + +class MigrationsTest < TestCase + def setup + @migration_dir = 'test/tmp/migrations' + FileUtils.mkdir_p(@migration_dir) + end + + def teardown + FileUtils.rm_rf(@migration_dir) if Dir.exist?(@migration_dir) + end + + def test_create_table_migration + # TODO: Test creating tables through migrations + skip "Migration tests not implemented yet" + end + + def test_add_column_migration + # TODO: Test adding columns to existing tables + skip "Migration tests not implemented yet" + end + + def test_remove_column_migration + # TODO: Test removing columns from existing tables + skip "Migration tests not implemented yet" + end + + def test_migration_rollback + # TODO: Test rolling back migrations + skip "Migration tests not implemented yet" + end + + def test_migration_versioning + # TODO: Test schema_migrations table and version tracking + skip "Migration tests not implemented yet" + end + + def test_change_column_migration + # TODO: Test changing column types/properties + skip "Migration tests not implemented yet" + end + + def test_add_index_migration + # TODO: Test adding indexes through migrations + skip "Migration tests not implemented yet" + end + + def test_remove_index_migration + # TODO: Test removing indexes through migrations + skip "Migration tests not implemented yet" + end +end \ No newline at end of file diff --git a/test/integration/validations_test.rb b/test/integration/validations_test.rb new file mode 100644 index 0000000..3c44269 --- /dev/null +++ b/test/integration/validations_test.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +require "test_helper" +require "models/author" +require "models/post" + +class ValidationsTest < TestCase + fixtures :authors, :posts + + def test_presence_validation + # TODO: Test validates :name, presence: true + skip "Validation tests not implemented yet" + end + + def test_uniqueness_validation + # TODO: Test validates :email, uniqueness: true + skip "Validation tests not implemented yet" + end + + def test_length_validation + # TODO: Test validates :name, length: { maximum: 50 } + skip "Validation tests not implemented yet" + end + + def test_numericality_validation + # TODO: Test validates :count, numericality: { greater_than: 0 } + skip "Validation tests not implemented yet" + end + + def test_format_validation + # TODO: Test validates :email, format: { with: email_regex } + skip "Validation tests not implemented yet" + end + + def test_inclusion_validation + # TODO: Test validates :status, inclusion: { in: %w[active inactive] } + skip "Validation tests not implemented yet" + end + + def test_custom_validation + # TODO: Test custom validation methods + skip "Validation tests not implemented yet" + end + + def test_validation_callbacks + # TODO: Test before_validation, after_validation callbacks + skip "Validation tests not implemented yet" + end +end \ No newline at end of file diff --git a/test/test.duckdb b/test/test.duckdb new file mode 100644 index 0000000..ad7b9f6 Binary files /dev/null and b/test/test.duckdb differ diff --git a/test/test_helper.rb b/test/test_helper.rb index 4debea0..774497a 100644 --- a/test/test_helper.rb +++ b/test/test_helper.rb @@ -1,5 +1,73 @@ # frozen_string_literal: true +begin + require 'debug' +rescue LoadError + # Debug gem not available, continue without it +end + +require_relative "config" +require "stringio" +require "active_record" +require "active_record/fixtures" +require "active_support/testing/autorun" +require "active_support/logger" + +# Add the lib directory to load path $LOAD_PATH.unshift File.expand_path("../lib", __dir__) -require "activerecord-duckdb-adapter" +require 'activerecord-duckdb-adapter' + +def connect + ActiveRecord::Base.logger = ActiveSupport::Logger.new("log/debug.log", 0, 100 * 1024 * 1024) + ActiveRecord::Base.configurations = { + 'duckdb' => { + adapter: 'duckdb', + database: 'test/test.duckdb', + min_messages: 'warning' + } + } + ActiveRecord::Base.establish_connection :duckdb +end + +connect() + +def load_schema + # silence verbose schema loading + original_stdout = $stdout + $stdout = StringIO.new + + load SCHEMA_ROOT + "/schema.rb" + + ActiveRecord::FixtureSet.reset_cache +ensure + $stdout = original_stdout +end + +load_schema() +class TestCase < ActiveSupport::TestCase + include ActiveRecord::TestFixtures + self.fixture_paths = [::FIXTURE_ROOT] + self.use_transactional_tests = true + self.use_instantiated_fixtures = false + + # Add debugging for fixture loading issues + def setup_fixtures(config = ActiveRecord::Base) + puts "DEBUG: Setting up fixtures in #{self.class.name}" if ENV['DEBUG_FIXTURES'] + puts "DEBUG: Fixture paths: #{self.class.fixture_paths}" if ENV['DEBUG_FIXTURES'] + puts "DEBUG: Ruby version: #{RUBY_VERSION}" if ENV['DEBUG_FIXTURES'] + + super + + if ENV['DEBUG_FIXTURES'] + puts "DEBUG: Fixtures loaded. Available fixture methods:" + if respond_to?(:loaded_fixtures) + puts "DEBUG: Loaded fixtures: #{loaded_fixtures.keys}" + end + end + rescue => e + puts "ERROR: Fixture setup failed: #{e.message}" + puts "ERROR: Backtrace: #{e.backtrace.first(5).join(', ')}" + raise + end +end diff --git a/test/unit/columns_test.rb b/test/unit/columns_test.rb new file mode 100644 index 0000000..1cf865d --- /dev/null +++ b/test/unit/columns_test.rb @@ -0,0 +1,243 @@ +# frozen_string_literal: true + +require "test_helper" + +class ColumnsTest < TestCase + def setup + @connection = ActiveRecord::Base.connection + end + + def test_columns_basic_functionality + # Test with existing 'authors' table + columns = @connection.columns('authors') + + assert_instance_of Array, columns + assert columns.length > 0, "Should have at least one column" + + # Check that we get Column objects + columns.each do |column| + assert_instance_of ActiveRecord::ConnectionAdapters::Column, column + assert_respond_to column, :name + assert_respond_to column, :type + assert_respond_to column, :sql_type + end + end + + def test_columns_authors_table + columns = @connection.columns('authors') + column_names = columns.map(&:name) + + # Authors table should have 'id' and 'name' columns + assert_includes column_names, 'id' + assert_includes column_names, 'name' + + # Find the ID column and check its properties + id_column = columns.find { |col| col.name == 'id' } + assert_not_nil id_column, "Should have id column" + assert_equal :bigint, id_column.type + end + + def test_columns_posts_table + columns = @connection.columns('posts') + column_names = columns.map(&:name) + + # Posts table should have expected columns + expected_columns = ['id', 'author_id', 'title', 'body', 'count', 'enabled'] + expected_columns.each do |col_name| + assert_includes column_names, col_name, "Should have #{col_name} column" + end + end + + def test_column_definitions_basic_functionality + # Test the private column_definitions method through columns + columns = @connection.columns('authors') + + # Should return proper column information + assert columns.length >= 2, "Should have at least id and name columns" + + # Check column types are detected correctly + id_column = columns.find { |col| col.name == 'id' } + name_column = columns.find { |col| col.name == 'name' } + + assert_equal :bigint, id_column.type + assert_equal :string, name_column.type + end + + def test_columns_nonexistent_table + # Test with a table that doesn't exist + columns = @connection.columns('nonexistent_table') + + # Should return empty array for non-existent tables + assert_equal [], columns + assert_instance_of Array, columns + end + + def test_columns_sql_injection_protection + # Test that SQL injection is prevented in column_definitions + malicious_table_name = "authors'; DROP TABLE authors; --" + + # Should not raise exception and should return empty array (table doesn't exist) + columns = @connection.columns(malicious_table_name) + assert_equal [], columns + + # Verify authors table still exists + assert @connection.table_exists?('authors'), "Authors table should still exist after SQL injection attempt" + end + + def test_columns_with_custom_table + # Create a test table with various column types + @connection.execute(<<~SQL) + CREATE TABLE test_columns_table ( + id INTEGER PRIMARY KEY, + name VARCHAR(100) NOT NULL, + age INTEGER, + salary DECIMAL(10,2), + is_active BOOLEAN DEFAULT true, + created_at TIMESTAMP, + notes TEXT + ) + SQL + + columns = @connection.columns('test_columns_table') + column_names = columns.map(&:name) + + # Check all columns are detected + expected_columns = ['id', 'name', 'age', 'salary', 'is_active', 'created_at', 'notes'] + expected_columns.each do |col_name| + assert_includes column_names, col_name, "Should have #{col_name} column" + end + + # Check column types are correctly detected + columns_by_name = columns.index_by(&:name) + + assert_equal :integer, columns_by_name['id'].type + assert_equal :string, columns_by_name['name'].type + assert_equal :integer, columns_by_name['age'].type + assert_equal :decimal, columns_by_name['salary'].type + assert_equal :boolean, columns_by_name['is_active'].type + assert_equal :time, columns_by_name['created_at'].type # DuckDB TIMESTAMP maps to :time + assert_equal :string, columns_by_name['notes'].type # TEXT maps to string + + ensure + @connection.execute("DROP TABLE IF EXISTS test_columns_table") + end + + def test_columns_null_and_default_values + # Create a test table with various null and default constraints + @connection.execute(<<~SQL) + CREATE TABLE test_null_defaults ( + id INTEGER PRIMARY KEY, + required_field VARCHAR(50) NOT NULL, + optional_field VARCHAR(50), + default_field VARCHAR(50) DEFAULT 'default_value', + default_number INTEGER DEFAULT 42 + ) + SQL + + columns = @connection.columns('test_null_defaults') + columns_by_name = columns.index_by(&:name) + + # Check null constraints + assert_equal false, columns_by_name['required_field'].null, "required_field should not allow null" + assert_equal true, columns_by_name['optional_field'].null, "optional_field should allow null" + + # Check default values (DuckDB returns defaults as strings) + assert_equal "'default_value'", columns_by_name['default_field'].default + assert_equal "42", columns_by_name['default_number'].default + + ensure + @connection.execute("DROP TABLE IF EXISTS test_null_defaults") + end + + def test_columns_ordering + # Create a table with specific column order + @connection.execute(<<~SQL) + CREATE TABLE test_column_order ( + third_column INTEGER, + first_column VARCHAR(50), + second_column BOOLEAN + ) + SQL + + columns = @connection.columns('test_column_order') + column_names = columns.map(&:name) + + # Columns should be returned in the order they were defined + expected_order = ['third_column', 'first_column', 'second_column'] + assert_equal expected_order, column_names, "Columns should be in definition order" + + ensure + @connection.execute("DROP TABLE IF EXISTS test_column_order") + end + + def test_columns_integration_with_activerecord + # Test that ActiveRecord models can use the columns method + require "models/author" + require "models/post" + + # ActiveRecord should be able to get column information + author_columns = Author.columns + post_columns = Post.columns + + assert author_columns.length > 0, "Author should have columns" + assert post_columns.length > 0, "Post should have columns" + + # Check that ActiveRecord can find specific columns + assert Author.column_names.include?('id') + assert Author.column_names.include?('name') + assert Post.column_names.include?('id') + assert Post.column_names.include?('title') + end + + def test_columns_performance + # Basic performance test - should complete quickly + start_time = Time.now + + 50.times do + @connection.columns('authors') + end + + end_time = Time.now + duration = end_time - start_time + + # Should complete 50 calls in less than 1 second + assert duration < 1.0, "columns method is too slow: #{duration} seconds for 50 calls" + end + + def test_columns_case_sensitivity + # DuckDB table names are case-sensitive + columns_lower = @connection.columns('authors') + columns_upper = @connection.columns('AUTHORS') + + # Lowercase should work (table exists) + assert columns_lower.length > 0, "Should find columns for 'authors'" + # Uppercase should return empty (table doesn't exist) + assert_equal [], columns_upper, "Should not find columns for 'AUTHORS'" + end + + def test_columns_with_schema_changes + # Test that columns method works correctly after schema changes + @connection.execute(<<~SQL) + CREATE TABLE test_schema_evolution ( + id INTEGER PRIMARY KEY, + name VARCHAR(100) + ) + SQL + + # Initial columns + columns = @connection.columns('test_schema_evolution') + assert_equal 2, columns.length + assert_equal ['id', 'name'], columns.map(&:name) + + # Add a column + @connection.execute("ALTER TABLE test_schema_evolution ADD COLUMN email VARCHAR(255)") + + # Should reflect new column + columns = @connection.columns('test_schema_evolution') + assert_equal 3, columns.length + assert_equal ['id', 'name', 'email'], columns.map(&:name) + + ensure + @connection.execute("DROP TABLE IF EXISTS test_schema_evolution") + end +end \ No newline at end of file diff --git a/test/unit/connection_test.rb b/test/unit/connection_test.rb new file mode 100644 index 0000000..ef69c3e --- /dev/null +++ b/test/unit/connection_test.rb @@ -0,0 +1,65 @@ +# frozen_string_literal: true + +require "test_helper" + +class ConnectionTest < TestCase + def test_establish_connection + # TODO: Test basic connection establishment + skip "Connection tests not implemented yet" + end + + def test_connection_pooling + # TODO: Test connection pool management + skip "Connection tests not implemented yet" + end + + def test_thread_safety + # TODO: Test multiple threads using connections safely + skip "Connection tests not implemented yet" + end + + def test_connection_leasing + # TODO: Test lease_connection vs connection methods + skip "Connection tests not implemented yet" + end + + def test_connection_timeout + # TODO: Test connection pool timeout handling + skip "Connection tests not implemented yet" + end + + def test_connection_recovery + # TODO: Test connection recovery after disconnect + skip "Connection tests not implemented yet" + end + + def test_multiple_databases + # TODO: Test connecting to multiple DuckDB databases + skip "Connection tests not implemented yet" + end + + def test_in_memory_database + # TODO: Test in-memory database connections + skip "Connection tests not implemented yet" + end + + def test_connection_configuration + # TODO: Test various connection configuration options + skip "Connection tests not implemented yet" + end + + def test_connection_verification + # TODO: Test connection verification and active? checks + skip "Connection tests not implemented yet" + end + + def test_connection_closing + # TODO: Test proper connection closing and cleanup + skip "Connection tests not implemented yet" + end + + def test_concurrent_access + # TODO: Test concurrent read/write access patterns + skip "Connection tests not implemented yet" + end +end \ No newline at end of file diff --git a/test/unit/database_tasks_test.rb b/test/unit/database_tasks_test.rb new file mode 100644 index 0000000..fe3b39f --- /dev/null +++ b/test/unit/database_tasks_test.rb @@ -0,0 +1,204 @@ +# frozen_string_literal: true + +require "test_helper" + +class DatabaseTasksTest < TestCase + def setup + @test_db_path = 'test/test_database_tasks.duckdb' + @test_config = { + adapter: 'duckdb', + database: @test_db_path + } + @tasks = ActiveRecord::Tasks::DuckdbDatabaseTasks.new(@test_config) + + # Store original connection config to restore later + @original_config = ActiveRecord::Base.connection_db_config.configuration_hash + + # Clean up any existing test databases and directories + cleanup_all_test_files + end + + def teardown + # Restore original connection to main test database + ActiveRecord::Base.establish_connection(@original_config) + cleanup_all_test_files + end + + def test_create_file_database + refute File.exist?(@test_db_path), "Test database should not exist initially" + + @tasks.create + + assert File.exist?(@test_db_path), "Database file should be created" + assert File.size(@test_db_path) > 0, "Database file should not be empty" + end + + def test_create_database_already_exists + # Create database first + @tasks.create + assert File.exist?(@test_db_path), "Database should be created" + + # Try to create again - should raise exception + assert_raises ActiveRecord::DatabaseAlreadyExists do + @tasks.create + end + end + + def test_drop_existing_database + # Create database first + @tasks.create + assert File.exist?(@test_db_path), "Database should be created" + + # Drop it + @tasks.drop + + refute File.exist?(@test_db_path), "Database file should be removed" + end + + def test_drop_nonexistent_database + refute File.exist?(@test_db_path), "Test database should not exist" + + # Should not raise exception when dropping non-existent database + assert_nothing_raised do + @tasks.drop + end + end + + def test_purge_database + # Store original connection config + original_config = ActiveRecord::Base.connection_db_config.configuration_hash + + # Create database first + @tasks.create + assert File.exist?(@test_db_path), "Database should be created" + + # Add some data to make sure purge recreates fresh database + ActiveRecord::Base.establish_connection(@test_config) + ActiveRecord::Base.connection.execute('CREATE TABLE test_purge (id INTEGER)') + ActiveRecord::Base.connection.execute('INSERT INTO test_purge VALUES (1)') + + # Purge should drop and recreate + @tasks.purge + + assert File.exist?(@test_db_path), "Database should exist after purge" + + # Reconnect and verify table is gone + ActiveRecord::Base.establish_connection(@test_config) + tables = ActiveRecord::Base.connection.execute("SELECT name FROM sqlite_master WHERE type='table'").map { |row| row[0] } + refute_includes tables, 'test_purge', "Table should not exist after purge" + + ensure + # Restore original connection for other tests + ActiveRecord::Base.establish_connection(original_config) if original_config + end + + def test_create_in_memory_database + # Store original connection config + original_config = ActiveRecord::Base.connection_db_config.configuration_hash + + in_memory_config = { + adapter: 'duckdb', + database: ':memory:' + } + tasks = ActiveRecord::Tasks::DuckdbDatabaseTasks.new(in_memory_config) + + # Should not raise exception + assert_nothing_raised do + tasks.create + end + + # Should be able to establish connection + ActiveRecord::Base.establish_connection(in_memory_config) + # Connection test - in-memory databases work differently + assert_nothing_raised { ActiveRecord::Base.connection.execute("SELECT 1") } + + ensure + # Restore original connection for other tests + ActiveRecord::Base.establish_connection(original_config) if original_config + end + + def test_drop_in_memory_database + # Store original connection config + original_config = ActiveRecord::Base.connection_db_config.configuration_hash + + in_memory_config = { + adapter: 'duckdb', + database: ':memory:' + } + tasks = ActiveRecord::Tasks::DuckdbDatabaseTasks.new(in_memory_config) + + # Create and connect + tasks.create + ActiveRecord::Base.establish_connection(in_memory_config) + + # Drop should not raise exception + assert_nothing_raised do + tasks.drop + end + + ensure + # Restore original connection for other tests + ActiveRecord::Base.establish_connection(original_config) if original_config + end + + def test_create_with_directory_creation + nested_path = 'test/deep/nested/directory/test.duckdb' + nested_config = { + adapter: 'duckdb', + database: nested_path + } + tasks = ActiveRecord::Tasks::DuckdbDatabaseTasks.new(nested_config) + + # Ensure clean state + FileUtils.rm_rf('test/deep') if Dir.exist?('test/deep') + + begin + refute File.exist?(nested_path), "Nested database should not exist initially" + refute Dir.exist?('test/deep'), "Nested directory should not exist initially" + + tasks.create + + assert File.exist?(nested_path), "Nested database should be created" + assert Dir.exist?('test/deep/nested/directory'), "Nested directories should be created" + + ensure + # Cleanup + FileUtils.rm_rf('test/deep') if Dir.exist?('test/deep') + end + end + + def test_database_tasks_with_relative_paths + relative_config = { + adapter: 'duckdb', + database: './test/relative_test.duckdb' + } + tasks = ActiveRecord::Tasks::DuckdbDatabaseTasks.new(relative_config) + + # Ensure clean state + FileUtils.rm_f('test/relative_test.duckdb') + refute File.exist?('test/relative_test.duckdb'), "Relative path database should not exist initially" + + begin + tasks.create + assert File.exist?('test/relative_test.duckdb'), "Relative path database should be created" + + tasks.drop + refute File.exist?('test/relative_test.duckdb'), "Relative path database should be dropped" + + ensure + FileUtils.rm_f('test/relative_test.duckdb') + end + end + + private + + def cleanup_all_test_files + # Clean up main test database + FileUtils.rm_f(@test_db_path) if File.exist?(@test_db_path) + FileUtils.rm_f("#{@test_db_path}.wal") if File.exist?("#{@test_db_path}.wal") + + # Clean up other test files + FileUtils.rm_f('test/relative_test.duckdb') + FileUtils.rm_rf('test/deep') if Dir.exist?('test/deep') + end +end diff --git a/test/unit/primary_keys_test.rb b/test/unit/primary_keys_test.rb new file mode 100644 index 0000000..173243b --- /dev/null +++ b/test/unit/primary_keys_test.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +require "test_helper" + +class PrimaryKeysTest < TestCase + def setup + @connection = ActiveRecord::Base.connection + end + + def test_primary_keys_single_column + # Test with the existing 'authors' table which has a single primary key + primary_keys = @connection.primary_keys('authors') + + assert_equal ['id'], primary_keys + assert_instance_of Array, primary_keys + assert_equal 1, primary_keys.length + end + + def test_auto_increment_primary_key_basic + # Test that the basic auto-increment functionality works (fixes the original error) + @connection.create_table :test_auto_increment, force: true do |t| + t.string :name + end + + # Reset sequence to ensure clean start + begin + @connection.execute("SELECT setval('test_auto_increment_id_seq', 1, false)") + rescue + # Ignore if sequence doesn't exist or setval doesn't work + end + + # This should not raise "NOT NULL constraint failed: users.id" + @connection.execute("INSERT INTO test_auto_increment (name) VALUES ('First')") + @connection.execute("INSERT INTO test_auto_increment (name) VALUES ('Second')") + @connection.execute("INSERT INTO test_auto_increment (name) VALUES ('Third')") + + # Verify the auto-increment worked + results = @connection.execute("SELECT id, name FROM test_auto_increment ORDER BY id") + + assert_equal 3, results.to_a.length + # Don't test exact IDs, just that they're sequential + ids = results.to_a.map { |row| row[0] } + assert_equal ids.sort, ids, "IDs should be in sequential order" + assert_equal ids.uniq, ids, "IDs should be unique" + + ensure + @connection.drop_table :test_auto_increment, if_exists: true + end +end \ No newline at end of file diff --git a/test/unit/schema_test.rb b/test/unit/schema_test.rb new file mode 100644 index 0000000..83cc836 --- /dev/null +++ b/test/unit/schema_test.rb @@ -0,0 +1,71 @@ +# frozen_string_literal: true + +require "test_helper" + +class SchemaTest < TestCase + def setup + @schema_file = 'test/tmp/schema.rb' + @structure_file = 'test/tmp/structure.sql' + FileUtils.mkdir_p(File.dirname(@schema_file)) + end + + def teardown + FileUtils.rm_f(@schema_file) + FileUtils.rm_f(@structure_file) + end + + def test_schema_dump + # TODO: Test ActiveRecord::SchemaDumper.dump generates valid schema.rb + skip "Schema tests not implemented yet" + end + + def test_schema_load + # TODO: Test loading schema.rb recreates database structure + skip "Schema tests not implemented yet" + end + + def test_structure_dump + # TODO: Test structure dump generates valid SQL file + skip "Schema tests not implemented yet" + end + + def test_structure_load + # TODO: Test loading structure.sql recreates database structure + skip "Schema tests not implemented yet" + end + + def test_schema_versioning + # TODO: Test schema_migrations table creation and management + skip "Schema tests not implemented yet" + end + + def test_schema_statements_create_table + # TODO: Test connection.create_table functionality + skip "Schema tests not implemented yet" + end + + def test_schema_statements_add_column + # TODO: Test connection.add_column functionality + skip "Schema tests not implemented yet" + end + + def test_schema_statements_remove_column + # TODO: Test connection.remove_column functionality + skip "Schema tests not implemented yet" + end + + def test_schema_statements_change_column + # TODO: Test connection.change_column functionality + skip "Schema tests not implemented yet" + end + + def test_schema_statements_add_index + # TODO: Test connection.add_index functionality + skip "Schema tests not implemented yet" + end + + def test_schema_statements_remove_index + # TODO: Test connection.remove_index functionality + skip "Schema tests not implemented yet" + end +end \ No newline at end of file diff --git a/test/unit/sql_compatibility_test.rb b/test/unit/sql_compatibility_test.rb new file mode 100644 index 0000000..bf4c42a --- /dev/null +++ b/test/unit/sql_compatibility_test.rb @@ -0,0 +1,79 @@ +# frozen_string_literal: true + +require "test_helper" +require "models/author" +require "models/post" + +class SqlCompatibilityTest < TestCase + fixtures :authors, :posts + + def test_order_by_queries + # TODO: Test ORDER BY name ASC/DESC functionality + skip "SQL compatibility tests not implemented yet" + end + + def test_limit_offset_queries + # TODO: Test LIMIT and OFFSET for pagination + skip "SQL compatibility tests not implemented yet" + end + + def test_group_by_queries + # TODO: Test GROUP BY and aggregate functions + skip "SQL compatibility tests not implemented yet" + end + + def test_join_queries + # TODO: Test INNER JOIN, LEFT JOIN functionality + skip "SQL compatibility tests not implemented yet" + end + + def test_where_conditions + # TODO: Test various WHERE clause conditions (=, !=, IN, LIKE, etc.) + skip "SQL compatibility tests not implemented yet" + end + + def test_having_conditions + # TODO: Test HAVING clause with GROUP BY + skip "SQL compatibility tests not implemented yet" + end + + def test_count_queries + # TODO: Test COUNT, COUNT(DISTINCT), etc. + skip "SQL compatibility tests not implemented yet" + end + + def test_aggregate_functions + # TODO: Test SUM, AVG, MIN, MAX functions + skip "SQL compatibility tests not implemented yet" + end + + def test_subqueries + # TODO: Test subqueries in WHERE and SELECT + skip "SQL compatibility tests not implemented yet" + end + + def test_case_statements + # TODO: Test CASE WHEN statements + skip "SQL compatibility tests not implemented yet" + end + + def test_boolean_operations + # TODO: Test AND, OR, NOT operations + skip "SQL compatibility tests not implemented yet" + end + + def test_date_time_functions + # TODO: Test date/time functions and operations + skip "SQL compatibility tests not implemented yet" + end + + def test_string_functions + # TODO: Test string functions (LIKE, CONCAT, etc.) + skip "SQL compatibility tests not implemented yet" + end + + def test_null_handling + # TODO: Test IS NULL, IS NOT NULL, COALESCE + skip "SQL compatibility tests not implemented yet" + end +end \ No newline at end of file