1+ require 'arjdbc/tasks/jdbc_database_tasks'
2+
3+ module ArJdbc
4+ module Tasks
5+ class DB2DatabaseTasks < JdbcDatabaseTasks
6+
7+ def create
8+ raise "AR-JDBC adapter 'DB2' does not support create_database"
9+ end
10+
11+ def purge
12+ establish_connection ( config )
13+ connection . recreate_database
14+ end
15+
16+ # NOTE: does not work correctly (on non AS400) due driver meta data issue
17+ #
18+ # also try db2move e.g. `db2move SAMPLE EXPORT -sn db2inst`
19+ # - where SAMPLE is the database name
20+ # - and -sn specified schema name
21+ #
22+
23+ def structure_dump ( filename )
24+ establish_connection ( config )
25+ dump = File . open ( filename , "w:utf-8" )
26+
27+ schema_name = connection . schema . upcase if connection . schema
28+ meta_data = connection . jdbc_connection . meta_data
29+ tables_rs = meta_data . getTables ( nil , schema_name , nil , [ "TABLE" ] . to_java ( :String ) )
30+
31+ have_scale = ArJdbc ::DB2 ::HAVE_SCALE
32+ have_precision = ArJdbc ::DB2 ::HAVE_LIMIT + ArJdbc ::DB2 ::HAVE_LIMIT
33+
34+ while tables_rs . next
35+ table_name = tables_rs . getString ( 'TABLE_NAME' )
36+ dump << "CREATE TABLE #{ connection . quote_table_name ( table_name ) } (\n "
37+
38+ cols_rs = meta_data . getColumns ( nil , schema_name , table_name , nil )
39+ begin
40+ first_col = true
41+ while cols_rs . next
42+ column_name = cols_rs . getString ( 4 )
43+ default = cols_rs . getString ( 13 )
44+ default = default . empty? ? "" : " DEFAULT #{ default } " if default
45+ type = cols_rs . getString ( 6 )
46+ precision , scale = cols_rs . getString ( 7 ) , cols_rs . getString ( 9 )
47+ column_size = ""
48+ if scale && have_scale . include? ( type )
49+ column_size = "(#{ precision } ,#{ scale } )"
50+ elsif precision && have_precision . include? ( type )
51+ column_size = "(#{ precision } )"
52+ end
53+ nulling = ( cols_rs . getString ( 18 ) == 'NO' ? " NOT NULL" : nil )
54+ autoinc = ( cols_rs . getString ( 23 ) == 'YES' ? " GENERATED ALWAYS AS IDENTITY" : nil )
55+
56+ create_column = connection . quote_column_name ( column_name )
57+ create_column << " #{ type } "
58+ create_column << column_size
59+ create_column << nulling . to_s
60+ create_column << default . to_s
61+ create_column << autoinc . to_s
62+
63+ create_column = first_col ? " #{ create_column } " : ",\n #{ create_column } "
64+ dump << create_column
65+
66+ first_col = false
67+ end
68+ ensure
69+ cols_rs . close
70+ end
71+
72+ dump << "\n );\n \n "
73+
74+ pk_rs = meta_data . getPrimaryKeys ( nil , schema_name , table_name )
75+ primary_keys = { }
76+ begin
77+ while pk_rs . next
78+ name = pk_rs . getString ( 6 )
79+ primary_keys [ name ] ||= [ ]
80+ primary_keys [ name ] << pk_rs . getString ( 4 )
81+ end
82+ ensure
83+ pk_rs . close
84+ end
85+ primary_keys . each do |name , cols |
86+ dump << "ALTER TABLE #{ connection . quote_table_name ( table_name ) } \n "
87+ dump << " ADD CONSTRAINT #{ name } \n "
88+ dump << " PRIMARY KEY (#{ cols . join ( ', ' ) } );\n \n "
89+ end
90+ end
91+
92+ dump . close
93+ end
94+
95+ def structure_load ( filename )
96+ establish_connection ( config )
97+ IO . read ( filename ) . split ( /;\n */m ) . each do |ddl |
98+ connection . execute ddl . sub ( /;$/ , '' )
99+ end
100+ end
101+
102+ end
103+ end
104+ end
0 commit comments