In the Oarcle SQL script, the field length exceeds 4000 execution will have an exception, and in PostgreSQL more than 4000 can still execute normally, the product supports multiple databases, such as Oracle and PostgreSQL, when the basic data is large, You can only upgrade your deployment by exporting the DMP or backup files for the underlying data-related tables. The development of Oracle as a development library, the basic data needs to be made to PostgreSQL backup files, through the program directly read the Oracle table to PostgreSQL.
Steps:
1, modify the Oracle and PostgreSQL library IP, user name and password;
2, modify the library table list;
3, Operation;
Public static void main (String[] args) throws Exception{ Long t0 = system.currenttimemillis (); class.forname (" Oracle.jdbc.driver.OracleDriver "); class.forname (" Org.postgresql.Driver "); Connection srcCon = null, dstCon = null; Statement srcStmt = null, dstStmt= null; PreparedStatement ps = null; try{ /* Creating a connection */ srccon = drivermanager.getconnection ("JDBC:ORACLE:THIN:@192.168.1.12:1521:ORCL", "H2do", "H2do"); dstcon = drivermanager.getconnection ("Jdbc:postgresql://192.168.1.23:5432/h2do", "Postgres", "Postgres"); srcStmt = Srccon.createstatement (); dststmt = Dstcon.createstatement (); /* Library Table List */ string[] tables = new string[]{ "H2do ", " E2say " }; /* table-wise processing */ for (string table : tables) { /*1, clean up the target table */ dststmt.execute ("truncate table " + table); /*2, Query source table field splicing preprocessing SQL statement */ resultset rs = srcstmt.executequery ("select * from " + table); stringbuilder sql1 = new stringbuilder ("insert into " + table + "("); &nbsP; stringbuilder sql2 = new stringbuilder (") values ("); resultsetmetadata rsmd = rs.getmetadata (); for (int Col = 1; col <= rsmd.getcolumncount (); col++) { if (col > 1) { sql1.append (","); sql2.append (", "); } &nbsP; sql1.append (Rsmd.getcolumnname (col). ToLowerCase ( )); sql2.append ("?"); } string sql = sql1.tostring () + sql2.toString () + ")"; system.out.println ( SQL); /*3, read the source table data into the target table, each thousand commits once */ int rows = 0; ps = dstcon.preparestatement (SQL); while (Rs.next ()) { for (Int col = 1; col <= rsmd.getcolumncount (); col++) { if (Col Rsmd.getColumnType) ==&NBSP;TYPES.CLOB) { ps.setstring (col, rs.getstring (col)); } else{ ps.setobject (Col, rs.getobject (col)); } } ps.addbatch (); rows++; if (rows%1000 == 0) { ps.executebatch (); dstcon.commit (); ps.clearbatch (); rows = 0; } } if (rows > 0) { &nBsp; ps.executebatch (); dstcon.commit (); } ps.close (); system.out.println ("Time-consuming:" + (System.currenttimemillis () - t0) + "milliseconds (" + table + "). "); } }finally{ try{if (NULL&NBSP;!=&NBSP;SRCSTMT) Srcstmt.close ();} catch (exception e) {} try{if (null != srccon ) Srccon.close (); }catch (exception e) {}&NBSP;&NBSP;&NBSP;&NBSp; try{if (null != dststmt) Dststmt.close ();} catch (exception e) {} try{if (null != dstcon ) Dstcon.close (); }catch (exception e) {} } &NBSP;&NBSP;SYSTEM.OUT.PRINTLN ("Total Time:" + (System.currenttimemillis () - t0) + "milliseconds. ");}
Migrating table data with CLOB fields to PostgreSQL from Oracle