& Quot; packagecom. hadoop. recommend; importorg. apache. sqoop. client. sqoopClient; importorg. apache. sqoop. model. MDriverConfig; importorg. apache. sqoop. model. MFromConfig; importorg. apache. sqoop. model. MJob; importorg. apache. sqoop. model. MLink; import... mysqlsqoophdfshadoop
Package com. hadoop. recommend; import org. apache. sqoop. client. sqoopClient; import org. apache. sqoop. model. MDriverConfig; import org. apache. sqoop. model. MFromConfig; import org. apache. sqoop. model. MJob; import org. apache. sqoop. model. MLink; import org. apache. sqoop. model. MLinkConfig; import org. apache. sqoop. model. MSubmission; import org. apache. sqoop. model. MToConfig; import org. apache. sqoop. submission. counter. counter; import org. apache. sqoop. submission. counter. counterGroup; import org. apache. sqoop. submission. counter. counters; import org. apache. sqoop. validation. status; public class MysqlToHDFS {public static void main (String [] args) {sqoopTransfer ();} public static void sqoopTransfer () {// initialize String url =" http://master:12000/sqoop/ "; SqoopClient client = new SqoopClient (url); // create a source link JDBC long fromConnectorId = 2; MLink fromLink = client. createLink (fromConnectorId); fromLink. setName ("JDBC connector"); fromLink. setCreationUser ("hadoop"); MLinkConfig fromLinkConfig = fromLink. getConnectorLinkConfig (); fromLinkConfig. getStringInput ("linkConfig. connectionString "). setValue ("jdbc: mysql: // master: 3306/hive"); fromLinkConfig. getStr IngInput ("linkConfig. jdbcDriver "). setValue ("com. mysql. jdbc. driver "); fromLinkConfig. getStringInput ("linkConfig. username "). setValue ("root"); fromLinkConfig. getStringInput ("linkConfig. password "). setValue (""); Status fromStatus = client. saveLink (fromLink); if (fromStatus. canProceed () {System. out. println ("JDBC Link created successfully, ID:" + fromLink. getPersistenceId ();} else {System. out. println ("JDBC Link creation failed" );} // Create a destination link HDFS long toConnectorId = 1; MLink toLink = client. createLink (toConnectorId); toLink. setName ("HDFS connector"); toLink. setCreationUser ("hadoop"); MLinkConfig toLinkConfig = toLink. getConnectorLinkConfig (); toLinkConfig. getStringInput ("linkConfig. uri "). setValue ("hdfs: // master: 9000/"); Status toStatus = client. saveLink (toLink); if (toStatus. canProceed () {System. out. println ("create HDFS L Ink succeeded. ID: "+ toLink. getPersistenceId ();} else {System. out. println ("failed to create HDFS Link");} // create a task long fromLinkId = fromLink. getPersistenceId (); long toLinkId = toLink. getPersistenceId (); MJob job = client. createJob (fromLinkId, toLinkId); job. setName ("MySQL to HDFS job"); job. setCreationUser ("hadoop"); // sets the source link task configuration information MFromConfig fromJobConfig = job. getFromJobConfig (); fromJobConfig. getStringInput ("FromJobConfig. schemaName "). setValue ("sqoop"); fromJobConfig. getStringInput ("fromJobConfig. tableName "). setValue ("sqoop"); fromJobConfig. getStringInput ("fromJobConfig. partitionColumn "). setValue ("id"); MToConfig toJobConfig = job. getToJobConfig (); toJobConfig. getStringInput ("toJobConfig. outputDirectory "). setValue ("/user/hdfs/recommend"); MDriverConfig driverConfig = job. getDriverConfig (); driverCon Fig. getStringInput ("throttlingConfig. numExtractors "). setValue ("3"); Status status = client. saveJob (job); if (status. canProceed () {System. out. println ("JOB created successfully, ID:" + job. getPersistenceId ();} else {System. out. println ("JOB creation failed. ");} // Start the task long jobId = job. getPersistenceId (); MSubmission submission = client. startJob (jobId); System. out. println ("JOB submission status:" + submission. getStatus (); while (submission. getStatus (). isRunning () & submission. getProgress ()! =-1) {System. out. println ("Progress:" + String. format ("%. 2f % ", submission. getProgress () * 100); // three seconds to report the progress. try {Thread. sleep (3000);} catch (InterruptedException e) {e. printStackTrace () ;}} System. out. println ("JOB execution ends ...... "); System. out. println ("Hadoop task ID:" + submission. getExternalId (); Counters counters = submission. getCounters (); if (counters! = Null) {System. out. println ("counter:"); for (CounterGroup group: counters) {System. out. print ("\ t"); System. out. println (group. getName (); for (Counter counter: group) {System. out. print ("\ t"); System. out. print (counter. getName (); System. out. print (":"); System. out. println (counter. getValue () ;}} if (submission. getExceptionInfo ()! = Null) {System. out. println ("JOB execution exception, exception information:" + submission. getExceptionInfo ();} System. out. println ("MySQL transfers data to HDFS statistics through sqoop ");}}
What happened when I reported this miss ??