{"id":4261,"date":"2021-03-02T14:06:47","date_gmt":"2021-03-02T06:06:47","guid":{"rendered":"https:\/\/nj.transwarp.cn:8180\/?p=4261"},"modified":"2021-03-02T14:06:37","modified_gmt":"2021-03-02T06:06:37","slug":"sqoop%e5%90%91hdfs%e7%9a%84%e5%a2%9e%e9%87%8f%e5%af%bc%e5%85%a5","status":"publish","type":"post","link":"https:\/\/kbwp.transwarp.cn\/?p=4261","title":{"rendered":"sqoop\u5411hdfs\u7684\u589e\u91cf\u5bfc\u5165"},"content":{"rendered":"<h3>\u6982\u8981\u63cf\u8ff0<\/h3>\n<hr \/>\n<p><strong>\u5f53\u5728\u751f\u4ea7\u73af\u5883\u4e2d\uff0c\u6211\u4eec\u53ef\u80fd\u4f1a\u5b9a\u671f\u4ece\u4e0e\u4e1a\u52a1\u76f8\u5173\u7684\u5173\u7cfb\u578b\u6570\u636e\u5e93\u5411Hadoop\u5bfc\u5165\u6570\u636e\uff0c\u5bfc\u5165\u6570\u4ed3\u540e\u8fdb\u884c\u540e\u7eed\u79bb\u7ebf\u5206\u6790\u3002\u8fd9\u79cd\u60c5\u51b5\u4e0b\u6211\u4eec\u4e0d\u53ef\u80fd\u5c06\u6240\u6709\u6570\u636e\u91cd\u65b0\u518d\u5bfc\u5165\u4e00\u904d\uff0c\u6240\u4ee5\u6b64\u65f6\u9700\u8981\u6570\u636e\u589e\u91cf\u5bfc\u5165\u3002<\/strong><br \/>\n<strong>\u589e\u91cf\u5bfc\u5165\u6570\u636e\u5206\u4e3a\u4e24\u79cd\u65b9\u5f0f\uff1a<\/strong><\/p>\n<ul>\n<li><strong>\u4e00\u662f\u57fa\u4e8e\u9012\u589e\u5217\u7684\u589e\u91cf\u6570\u636e\u5bfc\u5165\uff08Append\u65b9\u5f0f\uff09<\/strong><\/li>\n<li><strong>\u4e8c\u662f\u57fa\u4e8e\u65f6\u95f4\u5217\u7684\u6570\u636e\u589e\u91cf\u5bfc\u5165\uff08LastModified\u65b9\u5f0f\uff09<\/strong><\/li>\n<\/ul>\n<h3>\u8be6\u7ec6\u8bf4\u660e<\/h3>\n<hr \/>\n<h4>1. \u65b9\u5f0f\u4e00\uff1aAppend\u65b9\u5f0f<\/h4>\n<p>\u6bd4\u5982:\u6709\u4e00\u4e2a\u8ba2\u5355\u8868\uff0c\u91cc\u9762\u6bcf\u4e2a\u8ba2\u5355\u6709\u4e00\u4e2a\u552f\u4e00\u6807\u8bc6\u81ea\u589e\u5217ID\uff0c\u5728\u5173\u7cfb\u578b\u6570\u636e\u5e93\u4e2d\u4ee5\u4e3b\u952e\u5f62\u5f0f\u5b58\u5728\uff0c\u4e4b\u524d\u5df2\u7ecf\u5c06id\u57281-3\u7684\u7f16\u53f7\u7684\u8ba2\u5355\u5bfc\u5165\u5230\u4e86Hive\u4e2d\uff0c\u73b0\u5728\u4e00\u6bb5\u65f6\u95f4\u540e\u6211\u4eec\u9700\u8981\u5c06\u8fd1\u671f\u4ea7\u751f\u7684\u65b0\u7684\u8ba2\u5355\u6570\u636e(id\u4e3a4\u30015\u7684\u4e24\u6761\u6570\u636e)\u5bfc\u5165Hdfs\uff0c\u4f9b\u540e\u7eed\u6570\u4ed3\u8fdb\u884c\u5206\u6790\u3002\u6b64\u65f6\u6211\u4eec\u53ea\u9700\u8981\u6307\u5b9a-incremental\u53c2\u6570\u4e3aappend\uff0c-last-value\u53c2\u6570\u4e3a3\u5373\u53ef\u3002\u8868\u793a\u53ea\u4ece\u5927\u4e8e3\u540e\u5f00\u59cb\u5bfc\u5165\u3002<\/p>\n<p><strong>\u91cd\u8981\u53c2\u6570\u8bf4\u660e\uff1a<\/strong><\/p>\n<table>\n<thead>\n<tr>\n<th style=\"text-align: left;\">\u53c2\u6570<\/th>\n<th style=\"text-align: left;\">\u8bf4\u660e<\/th>\n<\/tr>\n<\/thead>\n<tbody>\n<tr>\n<td style=\"text-align: left;\">&#8211;incremental append<\/td>\n<td style=\"text-align: left;\">\u57fa\u4e8e\u9012\u589e\u5217\u7684\u589e\u91cf\u5bfc\u5165\uff08\u5c06\u9012\u589e\u5217\u503c\u5927\u4e8e\u9608\u503c\u7684\u6240\u6709\u6570\u636e\u589e\u91cf\u5bfc\u5165hdfs\uff09<\/td>\n<\/tr>\n<tr>\n<td style=\"text-align: left;\">&#8211;check-column<\/td>\n<td style=\"text-align: left;\">\u9012\u589e\u5217\uff08int\uff09<\/td>\n<\/tr>\n<tr>\n<td style=\"text-align: left;\">&#8211;last-value<\/td>\n<td style=\"text-align: left;\">\u9608\u503c\uff08int\uff09<\/td>\n<\/tr>\n<\/tbody>\n<\/table>\n<h5>1.1 Oracle\u5efa\u8868<\/h5>\n<pre><code class=\"language-sql\">CREATE TABLE appendTest (\n  id int,\n  name varchar2(255)\n) ;<\/code><\/pre>\n<h5>1.2 \u5bfc\u5165\u6570\u636e<\/h5>\n<pre><code class=\"language-sql\">insert into appendTest(id,name) values(1,'name1');\ninsert into appendTest(id,name) values(2,'name2');\ninsert into appendTest(id,name) values(3,'name3');<\/code><\/pre>\n<p><img decoding=\"async\" src=\"\/wp-content\/uploads\/2020\/10\/image-1602816368086.png\" alt=\"file\" \/><\/p>\n<h5>1.3 \u589e\u91cf\u5bfc\u5165<\/h5>\n<pre><code class=\"language-shell\">#\u5c06id>0\u7684\u4e09\u6761\u6570\u636e\u8fdb\u884c\u5bfc\u5165\nsqoop import \\\n--connect jdbc:oracle:thin:@172.22.23.9:1521\/helowin \\\n--username lkw \\\n--password 123456  \\\n--query \"select * from lkw.appendtest where \\$CONDITIONS\" \\\n--hive-drop-import-delims \\\n--null-string '\\\\N' \\\n--null-non-string '\\\\N' \\\n-m 1 \\\n--target-dir \/tmp\/tb01 \\\n--incremental append \\\n--check-column id   \\\n--last-value 0 <\/code><\/pre>\n<p><strong>\u6267\u884c\u65e5\u5fd7\uff1a<\/strong><\/p>\n<pre><code class=\"language-shell\">[root@tdh70001 ~]# sqoop import --connect jdbc:oracle:thin:@172.22.23.9:1521\/helowin --username lkw --password 123456  --query \"select * from lkw.appendtest where \\$CONDITIONS\"  --hive-drop-import-delims --null-string '\\\\N' --null-non-string '\\\\N'  -m 1 --target-dir \/tmp\/tb01 --incremental append --check-column id   --last-value 0 \nWarning: \/root\/TDH-Client\/sqoop\/..\/hbase does not exist! HBase imports will fail.\nPlease set $HBASE_HOME to the root of your HBase installation.\nWarning: \/root\/TDH-Client\/sqoop\/..\/hcatalog does not exist! HCatalog jobs will fail.\nPlease set $HCAT_HOME to the root of your HCatalog installation.\nWarning: \/root\/TDH-Client\/sqoop\/..\/accumulo does not exist! Accumulo imports will fail.\nPlease set $ACCUMULO_HOME to the root of your Accumulo installation.\n2020-10-16 11:05:23,178 INFO sqoop.Sqoop: Running Sqoop version: 1.4.5-transwarp-6.0.0\n2020-10-16 11:05:23,212 WARN tool.BaseSqoopTool: Setting your password on the command-line is insecure. Consider using -P instead.\n2020-10-16 11:05:23,492 INFO oracle.OraOopManagerFactory: Data Connector for Oracle and Hadoop is disabled.\n2020-10-16 11:05:23,510 INFO manager.SqlManager: Using default fetchSize of 1000\n2020-10-16 11:05:23,510 INFO tool.CodeGenTool: Beginning code generation\n2020-10-16 11:05:24,896 INFO manager.OracleManager: Time zone has been set to GMT\n2020-10-16 11:05:25,024 INFO manager.SqlManager: Executing SQL statement: select * from lkw.appendtest where  (1 = 0) \n2020-10-16 11:05:25,036 INFO manager.SqlManager: Executing SQL statement: select * from lkw.appendtest where  (1 = 0) \n2020-10-16 11:05:25,063 INFO orm.CompilationManager: HADOOP_MAPRED_HOME is \/root\/TDH-Client\/hadoop\/hadoop-mapreduce\n\u6ce8: \/tmp\/sqoop-root\/compile\/d3b276c99c456cdef55c926825eea1b7\/QueryResult.java\u4f7f\u7528\u6216\u8986\u76d6\u4e86\u5df2\u8fc7\u65f6\u7684 API\u3002\n\u6ce8: \u6709\u5173\u8be6\u7ec6\u4fe1\u606f, \u8bf7\u4f7f\u7528 -Xlint:deprecation \u91cd\u65b0\u7f16\u8bd1\u3002\n2020-10-16 11:05:26,512 INFO orm.CompilationManager: Writing jar file: \/tmp\/sqoop-root\/compile\/d3b276c99c456cdef55c926825eea1b7\/QueryResult.jar\n2020-10-16 11:05:26,756 INFO util.KerberosUtil: Using principal pattern: HTTP\/_HOST\n2020-10-16 11:05:27,783 INFO tool.ImportTool: Maximal id query for free form incremental import: SELECT MAX(id) FROM (select * from lkw.appendtest where (1 = 1)) sqoop_import_query_alias\n2020-10-16 11:05:27,785 INFO tool.ImportTool: Incremental import based on column id\n2020-10-16 11:05:27,785 INFO tool.ImportTool: Lower bound value: 0\n2020-10-16 11:05:27,785 INFO tool.ImportTool: Upper bound value: 3\n2020-10-16 11:05:27,787 INFO mapreduce.ImportJobBase: Beginning query import.\n2020-10-16 11:05:27,801 INFO Configuration.deprecation: mapred.jar is deprecated. Instead, use mapreduce.job.jar\n2020-10-16 11:05:27,814 INFO Configuration.deprecation: mapred.map.tasks is deprecated. Instead, use mapreduce.job.maps\n2020-10-16 11:05:28,465 INFO hdfs.DFSClient: Created HDFS_DELEGATION_TOKEN token 272 for hdfs on ha-hdfs:nameservice1\n2020-10-16 11:05:28,494 INFO security.TokenCache: Got dt for hdfs:\/\/nameservice1; Kind: HDFS_DELEGATION_TOKEN, Service: ha-hdfs:nameservice1, Ident: (HDFS_DELEGATION_TOKEN token 272 for hdfs)\n2020-10-16 11:05:28,551 INFO client.ConfiguredRMFailoverProxyProvider: Failing over to rm2\n2020-10-16 11:05:30,616 INFO db.DBInputFormat: Using read commited transaction isolation\n2020-10-16 11:05:30,797 INFO mapreduce.JobSubmitter: number of splits:1\n2020-10-16 11:05:31,041 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1600851944038_0014\n2020-10-16 11:05:31,041 INFO mapreduce.JobSubmitter: Kind: HDFS_DELEGATION_TOKEN, Service: ha-hdfs:nameservice1, Ident: (HDFS_DELEGATION_TOKEN token 272 for hdfs)\n2020-10-16 11:05:31,662 INFO impl.YarnClientImpl: Submitted application application_1600851944038_0014\n2020-10-16 11:05:31,721 INFO mapreduce.Job: The url to track the job: http:\/\/tdh70002:8088\/proxy\/application_1600851944038_0014\/\n2020-10-16 11:05:31,722 INFO mapreduce.Job: Running job: job_1600851944038_0014\n2020-10-16 11:05:41,987 INFO mapreduce.Job: Job job_1600851944038_0014 running in uber mode : false\n2020-10-16 11:05:41,989 INFO mapreduce.Job:  map 0% reduce 0%\n2020-10-16 11:05:50,114 INFO mapreduce.Job:  map 100% reduce 0%\n2020-10-16 11:05:51,130 INFO mapreduce.Job: Job job_1600851944038_0014 completed successfully\n2020-10-16 11:05:51,310 INFO mapreduce.Job: Counters: 30\n    File System Counters\n        FILE: Number of bytes read=0\n        FILE: Number of bytes written=144786\n        FILE: Number of read operations=0\n        FILE: Number of large read operations=0\n        FILE: Number of write operations=0\n        HDFS: Number of bytes read=87\n        HDFS: Number of bytes written=24\n        HDFS: Number of read operations=4\n        HDFS: Number of large read operations=0\n        HDFS: Number of write operations=2\n    Job Counters \n        Launched map tasks=1\n        Other local map tasks=1\n        Total time spent by all maps in occupied slots (ms)=5692\n        Total time spent by all reduces in occupied slots (ms)=0\n        Total time spent by all map tasks (ms)=5692\n        Total vcore-milliseconds taken by all map tasks=5692\n        Total megabyte-milliseconds taken by all map tasks=5828608\n    Map-Reduce Framework\n        Map input records=3\n        Map output records=3\n        Input split bytes=87\n        Spilled Records=0\n        Failed Shuffles=0\n        Merged Map outputs=0\n        GC time elapsed (ms)=48\n        CPU time spent (ms)=1470\n        Physical memory (bytes) snapshot=279994368\n        Virtual memory (bytes) snapshot=4757753856\n        Total committed heap usage (bytes)=635437056\n    File Input Format Counters \n        Bytes Read=0\n    File Output Format Counters \n        Bytes Written=24\n2020-10-16 11:05:51,319 INFO mapreduce.ImportJobBase: Transferred 24 bytes in 23.4942 seconds (1.0215 bytes\/sec)\n2020-10-16 11:05:51,324 INFO mapreduce.ImportJobBase: Retrieved 3 records.\n2020-10-16 11:05:51,345 INFO util.AppendUtils: Creating missing output directory - tb01\n2020-10-16 11:05:51,374 INFO tool.ImportTool: Incremental import complete! To run another incremental import of all data following this import, supply the following arguments:\n2020-10-16 11:05:51,374 INFO tool.ImportTool:  --incremental append\n2020-10-16 11:05:51,374 INFO tool.ImportTool:   --check-column id\n2020-10-16 11:05:51,374 INFO tool.ImportTool:   --last-value 3\n2020-10-16 11:05:51,374 INFO tool.ImportTool: (Consider saving this with 'sqoop job --create')\n<\/code><\/pre>\n<p><strong>\u7ed3\u679c<\/strong><\/p>\n<pre><code class=\"language-shell\">[root@tdh70001 ~]# hadoop fs -cat \/tmp\/tb01\/part-m-00000\n2020-10-16 11:06:27,085 INFO util.KerberosUtil: Using principal pattern: HTTP\/_HOST\n1,name1\n2,name2\n3,name3<\/code><\/pre>\n<p>\u6b64\u65f6\u5411\u8868appendTest\u518d\u6b21\u63d2\u5165\u6570\u636e<\/p>\n<pre><code class=\"language-sql\">insert into appendTest(id,name) values(4,'name4');\ninsert into appendTest(id,name) values(5,'name5');<\/code><\/pre>\n<p>\u518d\u6b21\u6267\u884c\u589e\u91cf\u5bfc\u5165<\/p>\n<pre><code class=\"language-shell\">#\u7531\u4e8e\u4e0a\u4e00\u6b21\u5bfc\u5165\u7684\u65f6\u5019\uff0c\uff0c\u5c06--last-value\u8bbe\u7f6e\u4e3a0\uff0c\u5c06id>0\u7684\u4e09\u6761\u6570\u636e\u5bfc\u5165\u540e\uff0c\u73b0\u5728\u8fdb\u884c\u5bfc\u5165\u4e86\u65f6\u5019\u9700\u8981\u5c06last-value\u8bbe\u7f6e\u4e3a3\nsqoop import \\\n--connect jdbc:oracle:thin:@172.22.23.9:1521\/helowin \\\n--username lkw \\\n--password 123456  \\\n--query \"select * from lkw.appendtest where \\$CONDITIONS\" \\\n--hive-drop-import-delims \\\n--null-string '\\\\N' \\\n--null-non-string '\\\\N' \\\n-m 1 \\\n--target-dir \/tmp\/tb01 \\\n--incremental append \\\n--check-column id   \\\n--last-value 3 <\/code><\/pre>\n<p><strong>\u7ed3\u679c<\/strong><\/p>\n<pre><code class=\"language-shell\">[root@tdh70001 ~]# hadoop fs -ls \/tmp\/tb01\n2020-10-16 11:13:53,730 INFO util.KerberosUtil: Using principal pattern: HTTP\/_HOST\nFound 2 items\n-rw-r--r--   3 hdfs hadoop         24 2020-10-16 11:05 \/tmp\/tb01\/part-m-00000\n-rw-r--r--   3 hdfs hadoop         16 2020-10-16 11:12 \/tmp\/tb01\/part-m-00001\n[root@tdh70001 ~]# hadoop fs -cat \/tmp\/tb01\/part*\n2020-10-16 11:14:04,005 INFO util.KerberosUtil: Using principal pattern: HTTP\/_HOST\n1,name1\n2,name2\n3,name3\n4,name4\n5,name5<\/code><\/pre>\n<h4>2. \u65b9\u5f0f\u4e8c\uff1aLastModified\u65b9\u5f0f<\/h4>\n<p>\u57fa\u4e8elastModify\u7684\u65b9\u5f0f\uff0c\u8981\u6c42\u539f\u8868\u4e2d\u6709time\u5b57\u6bb5\uff0c\u5b83\u80fd\u6307\u5b9a\u4e00\u4e2a\u65f6\u95f4\u6233\uff0c\u8ba9SQoop\u628a\u8be5\u65f6\u95f4\u6233\u4e4b\u540e\u7684\u6570\u636e\u5bfc\u5165\u81f3Hive\uff0c\u56e0\u4e3a\u540e\u7eed\u8ba2\u5355\u53ef\u80fd\u72b6\u6001\u4f1a\u53d1\u751f\u53d8\u5316\uff0c\u53d8\u5316\u540etime\u5b57\u6bb5\u65f6\u95f4\u6233\u4e5f\u4f1a\u53d1\u751f\u53d8\u5316\uff0c\u6b64\u65f6SQoop\u4f9d\u7136\u4f1a\u5c06\u76f8\u540c\u72b6\u6001\u66f4\u6539\u540e\u7684\u8ba2\u5355\u5bfc\u5165Hive\uff0c\u5f53\u7136\u6211\u4eec\u53ef\u4ee5\u6307\u5b9amerge-key\u53c2\u6570\u4e3aid\uff0c\u8868\u793a\u5c06\u540e\u7eed\u65b0\u7684\u8bb0\u5f55\u4e0e\u539f\u6709\u8bb0\u5f55\u5408\u5e76\u3002<\/p>\n<p><strong>\u91cd\u8981\u53c2\u6570\u8bf4\u660e\uff1a<\/strong><\/p>\n<table>\n<thead>\n<tr>\n<th style=\"text-align: left;\">\u53c2\u6570<\/th>\n<th style=\"text-align: left;\">\u8bf4\u660e<\/th>\n<\/tr>\n<\/thead>\n<tbody>\n<tr>\n<td style=\"text-align: left;\">&#8211;incremental lastmodified<\/td>\n<td style=\"text-align: left;\">\u57fa\u4e8e\u65f6\u95f4\u5217\u7684\u589e\u91cf\u5bfc\u5165\uff08\u5c06\u65f6\u95f4\u5217\u503c\u5927\u4e8e\u9608\u503c\u7684\u6240\u6709\u6570\u636e\u589e\u91cf\u5bfc\u5165hdfs\uff09<\/td>\n<\/tr>\n<tr>\n<td style=\"text-align: left;\">&#8211;check-column<\/td>\n<td style=\"text-align: left;\">\u9012\u589e\u5217\uff08int\uff09<\/td>\n<\/tr>\n<tr>\n<td style=\"text-align: left;\">&#8211;last-value<\/td>\n<td style=\"text-align: left;\">\u9608\u503c\uff08int\uff09<\/td>\n<\/tr>\n<tr>\n<td style=\"text-align: left;\">&#8211;merge-key<\/td>\n<td style=\"text-align: left;\">\u5408\u5e76\u5217\uff08\u4e3b\u952e\uff0c\u5408\u5e76\u952e\u503c\u76f8\u540c\u7684\u8bb0\u5f55\uff09<\/td>\n<\/tr>\n<\/tbody>\n<\/table>\n<h5>2.1 Oracle\u5efa\u8868<\/h5>\n<pre><code class=\"language-sql\">CREATE TABLE lastModifyTest (\nid INT,\nname VARCHAR2(20),\nlast_mod TIMESTAMP DEFAULT CURRENT_TIMESTAMP\n);<\/code><\/pre>\n<h5>2.2 \u5bfc\u5165\u6570\u636e<\/h5>\n<pre><code class=\"language-sql\">insert into lastModifyTest(id,name,last_mod) values(1,'enzo',to_timestamp('20190514151716','yyyymmddhh24miss'));\ninsert into lastModifyTest(id,name,last_mod) values(2,'din',to_timestamp('20190514151723','yyyymmddhh24miss'));\ninsert into lastModifyTest(id,name,last_mod) values(3,'fz',to_timestamp('20190514151729','yyyymmddhh24miss'));\ninsert into lastModifyTest(id,name,last_mod) values(4,'dx',to_timestamp('20190514151734','yyyymmddhh24miss'));\ninsert into lastModifyTest(id,name,last_mod) values(5,'ef',to_timestamp('20190514151740','yyyymmddhh24miss'));<\/code><\/pre>\n<p><img decoding=\"async\" src=\"\/wp-content\/uploads\/2020\/10\/image-1602818118589.png\" alt=\"file\" \/><\/p>\n<h5>2.3 \u589e\u91cf\u5bfc\u5165<\/h5>\n<pre><code class=\"language-shell\">#\u5c06last_mod\u503c2019-05-14 15:17:23\u65f6\u95f4\u4ee5\u540e\u7684\u6570\u636e\u8fdb\u884c\u5bfc\u5165\nsqoop import \\\n--connect jdbc:oracle:thin:@172.22.23.9:1521\/helowin \\\n--username lkw \\\n--password 123456  \\\n--query \"select * from lkw.lastModifyTest where \\$CONDITIONS\"  \\\n--hive-drop-import-delims \\\n--null-string '\\\\N' \\\n--null-non-string '\\\\N'  \\\n-m 1 \\\n--target-dir \/tmp\/tb02 \\\n--incremental lastmodified \\\n--check-column LAST_MOD \\\n--last-value \"2019-05-14 15:17:23\"<\/code><\/pre>\n<p><strong>\u6267\u884c\u65e5\u5fd7\uff1a<\/strong><\/p>\n<pre><code class=\"language-shell\">[root@tdh70001 ~]# sqoop import --connect jdbc:oracle:thin:@172.22.23.9:1521\/helowin --username lkw --password 123456  --query \"select * from lkw.lastModifyTest where \\$CONDITIONS\"  --hive-drop-import-delims --null-string '\\\\N' --null-non-string '\\\\N'  -m 1 --target-dir \/tmp\/tb02 --incremental lastmodified --check-column LAST_MOD --last-value \"2019-05-14 15:17:23\"\nWarning: \/root\/TDH-Client\/sqoop\/..\/hbase does not exist! HBase imports will fail.\nPlease set $HBASE_HOME to the root of your HBase installation.\nWarning: \/root\/TDH-Client\/sqoop\/..\/hcatalog does not exist! HCatalog jobs will fail.\nPlease set $HCAT_HOME to the root of your HCatalog installation.\nWarning: \/root\/TDH-Client\/sqoop\/..\/accumulo does not exist! Accumulo imports will fail.\nPlease set $ACCUMULO_HOME to the root of your Accumulo installation.\n2020-10-16 11:35:01,134 INFO sqoop.Sqoop: Running Sqoop version: 1.4.5-transwarp-6.0.0\n2020-10-16 11:35:01,169 WARN tool.BaseSqoopTool: Setting your password on the command-line is insecure. Consider using -P instead.\n2020-10-16 11:35:01,453 INFO oracle.OraOopManagerFactory: Data Connector for Oracle and Hadoop is disabled.\n2020-10-16 11:35:01,471 INFO manager.SqlManager: Using default fetchSize of 1000\n2020-10-16 11:35:01,472 INFO tool.CodeGenTool: Beginning code generation\n2020-10-16 11:35:03,138 INFO manager.OracleManager: Time zone has been set to GMT\n2020-10-16 11:35:03,280 INFO manager.SqlManager: Executing SQL statement: select * from lkw.lastModifyTest where  (1 = 0) \n2020-10-16 11:35:03,294 INFO manager.SqlManager: Executing SQL statement: select * from lkw.lastModifyTest where  (1 = 0) \n2020-10-16 11:35:03,321 INFO orm.CompilationManager: HADOOP_MAPRED_HOME is \/root\/TDH-Client\/hadoop\/hadoop-mapreduce\n\u6ce8: \/tmp\/sqoop-root\/compile\/a0940f58ee2cb5824a4d4dffea926fba\/QueryResult.java\u4f7f\u7528\u6216\u8986\u76d6\u4e86\u5df2\u8fc7\u65f6\u7684 API\u3002\n\u6ce8: \u6709\u5173\u8be6\u7ec6\u4fe1\u606f, \u8bf7\u4f7f\u7528 -Xlint:deprecation \u91cd\u65b0\u7f16\u8bd1\u3002\n2020-10-16 11:35:04,844 INFO orm.CompilationManager: Writing jar file: \/tmp\/sqoop-root\/compile\/a0940f58ee2cb5824a4d4dffea926fba\/QueryResult.jar\n2020-10-16 11:35:05,118 INFO util.KerberosUtil: Using principal pattern: HTTP\/_HOST\n2020-10-16 11:35:06,456 INFO manager.SqlManager: Executing SQL statement: select * from lkw.lastModifyTest where  (1 = 0) \n2020-10-16 11:35:06,461 INFO tool.ImportTool: Incremental import based on column LAST_MOD\n2020-10-16 11:35:06,461 INFO tool.ImportTool: Lower bound value: TO_TIMESTAMP('2019-05-14 15:17:23', 'YYYY-MM-DD HH24:MI:SS.FF')\n2020-10-16 11:35:06,461 INFO tool.ImportTool: Upper bound value: TO_TIMESTAMP('2020-10-16 11:35:06.0', 'YYYY-MM-DD HH24:MI:SS.FF')\n2020-10-16 11:35:06,464 INFO mapreduce.ImportJobBase: Beginning query import.\n2020-10-16 11:35:06,483 INFO Configuration.deprecation: mapred.jar is deprecated. Instead, use mapreduce.job.jar\n2020-10-16 11:35:06,500 INFO Configuration.deprecation: mapred.map.tasks is deprecated. Instead, use mapreduce.job.maps\n2020-10-16 11:35:07,094 INFO hdfs.DFSClient: Created HDFS_DELEGATION_TOKEN token 274 for hdfs on ha-hdfs:nameservice1\n2020-10-16 11:35:07,144 INFO security.TokenCache: Got dt for hdfs:\/\/nameservice1; Kind: HDFS_DELEGATION_TOKEN, Service: ha-hdfs:nameservice1, Ident: (HDFS_DELEGATION_TOKEN token 274 for hdfs)\n2020-10-16 11:35:07,178 INFO client.ConfiguredRMFailoverProxyProvider: Failing over to rm2\n2020-10-16 11:35:09,124 INFO db.DBInputFormat: Using read commited transaction isolation\n2020-10-16 11:35:09,301 INFO mapreduce.JobSubmitter: number of splits:1\n2020-10-16 11:35:09,895 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1600851944038_0016\n2020-10-16 11:35:09,895 INFO mapreduce.JobSubmitter: Kind: HDFS_DELEGATION_TOKEN, Service: ha-hdfs:nameservice1, Ident: (HDFS_DELEGATION_TOKEN token 274 for hdfs)\n2020-10-16 11:35:10,529 INFO impl.YarnClientImpl: Submitted application application_1600851944038_0016\n2020-10-16 11:35:10,621 INFO mapreduce.Job: The url to track the job: http:\/\/tdh70002:8088\/proxy\/application_1600851944038_0016\/\n2020-10-16 11:35:10,623 INFO mapreduce.Job: Running job: job_1600851944038_0016\n2020-10-16 11:35:19,882 INFO mapreduce.Job: Job job_1600851944038_0016 running in uber mode : false\n2020-10-16 11:35:19,884 INFO mapreduce.Job:  map 0% reduce 0%\n2020-10-16 11:35:28,125 INFO mapreduce.Job:  map 100% reduce 0%\n2020-10-16 11:35:29,148 INFO mapreduce.Job: Job job_1600851944038_0016 completed successfully\n2020-10-16 11:35:29,410 INFO mapreduce.Job: Counters: 30\n    File System Counters\n        FILE: Number of bytes read=0\n        FILE: Number of bytes written=144928\n        FILE: Number of read operations=0\n        FILE: Number of large read operations=0\n        FILE: Number of write operations=0\n        HDFS: Number of bytes read=87\n        HDFS: Number of bytes written=109\n        HDFS: Number of read operations=4\n        HDFS: Number of large read operations=0\n        HDFS: Number of write operations=2\n    Job Counters \n        Launched map tasks=1\n        Other local map tasks=1\n        Total time spent by all maps in occupied slots (ms)=5760\n        Total time spent by all reduces in occupied slots (ms)=0\n        Total time spent by all map tasks (ms)=5760\n        Total vcore-milliseconds taken by all map tasks=5760\n        Total megabyte-milliseconds taken by all map tasks=5898240\n    Map-Reduce Framework\n        Map input records=4\n        Map output records=4\n        Input split bytes=87\n        Spilled Records=0\n        Failed Shuffles=0\n        Merged Map outputs=0\n        GC time elapsed (ms)=56\n        CPU time spent (ms)=1540\n        Physical memory (bytes) snapshot=274501632\n        Virtual memory (bytes) snapshot=4757753856\n        Total committed heap usage (bytes)=635437056\n    File Input Format Counters \n        Bytes Read=0\n    File Output Format Counters \n        Bytes Written=109\n2020-10-16 11:35:29,420 INFO mapreduce.ImportJobBase: Transferred 109 bytes in 22.9066 seconds (4.7585 bytes\/sec)\n2020-10-16 11:35:29,425 INFO mapreduce.ImportJobBase: Retrieved 4 records.\n2020-10-16 11:35:29,452 INFO tool.ImportTool: Incremental import complete! To run another incremental import of all data following this import, supply the following arguments:\n2020-10-16 11:35:29,452 INFO tool.ImportTool:  --incremental lastmodified\n2020-10-16 11:35:29,452 INFO tool.ImportTool:   --check-column LAST_MOD\n2020-10-16 11:35:29,452 INFO tool.ImportTool:   --last-value 2020-10-16 11:35:06.0\n2020-10-16 11:35:29,452 INFO tool.ImportTool: (Consider saving this with 'sqoop job --create')<\/code><\/pre>\n<p><strong>\u7ed3\u679c\uff1a<\/strong><\/p>\n<pre><code class=\"language-shell\">[root@tdh70001 ~]# hadoop fs -ls \/tmp\/tb02\n2020-10-16 11:35:58,731 INFO util.KerberosUtil: Using principal pattern: HTTP\/_HOST\nFound 2 items\n-rw-r--r--   3 hdfs hadoop          0 2020-10-16 11:35 \/tmp\/tb02\/_SUCCESS\n-rw-r--r--   3 hdfs hadoop        109 2020-10-16 11:35 \/tmp\/tb02\/part-m-00000\n[root@tdh70001 ~]# hadoop fs -cat \/tmp\/tb02\/part*\n2020-10-16 11:36:11,114 INFO util.KerberosUtil: Using principal pattern: HTTP\/_HOST\n2,din,2019-05-14 15:17:23.0\n3,fz,2019-05-14 15:17:29.0\n4,dx,2019-05-14 15:17:34.0\n5,ef,2019-05-14 15:17:40.0\n<\/code><\/pre>\n<h4>3. sqoop job\u7684\u7ed3\u5408\u4f7f\u7528<\/h4>\n<p>\u524d\u9762\u6211\u4eec\u4ecb\u7ecd\u4e86Append\u548cLastModified\u65b9\u5f0f\u4e0b\uff0c\u5982\u4f55\u6839\u636e\u6307\u5b9a\u7684&#8211;last-value\u6765\u505a\u589e\u91cf\u5bfc\u5165\uff0c\u7ec6\u5fc3\u7684\u5c0f\u4f19\u4f34\u80af\u5b9a\u53d1\u73b0\u4e86\uff0c\u589e\u91cf\u5bfc\u5165\u4f5c\u4e3a\u4e00\u4e2a\u540e\u53f0\u5e38\u9a7b\u7684\u5bfc\u5165\u4efb\u52a1\uff08\u9700\u8981\u914d\u7f6e\u6210<a href=\"https:\/\/blog.csdn.net\/weixin_42003671\/article\/details\/90019323\" title=\"crontab\u4efb\u52a1\">crontab\u4efb\u52a1<\/a>\uff0c\u6216\u8005Azkaban\u3001Oozie\u4e4b\u7c7b\u7684\u8c03\u5ea6\u5de5\u5177\u53bb\u5b9a\u65f6\u8c03\u5ea6\uff09\uff0c<strong><font color=red>\u96be\u9053\u6bcf\u6b21\u90fd\u8981\u624b\u52a8\u83b7\u53d6\u8fd9\u4e2a&#8211;last-value\u518d\u4f20\u9012\u5230\u4e0b\u4e2a\u589e\u91cf\u5bfc\u5165\u4efb\u52a1\u4e2d\u53bb\u4e48\uff1f<\/font><\/strong>\u8fd9\u91cc\u5c31\u8981sqoop job\u547d\u4ee4\u767b\u573a\u4e86\uff01<\/p>\n<p><strong>\u5148\u8bf4\u91cd\u8981\u7ed3\u8bba\uff1a<\/strong><\/p>\n<p><strong><font color=red>\u6267\u884cjob\u540e\uff0c\u4f1a\u66f4\u65b0job\u4e2d\u8bb0\u5f55\u7684incremental.last.value\uff0c\u8fd9\u4e2a\u65f6\u95f4\u662f\u6267\u884c\u5b8cjob\u7684\u65f6\u95f4\uff0c\u4e0d\u662f\u8868\u7684&#8211;check-column\u5b57\u6bb5\u6700\u665a\u7684\u65f6\u95f4\uff01<br \/>\n\u4e5f\u5c31\u662f\u8bf4\uff0c\u6bcf\u6b21\u6267\u884csqoop job\u66f4\u65b0\u7684\u662fincremental.last.value\u4e2d\u7684\u503c\u6240\u8868\u793a\u7684\u65f6\u95f4\u5230\u5f53\u524d\u7cfb\u7edf\u65f6\u95f4\u7684\u65f6\u95f4\u6bb5\u7684\u6570\u636e\u3002<\/font><\/strong><\/p>\n<p><strong>\u5e38\u7528\u547d\u4ee4\uff1a<\/strong><\/p>\n<pre><code class=\"language-shell\">> sqoop job --list\u3000\u3000\u3000\u3000\u3000\u3000\u3000\u3000\u3000#\u5217\u51fa\u6240\u6709\u7684job\n> sqoop job --show jobname\u3000\u3000\u3000\u3000 #\u663e\u793ajobname\u7684\u4fe1\u606f\uff08\u5305\u62ecincremental.last.value\u548c\u5176\u4ed6job\u7684\u53c2\u6570\u4fe1\u606f\uff09\n> sqoop job --delete jobname \u3000\u3000\u3000#\u5220\u9664jobname\n> sqoop job --exec jobname  \u3000\u3000\u3000 #\u6267\u884cjobname<\/code><\/pre>\n<h5>3.1 sqoop job\u548cLastModified\u7684\u7ed3\u5408\u4f7f\u7528<\/h5>\n<h6>3.1.1 \u63d2\u5165\u65b0\u589e\u6570\u636e<\/h6>\n<p>\u5728lastModifyTest\u8868\u539f\u6709\u76845\u6761\u6570\u636e\u7684\u57fa\u7840\u4e0a\uff0c\u65b0\u589e2\u6761\u6570\u636e<\/p>\n<pre><code class=\"language-sql\">insert into lastModifyTest(id,name,last_mod) values(6,'gh',to_timestamp('20190514151751','yyyymmddhh24miss'));\ninsert into lastModifyTest(id,name,last_mod) values(7,'hh',to_timestamp('20190514151752','yyyymmddhh24miss'));<\/code><\/pre>\n<p>\u76ee\u524d\u603b\u89c8\u5982\u4e0b\uff1a<\/p>\n<p><img decoding=\"async\" src=\"\/wp-content\/uploads\/2020\/10\/image-1602828915997.png\" alt=\"file\" \/><\/p>\n<h6>3.1.2 \u521b\u5efasqoop job<\/h6>\n<pre><code class=\"language-shell\"># 1\u3001sqoop job\u521b\u5efa\u65f6\uff0c\u5fc5\u987b\u4f7f\u7528-P\u624b\u52a8\u8f93\u5165\uff0c\u6216\u8005\u901a\u8fc7--password-file\u5bc6\u7801\u6587\u4ef6\u7684\u65b9\u5f0f\uff0c\u4e0d\u80fd\u591f\u4f7f\u7528--password\n# \u5426\u5219\u62a5\u9519\uff1aWARN tool.BaseSqoopTool: Setting your password on the command-line is insecure. Consider using -P instead.\n# 2\u3001\u5982\u679c\u52a0\u4e0a-m\u7684\u8bdd\uff0c\u5c31\u5fc5\u987b\u5728\u4f7f\u7528--incremental lastmodified\u3001\u8f93\u51fa\u76ee\u5f55\u5df2\u5b58\u5728\u65f6\uff0c\u4f7f\u7528--merge-key\u6216\u8005--append\n# \u5426\u5219\u62a5\u9519\uff1aERROR tool.ImportTool: Error during import: --merge-key or --append is required when using --incremental lastmodified and the output directory exists.\n# 3\u3001\u5217\u540d\u5fc5\u987b\u5927\u5199\uff0c\u5426\u5219\u62a5\u9519 Imported Failed: column not found: id\n# 4\u3001\u8fd9\u91cc\u6307\u5b9a--last-value \"2019-05-14 15:17:40\"\nsqoop job \\\n--create job01 \\\n-- import \\\n--connect jdbc:oracle:thin:@172.22.23.9:1521\/helowin \\\n--username lkw \\\n-P \\\n--query \"select * from lkw.lastModifyTest where \\$CONDITIONS\" \\\n--hive-drop-import-delims \\\n--null-string '\\\\N' \\\n--null-non-string '\\\\N' \\\n-m 1 \\\n--target-dir \/tmp\/tb02 \\\n--merge-key ID \\\n--incremental lastmodified \\\n--check-column LAST_MOD \\\n--last-value \"2019-05-14 15:17:40\"<\/code><\/pre>\n<h6>3.1.3 \u6267\u884csqoop job<\/h6>\n<pre><code class=\"language-shell\">sqoop job --exec job01<\/code><\/pre>\n<p><strong>\u6267\u884c\u65e5\u5fd7\uff1a<\/strong><\/p>\n<pre><code class=\"language-shell\">[root@tdh70001 ~]# sqoop job --exec job01\n#\u6267\u884c\u8be5\u547d\u4ee4\u540e\uff0c\u4e0e\u76f4\u63a5\u5bfc\u5165\u4e0d\u540c\uff0c\u8be5\u547d\u4ee4\u542f\u52a8\u4e862\u4e2amapreduce\u4efb\u52a1\uff0c\u8fd9\u6837\u5c31\u628a\u6570\u636e\u589e\u91cfmerge\u5bfc\u5165hdfs\u4e86\u3002\nWarning: \/root\/TDH-Client\/sqoop\/..\/hbase does not exist! HBase imports will fail.\nPlease set $HBASE_HOME to the root of your HBase installation.\nWarning: \/root\/TDH-Client\/sqoop\/..\/hcatalog does not exist! HCatalog jobs will fail.\nPlease set $HCAT_HOME to the root of your HCatalog installation.\nWarning: \/root\/TDH-Client\/sqoop\/..\/accumulo does not exist! Accumulo imports will fail.\nPlease set $ACCUMULO_HOME to the root of your Accumulo installation.\n2020-10-16 14:09:44,582 INFO sqoop.Sqoop: Running Sqoop version: 1.4.5-transwarp-6.0.0\nEnter password: \n2020-10-16 14:10:48,949 INFO oracle.OraOopManagerFactory: Data Connector for Oracle and Hadoop is disabled.\n2020-10-16 14:10:48,965 INFO manager.SqlManager: Using default fetchSize of 1000\n2020-10-16 14:10:48,965 INFO tool.CodeGenTool: Beginning code generation\n2020-10-16 14:10:50,432 INFO manager.OracleManager: Time zone has been set to GMT\n2020-10-16 14:10:50,523 INFO manager.SqlManager: Executing SQL statement: select * from lkw.lastModifyTest where  (1 = 0) \n2020-10-16 14:10:50,533 INFO manager.SqlManager: Executing SQL statement: select * from lkw.lastModifyTest where  (1 = 0) \n2020-10-16 14:10:50,556 INFO orm.CompilationManager: HADOOP_MAPRED_HOME is \/root\/TDH-Client\/hadoop\/hadoop-mapreduce\n\u6ce8: \/tmp\/sqoop-root\/compile\/4256f4d8cccf30b96f0bca7714fde250\/QueryResult.java\u4f7f\u7528\u6216\u8986\u76d6\u4e86\u5df2\u8fc7\u65f6\u7684 API\u3002\n\u6ce8: \u6709\u5173\u8be6\u7ec6\u4fe1\u606f, \u8bf7\u4f7f\u7528 -Xlint:deprecation \u91cd\u65b0\u7f16\u8bd1\u3002\n2020-10-16 14:10:52,066 INFO orm.CompilationManager: Writing jar file: \/tmp\/sqoop-root\/compile\/4256f4d8cccf30b96f0bca7714fde250\/QueryResult.jar\n2020-10-16 14:10:52,339 INFO util.KerberosUtil: Using principal pattern: HTTP\/_HOST\n2020-10-16 14:10:53,563 INFO manager.SqlManager: Executing SQL statement: select * from lkw.lastModifyTest where  (1 = 0) \n2020-10-16 14:10:53,567 INFO tool.ImportTool: Incremental import based on column LAST_MOD\n#\u8fd9\u91cc\u53ef\u4ee5\u770b\u5230\u5bf9\u5e94\u7684\u4e0b\u754c\u5c31\u662f\u6211\u4eec\u5b9a\u4e49\u7684--last-value\uff0c\u4e0a\u754c\u662f\u5f53\u524d\u6267\u884c\u65f6\u95f4\n2020-10-16 14:10:53,567 INFO tool.ImportTool: Lower bound value: TO_TIMESTAMP('2019-05-14 15:17:40', 'YYYY-MM-DD HH24:MI:SS.FF')\n2020-10-16 14:10:53,567 INFO tool.ImportTool: Upper bound value: TO_TIMESTAMP('2020-10-16 14:10:53.0', 'YYYY-MM-DD HH24:MI:SS.FF')\n2020-10-16 14:10:53,570 INFO mapreduce.ImportJobBase: Beginning query import.\n2020-10-16 14:10:53,589 INFO Configuration.deprecation: mapred.jar is deprecated. Instead, use mapreduce.job.jar\n2020-10-16 14:10:53,605 INFO Configuration.deprecation: mapred.map.tasks is deprecated. Instead, use mapreduce.job.maps\n2020-10-16 14:10:54,240 INFO hdfs.DFSClient: Created HDFS_DELEGATION_TOKEN token 279 for hdfs on ha-hdfs:nameservice1\n2020-10-16 14:10:54,268 INFO security.TokenCache: Got dt for hdfs:\/\/nameservice1; Kind: HDFS_DELEGATION_TOKEN, Service: ha-hdfs:nameservice1, Ident: (HDFS_DELEGATION_TOKEN token 279 for hdfs)\n2020-10-16 14:10:54,319 INFO client.ConfiguredRMFailoverProxyProvider: Failing over to rm2\n2020-10-16 14:10:56,324 INFO db.DBInputFormat: Using read commited transaction isolation\n2020-10-16 14:10:56,498 INFO mapreduce.JobSubmitter: number of splits:1\n2020-10-16 14:10:56,774 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1600851944038_0021\n2020-10-16 14:10:56,774 INFO mapreduce.JobSubmitter: Kind: HDFS_DELEGATION_TOKEN, Service: ha-hdfs:nameservice1, Ident: (HDFS_DELEGATION_TOKEN token 279 for hdfs)\n2020-10-16 14:10:57,360 INFO impl.YarnClientImpl: Submitted application application_1600851944038_0021\n2020-10-16 14:10:57,416 INFO mapreduce.Job: The url to track the job: http:\/\/tdh70002:8088\/proxy\/application_1600851944038_0021\/\n2020-10-16 14:10:57,416 INFO mapreduce.Job: Running job: job_1600851944038_0021\n2020-10-16 14:11:07,681 INFO mapreduce.Job: Job job_1600851944038_0021 running in uber mode : false\n2020-10-16 14:11:07,683 INFO mapreduce.Job:  map 0% reduce 0%\n2020-10-16 14:11:15,824 INFO mapreduce.Job:  map 100% reduce 0%\n2020-10-16 14:11:15,836 INFO mapreduce.Job: Job job_1600851944038_0021 completed successfully\n2020-10-16 14:11:16,019 INFO mapreduce.Job: Counters: 30\n    File System Counters\n        FILE: Number of bytes read=0\n        FILE: Number of bytes written=145177\n        FILE: Number of read operations=0\n        FILE: Number of large read operations=0\n        FILE: Number of write operations=0\n        HDFS: Number of bytes read=87\n        HDFS: Number of bytes written=81\n        HDFS: Number of read operations=4\n        HDFS: Number of large read operations=0\n        HDFS: Number of write operations=2\n    Job Counters \n        Launched map tasks=1\n        Other local map tasks=1\n        Total time spent by all maps in occupied slots (ms)=5666\n        Total time spent by all reduces in occupied slots (ms)=0\n        Total time spent by all map tasks (ms)=5666\n        Total vcore-milliseconds taken by all map tasks=5666\n        Total megabyte-milliseconds taken by all map tasks=5801984\n    Map-Reduce Framework\n        Map input records=3\n        Map output records=3\n        Input split bytes=87\n        Spilled Records=0\n        Failed Shuffles=0\n        Merged Map outputs=0\n        GC time elapsed (ms)=44\n        CPU time spent (ms)=1600\n        Physical memory (bytes) snapshot=278683648\n        Virtual memory (bytes) snapshot=4757753856\n        Total committed heap usage (bytes)=635437056\n    File Input Format Counters \n        Bytes Read=0\n    File Output Format Counters \n        Bytes Written=81\n2020-10-16 14:11:16,029 INFO mapreduce.ImportJobBase: Transferred 81 bytes in 22.4118 seconds (3.6142 bytes\/sec)\n2020-10-16 14:11:16,034 INFO mapreduce.ImportJobBase: Retrieved 3 records.\n2020-10-16 14:11:16,200 INFO Configuration.deprecation: mapred.output.key.class is deprecated. Instead, use mapreduce.job.output.key.class\n2020-10-16 14:11:16,256 INFO hdfs.DFSClient: Created HDFS_DELEGATION_TOKEN token 280 for hdfs on ha-hdfs:nameservice1\n2020-10-16 14:11:16,256 INFO security.TokenCache: Got dt for hdfs:\/\/nameservice1; Kind: HDFS_DELEGATION_TOKEN, Service: ha-hdfs:nameservice1, Ident: (HDFS_DELEGATION_TOKEN token 280 for hdfs)\n2020-10-16 14:11:16,263 INFO client.ConfiguredRMFailoverProxyProvider: Failing over to rm2\n2020-10-16 14:11:17,842 INFO input.FileInputFormat: Total input paths to process : 2\n2020-10-16 14:11:18,046 INFO mapreduce.JobSubmitter: number of splits:2\n2020-10-16 14:11:18,165 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1600851944038_0022\n2020-10-16 14:11:18,165 INFO mapreduce.JobSubmitter: Kind: HDFS_DELEGATION_TOKEN, Service: ha-hdfs:nameservice1, Ident: (HDFS_DELEGATION_TOKEN token 280 for hdfs)\n2020-10-16 14:11:18,509 INFO impl.YarnClientImpl: Submitted application application_1600851944038_0022\n2020-10-16 14:11:18,513 INFO mapreduce.Job: The url to track the job: http:\/\/tdh70002:8088\/proxy\/application_1600851944038_0022\/\n2020-10-16 14:11:18,514 INFO mapreduce.Job: Running job: job_1600851944038_0022\n2020-10-16 14:11:28,710 INFO mapreduce.Job: Job job_1600851944038_0022 running in uber mode : false\n2020-10-16 14:11:28,711 INFO mapreduce.Job:  map 0% reduce 0%\n^[[A2020-10-16 14:11:35,784 INFO mapreduce.Job:  map 100% reduce 0%\n2020-10-16 14:11:42,847 INFO mapreduce.Job:  map 100% reduce 100%\n2020-10-16 14:11:42,860 INFO mapreduce.Job: Job job_1600851944038_0022 completed successfully\n2020-10-16 14:11:42,947 INFO mapreduce.Job: Counters: 49\n    File System Counters\n        FILE: Number of bytes read=343\n        FILE: Number of bytes written=437065\n        FILE: Number of read operations=0\n        FILE: Number of large read operations=0\n        FILE: Number of write operations=0\n        HDFS: Number of bytes read=452\n        HDFS: Number of bytes written=163\n        HDFS: Number of read operations=9\n        HDFS: Number of large read operations=0\n        HDFS: Number of write operations=2\n    Job Counters \n        Launched map tasks=2\n        Launched reduce tasks=1\n        Data-local map tasks=2\n        Total time spent by all maps in occupied slots (ms)=8345\n        Total time spent by all reduces in occupied slots (ms)=4589\n        Total time spent by all map tasks (ms)=8345\n        Total time spent by all reduce tasks (ms)=4589\n        Total vcore-milliseconds taken by all map tasks=8345\n        Total vcore-milliseconds taken by all reduce tasks=4589\n        Total megabyte-milliseconds taken by all map tasks=8545280\n        Total megabyte-milliseconds taken by all reduce tasks=4699136\n    Map-Reduce Framework\n        Map input records=7\n        Map output records=7\n        Map output bytes=323\n        Map output materialized bytes=349\n        Input split bytes=262\n        Combine input records=0\n        Combine output records=0\n        Reduce input groups=6\n        Reduce shuffle bytes=349\n        Reduce input records=7\n        Reduce output records=6\n        Spilled Records=14\n        Shuffled Maps =2\n        Failed Shuffles=0\n        Merged Map outputs=2\n        GC time elapsed (ms)=101\n        CPU time spent (ms)=2300\n        Physical memory (bytes) snapshot=991227904\n        Virtual memory (bytes) snapshot=14205108224\n        Total committed heap usage (bytes)=1642070016\n    Shuffle Errors\n        BAD_ID=0\n        CONNECTION=0\n        IO_ERROR=0\n        WRONG_LENGTH=0\n        WRONG_MAP=0\n        WRONG_REDUCE=0\n    File Input Format Counters \n        Bytes Read=190\n    File Output Format Counters \n        Bytes Written=163\n2020-10-16 14:11:42,994 INFO tool.ImportTool: Saving incremental import state to the metastore\n2020-10-16 14:11:43,134 INFO tool.ImportTool: Updated data for job: job01<\/code><\/pre>\n<p><strong>\u7ed3\u679c\uff1a<\/strong><\/p>\n<pre><code class=\"language-shell\">[root@tdh70001 ~]# hadoop fs -ls \/tmp\/tb02\n2020-10-16 14:11:51,829 INFO util.KerberosUtil: Using principal pattern: HTTP\/_HOST\nFound 2 items\n-rw-r--r--   3 hdfs hadoop          0 2020-10-16 14:11 \/tmp\/tb02\/_SUCCESS\n-rw-r--r--   3 hdfs hadoop        163 2020-10-16 14:11 \/tmp\/tb02\/part-r-00000\n[root@tdh70001 ~]# hadoop fs -cat \/tmp\/tb02\/part-r-00000\n2020-10-16 14:12:00,833 INFO util.KerberosUtil: Using principal pattern: HTTP\/_HOST\n2,din,2019-05-14 15:17:23.0\n3,fz,2019-05-14 15:17:29.0\n4,dx,2019-05-14 15:17:34.0\n5,ef,2019-05-14 15:17:40.0\n6,gh,2019-05-14 15:17:51.0\n7,hh,2019-05-14 15:17:52.0<\/code><\/pre>\n<p><strong>\u5173\u952e\u65e5\u5fd7\uff1a<\/strong><\/p>\n<pre><code class=\"language-shell\">2020-10-16 14:10:53,567 INFO tool.ImportTool: Lower bound value: TO_TIMESTAMP('2019-05-14 15:17:40', 'YYYY-MM-DD HH24:MI:SS.FF')\n2020-10-16 14:10:53,567 INFO tool.ImportTool: Upper bound value: TO_TIMESTAMP('2020-10-16 14:10:53.0', 'YYYY-MM-DD HH24:MI:SS.FF')<\/code><\/pre>\n<p><strong>\u800c\u518d\u6b21\u68c0\u67e5sqoop job\u4e2d\u7684incremental.last.value\uff0c\u5df2\u7ecf\u53d8\u6210\u4e862020-10-16 14:10:53.0\uff0c\u4e0b\u6b21\u518d\u6b21\u6267\u884c\u8be5sqoop job\uff0c\u5c31\u4f1a\u4ee5\u8fd9\u4e2a\u65f6\u95f4\u4f5c\u4e3a&#8211;last-value\u5f00\u59cb\u65f6\u95f4 \u8fdb\u884c\u5224\u65ad\uff01<\/strong><\/p>\n<h5>3.2 sqoop job\u548cAppend\u7684\u7ed3\u5408\u4f7f\u7528<\/h5>\n<p>\u4e0eLastModified\u540c\u7406\uff0c\u8fd9\u91cc\u622a\u53d6\u4e00\u4e0b\u5173\u952e\u65e5\u5fd7\u548c\u7279\u5f81\uff1a<\/p>\n<pre><code class=\"language-shell\">sqoop job \n--create job02 \n-- import \n--connect jdbc:oracle:thin:@172.22.23.9:1521\/helowin \n--username lkw \n-P \n--query \"select * from lkw.appendtest where \\$CONDITIONS\" \n--hive-drop-import-delims \n--null-string '\\\\N' \n--null-non-string '\\\\N' \n-m 1  \n--target-dir \/tmp\/tb01 \n--merge-key ID \n--incremental append \n--check-column ID \n--last-value 3<\/code><\/pre>\n<p><strong>\u6267\u884c\u65e5\u5fd7\uff1a<\/strong><\/p>\n<pre><code class=\"language-shell\">[root@tdh70001 ~]# sqoop job --exec job02\nWarning: \/root\/TDH-Client\/sqoop\/..\/hbase does not exist! HBase imports will fail.\nPlease set $HBASE_HOME to the root of your HBase installation.\nWarning: \/root\/TDH-Client\/sqoop\/..\/hcatalog does not exist! HCatalog jobs will fail.\nPlease set $HCAT_HOME to the root of your HCatalog installation.\nWarning: \/root\/TDH-Client\/sqoop\/..\/accumulo does not exist! Accumulo imports will fail.\nPlease set $ACCUMULO_HOME to the root of your Accumulo installation.\n2020-10-16 15:15:42,704 INFO sqoop.Sqoop: Running Sqoop version: 1.4.5-transwarp-6.0.0\nEnter password: \n2020-10-16 15:15:45,840 INFO oracle.OraOopManagerFactory: Data Connector for Oracle and Hadoop is disabled.\n2020-10-16 15:15:45,856 INFO manager.SqlManager: Using default fetchSize of 1000\n2020-10-16 15:15:45,856 INFO tool.CodeGenTool: Beginning code generation\n2020-10-16 15:15:47,225 INFO manager.OracleManager: Time zone has been set to GMT\n2020-10-16 15:15:47,344 INFO manager.SqlManager: Executing SQL statement: select * from lkw.appendtest where  (1 = 0) \n2020-10-16 15:15:47,357 INFO manager.SqlManager: Executing SQL statement: select * from lkw.appendtest where  (1 = 0) \n2020-10-16 15:15:47,381 INFO orm.CompilationManager: HADOOP_MAPRED_HOME is \/root\/TDH-Client\/hadoop\/hadoop-mapreduce\n\u6ce8: \/tmp\/sqoop-root\/compile\/a3b2b1e0eb058201be32e9def39efaf8\/QueryResult.java\u4f7f\u7528\u6216\u8986\u76d6\u4e86\u5df2\u8fc7\u65f6\u7684 API\u3002\n\u6ce8: \u6709\u5173\u8be6\u7ec6\u4fe1\u606f, \u8bf7\u4f7f\u7528 -Xlint:deprecation \u91cd\u65b0\u7f16\u8bd1\u3002\n2020-10-16 15:15:48,837 INFO orm.CompilationManager: Writing jar file: \/tmp\/sqoop-root\/compile\/a3b2b1e0eb058201be32e9def39efaf8\/QueryResult.jar\n2020-10-16 15:15:49,128 INFO util.KerberosUtil: Using principal pattern: HTTP\/_HOST\n2020-10-16 15:15:50,380 INFO tool.ImportTool: Maximal id query for free form incremental import: SELECT MAX(ID) FROM (select * from lkw.appendtest where (1 = 1)) sqoop_import_query_alias\n2020-10-16 15:15:50,384 INFO tool.ImportTool: Incremental import based on column ID\n2020-10-16 15:15:50,384 INFO tool.ImportTool: Lower bound value: 3\n2020-10-16 15:15:50,385 INFO tool.ImportTool: Upper bound value: 5\n2020-10-16 15:15:50,387 INFO mapreduce.ImportJobBase: Beginning query import.\n2020-10-16 15:15:50,402 INFO Configuration.deprecation: mapred.jar is deprecated. Instead, use mapreduce.job.jar\n2020-10-16 15:15:50,415 INFO Configuration.deprecation: mapred.map.tasks is deprecated. Instead, use mapreduce.job.maps\n2020-10-16 15:15:51,010 INFO hdfs.DFSClient: Created HDFS_DELEGATION_TOKEN token 283 for hdfs on ha-hdfs:nameservice1\n2020-10-16 15:15:51,038 INFO security.TokenCache: Got dt for hdfs:\/\/nameservice1; Kind: HDFS_DELEGATION_TOKEN, Service: ha-hdfs:nameservice1, Ident: (HDFS_DELEGATION_TOKEN token 283 for hdfs)\n2020-10-16 15:15:51,087 INFO client.ConfiguredRMFailoverProxyProvider: Failing over to rm2\n2020-10-16 15:15:53,052 INFO db.DBInputFormat: Using read commited transaction isolation\n2020-10-16 15:15:53,231 INFO mapreduce.JobSubmitter: number of splits:1\n2020-10-16 15:15:53,500 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1600851944038_0025\n2020-10-16 15:15:53,500 INFO mapreduce.JobSubmitter: Kind: HDFS_DELEGATION_TOKEN, Service: ha-hdfs:nameservice1, Ident: (HDFS_DELEGATION_TOKEN token 283 for hdfs)\n2020-10-16 15:15:54,100 INFO impl.YarnClientImpl: Submitted application application_1600851944038_0025\n2020-10-16 15:15:54,159 INFO mapreduce.Job: The url to track the job: http:\/\/tdh70002:8088\/proxy\/application_1600851944038_0025\/\n2020-10-16 15:15:54,160 INFO mapreduce.Job: Running job: job_1600851944038_0025\n2020-10-16 15:16:03,384 INFO mapreduce.Job: Job job_1600851944038_0025 running in uber mode : false\n2020-10-16 15:16:03,386 INFO mapreduce.Job:  map 0% reduce 0%\n2020-10-16 15:16:21,729 INFO mapreduce.Job:  map 100% reduce 0%\n2020-10-16 15:16:22,750 INFO mapreduce.Job: Job job_1600851944038_0025 completed successfully\n2020-10-16 15:16:22,927 INFO mapreduce.Job: Counters: 30\n    File System Counters\n        FILE: Number of bytes read=0\n        FILE: Number of bytes written=145035\n        FILE: Number of read operations=0\n        FILE: Number of large read operations=0\n        FILE: Number of write operations=0\n        HDFS: Number of bytes read=87\n        HDFS: Number of bytes written=24\n        HDFS: Number of read operations=4\n        HDFS: Number of large read operations=0\n        HDFS: Number of write operations=2\n    Job Counters \n        Launched map tasks=1\n        Other local map tasks=1\n        Total time spent by all maps in occupied slots (ms)=15807\n        Total time spent by all reduces in occupied slots (ms)=0\n        Total time spent by all map tasks (ms)=15807\n        Total vcore-milliseconds taken by all map tasks=15807\n        Total megabyte-milliseconds taken by all map tasks=16186368\n    Map-Reduce Framework\n        Map input records=3\n        Map output records=3\n        Input split bytes=87\n        Spilled Records=0\n        Failed Shuffles=0\n        Merged Map outputs=0\n        GC time elapsed (ms)=75\n        CPU time spent (ms)=1720\n        Physical memory (bytes) snapshot=267665408\n        Virtual memory (bytes) snapshot=4757753856\n        Total committed heap usage (bytes)=494927872\n    File Input Format Counters \n        Bytes Read=0\n    File Output Format Counters \n        Bytes Written=24\n2020-10-16 15:16:22,936 INFO mapreduce.ImportJobBase: Transferred 24 bytes in 32.5106 seconds (0.7382 bytes\/sec)\n2020-10-16 15:16:22,941 INFO mapreduce.ImportJobBase: Retrieved 3 records.\n2020-10-16 15:16:22,964 INFO util.AppendUtils: Appending to directory tb01\n2020-10-16 15:16:22,975 INFO util.AppendUtils: Using found partition 2\n2020-10-16 15:16:22,996 INFO tool.ImportTool: Saving incremental import state to the metastore\n2020-10-16 15:16:23,133 INFO tool.ImportTool: Updated data for job: job02<\/code><\/pre>\n<p><strong>\u5173\u952e\u65e5\u5fd7\uff1a<\/strong><\/p>\n<pre><code class=\"language-shell\">2020-10-16 15:15:50,380 INFO tool.ImportTool: Maximal id query for free form incremental import: SELECT MAX(ID) FROM (select * from lkw.appendtest where (1 = 1)) sqoop_import_query_alias\n2020-10-16 15:15:50,384 INFO tool.ImportTool: Incremental import based on column ID\n2020-10-16 15:15:50,384 INFO tool.ImportTool: Lower bound value: 3\n2020-10-16 15:15:50,385 INFO tool.ImportTool: Upper bound value: 5<\/code><\/pre>\n<p><strong>\u6267\u884c\u4e4b\u540e\uff0c\u901a\u8fc7sqoop job &#8211;show job02\u53ef\u4ee5\u770b\u5230incremental.last.value=5\uff0c\u4e0b\u6b21\u6267\u884csqoop job\u4f1a\u4ee5ID&gt;5\u7684\u6570\u636e\u8fdb\u884c\u5bfc\u5165\u3002<\/strong><\/p>\n","protected":false},"excerpt":{"rendered":"<p>\u6982\u8981\u63cf\u8ff0 \u5f53\u5728\u751f\u4ea7\u73af\u5883\u4e2d\uff0c\u6211\u4eec\u53ef\u80fd\u4f1a\u5b9a\u671f\u4ece\u4e0e\u4e1a\u52a1\u76f8\u5173\u7684\u5173\u7cfb\u578b\u6570\u636e\u5e93\u5411Hadoop\u5bfc\u5165\u6570\u636e\uff0c\u5bfc\u5165\u6570\u4ed3\u540e\u8fdb\u884c\u540e\u7eed\u79bb ..<\/p>\n<div class=\"clear-fix\"><\/div>\n<p><a href=\"https:\/\/kbwp.transwarp.cn\/?p=4261\" title=\"read more...\">Read more<\/a><\/p>\n","protected":false},"author":12,"featured_media":0,"comment_status":"closed","ping_status":"closed","sticky":false,"template":"","format":"standard","meta":{"footnotes":""},"categories":[47],"tags":[],"class_list":["post-4261","post","type-post","status-publish","format-standard","hentry","category-third_part"],"acf":[],"_links":{"self":[{"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=\/wp\/v2\/posts\/4261","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=\/wp\/v2\/users\/12"}],"replies":[{"embeddable":true,"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=%2Fwp%2Fv2%2Fcomments&post=4261"}],"version-history":[{"count":1,"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=\/wp\/v2\/posts\/4261\/revisions"}],"predecessor-version":[{"id":4271,"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=\/wp\/v2\/posts\/4261\/revisions\/4271"}],"wp:attachment":[{"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=%2Fwp%2Fv2%2Fmedia&parent=4261"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=%2Fwp%2Fv2%2Fcategories&post=4261"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=%2Fwp%2Fv2%2Ftags&post=4261"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}