{"id":1055,"date":"2019-09-27T10:24:14","date_gmt":"2019-09-27T02:24:14","guid":{"rendered":"https:\/\/nj.transwarp.cn:8180\/?p=1055"},"modified":"2026-03-25T14:00:46","modified_gmt":"2026-03-25T06:00:46","slug":"datax%e5%af%b9%e6%8e%a5hdfs","status":"publish","type":"post","link":"https:\/\/kbwp.transwarp.cn\/?p=1055","title":{"rendered":"datax \u5bf9\u63a5 hdfs\uff08hdfswriter\uff09"},"content":{"rendered":"<h3>\u6982\u8981\u63cf\u8ff0<\/h3>\n<hr \/>\n<p>DataX \u662f\u4e00\u4e2a\u5f02\u6784\u6570\u636e\u6e90\u79bb\u7ebf\u540c\u6b65\u5de5\u5177\uff0c\u81f4\u529b\u4e8e\u5b9e\u73b0\u5305\u62ec\u5173\u7cfb\u578b\u6570\u636e\u5e93(MySQL\u3001Oracle\u7b49)\u3001HDFS\u3001Hive\u3001ODPS\u3001HBase\u3001FTP \u7b49\u5404\u79cd\u5f02\u6784\u6570\u636e\u6e90\u4e4b\u95f4\u7a33\u5b9a\u9ad8\u6548\u7684\u6570\u636e\u540c\u6b65\u529f\u80fd<\/p>\n<h3>\u8be6\u7ec6\u8bf4\u660e<\/h3>\n<p>Java\u7248\u672c\u8981\u6c42\uff1ajdk1.8\u53ca\u4ee5\u4e0a\uff0cPython\u7248\u672c\u8981\u6c42\uff1a&gt;=2.7.X\uff0cDataX \u672a\u66f4\u65b0\u81f3 Python3<\/p>\n<hr \/>\n<h4>\u4e0b\u8f7d\u5b89\u88c5 DataX<\/h4>\n<p>\u4e0b\u8f7d\u5730\u5740\u4ee5\u53ca\u5b89\u88c5\u6559\u7a0b\u53c2\u8003 DataX \u5f00\u6e90\u624b\u518c\uff1a<a href=\"http:\/\/datax-opensource.oss-cn-hangzhou.aliyuncs.com\/datax.tar.gz\">http:\/\/datax-opensource.oss-cn-hangzhou.aliyuncs.com\/datax.tar.gz<\/a><\/p>\n<h4>\u7f16\u8f91 json<\/h4>\n<p>\u6839\u636e\u6570\u636e\u6e90\u7f16\u8f91 json \u6587\u4ef6\uff0c<strong>\u6570\u636e\u6e90\u53c2\u8003\u6307\u5357\uff1a<a href=\"https:\/\/github.com\/alibaba\/DataX\">https:\/\/github.com\/alibaba\/DataX<\/a><\/strong><\/p>\n<h4>\u6267\u884c\u6570\u636e\u8fc1\u79fb\u811a\u672c<\/h4>\n<p>\u8fdb\u5165datax \u7684 bin \u76ee\u5f55\u4e2d\uff0c\u91cc\u9762\u6709 datax.py \u6587\u4ef6<\/p>\n<pre><code class=\"language-shell\"># python2 datax.py ..\/conf\/oracle_inceptor_kerberos_test.json<\/code><\/pre>\n<h4>\u5c06 oracle \u6570\u636e\u5199\u5165\u65e0\u8ba4\u8bc1\u7684 hdfs \u4e2d\u7684 json \u914d\u7f6e\u6587\u4ef6<\/h4>\n<pre><code class=\"language-json\">{\n\"job\": {\n    \"setting\": {\n    \"speed\": {\n        \"channel\": 3\n        },\n        \"errorLimit\": {\n            \"record\": 0,\n            \"percentage\": 0.02\n            }\n        },\n        \"content\": [\n        {\n            \"reader\": {\n                \"name\": \"oraclereader\",\n                \"parameter\": {\n                    \"username\": \"sys\",\n                    \"password\": \"123456\",\n                    \"connection\": [\n                        {\n                            \"querySql\":[\"select empno,ename from emp\"],\n                            \"jdbcUrl\": [\"jdbc:oracle:thin:@172.22.44.1:1521\/orcl\"]\n                        }\n                    ],\n                }\n            },\n            \"writer\": {\n                \"name\": \"hdfswriter\",\n                \"parameter\": {\n                    \"column\": [{\"name\":\"empno\",\"type\":\"int\"},{\"name\":\"ename\",\"type\":\"string\"}],\n                    \"compress\": \"\",\n                    \"\/\/\": \"\u8fd9\u91cc\u8981\u586b\u5199 active \u7684 namenode \u7684ip\u5730\u5740\",\n                    \"defaultFS\": \"hdfs:\/\/172.22.23.2:8020\",\n                    \"fieldDelimiter\": \"\\t\",\n                    \"fileName\": \"emp\",\n                    \"fileType\": \"text\",\n                    \"path\": \"\/tmp\/\",\n                    \"writeMode\": \"append\"\n                    }\n                }\n            }\n        ]\n    }\n}<\/code><\/pre>\n<h4>\u5c06 oracle \u6570\u636e\u5199\u5165 kerberos \u8ba4\u8bc1\u65b9\u5f0f\u7684 hdfs \u4e2d\u7684 json \u914d\u7f6e\u6587\u4ef6<\/h4>\n<pre><code class=\"language-json\">{\n\"job\": {\n    \"setting\": {\n        \"speed\": {\n            \"channel\": 3\n        },\n        \"errorLimit\": {\n            \"record\": 0,\n            \"percentage\": 0.02\n            }\n        },\n    \"content\": [\n    {\n        \"reader\": {\n            \"name\": \"oraclereader\",\n            \"parameter\": {\n                \"username\": \"lkw\",\n                \"password\": \"q6408912.\",\n                \"connection\": [{\"querySql\":[\"select empno,ename from emp\"],\n                \"jdbcUrl\": [\"jdbc:oracle:thin:@172.22.44.1:1521\/orcl\"]\n                    }\n                ],\n            }\n        },\n        \"writer\": {\n            \"name\": \"hdfswriter\",\n            \"parameter\": {\n                \"column\": [{\"name\":\"empno\",\"type\":\"int\"},{\"name\":\"ename\",\"type\":\"string\"}],\n                \"compress\": \"\",\n                \"\/\/\": \"\u8fd9\u91cc\u8981\u586b\u5199 active \u7684 namenode \u7684ip\u5730\u5740\",\n                \"defaultFS\": \"hdfs:\/\/172.22.23.2:8020\",\n                \"fieldDelimiter\": \"\\t\",\n                \"fileName\": \"emp\",\n                \"fileType\": \"text\",\n                \"path\": \"\/tmp\/\",\n                \"writeMode\": \"append\",\n                \"\/\/\": \"\u5f00\u542fkerberos\u6a21\u5f0f\u4e0b\uff0c\u9700\u8981\u6dfb\u52a0\u4e0b\u9762\u56db\u884c\uff01\uff01\uff01\",\n                \"haveKerberos\": true,\n                \"\/\/\": \"\u53ef\u4ee5\u4eceTDH\u96c6\u7fa4\u4e0a\u83b7\u53d6keytab\u6587\u4ef6\",\n                \"kerberosKeytabFilePath\": \"\/etc\/hdfs1\/conf\/hdfs.keytab\",\n                \"\/\/\": \"\u53ef\u4ee5\u901a\u8fc7klist -ket \/etc\/hdfs1\/conf\/hdfs.keytab \u67e5\u770b\u5bf9\u5e94\u7684principal\",\n                \"kerberosPrincipal\": \"hdfs\/tdh60202@TDH\",\n                \"\/\/\": \"\u8fd9\u4e2a\u53c2\u6570\uff0c\u53c2\u8003TDH\u9875\u9762 HDFS\u7ec4\u4ef6\u7684\u5b9e\u9645\u914d\u7f6e\u7684\u503c\uff01\",\n                \"hadoopConfig\": { \"dfs.data.transfer.protection\": \"integrity\" }\n                    }\n                }\n            }\n        ]\n    }\n}<\/code><\/pre>\n<h3>FAQ<\/h3>\n<hr \/>\n<h4>1\u3001\u5f00\u542fkerberos\u6a21\u5f0f\u4e0b\uff0c\u62a5\u9519 Connection reset by peer<\/h4>\n<p><strong>\u5ba2\u6237\u7aef\u62a5\u9519\u4e00\u822c\u662f\u8fd9\u6837\u7684\uff1a<\/strong><\/p>\n<pre><code class=\"language-java\">org.apache.hadoop.ipc.RemoteException(java.io.IOException): File \/tmp\/emp__1b5429c4_671d_4fbc_b57a_3af843c1333d could only be replicated to 0 nodes instead of minReplication (=1).  There are 3 datanode(s) running and 3 node(s) are excluded in this operation.<\/code><\/pre>\n<p><strong>\u68c0\u67e5datanode\u65e5\u5fd7<\/strong><\/p>\n<pre><code class=\"language-java\">2019-09-27 10:00:29,684 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Failed to read expected SASL data transfer protection handshake from client at \/172.22\n.23.1:42541. Perhaps the client is running an older version of Hadoop which does not support SASL data transfer protection<\/code><\/pre>\n<p>Hadoop 2.6.0 \u7248\u672c\u8d77\uff0cHDFS\u5ba2\u6237\u7aef\u4e0e datanode \u901a\u4fe1\u7684\u8eab\u4efd\u8ba4\u8bc1\u6709\u591a\u79cd\u6a21\u5f0f\uff0c\u5ba2\u6237\u7aef\u7684\u8ba4\u8bc1\u65b9\u5f0f\u8981\u4e0e datanode \u7aef\u4fdd\u6301\u4e00\u81f4\u3002\u53ef\u4ee5\u901a\u8fc7\u547d\u4ee4\u67e5\u8be2 datanode \u7aef\u914d\u7f6e\u7684\u8ba4\u8bc1\u6a21\u5f0f\uff1a<\/p>\n<pre><code class=\"language-shell\"># grep -C 2 'dfs.data.transfer.protection' \/etc\/hdfs1\/conf\/hdfs-site.xml<\/code><\/pre>\n<p>\u7136\u540e\u5728json \u4e2d\u6dfb\u52a0\u5bf9\u5e94\u7684\u5c5e\u6027\uff0c\u6dfb\u52a0\u5230 parameter \u5c5e\u6027\u4e2d\uff0c\u5982\u4e0b\u6240\u793a\uff1a<\/p>\n<pre><code class=\"language-json\">\"hadoopConfig\":{\n    \"dfs.data.transfer.protection\":\"integrity\",\n    \"hadoop.rpc.protection\":\"authentication\"\n}<\/code><\/pre>\n<h4>2\u3001\u62a5\u9519 No common protection layer between client and server<\/h4>\n<ul>\n<li>\n<p><code>dfs.data.transfer.protection<\/code>\uff1a\u4ee5TDH\u96c6\u7fa4hdfs\u7ec4\u4ef6\u7684\u5b9e\u9645\u914d\u7f6e\u503c\u4e3a\u51c6\uff08\u53ef\u4ee5\u5728\u53c2\u6570\u9875\u9762\u67e5\u770b\uff0c\u6216\u8005<code>hdfs getconf -confKey dfs.data.transfer.protection<\/code> \u6765\u68c0\u67e5\u914d\u7f6e\uff09\u3002<\/p>\n<\/li>\n<li>\n<p><code>hadoop.rpc.protection<\/code>\uff1a \u901a\u8fc7<code>hdfs getconf -confKey &quot;hadoop.rpc.protection&quot;<\/code> \u662f\u5426\u4e3a authentication \uff08\u5982\u679c\u6ca1\u6709\u7684\u8bdd\u53ef\u4ee5\u5728TDH\u4e0a\u65b0\u589e\u81ea\u5b9a\u4e49\u53c2\u6570\uff0c\u914d\u7f6e\u670d\u52a1\u518d\u91cd\u542fhdfs\uff09\uff0c\u7136\u540e\u5728 json \u4e2d\u6dfb\u52a0\u5bf9\u5e94\u7684\u5c5e\u6027\uff0c\u6dfb\u52a0\u5230 parameter \u5c5e\u6027\u4e2d\uff0c\u5982\u4e0b\u6240\u793a\uff1a<\/p>\n<\/li>\n<\/ul>\n<pre><code class=\"language-json\">\"hadoopConfig\":{\n    \"dfs.data.transfer.protection\":\"integrity\",\n    \"hadoop.rpc.protection\":\"authentication\"\n}<\/code><\/pre>\n<h4>3\u3001\u62a5\u9519 Operation category READ is not supported in state standby<\/h4>\n<p>json \u914d\u7f6e\u4e2d &quot;defaultFS&quot;  \u7684\u503c\u8981\u9009\u62e9 active \u7684 namenode\uff0c\u5982\u9700\u914d\u7f6enamanode HA\uff0c\u53c2\u8003\u4e0b\u9762part4\u90e8\u5206&lt;\u5982\u4f55\u914d\u7f6enamenode HA\u9ad8\u53ef\u7528&gt;<\/p>\n<pre><code class=\"language-java\">2019-09-27 16:28:48.367 [job-0] INFO  JobContainer - DataX Reader.Job [oraclereader] do prepare work .\n2019-09-27 16:28:48.368 [job-0] INFO  JobContainer - DataX Writer.Job [hdfswriter] do prepare work .\n2019-09-27 16:28:48.476 [job-0] ERROR HdfsWriter$Job - \u5224\u65ad\u6587\u4ef6\u8def\u5f84[message:filePath =\/tmp\/]\u662f\u5426\u5b58\u5728\u65f6\u53d1\u751f\u7f51\u7edcIO\u5f02\u5e38,\u8bf7\u68c0\u67e5\u60a8\u7684\u7f51\u7edc\u662f\u5426\u6b63\u5e38\uff01\n2019-09-27 16:28:48.482 [job-0] ERROR JobContainer - Exception when job run\ncom.alibaba.datax.common.exception.DataXException: Code:[HdfsWriter-06], Description:[\u4e0eHDFS\u5efa\u7acb\u8fde\u63a5\u65f6\u51fa\u73b0IO\u5f02\u5e38.]. - org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.ipc.StandbyException): Operation category READ is not supported in state standby<\/code><\/pre>\n<h4>4\u3001\u5982\u4f55\u914d\u7f6enamenode HA\u9ad8\u53ef\u7528<\/h4>\n<p>active namenode \u6709\u65f6\u4f1a\u53d1\u751f\u6545\u969c\u5207\u6362\u7684\u95ee\u9898\uff0c\u53ef\u4ee5\u53c2\u8003\u5982\u4e0b\u65b9\u6cd5\u914d\u7f6eha\u9ad8\u53ef\u7528<\/p>\n<pre><code class=\"language-json\">\"writer\": {\n    \"name\": \"hdfswriter\",\n    \"parameter\": {\n        \"column\": [{\"name\":\"empno\",\"type\":\"int\"},{\"name\":\"ename\",\"type\":\"string\"}],\n        \"compress\": \"\",\n        \"\/\/\": \"\u53c2\u8003TDH\u96c6\u7fa4\/etc\/hdfsX\/conf\/hdfs-site.xml\u4e2d\u7684dfs.nameservices\",\n        \"defaultFS\": \"hdfs:\/\/nameservice1\",\n        \"fieldDelimiter\": \"\\t\",\n        \"fileName\": \"emp\",\n        \"fileType\": \"text\",\n        \"path\": \"\/tmp\/\",\n        \"writeMode\": \"append\",\n        \"haveKerberos\": true,\n        \"kerberosKeytabFilePath\": \"\/mnt\/disk2\/datax\/bin\/hdfs.keytab\",\n        \"kerberosPrincipal\": \"hdfs\/tdh083@TDH\",\n        \"hadoopConfig\":{\n            \"\/\/\": \"\u53c2\u8003TDH\u96c6\u7fa4\/etc\/hdfsX\/conf\/hdfs-site.xml\u4e2d\u7684dfs.nameservices\",\n            \"dfs.nameservices\": \"nameservice1\",\n            \"\/\/\": \"\u53c2\u8003TDH\u96c6\u7fa4\/etc\/hdfsX\/conf\/hdfs-site.xml\u4e2d\u7684dfs.ha.namenodes.nameservice1\",\n            \"dfs.ha.namenodes.nameservice1\": \"nn1,nn2\",\n            \"\/\/\": \"\u53c2\u8003TDH\u96c6\u7fa4\/etc\/hdfsX\/conf\/hdfs-site.xml\u4e2d\u7684dfs.namenode.rpc-address.nameservice1.nn1\",\n            \"dfs.namenode.rpc-address.nameservice1.nn1\": \"tdh082:8020\",\n            \"\/\/\": \"\u53c2\u8003TDH\u96c6\u7fa4\/etc\/hdfsX\/conf\/hdfs-site.xml\u4e2d\u7684dfs.namenode.rpc-address.nameservice1.nn2\",\n            \"dfs.namenode.rpc-address.nameservice1.nn2\": \"tdh083:8020\",\n            \"\/\/\": \"\u53c2\u8003TDH\u96c6\u7fa4\/etc\/hdfsX\/conf\/hdfs-site.xml\u4e2d\u7684dfs.client.failover.proxy.provider.nameservice1\",\n            \"dfs.client.failover.proxy.provider.nameservice1\": \"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider\"\n            }\n        }\n    }<\/code><\/pre>\n<h4>5\u3001\u62a5\u9519 javax.security.auth.login.LoginException: Receive timed out<\/h4>\n<p>\u4fee\u6539\u8be5\u8282\u70b9\uff08\u975eTDH\u96c6\u7fa4\uff09\u7684\/etc\/krb5.conf\u6587\u4ef6\uff0c\u5c06 <code>udp_preference_limit<\/code> \u7684\u503c\u4fee\u6539\u4e3a1\uff0c\u4f18\u5148\u8d70TCP\u8fde\u63a5\u3002<\/p>\n<p><img decoding=\"async\" src=\"\/wp-content\/uploads\/2019\/09\/image-1642757971634.png\" alt=\"file\" \/><\/p>\n<h4>6\u3001\u62a5\u9519 javax.security.auth.login.LoginException: \u6ca1\u6709\u5230\u4e3b\u673a\u7684\u8def\u7531<\/h4>\n<p>a) \u68c0\u67e5\u8be5\u8282\u70b9\uff08\u975eTDH\u96c6\u7fa4\uff09\u7684\/etc\/hosts\u6587\u4ef6\uff0c\u662f\u5426\u6dfb\u52a0\u4e86TDH\u96c6\u7fa4\u7684\u6620\u5c04\u5173\u7cfb\uff08\u53ef\u53c2\u8003TDH\u96c6\u7fa4\u7684\/etc\/hosts\u6587\u4ef6\uff09<br \/>\nb) \u68c0\u67e5\u8be5\u8282\u70b9\uff08\u975eTDH\u96c6\u7fa4\uff09\u7684\/etc\/krb5.conf\u6587\u4ef6\uff0ckdc\u4e2d\u7684hostname\u662f\u5426\u662f\u6b63\u786e\u7684\uff08\u53ef\u53c2\u8003TDH\u96c6\u7fa4\u7684\/etc\/krb5.conf\u6587\u4ef6\uff09<\/p>\n<p><img decoding=\"async\" src=\"\/wp-content\/uploads\/2019\/09\/image-1642758228776.png\" alt=\"file\" \/><\/p>\n<h4>7\u3001\u62a5\u9519 Missing parentheses in call to &#8216;print&#8217;. Did you mean print(readerRef)?<\/h4>\n<pre><code class=\"language-shell\">[root@single01\/mnt\/disk2\/datax\/bin]$ python3 datax.py .\/oracle_ha.json \n  File \"datax.py\", line 114\n    print readerRef\n                  ^\nSyntaxError: Missing parentheses in call to 'print'. Did you mean print(readerRef)?<\/code><\/pre>\n<p>\u5f53\u524ddatax\u6682\u4e0d\u652f\u6301python3\u7248\u672c\uff0c\u8bf7\u5207\u6362python2\u8fdb\u884c\u4f7f\u7528\u3002<\/p>\n<h4>8\u3001\u53cc\u7f51\u5361\u73af\u5883\u4e0b\uff0cdatax\u5f80hdfs\u5199\u6570\uff0c\u8fde\u63a5\u5230hdfs\u96c6\u7fa4\u7684\u5185\u7f51IP<\/h4>\n<p>\u53c2\u8003\u622a\u56fe\uff1a\u9664\u4e86\u96c6\u7fa4\u8981\u53c2\u8003 <strong><a href=\"https:\/\/kb.transwarp.cn\/posts\/455\" title=\"TDH\u96c6\u7fa4\u914d\u7f6e\u5185\u5916\u7f51\u53cc\u7f51\u5361\u4e0b\u5916\u7f51\u670d\u52a1\u5668\u5982\u4f55\u8bbf\u95ee\u5185\u7f51\u7684HDFS\">TDH\u96c6\u7fa4\u914d\u7f6e\u5185\u5916\u7f51\u53cc\u7f51\u5361\u4e0b\u5916\u7f51\u670d\u52a1\u5668\u5982\u4f55\u8bbf\u95ee\u5185\u7f51\u7684HDFS<\/a><\/strong>  \u505a\u8c03\u6574\u5916\uff0cjson\u6587\u4ef6\u8981\u989d\u5916\u52a0\u4e0a <code>&quot;hadoopConfig&quot;: { &quot;dfs.client.use.datanode.hostname&quot;: true }<\/code> \u7684\u914d\u7f6e\uff0c\u786e\u4fdd dfsclient \u4fa7\u6709\u505a\u8be5\u914d\u7f6e\u3002<\/p>\n<p><img decoding=\"async\" src=\"https:\/\/nj.transwarp.cn:8180\/wp-content\/uploads\/2019\/09\/image-1729504314588.png\" alt=\"file\" \/><\/p>\n<blockquote>\n<p>\u53c2\u8003\uff1a<br \/>\n<a href=\"https:\/\/blog.csdn.net\/u011510825\/article\/details\/117394086?utm_medium=distribute.pc_relevant.none-task-blog-2~default~baidujs_baidulandingword~default-1-117394086-blog-122330987.235^v43^pc_blog_bottom_relevance_base7&spm=1001.2101.3001.4242.2&utm_relevant_index=4\" title=\"\u8bb0\u4e00\u6b21datax hdfswriter\u7684\u8e29\u5751\u8bb0\uff08\u4e0a\u4f20\u6587\u4ef6\u5230hdfs\u7684\u5751\uff09\">\u8bb0\u4e00\u6b21datax hdfswriter\u7684\u8e29\u5751\u8bb0\uff08\u4e0a\u4f20\u6587\u4ef6\u5230hdfs\u7684\u5751\uff09<\/a><br \/>\n<a href=\"https:\/\/blog.csdn.net\/liuxiao723846\/article\/details\/122330987\" title=\"\u5ba2\u6237\u7aef\u8bbf\u95ee\u53cc\u7f51\u5361hadoop\u96c6\u7fa4\u7684HDFS\">\u5ba2\u6237\u7aef\u8bbf\u95ee\u53cc\u7f51\u5361hadoop\u96c6\u7fa4\u7684HDFS<\/a><\/p>\n<\/blockquote>\n","protected":false},"excerpt":{"rendered":"<p>\u6982\u8981\u63cf\u8ff0 DataX \u662f\u4e00\u4e2a\u5f02\u6784\u6570\u636e\u6e90\u79bb\u7ebf\u540c\u6b65\u5de5\u5177\uff0c\u81f4\u529b\u4e8e\u5b9e\u73b0\u5305\u62ec\u5173\u7cfb\u578b\u6570\u636e\u5e93(MySQL\u3001Oracle\u7b49)\u3001 ..<\/p>\n<div class=\"clear-fix\"><\/div>\n<p><a href=\"https:\/\/kbwp.transwarp.cn\/?p=1055\" title=\"read more...\">Read more<\/a><\/p>\n","protected":false},"author":12,"featured_media":0,"comment_status":"closed","ping_status":"closed","sticky":false,"template":"","format":"standard","meta":{"footnotes":""},"categories":[47],"tags":[],"class_list":["post-1055","post","type-post","status-publish","format-standard","hentry","category-third_part"],"acf":[],"_links":{"self":[{"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=\/wp\/v2\/posts\/1055","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=\/wp\/v2\/users\/12"}],"replies":[{"embeddable":true,"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=%2Fwp%2Fv2%2Fcomments&post=1055"}],"version-history":[{"count":5,"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=\/wp\/v2\/posts\/1055\/revisions"}],"predecessor-version":[{"id":18201,"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=\/wp\/v2\/posts\/1055\/revisions\/18201"}],"wp:attachment":[{"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=%2Fwp%2Fv2%2Fmedia&parent=1055"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=%2Fwp%2Fv2%2Fcategories&post=1055"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/kbwp.transwarp.cn\/index.php?rest_route=%2Fwp%2Fv2%2Ftags&post=1055"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}