当前位置:   article > 正文

Unrecognized Hadoop major version number: 3.0.0-cdh6.3.2

unrecognized hadoop major version number: 3.0.0-cdh6.3.2

 一.环境描述

spark提交job到yarn报错,业务代码比较简单,通过接口调用获取数据,将数据通过sparksql将数据写入hive中,尝试各种替换hadoop版本,最后拿下

1.hadoop环境

2.项目 pom.xml

  1. spark-submit \
  2. --name GridCorrelationMain \
  3. --master yarn \
  4. --deploy-mode cluster \
  5. --executor-cores 2 \
  6. --executor-memory 4G \
  7. --num-executors 5 \
  8. --driver-memory 2G \
  9. --class cn.zd.maincode.wangge.GridCorrelationMain \
  10. /home/boeadm/zwj/iot/cp-etl-spark-data/target/cp_zhengda_spark_utils-1.0-SNAPSHOT.jar
  11. eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2OTI0MzU5NjgsImlhdCI6MTY5MjM0OTU2Mywic3ViIjo1MjB9.rCmnhF2EhdzH62T7lP3nmxQSxh17PotscxEcZkjL5hk
  12. <dependencies>
  13. <dependency>
  14. <groupId>org.apache.commons</groupId>
  15. <artifactId>commons-configuration2</artifactId>
  16. <version>2.9.0</version>
  17. </dependency>
  18. <dependency>
  19. <groupId>org.apache.spark</groupId>
  20. <artifactId>spark-core_2.11</artifactId>
  21. <version>2.3.3</version>
  22. <exclusions>
  23. <exclusion>
  24. <artifactId>hadoop-client</artifactId>
  25. <groupId>org.apache.hadoop</groupId>
  26. </exclusion>
  27. <exclusion>
  28. <artifactId>slf4j-log4j12</artifactId>
  29. <groupId>org.slf4j</groupId>
  30. </exclusion>
  31. </exclusions>
  32. </dependency>
  33. <dependency>
  34. <groupId>org.apache.spark</groupId>
  35. <artifactId>spark-sql_2.11</artifactId>
  36. <version>2.3.3</version>
  37. <!--<scope>provided</scope>-->
  38. <!-- <exclusions>
  39. <exclusion>
  40. <groupId>com.google.guava</groupId>
  41. <artifactId>guava</artifactId>
  42. </exclusion>
  43. </exclusions>-->
  44. </dependency>
  45. <!--
  46. <dependency>
  47. <groupId>com.google.guava</groupId>
  48. <artifactId>guava</artifactId>
  49. <version>15.0</version>
  50. </dependency>
  51. -->
  52. <dependency>
  53. <groupId>org.apache.hadoop</groupId>
  54. <artifactId>hadoop-common</artifactId>
  55. <version>${hadoop.version}</version>
  56. <exclusions>
  57. <exclusion>
  58. <groupId>commons-codec</groupId>
  59. <artifactId>commons-codec</artifactId>
  60. </exclusion>
  61. <exclusion>
  62. <groupId>commons-httpclient</groupId>
  63. <artifactId>commons-httpclient</artifactId>
  64. </exclusion>
  65. <!-- <exclusion>
  66. <groupId>com.google.guava</groupId>
  67. <artifactId>guava</artifactId>
  68. </exclusion>-->
  69. </exclusions>
  70. <!--<scope>provided</scope>-->
  71. </dependency>
  72. <dependency>
  73. <groupId>org.apache.hadoop</groupId>
  74. <artifactId>hadoop-client</artifactId>
  75. <version>${hadoop.version}</version>
  76. <exclusions>
  77. <exclusion>
  78. <artifactId>hadoop-common</artifactId>
  79. <groupId>org.apache.hadoop</groupId>
  80. </exclusion>
  81. </exclusions>
  82. </dependency>
  83. <dependency>
  84. <groupId>org.apache.hadoop</groupId>
  85. <artifactId>hadoop-hdfs</artifactId>
  86. <version>${hadoop.version}</version>
  87. </dependency>
  88. <dependency>
  89. <groupId>org.apache.spark</groupId>
  90. <artifactId>spark-hive_2.11</artifactId>
  91. <version>2.3.2</version>
  92. <exclusions>
  93. <exclusion>
  94. <artifactId>hive-exec</artifactId>
  95. <groupId>org.spark-project.hive</groupId>
  96. </exclusion>
  97. <exclusion>
  98. <artifactId>hive-metastore</artifactId>
  99. <groupId>org.spark-project.hive</groupId>
  100. </exclusion>
  101. </exclusions>
  102. </dependency>
  103. <dependency>
  104. <groupId>org.apache.hadoop</groupId>
  105. <artifactId>hadoop-mapreduce-client-core</artifactId>
  106. <version>${hadoop.version}</version>
  107. </dependency>
  108. <dependency>
  109. <groupId>org.apache.hive</groupId>
  110. <artifactId>hive-jdbc</artifactId>
  111. <exclusions>
  112. <exclusion>
  113. <groupId>org.eclipse.jetty.aggregate</groupId>
  114. <artifactId>jetty-all</artifactId>
  115. </exclusion>
  116. <exclusion>
  117. <groupId>org.apache.hive</groupId>
  118. <artifactId>hive-shims</artifactId>
  119. </exclusion>
  120. <exclusion>
  121. <artifactId>hbase-mapreduce</artifactId>
  122. <groupId>org.apache.hbase</groupId>
  123. </exclusion>
  124. <exclusion>
  125. <artifactId>hbase-server</artifactId>
  126. <groupId>org.apache.hbase</groupId>
  127. </exclusion>
  128. <exclusion>
  129. <artifactId>log4j-slf4j-impl</artifactId>
  130. <groupId>org.apache.logging.log4j</groupId>
  131. </exclusion>
  132. <exclusion>
  133. <artifactId>slf4j-log4j12</artifactId>
  134. <groupId>org.slf4j</groupId>
  135. </exclusion>
  136. </exclusions>
  137. <version>2.1.1</version>
  138. </dependency>
  139. <!--服务验证相关依赖-->
  140. <dependency>
  141. <groupId>org.apache.httpcomponents</groupId>
  142. <artifactId>httpclient</artifactId>
  143. <version>4.5.13</version>
  144. <exclusions>
  145. <exclusion>
  146. <groupId>commons-codec</groupId>
  147. <artifactId>commons-codec</artifactId>
  148. </exclusion>
  149. </exclusions>
  150. <!--<scope>provided</scope>-->
  151. </dependency>
  152. <!--本地跑的话 需要这个jar-->
  153. <dependency>
  154. <groupId>commons-codec</groupId>
  155. <artifactId>commons-codec</artifactId>
  156. <version>1.15</version>
  157. <!--<scope>provided</scope>-->
  158. </dependency>
  159. <dependency>
  160. <groupId>com.typesafe</groupId>
  161. <artifactId>config</artifactId>
  162. <version>1.3.1</version>
  163. </dependency>
  164. <!-- https://mvnrepository.com/artifact/com.alibaba/fastjson -->
  165. <dependency>
  166. <groupId>com.alibaba</groupId>
  167. <artifactId>fastjson</artifactId>
  168. <version>1.2.62</version>
  169. </dependency>
  170. <dependency>
  171. <groupId>com.alibaba</groupId>
  172. <artifactId>fastjson</artifactId>
  173. <version>${fastjson.version}</version>
  174. </dependency>
  175. <!-- https://mvnrepository.com/artifact/org.json/json -->
  176. <dependency>
  177. <groupId>org.json</groupId>
  178. <artifactId>json</artifactId>
  179. <version>20160810</version>
  180. </dependency>
  181. <dependency>
  182. <groupId>com.github.qlone</groupId>
  183. <artifactId>retrofit-crawler</artifactId>
  184. <version>1.0.0</version>
  185. </dependency>
  186. <dependency>
  187. <groupId>com.oracle.database.jdbc</groupId>
  188. <artifactId>ojdbc8</artifactId>
  189. <version>12.2.0.1</version>
  190. </dependency>
  191. <!--mysql连接-->
  192. <dependency>
  193. <groupId>mysql</groupId>
  194. <artifactId>mysql-connector-java</artifactId>
  195. <version>5.1.40</version>
  196. </dependency>
  197. <dependency>
  198. <groupId>javax.mail</groupId>
  199. <artifactId>javax.mail-api</artifactId>
  200. <version>1.5.6</version>
  201. </dependency>
  202. <dependency>
  203. <groupId>org.apache.commons</groupId>
  204. <artifactId>commons-email</artifactId>
  205. <version>1.4</version>
  206. </dependency>
  207. </dependencies>

3.项目集群提交报错


        at org.apache.spark.sql.catalyst.catalog.SessionCatalog.lookupRelation(SessionCatalog.scala:696)
        at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.org$apache$spark$sql$catalyst$analysis$Analyzer$ResolveRelations

lookupTableFromCatalog(Analyzer.scala:730)atorg.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.resolveRelation(Analyzer.scala:685)atorg.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations
anonfun$apply$8.applyOrElse(Analyzer.scala:715)
        at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations
anonfun$apply$8.applyOrElse(Analyzer.scala:708)atorg.apache.spark.sql.catalyst.plans.logical.AnalysisHelper
anonfun$resolveOperatorsUp$1
anonfun$apply$1.apply(AnalysisHelper.scala:90)atorg.apache.spark.sql.catalyst.plans.logical.AnalysisHelper
anonfun$resolveOperatorsUp$1
anonfun$apply$1.apply(AnalysisHelper.scala:90)atorg.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70)atorg.apache.spark.sql.catalyst.plans.logical.AnalysisHelper
anonfun$resolveOperatorsUp$1.apply(AnalysisHelper.scala:89)
        at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper
anonfun$resolveOperatorsUp$1.apply(AnalysisHelper.scala:86)atorg.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:194)atorg.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.resolveOperatorsUp(AnalysisHelper.scala:86)atorg.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsUp(LogicalPlan.scala:29)atorg.apache.spark.sql.catalyst.plans.logical.AnalysisHelper
anonfun$resolveOperatorsUp$1
anonfun$1.apply(AnalysisHelper.scala:87)atorg.apache.spark.sql.catalyst.plans.logical.AnalysisHelper
anonfun$resolveOperatorsUp$1
anonfun$1.apply(AnalysisHelper.scala:87)atorg.apache.spark.sql.catalyst.trees.TreeNode
anonfun$4.apply(TreeNode.scala:326)
        at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:187)
        at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:324)
        at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper
anonfun$resolveOperatorsUp$1.apply(AnalysisHelper.scala:87)atorg.apache.spark.sql.catalyst.plans.logical.AnalysisHelper
anonfun$resolveOperatorsUp$1.apply(AnalysisHelper.scala:86)
        at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:194)
        at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.resolveOperatorsUp(AnalysisHelper.scala:86)
        at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsUp(LogicalPlan.scala:29)
        at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper
anonfun$resolveOperatorsUp$1
anonfun$1.apply(AnalysisHelper.scala:87)
        at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper
anonfun$resolveOperatorsUp$1
anonfun$1.apply(AnalysisHelper.scala:87)
        at org.apache.spark.sql.catalyst.trees.TreeNode
anonfun$4.apply(TreeNode.scala:326)atorg.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:187)atorg.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:324)atorg.apache.spark.sql.catalyst.plans.logical.AnalysisHelper
anonfun$resolveOperatorsUp$1.apply(AnalysisHelper.scala:87)
        at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper
anonfun$resolveOperatorsUp$1.apply(AnalysisHelper.scala:86)atorg.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:194)atorg.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.resolveOperatorsUp(AnalysisHelper.scala:86)atorg.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsUp(LogicalPlan.scala:29)atorg.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.apply(Analyzer.scala:708)atorg.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.apply(Analyzer.scala:654)atorg.apache.spark.sql.catalyst.rules.RuleExecutor
anonfun$execute$1
anonfun$apply$1.apply(RuleExecutor.scala:87)atorg.apache.spark.sql.catalyst.rules.RuleExecutor
anonfun$execute$1
anonfun$apply$1.apply(RuleExecutor.scala:84)atscala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)atscala.collection.immutable.List.foldLeft(List.scala:84)atorg.apache.spark.sql.catalyst.rules.RuleExecutor
anonfun$execute$1.apply(RuleExecutor.scala:84)
        at org.apache.spark.sql.catalyst.rules.RuleExecutor
anonfun$execute$1.apply(RuleExecutor.scala:76)atscala.collection.immutable.List.foreach(List.scala:392)atorg.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:76)atorg.apache.spark.sql.catalyst.analysis.Analyzer.org$apache$spark$sql$catalyst$analysis$Analyzer
executeSameContext(Analyzer.scala:127)
        at org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:121)
        at org.apache.spark.sql.catalyst.analysis.Analyzer
anonfun$executeAndCheck$1.apply(Analyzer.scala:106)atorg.apache.spark.sql.catalyst.analysis.Analyzer
anonfun$executeAndCheck$1.apply(Analyzer.scala:105)
        at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201)
        at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:105)
        at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57)
        at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55)
        at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47)
        at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:78)
        at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:651)
        at cn.zd.maincode.wangge.GridCorrelationMain$.createDataFrameAndTempView(GridCorrelationMain.scala:264)
        at cn.zd.maincode.wangge.GridCorrelationMain$.horecaGridInfo(GridCorrelationMain.scala:148)
        at cn.zd.maincode.wangge.GridCorrelationMain$.main(GridCorrelationMain.scala:110)
        at cn.zd.maincode.wangge.GridCorrelationMain.main(GridCorrelationMain.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.yarn.ApplicationMaster
anon$2.run(ApplicationMaster.scala:673)Causedby:java.lang.ExceptionInInitializerErroratorg.apache.hadoop.hive.conf.HiveConf.<clinit>(HiveConf.java:105)atorg.apache.spark.sql.hive.client.HiveClientImpl.newState(HiveClientImpl.scala:153)atorg.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:118)atsun.reflect.NativeConstructorAccessorImpl.newInstance0(NativeMethod)atsun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)atsun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)atjava.lang.reflect.Constructor.newInstance(Constructor.java:423)atorg.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:292)atorg.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:395)atorg.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:284)atorg.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:68)atorg.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:67)atorg.apache.spark.sql.hive.HiveExternalCatalog
anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:217)
        at org.apache.spark.sql.hive.HiveExternalCatalog
anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:217)atorg.apache.spark.sql.hive.HiveExternalCatalog
anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:217)
        at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
        ... 72 more
Caused by: java.lang.IllegalArgumentException: Unrecognized Hadoop major version number: 3.0.0-cdh6.3.2
        at org.apache.hadoop.hive.shims.ShimLoader.getMajorVersion(ShimLoader.java:169)
        at org.apache.hadoop.hive.shims.ShimLoader.loadShims(ShimLoader.java:134)
        at org.apache.hadoop.hive.shims.ShimLoader.getHadoopShims(ShimLoader.java:95)
        at org.apache.hadoop.hive.conf.HiveConf$ConfVars.<clinit>(HiveConf.java:354)
        ... 88 more

End of LogType:stderr

4.最终解决方式

 将相关依赖不打进包中

  1. <dependency>
  2. <groupId>org.apache.hive</groupId>
  3. <artifactId>hive-jdbc</artifactId>
  4. <exclusions>
  5. <exclusion>
  6. <groupId>org.eclipse.jetty.aggregate</groupId>
  7. <artifactId>jetty-all</artifactId>
  8. </exclusion>
  9. <exclusion>
  10. <groupId>org.apache.hive</groupId>
  11. <artifactId>hive-shims</artifactId>
  12. </exclusion>
  13. <exclusion>
  14. <artifactId>hbase-mapreduce</artifactId>
  15. <groupId>org.apache.hbase</groupId>
  16. </exclusion>
  17. <exclusion>
  18. <artifactId>hbase-server</artifactId>
  19. <groupId>org.apache.hbase</groupId>
  20. </exclusion>
  21. <exclusion>
  22. <artifactId>log4j-slf4j-impl</artifactId>
  23. <groupId>org.apache.logging.log4j</groupId>
  24. </exclusion>
  25. <exclusion>
  26. <artifactId>slf4j-log4j12</artifactId>
  27. <groupId>org.slf4j</groupId>
  28. </exclusion>
  29. </exclusions>
  30. <version>2.1.1</version>
  31. </dependency>
  32. <!--服务验证相关依赖-->
  33. <dependency>
  34. <groupId>org.apache.httpcomponents</groupId>
  35. <artifactId>httpclient</artifactId>
  36. <version>4.5.13</version>
  37. <exclusions>
  38. <exclusion>
  39. <groupId>commons-codec</groupId>
  40. <artifactId>commons-codec</artifactId>
  41. </exclusion>
  42. </exclusions>
  43. <!--<scope>provided</scope>-->
  44. </dependency>
  45. <!--本地跑的话 需要这个jar-->
  46. <dependency>
  47. <groupId>commons-codec</groupId>
  48. <artifactId>commons-codec</artifactId>
  49. <version>1.15</version>
  50. <!--<scope>provided</scope>-->
  51. </dependency>
  52. <dependency>
  53. <groupId>com.typesafe</groupId>
  54. <artifactId>config</artifactId>
  55. <version>1.3.1</version>
  56. </dependency>
  57. <!-- https://mvnrepository.com/artifact/com.alibaba/fastjson -->
  58. <dependency>
  59. <groupId>com.alibaba</groupId>
  60. <artifactId>fastjson</artifactId>
  61. <version>1.2.62</version>
  62. </dependency>
  63. <dependency>
  64. <groupId>com.alibaba</groupId>
  65. <artifactId>fastjson</artifactId>
  66. <version>${fastjson.version}</version>
  67. </dependency>
  68. <!-- https://mvnrepository.com/artifact/org.json/json -->
  69. <dependency>
  70. <groupId>org.json</groupId>
  71. <artifactId>json</artifactId>
  72. <version>20160810</version>
  73. </dependency>
  74. <dependency>
  75. <groupId>com.github.qlone</groupId>
  76. <artifactId>retrofit-crawler</artifactId>
  77. <version>1.0.0</version>
  78. </dependency>
  79. <dependency>
  80. <groupId>com.oracle.database.jdbc</groupId>
  81. <artifactId>ojdbc8</artifactId>
  82. <version>12.2.0.1</version>
  83. </dependency>
  84. <!--mysql连接-->
  85. <dependency>
  86. <groupId>mysql</groupId>
  87. <artifactId>mysql-connector-java</artifactId>
  88. <version>5.1.40</version>
  89. </dependency>
  90. <!--1031日 新取消-->
  91. <!-- <dependency>
  92. <groupId>com.google.guava</groupId>
  93. <artifactId>guava</artifactId>
  94. <version>28.0-jre</version>
  95. </dependency>-->
  96. <!-- https://mvnrepository.com/artifact/org.apache.directory.studio/org.apache.commons.codec -->
  97. <!-- https://mvnrepository.com/artifact/org.apache.commons/org.apache.commons.codec -->
  98. <!--邮件发送依赖-->
  99. <dependency>
  100. <groupId>javax.mail</groupId>
  101. <artifactId>javax.mail-api</artifactId>
  102. <version>1.5.6</version>
  103. </dependency>
  104. <dependency>
  105. <groupId>org.apache.commons</groupId>
  106. <artifactId>commons-email</artifactId>
  107. <version>1.4</version>
  108. </dependency>
  109. <!--
  110. <dependency>
  111. <groupId>org.scala-lang</groupId>
  112. <artifactId>scala-library</artifactId>
  113. <version>2.11.2</version>
  114. </dependency>
  115. <dependency>
  116. <groupId>org.scala-lang</groupId>
  117. <artifactId>scala-reflect</artifactId>
  118. <version>2.11.2</version>
  119. </dependency>
  120. <dependency>
  121. <groupId>org.scala-lang</groupId>
  122. <artifactId>scala-compiler</artifactId>
  123. <version>2.11.2</version>
  124. </dependency>-->
  125. <!-- <dependency>-->
  126. <!-- <groupId>com.starrocks</groupId>-->
  127. <!-- <artifactId>starrocks-spark2_2.11</artifactId>-->
  128. <!-- <version>1.0.1</version>-->
  129. <!-- </dependency>-->
  130. </dependencies>

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/菜鸟追梦旅行/article/detail/360359
推荐阅读
相关标签
  

闽ICP备14008679号