当前位置:   article > 正文

spark 提交任务到yarn 时报错 Caused by: java.sql.SQLException: No suitable driver

caused by: java.sql.sqlexception: no suitable driver
  1. org.apache.spark.SparkException: Exception thrown in awaitResult:
  2. at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:227)
  3. at org.apache.spark.deploy.yarn.ApplicationMaster.runDriver(ApplicationMaster.scala:471)
  4. at org.apache.spark.deploy.yarn.ApplicationMaster.org$apache$spark$deploy$yarn$ApplicationMaster$$runImpl(ApplicationMaster.scala:307)
  5. at org.apache.spark.deploy.yarn.ApplicationMaster$$anonfun$run$1.apply$mcV$sp(ApplicationMaster.scala:247)
  6. at org.apache.spark.deploy.yarn.ApplicationMaster$$anonfun$run$1.apply(ApplicationMaster.scala:247)
  7. at org.apache.spark.deploy.yarn.ApplicationMaster$$anonfun$run$1.apply(ApplicationMaster.scala:247)
  8. at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$3.run(ApplicationMaster.scala:802)
  9. at java.security.AccessController.doPrivileged(Native Method)
  10. at javax.security.auth.Subject.doAs(Subject.java:422)
  11. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1761)
  12. at org.apache.spark.deploy.yarn.ApplicationMaster.doAsUser(ApplicationMaster.scala:801)
  13. at org.apache.spark.deploy.yarn.ApplicationMaster.run(ApplicationMaster.scala:246)
  14. at org.apache.spark.deploy.yarn.ApplicationMaster$.main(ApplicationMaster.scala:828)
  15. at org.apache.spark.deploy.yarn.ApplicationMaster.main(ApplicationMaster.scala)
  16. Caused by: java.util.concurrent.ExecutionException: Boxed Error
  17. at scala.concurrent.impl.Promise$.resolver(Promise.scala:59)
  18. at scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:51)
  19. at scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:248)
  20. at scala.concurrent.Promise$class.tryFailure(Promise.scala:112)
  21. at scala.concurrent.impl.Promise$DefaultPromise.tryFailure(Promise.scala:157)
  22. at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:713)
  23. Caused by: java.lang.ExceptionInInitializerError
  24. at com.ck.data.batch.customer.Test.main(Test.scala)
  25. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  26. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  27. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  28. at java.lang.reflect.Method.invoke(Method.java:498)
  29. at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:694)
  30. Caused by: java.lang.RuntimeException: Failed to get driver instance for jdbcUrl=jdbc:mysql://10.255.11.118:3306/scrm?characterEncoding=utf8&useSSL=false&tinyInt1isBit=false
  31. at com.zaxxer.hikari.util.DriverDataSource.<init>(DriverDataSource.java:88)
  32. at com.zaxxer.hikari.pool.PoolBase.initializeDataSource(PoolBase.java:323)
  33. at com.zaxxer.hikari.pool.PoolBase.<init>(PoolBase.java:114)
  34. at com.zaxxer.hikari.pool.HikariPool.<init>(HikariPool.java:105)
  35. at com.zaxxer.hikari.HikariDataSource.<init>(HikariDataSource.java:72)
  36. at com.cds.common.java.jdbc.DbDataSource.newInstance(DbDataSource.java:52)
  37. at com.cds.common.java.jdbc.DbDataSource.newInstance(DbDataSource.java:18)
  38. at com.cds.common.java.connection.AbstractLazyInitClosableConnection.getInstance(AbstractLazyInitClosableConnection.java:69)
  39. at com.ck.data.common.instance.mysql.DbInstance.getDataSource(DbInstance.java:26)
  40. at com.ck.data.batch.customer.Test$.<init>(Test.scala:30)
  41. at com.ck.data.batch.customer.Test$.<clinit>(Test.scala)
  42. ... 6 more
  43. Caused by: java.sql.SQLException: No suitable driver
  44. at java.sql.DriverManager.getDriver(DriverManager.java:315)
  45. at com.zaxxer.hikari.util.DriverDataSource.<init>(DriverDataSource.java:81)
  46. ... 16 more

报错信息如上:

  1. import java.util.Properties
  2. import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
  3. import javax.sql.DataSource
  4. import org.apache.commons.dbutils.QueryRunner
  5. import org.apache.spark.SparkContext
  6. import org.apache.spark.sql.SparkSession
  7. object Test {
  8. val instance:DataSource = getDataSource
  9. def main(args: Array[String]): Unit = {
  10. val spark: SparkSession = SparkSession.builder().appName("test").getOrCreate()
  11. val sc = spark.sparkContext
  12. process(sc)
  13. sc.stop()
  14. }
  15. def process(sc: SparkContext) = {
  16. sc.makeRDD(List(1,2,3,4,5,6,7,8,9,10))
  17. .map(x => "test" + x)
  18. .foreachPartition(par => {
  19. val params: Array[Array[AnyRef]] = par.map(d => Array(d.asInstanceOf[AnyRef])).toArray
  20. saveData(params)
  21. })
  22. }
  23. def saveData(params: Array[Array[AnyRef]]) = {
  24. val sql = " INSERT INTO test (text) values (?) "
  25. val qr: QueryRunner = new QueryRunner(instance)
  26. qr.batch(sql, params)
  27. }
  28. def getDataSource(): HikariDataSource = {
  29. val properties = new Properties
  30. properties.put("jdbcUrl", "jdbc:mysql://localhost:3306/db?characterEncoding=utf8&useSSL=false&tinyInt1isBit=false")
  31. properties.put("username", "root")
  32. properties.put("password", "root!2022")
  33. val config = new HikariConfig(properties)
  34. // config.setDriverClassName("com.mysql.jdbc.Driver")
  35. new HikariDataSource(config)
  36. }
  37. }

代码如上:没有使用spark sql 。但是也是有写mysql的操作

1、查找原因

网上有很多的解决方法,但是基本都不太符合我的情况。罗列一下其他的解决方法

  1. sparkSql的需要手动添加 。option("driver", "com.mysql.jdbc.Driver" )
  2. 就是驱动的名字写错了(逗号 、分号、等等)
  3. 驱动缺失,去spark集群添加mysql的驱动,或者提交任务的时候手动指定驱动
    1. --conf spark.executor.extraClassPath=/opt/client/Spark2x/spark/jars/mysql-connector-java-5.1.48.jar \
    2. --conf spark.driver.extraClassPath=/opt/client/Spark2x/spark/jars/mysql-connector-java-5.1.48.jar \
  4. 驱动jar冲突,spark-submit 的时候手动指定驱动 --jars   --driver-class-path
    1. spark-submit \
    2. --master yarn \
    3. --deploy-mode cluster \
    4. --name "test" \
    5. --num-executors 3 \
    6. --executor-cores 1 \
    7. --executor-memory 2G \
    8. --driver-memory 1G \
    9. --jars /opt/client/Spark2x/spark/jars/mysql-connector-java-5.1.48.jar \
    10. --driver-class-path /opt/client/Spark2x/spark/jars/mysql-connector-java-5.1.48.jar \
    11. --conf spark.executor.extraClassPath=/opt/client/Spark2x/spark/jars/mysql-connector-java-5.1.48.jar \
    12. --class com.data.Test \
    13. test-1.0-SNAPSHOT-jar-with-dependencies.jar

我的问题解决:

  1. --jars /opt/client/Spark2x/spark/jars/mysql-connector-java-5.1.48.jar \
  2. --conf spark.yarn.user.classpath.first=true \

 

2、分析总结

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/凡人多烦事01/article/detail/593426
推荐阅读
相关标签
  

闽ICP备14008679号