当前位置:   article > 正文

解决java中joda DateTime导致的空指针异常

date空指针异常

异常消息如下

用户类引发异常:作业因阶段失败而中止:阶段1.0中的任务0失败4次,最近失败:阶段1.0中丢失任务0.3(TID 11,10.215.155.82):org.joda中的java.lang.NullPointerException .time.tz.CachedDateTimeZone.getInfo(CachedDateTimeZone.java:143)org.joda.time.ff.DachedTimeZone.getOffset(CachedDateTimeZone.java:103)org.joda.time.format.DateTimeFormatter.printTo(DateTimeFormatter.java) :org.joda.time.base的org.joda.time.format.DateTimeFormatter.printTo(DateTimeFormatter.java:521)org.joda.time.base上的org.joda.time.format.DateTimeFormatter.print(DateTimeFormatter.java:625) . AbstractDateTime.toString(AbstractDateTime.java:328)at com.xxx.ieg.face.demo.DateTimeNullReferenceReappear

anonfun$3
anonfun $ apply $ 1.apply(DateTimeNullReferenceReappear.scala:41)at com.xxx.ieg.face . demo.DateTimeNullReferenceReappear
anonfun$3
anonfun $ apply $ 1.apply(DateTimeNullReferenceReappear.scala:41)at scala.collection.TraversableLike
anonfun$groupBy$1.apply(TraversableLike.scala328)atscala.collection.TraversableLike
anonfun $ groupBy $ 1.apply(TraversableLike.scala:327)at scala.collection.Iterator $ class.foreach(Iterator.scala:727)at org.apache.spark.util .collection.CompactBuffer
anon$1.foreach(CompactBuffer.scala113)atscala.collection.IterableLike$class.foreach(IterableLike.scala72)atorg.apache.spark.util.collection.CompactBuffer.foreach(CompactBuffer).scala28)atscala.collection.TraversableLike$class.groupBy(TraversableLike.scala327)atg.apache.spark.util.collection.CompactBuffer.groupBy(CompactBuffer.scala28)atcom.xxx.ieg.face.demo.DateTimeNullReferenceReappear
anonfun $ 3.apply(DateTimeNullReferenceReappear.scala:41)at com.xxx.ieg.face.demo.DateTimeNullReferenceReappear
anonfun$3.apply(DateTimeNullReferenceReappear.scala40)atscala.collection.Iterator$$anon$11.next(Iterator.scala328)atscala.collection.Iterator
anon $ 10.next(Iterator.scala:312)at scala.collection.Iterator $ class.foreach(Iterator.scala:727)at scala .collection.Abs tractIterator.foreach(Iterator.scala:1157)at scala.collection.generic.Growable $ class . $ plus $ plus $ eq(Growable.scala:48)at scala.collection.mutable.ArrayBuffer . $ plus $ plus $ eq( ArrayBuffer.scala:103)scala.collection.mutable.ArrayBuffer . $ plus $ plus $ eq(ArrayBuffer.scala:47)at scala.collection.TraversableOnce $ class.to(TraversableOnce.scala:273)at scala.collection . AbstractIterator.to(Iterator.scala:1157)scala.collection.TraversableOnce $ class.toBuffer(TraversableOnce.scala:265)at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)at scala.collection.TraversableOnce $ class .toArray(TraversableOnce.scala:252)at org.apache.spark.rdd.RDD
anonfun$26.apply(RDD.scala1081)scala.collection.AbstractIterator.toArray(Iterator.scala1157).apache.spark.rdd.RDD
anonfun $ 26.apply(RDD.scala:1081)org.apache.spark.SparkContext
anonfun$runJob$4.apply(SparkContext.scala1314)org.apache.spark.org.apache.sparkSparkContext
anonfun $ runJob $ 4.apply(SparkContext.scala:1314) .scheduler.ResultTask.runTask(ResultTask.scala:61)atg.apache.spark.scheduler.Task.run(Task.scala:56)at org.apache.spark.executor.Executor $ TaskRunner.run(Executor.scala) :196)at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)at java.util.concurrent.ThreadPoolExecutor $ Worker.run(ThreadPoolExecutor.java:615)at java.lang.Thread.run(Thread . Java的:744)

我的代码如下:

import org.apache.hadoop.conf.Configuration

import org.apache.spark.rdd.RDD

import org.apache.spark.SparkContext._

import org.apache.spark.{ SparkConf, SparkContext }

import org.joda.time.DateTime

import org.joda.time.format.{ DateTimeFormat, DateTimeFormatter }

object DateTimeNullReferenceReappear extends App {

case class Record(uin: String = "", date: DateTime = null, value: Double = 0.0)

val cfg = new Configuration

val sparkConf = new SparkConf()

sparkConf.setAppName("bourne_exception_reappear")

val sc = new SparkContext(sparkConf)

val data = TDWSparkContext.tdwTable( // this function just read data from an data warehouse

sc,

tdwuser = FaceConf.TDW_USER,

tdwpasswd = FaceConf.TDW_PASSWORD,

dbName = "my_db",

tblName = "my_table",

parts = Array("p_20150323", "p_20150324", "p_20150325", "p_20150326", "p_20150327", "p_20150328", "p_20150329"))

.map(row => {

Record(uin = row(2),

date = DateTimeFormat.forPattern("yyyyMMdd").parseDateTime(row(0)),

value = row(4).toDouble)

}).map(x => (x.uin, (x.date, x.value)))

.groupByKey

.map(x => {

x._2.groupBy(_._1.toString("yyyyMMdd")).mapValues(_.map(_._2).sum) // throw exception here

})

// val data = TDWSparkContext.tdwTable( // It works, as I don't user datetime toString in the groupBy

// sc,

// tdwuser = FaceConf.TDW_USER,

// tdwpasswd = FaceConf.TDW_PASSWORD,

// dbName = "hy",

// tblName = "t_dw_cf_oss_tblogin",

// parts = Array("p_20150323", "p_20150324", "p_20150325", "p_20150326", "p_20150327", "p_20150328", "p_20150329"))

// .map(row => {

// Record(uin = row(2),

// date = DateTimeFormat.forPattern("yyyyMMdd").parseDateTime(row(0)),

// value = row(4).toDouble)

// }).map(x => (x.uin, (x.date.toString("yyyyMMdd"), x.value)))

// .groupByKey

// .map(x => {

// x._2.groupBy(_._1).mapValues(_.map(_._2).sum)

// })

data.take(10).map(println)

}

所以,似乎在 groupBy 中调用 toString 会导致异常,所以有人可以解释一下吗?

谢谢

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/凡人多烦事01/article/detail/320816
推荐阅读
相关标签
  

闽ICP备14008679号