Spark 日期解析
Spark date parsing
我正在解析这种格式的一些日期:2009-01-23 18:15:05
使用以下函数
def loadTransactions (sqlContext: SQLContext, path: String): DataFrame = {
val rowRdd = sqlContext.sparkContext.textFile(path).map { line =>
val tokens = line.split(',')
val dt = new DateTime(tokens(0))
Row(new Timestamp(dt.getMillis))
}
val fields = Seq(
StructField("timestamp", TimestampType, true)
)
val schema = StructType(fields)
sqlContext.createDataFrame(rowRdd, schema)
}
Spark 抛出错误:
java.lang.IllegalArgumentException: Invalid format: "2009-01-23 18:15:05" is malformed at " 18:15:05" at org.joda.time.format.DateTimeParserBucket.doParseMillis
我推测是因为缺少毫秒
不使用jodatime,可以使用下面的方法
def loadTransactions (sqlContext: SQLContext, path: String): DataFrame = {
val rowRdd = sqlContext.sparkContext.textFile(path).map { line =>
val tokens = line.split(',')
Row(getTimestamp(tokens(0)))
}
val fields = Seq(
StructField("timestamp", TimestampType, true)
)
val schema = StructType(fields)
sqlContext.createDataFrame(rowRdd, schema)
}
使用以下函数转换为时间戳。
def getTimestamp(x:String) :java.sql.Timestamp = {
val format = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss")
if (x.toString() == "")
return null
else {
val d = format.parse(x.toString());
val t = new Timestamp(d.getTime());
return t
}
}
这样的怎么样?
import org.apache.spark.sql.functions.regexp_extract
def loadTransactions (sqlContext: SQLContext, path: String): DataFrame = {
sqlContext.sparkContext.textFile(path).toDF("text").select(
regexp_extract($"text", "^(.*?),", 1).cast("timestamp").alias("timestamp"))
}
我正在解析这种格式的一些日期:2009-01-23 18:15:05
使用以下函数
def loadTransactions (sqlContext: SQLContext, path: String): DataFrame = {
val rowRdd = sqlContext.sparkContext.textFile(path).map { line =>
val tokens = line.split(',')
val dt = new DateTime(tokens(0))
Row(new Timestamp(dt.getMillis))
}
val fields = Seq(
StructField("timestamp", TimestampType, true)
)
val schema = StructType(fields)
sqlContext.createDataFrame(rowRdd, schema)
}
Spark 抛出错误:
java.lang.IllegalArgumentException: Invalid format: "2009-01-23 18:15:05" is malformed at " 18:15:05" at org.joda.time.format.DateTimeParserBucket.doParseMillis
我推测是因为缺少毫秒
不使用jodatime,可以使用下面的方法
def loadTransactions (sqlContext: SQLContext, path: String): DataFrame = {
val rowRdd = sqlContext.sparkContext.textFile(path).map { line =>
val tokens = line.split(',')
Row(getTimestamp(tokens(0)))
}
val fields = Seq(
StructField("timestamp", TimestampType, true)
)
val schema = StructType(fields)
sqlContext.createDataFrame(rowRdd, schema)
}
使用以下函数转换为时间戳。
def getTimestamp(x:String) :java.sql.Timestamp = {
val format = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss")
if (x.toString() == "")
return null
else {
val d = format.parse(x.toString());
val t = new Timestamp(d.getTime());
return t
}
}
这样的怎么样?
import org.apache.spark.sql.functions.regexp_extract
def loadTransactions (sqlContext: SQLContext, path: String): DataFrame = {
sqlContext.sparkContext.textFile(path).toDF("text").select(
regexp_extract($"text", "^(.*?),", 1).cast("timestamp").alias("timestamp"))
}