In [1]:
%%classpath add mvn
org.apache.spark spark-core_2.11 2.3.1
org.apache.spark spark-sql_2.11 2.3.1
In [2]:
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._
val spark = SparkSession.builder()
.master("local[2]")
.appName("Spark Rounding Examples")
.config("spark.some.config.option", "some-value")
.getOrCreate()
import spark.implicits._
Out[2]:
In [7]:
val df = Seq(1.12, 2.34, 9.87, 2.5, 3.5).toDF
df.show
Out[7]:
round¶
Round to the neart integer and round up at 0.5.
In [10]:
df.withColumn("round", round($"value")).show
The function round
accepts an optional arguments (via method overloadingm)
specifying the number of digits to keep.
In [11]:
df.withColumn("round", round($"value", 1)).show
bround¶
Round to the nearest integer and round to the even number at 0.5.
In [13]:
df.withColumn("round", bround($"value")).show
In [15]:
df.withColumn("round", bround($"value", 1)).show
rint¶
The function rint
is similar to the function bround
.
In [16]:
df.withColumn("round", rint($"value")).show
floor¶
In [17]:
df.withColumn("round", floor($"value")).show
ceil¶
In [18]:
df.withColumn("round", ceil($"value")).show
In [ ]: