fixed concatenate issue with print statemnets

This commit is contained in:
muassif 2021-08-16 22:20:17 +04:00
parent c004d5fc10
commit 1848e4df7a

View File

@ -12,13 +12,13 @@ data = [5, 4, 6, 3, 2, 8, 9, 2, 8, 7,
8, 4, 4, 8, 2, 7, 8, 9, 6, 9] 8, 4, 4, 8, 2, 7, 8, 9, 6, 9]
rdd1 = spark.sparkContext.parallelize(data) rdd1 = spark.sparkContext.parallelize(data)
print("RDD contents with partitions: "+rdd1.glom().collect()) print("RDD contents with partitions:" + str(rdd1.glom().collect()))
print("Count by values: "+rdd1.countByValue()) print("Count by values: " +str(rdd1.countByValue()))
print("reduce function"+rdd1.reduce(lambda a,b: a+b)) print("reduce function: " + str(rdd1.glom().collect()))
print("Sum of RDD contents"+rdd1.sum()) print("Sum of RDD contents:"+str(rdd1.sum()))
print(""+rdd1.top(5)) print("top: " + str(rdd1.top(5)))
print(rdd1.count()) print("count: " + str(rdd1.count()))
print(rdd1.max()) print("max: "+ str(rdd1.max()))
print(rdd1.min()) print("min" + str(rdd1.min()))
time.sleep(60) time.sleep(60)