import findspark
findspark.init()
from pyspark.sql import SparkSession
spark = SparkSession \
    .builder \
    .appName("Window functions") \
    .getOrCreate()
products = spark.read\
                .format("csv")\
                .option("header", "true")\
                .load('../datasets/products.csv')
products.show()
import sys
from pyspark.sql.window import Window
import pyspark.sql.functions as func
windowSpec1 = Window \
    .partitionBy(products['category']) \
    .orderBy(products['price'].desc())
price_rank = (func.rank().over(windowSpec1))
product_rank = products.select(
        products['product'],
        products['category'],
        products['price']
).withColumn('rank', func.rank().over(windowSpec1))
product_rank.show()
windowSpec2 = Window \
    .partitionBy(products['category']) \
    .orderBy(products['price'].desc()) \
    .rowsBetween(-1, 0)
price_max = (func.max(products['price']).over(windowSpec2))
products.select(
    products['product'],
    products['category'],
    products['price'],
    price_max.alias("price_max")).show()
windowSpec3 = Window \
    .partitionBy(products['category']) \
    .orderBy(products['price'].desc()) \
    .rangeBetween(-sys.maxsize, sys.maxsize)
price_difference = \
  (func.max(products['price']).over(windowSpec3) - products['price'])
products.select(
    products['product'],
    products['category'],
    products['price'],
    price_difference.alias("price_difference")).show()
windowSpec4 = Window \
    .partitionBy(products['category']) \
    .orderBy(products['price'].asc()) \
    .rangeBetween(0, sys.maxsize)
sys.maxsize
price_max = (func.max(products['price']).over(windowSpec4))
products.select(
    products['product'],
    products['category'],
    products['price'],
    price_max.alias("price_max")).show()