We will also set basic environmental variables via conf/spark-env.sh as follows (edit the values as per your environment):
> cp conf/spark-env.sh.template conf/spark-env.sh> vi conf/spark-env.sh $ export SPARK_LOCAL_IP=192.168.56.10 $ export SPARK_LOCAL_DIRS=/data/spark/local/data $ export SPARK_MASTER_HOST=192.168.56.10 $ export SPARK_WORKER_DIR=/data/spark/local/worker $ export SPARK_CONF_DIR=/opt/spark-2.3.2-bin-hadoop2.7/conf $ export SPARK_LOG_DIR=/data/spark/local/logs $ export SPARK_PID_DIR=/data/spark/local/pid