From 6808de36724533e983da7a3f7a15b5e979e0cce4 Mon Sep 17 00:00:00 2001 From: sophwats Date: Mon, 30 Jul 2018 11:49:09 -0400 Subject: [PATCH] decreased memory requirements of cluster --- app.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/app.py b/app.py index 1972852..09de71b 100755 --- a/app.py +++ b/app.py @@ -87,9 +87,9 @@ def main(arguments): # set up the spark configuration loggers.debug("Connecting to Spark") conf = (pyspark.SparkConf().setAppName("JiminyModeler") - .set('spark.executor.memory', '4G') - .set('spark.driver.memory', '45G') - .set('spark.driver.maxResultSize', '10G')) + .set('spark.executor.memory', '1G') + .set('spark.driver.memory', '1G') + .set('spark.driver.maxResultSize', '1G')) # get the spark context spark = pyspark.sql.SparkSession.builder.config(conf=conf).getOrCreate() sc = spark.sparkContext