From c0f82700ee25f4151bfbc787416cde9098b3ad40 Mon Sep 17 00:00:00 2001
From: Matthew Bauer <mjbauer95@gmail.com>
Date: Tue, 29 Jan 2019 15:53:31 -0500
Subject: [PATCH] pyspark: 2.3.2 -> 2.4.0, fix version bounds

py4j seems to work fine with this version
---
 pkgs/development/python-modules/pyspark/default.nix | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/pkgs/development/python-modules/pyspark/default.nix b/pkgs/development/python-modules/pyspark/default.nix
index 29dd344a34cb..0eca6c5ddf05 100644
--- a/pkgs/development/python-modules/pyspark/default.nix
+++ b/pkgs/development/python-modules/pyspark/default.nix
@@ -2,16 +2,19 @@
 
 buildPythonPackage rec {
   pname = "pyspark";
-  version = "2.3.2";
+  version = "2.4.0";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "7fb3b4fe47edb0fb78cecec37e0f2a728590f17ef6a49eae55141a7a374c07c8";
+    sha256 = "1p7z5f1a20l7xkjkh88q9cvjw2x8jbrlydkycn5lh4qvx72vgmy9";
   };
 
   # pypandoc is broken with pandoc2, so we just lose docs.
   postPatch = ''
     sed -i "s/'pypandoc'//" setup.py
+
+    # Current release works fine with py4j 0.10.8.1
+    substituteInPlace setup.py --replace py4j==0.10.7 'py4j>=0.10.7,<0.11'
   '';
 
   propagatedBuildInputs = [ py4j ];