2020-08-30 14:44:28 +02:00
|
|
|
{ lib, jdk, buildPythonPackage, fetchPypi, six, py4j }:
|
|
|
|
|
|
|
|
buildPythonPackage rec {
|
|
|
|
pname = "databricks-connect";
|
2021-01-21 11:35:29 +01:00
|
|
|
version = "7.3.7";
|
2020-08-30 14:44:28 +02:00
|
|
|
|
|
|
|
src = fetchPypi {
|
|
|
|
inherit pname version;
|
2021-01-21 11:35:29 +01:00
|
|
|
sha256 = "35ead50a0550e65a7d6fd78e2c8e54095b53514fba85180768a2dbcdd3f2cf0b";
|
2020-08-30 14:44:28 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
sourceRoot = ".";
|
|
|
|
|
|
|
|
propagatedBuildInputs = [ py4j six jdk ];
|
|
|
|
|
|
|
|
# requires network access
|
|
|
|
doCheck = false;
|
|
|
|
|
2020-11-29 22:25:34 +01:00
|
|
|
prePatch = ''
|
|
|
|
substituteInPlace setup.py \
|
|
|
|
--replace "py4j==0.10.9" "py4j"
|
|
|
|
'';
|
|
|
|
|
2020-08-30 14:44:28 +02:00
|
|
|
preFixup = ''
|
2020-11-29 22:25:34 +01:00
|
|
|
substituteInPlace "$out/bin/find-spark-home" \
|
2020-08-30 14:44:28 +02:00
|
|
|
--replace find_spark_home.py .find_spark_home.py-wrapped
|
|
|
|
'';
|
|
|
|
|
|
|
|
pythonImportsCheck = [ "pyspark" "six" "py4j" ];
|
|
|
|
|
|
|
|
meta = with lib; {
|
|
|
|
description = "Client for connecting to remote Databricks clusters";
|
|
|
|
homepage = "https://pypi.org/project/databricks-connect";
|
|
|
|
license = licenses.databricks;
|
|
|
|
maintainers = with maintainers; [ kfollesdal ];
|
|
|
|
};
|
|
|
|
}
|