freebsd-ports/devel/spark/Makefile
Stefan Eßer 6d342acaa5 devel/spark: fix multiple issues
The devel/spark port had been resurrected by Neel Chauhan based on
PR 266484, but that PR had become stale and did not follow current
rules and conventions (and had some issues that needed to be fixed).

Neel has run out of time fixing the issues and I have taken over and
finished the patches he had been working on.

This updated port has been tested with poudriere testport, but I do
not have a suitable test environment to run functional tests on.

Please address run-time issues that are detected to both the
maintainer of the port and to me.

Approved by:	portmgr (blanket)
2022-09-29 15:06:06 +02:00

86 lines
2.6 KiB
Makefile

PORTNAME= spark
PORTVERSION= 3.3.0
CATEGORIES= devel java
MASTER_SITES= https://archive.apache.org/dist/${PORTNAME}/${PORTNAME}-${PORTVERSION}/ \
LOCAL/se/distfiles/:deps \
https://people.freebsd.org/~se/distfiles/:deps
PKGNAMEPREFIX= apache-
DISTFILES= ${PORTNAME}-${PORTVERSION}.tgz \
${PORTNAME}-${PORTVERSION}-deps.tgz:deps
MAINTAINER= freebsd@sysctl.cz
COMMENT= Fast big data processing engine
WWW= http://spark.apache.org/
LICENSE= APACHE20
BUILD_DEPENDS= ${LOCALBASE}/lib/libsnappyjava.so:archivers/snappy-java \
bash:shells/bash \
mvn:devel/maven
RUN_DEPENDS= bash:shells/bash
USES= cpe python shebangfix
CPE_VENDOR= apache
USE_JAVA= yes
USE_RC_SUBR= spark_master spark_worker
SHEBANG_FILES= bin/sparkR
MAKE_ENV+= JAVA_HOME=${JAVA_HOME} \
MAVEN_OPTS="-Xmx2g -XX:ReservedCodeCacheSize=2g"
NO_ARCH= yes
SUB_LIST= SPARK_GROUP=spark \
SPARK_USER=spark
USERS= spark
GROUPS= spark
PLIST_SUB= VER=${PORTVERSION}
.include <bsd.port.pre.mk>
.if ${ARCH} == "amd64"
JAVA_ARCH= x86_64
.elif ${ARCH} == "i386"
JAVA_ARCH= x86
.else
JAVA_ARCH= ${ARCH}
.endif
# Update spark-*-dist.tgz if the hadoop version is changed!
HADOOP_VERSION= 3.3.4
OFFLINE= -o
do-build:
${MKDIR} ${WRKDIR}/snappy/org/xerial/snappy/native/${OPSYS}/${JAVA_ARCH}
${CP} ${LOCALBASE}/lib/libsnappyjava.so \
${WRKDIR}/snappy/org/xerial/snappy/native/${OPSYS}/${JAVA_ARCH}
cd ${WRKSRC} && ${SETENV} ${MAKE_ENV} \
${LOCALBASE}/bin/mvn ${OFFLINE} \
-Duser.home=${WRKDIR} \
-Dmaven.repo.local=${WRKDIR}/m2 \
-Dhadoop.version=${HADOOP_VERSION} \
-Pyarn -Phive -Phive-thriftserver -DskipTests \
clean package
${JAR} uvf ${WRKSRC}/assembly/target/scala*/jars/snappy-java-*.jar \
-C ${WRKDIR}/snappy org
post-build:
${RM} ${WRKSRC}/bin/*.cmd ${WRKSRC}/sbin/spark-daemon.sh.orig
do-install:
${MKDIR} ${STAGEDIR}${DATADIR}/lib ${STAGEDIR}${DATADIR}/examples/jars ${STAGEDIR}${DATADIR}/bin ${STAGEDIR}${DATADIR}/sbin ${STAGEDIR}${DATADIR}/conf
${ECHO_CMD} "Spark ${PORTVERSION} built for Hadoop ${HADOOP_VERSION}" > ${STAGEDIR}${DATADIR}/RELEASE
(cd ${WRKSRC}/assembly/target/scala* && ${COPYTREE_SHARE} jars ${STAGEDIR}${DATADIR})
${INSTALL_DATA} ${WRKSRC}/examples/target/spark-examples*.jar ${STAGEDIR}${DATADIR}/examples/jars
cd ${WRKSRC}/examples && ${COPYTREE_SHARE} src ${STAGEDIR}${DATADIR}/examples
cd ${WRKSRC}/bin && ${INSTALL_SCRIPT} * ${STAGEDIR}${DATADIR}/bin/
cd ${WRKSRC}/sbin && ${INSTALL_SCRIPT} * ${STAGEDIR}${DATADIR}/sbin/
cd ${WRKSRC} && ${COPYTREE_SHARE} "python" ${STAGEDIR}${DATADIR}/
${INSTALL_DATA} ${WRKSRC}/conf/*.template ${STAGEDIR}${DATADIR}/conf/
${MKDIR} ${STAGEDIR}/var/run/spark
${MKDIR} ${STAGEDIR}/var/log/spark
.include <bsd.port.post.mk>