[97746] trunk/dports/java/hadoop
hum at macports.org
hum at macports.org
Thu Sep 13 23:54:20 PDT 2012
Revision: 97746
http://trac.macports.org//changeset/97746
Author: hum at macports.org
Date: 2012-09-13 23:54:20 -0700 (Thu, 13 Sep 2012)
Log Message:
-----------
hadoop: fix to build libraries; rename patchfiles; support universal build; see #35902 comment:6.
Modified Paths:
--------------
trunk/dports/java/hadoop/Portfile
Added Paths:
-----------
trunk/dports/java/hadoop/files/patch-conf-hadoop-env.sh.diff
trunk/dports/java/hadoop/files/patch-pseudo.diff
trunk/dports/java/hadoop/files/patch-src-c++.diff
trunk/dports/java/hadoop/files/patch-src-contrib-fusedfs.diff
trunk/dports/java/hadoop/files/patch-src-native.diff
Removed Paths:
-------------
trunk/dports/java/hadoop/files/patch-c++.diff
trunk/dports/java/hadoop/files/patch-conf.diff
trunk/dports/java/hadoop/files/patch-fusedfs.diff
trunk/dports/java/hadoop/files/patch-hadoop-env.sh.diff
trunk/dports/java/hadoop/files/patch-native.diff
Modified: trunk/dports/java/hadoop/Portfile
===================================================================
--- trunk/dports/java/hadoop/Portfile 2012-09-14 06:24:23 UTC (rev 97745)
+++ trunk/dports/java/hadoop/Portfile 2012-09-14 06:54:20 UTC (rev 97746)
@@ -5,7 +5,7 @@
name hadoop
version 1.0.3
-revision 3
+revision 4
categories java devel science
maintainers hum openmaintainer
@@ -22,9 +22,9 @@
checksums rmd160 e41421483156fd0fa65d608b206a17cd2a73a989 \
sha256 716ab51f75ffb70343c3cca02f7ba4722f42376edb67eecbd42a426a054e6423
-patchfiles patch-hadoop-env.sh.diff \
- patch-native.diff \
- patch-c++.diff
+patchfiles patch-conf-hadoop-env.sh.diff \
+ patch-src-native.diff \
+ patch-src-c++.diff
depends_build bin:ant:apache-ant
depends_lib port:zlib \
@@ -50,11 +50,30 @@
file attributes ${worksrcpath}/src/c++/libhdfs/install-sh -permissions 0755
}
-universal_variant no
+variant universal {}
use_configure no
-# Build native and c++ libraries.
+set java_include -I/System/Library/Frameworks/JavaVM.framework/Headers
+set cflags "${configure.cflags} [get_canonical_archflags]"
+set cxxflags "${configure.cxxflags} [get_canonical_archflags cxx]"
+set ldflags "${configure.ldflags} [get_canonical_archflags] -framework JavaVM"
+set cppflags "${configure.cppflags} ${java_include}"
+
+# Set configure args to build native and c++-libhdfs.
+post-patch {
+ set libs "-ldl -lz -lsnappy"
+ reinplace "s|@cc@|${configure.cc}|g" ${worksrcpath}/build.xml
+ reinplace "s|@cflags@|${cflags}|g" ${worksrcpath}/build.xml
+ reinplace "s|@cxx@|${configure.cxx}|g" ${worksrcpath}/build.xml
+ reinplace "s|@cxxflags@|${cxxflags}|g" ${worksrcpath}/build.xml
+ reinplace "s|@ldflags@|${ldflags}|g" ${worksrcpath}/build.xml
+ reinplace "s|@libs@|${libs}|g" ${worksrcpath}/build.xml
+ reinplace "s|@cppflags@|${cppflags}|g" ${worksrcpath}/build.xml
+ reinplace "s|@args@|--prefix=${prefix}|g" ${worksrcpath}/build.xml
+}
+
+# Build native and c++-libhdfs.
build.cmd ant
build.args -Dcompile.native=true \
-Dsnappy.prefix=${prefix} \
@@ -63,7 +82,7 @@
build.target compile-native compile-c++-libhdfs
# Fix install_name of dylib.
-post-build {
+pre-destroot {
foreach file [glob ${worksrcpath}/build/native/**/lib/*.dylib \
${worksrcpath}/build/c++/**/lib/*.dylib] {
if {[file isfile ${file}]} {
@@ -97,16 +116,22 @@
variant fusedfs description {Add Fuse-DFS} {
depends_lib-append port:fuse4x
- patchfiles-append patch-fusedfs.diff
+ patchfiles-append patch-src-contrib-fusedfs.diff
- post-patch {
- set cppflags "-I${prefix}/include -I/System/Library/Frameworks/JavaVM.framework/Headers"
- set ldflags "-L${prefix}/lib -L$@"
- reinplace "s|configure|configure CPPFLAGS=\"${cppflags}\" LDFLAGS=\"${ldflags}\"|" \
- ${worksrcpath}/src/contrib/fuse-dfs/bootstrap.sh
- }
-
+ # libhdfs.dylib must be built before configuring fuse-dfs.
post-build {
+ set libs "-lfuse -lhdfs"
+ # "$@" is replaced with "${hadoop.root}/build/c++/${build.platform}/lib".
+ # See files/patch-src-contrib-fusedfs.diff.
+ set args "--prefix=${prefix} \
+ CC=${configure.cc} \
+ CFLAGS=\"${cflags}\" \
+ LDFLAGS=\"${ldflags} -L$@\" \
+ LIBS=\"${libs}\" \
+ CPPFLAGS=\"${cppflags}\""
+ set sh ${worksrcpath}/src/contrib/fuse-dfs/bootstrap.sh
+ reinplace "s|\./configure|\./configure ${args}|" ${sh}
+ # Build fusedfs.
system -W ${worksrcpath} "ant compile-contrib -Dlibhdfs=1 -Dfusedfs=1"
}
@@ -114,6 +139,10 @@
xinstall -m 755 \
${worksrcpath}/build/contrib/fuse-dfs/fuse_dfs \
${destroot}${prefix}/bin
+ # Fix install_name in fuse_dfs.
+ set bin ${destroot}${prefix}/bin/fuse_dfs
+ regexp {(\S+\/libhdfs\S+dylib)} [exec otool -L ${bin}] path
+ system "install_name_tool -change ${path} ${prefix}/lib/libhdfs.dylib ${bin}"
# Install fuse_dfs_wrapper.sh.
xinstall -m 755 ${filespath}/fuse_dfs_wrapper.sh ${destroot}${hadoop_home}/bin
set sh ${destroot}${hadoop_home}/bin/fuse_dfs_wrapper.sh
@@ -193,7 +222,7 @@
set hadoop_tmp_dir ${hadoop_var_dir}/cache
variant pseudo description {Run on a single-node in a pseudo-distributed mode} {
- patchfiles-append patch-conf.diff
+ patchfiles-append patch-pseudo.diff
post-destroot {
# Set conf directory for a pseudo-distributed mode.
Deleted: trunk/dports/java/hadoop/files/patch-c++.diff
===================================================================
--- trunk/dports/java/hadoop/files/patch-c++.diff 2012-09-14 06:24:23 UTC (rev 97745)
+++ trunk/dports/java/hadoop/files/patch-c++.diff 2012-09-14 06:54:20 UTC (rev 97746)
@@ -1,11 +0,0 @@
---- src/c++/libhdfs/hdfsJniHelper.c.orig 2012-09-08 17:04:50.000000000 +0900
-+++ src/c++/libhdfs/hdfsJniHelper.c 2012-09-08 01:37:44.000000000 +0900
-@@ -15,7 +15,7 @@
- */
-
- #include <string.h>
--#include <error.h>
-+//#include <error.h>
- #include "hdfsJniHelper.h"
-
- static pthread_mutex_t hdfsHashMutex = PTHREAD_MUTEX_INITIALIZER;
Added: trunk/dports/java/hadoop/files/patch-conf-hadoop-env.sh.diff
===================================================================
--- trunk/dports/java/hadoop/files/patch-conf-hadoop-env.sh.diff (rev 0)
+++ trunk/dports/java/hadoop/files/patch-conf-hadoop-env.sh.diff 2012-09-14 06:54:20 UTC (rev 97746)
@@ -0,0 +1,39 @@
+--- conf/hadoop-env.sh.orig 2011-05-05 00:00:30.000000000 +0900
++++ conf/hadoop-env.sh 2011-05-16 23:40:40.000000000 +0900
+@@ -6,13 +6,16 @@
+ # remote nodes.
+
+ # The java implementation to use. Required.
+-# export JAVA_HOME=/usr/lib/j2sdk1.5-sun
++export JAVA_HOME=@java_home@
++
++# Extra Java options.
++export JAVA_OPTS="-Dfile.encoding=UTF-8 $JAVA_OPTS"
+
+ # Extra Java CLASSPATH elements. Optional.
+ # export HADOOP_CLASSPATH=
+
+ # The maximum amount of heap to use, in MB. Default is 1000.
+-# export HADOOP_HEAPSIZE=2000
++export HADOOP_HEAPSIZE=2000
+
+ # Extra Java runtime options. Empty by default.
+ # export HADOOP_OPTS=-server
+@@ -31,7 +34,7 @@
+ # export HADOOP_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR"
+
+ # Where log files are stored. $HADOOP_HOME/logs by default.
+-# export HADOOP_LOG_DIR=${HADOOP_HOME}/logs
++export HADOOP_LOG_DIR=@hadoop_log_dir@
+
+ # File naming remote slave hosts. $HADOOP_HOME/conf/slaves by default.
+ # export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves
+@@ -45,7 +48,7 @@
+ # export HADOOP_SLAVE_SLEEP=0.1
+
+ # The directory where pid files are stored. /tmp by default.
+-# export HADOOP_PID_DIR=/var/hadoop/pids
++export HADOOP_PID_DIR=@hadoop_pid_dir@
+
+ # A string representing this instance of hadoop. $USER by default.
+ # export HADOOP_IDENT_STRING=$USER
Deleted: trunk/dports/java/hadoop/files/patch-conf.diff
===================================================================
--- trunk/dports/java/hadoop/files/patch-conf.diff 2012-09-14 06:24:23 UTC (rev 97745)
+++ trunk/dports/java/hadoop/files/patch-conf.diff 2012-09-14 06:54:20 UTC (rev 97746)
@@ -1,52 +0,0 @@
---- conf/core-site.xml.orig 2011-05-05 00:00:30.000000000 +0900
-+++ conf/core-site.xml 2011-05-16 22:42:36.000000000 +0900
-@@ -4,5 +4,12 @@
- <!-- Put site-specific property overrides in this file. -->
-
- <configuration>
--
-+ <property>
-+ <name>fs.default.name</name>
-+ <value>hdfs://localhost:9000</value>
-+ </property>
-+ <property>
-+ <name>hadoop.tmp.dir</name>
-+ <value>@hadoop_tmp_dir@</value>
-+ </property>
- </configuration>
---- conf/hdfs-site.xml.orig 2011-05-05 00:00:30.000000000 +0900
-+++ conf/hdfs-site.xml 2011-05-16 21:37:13.000000000 +0900
-@@ -4,5 +4,12 @@
- <!-- Put site-specific property overrides in this file. -->
-
- <configuration>
--
-+ <property>
-+ <name>dfs.replication</name>
-+ <value>1</value>
-+ </property>
-+ <property>
-+ <name>dfs.permissions</name>
-+ <value>false</value>
-+ </property>
- </configuration>
---- conf/mapred-site.xml.orig 2011-05-05 00:00:30.000000000 +0900
-+++ conf/mapred-site.xml 2011-05-16 23:00:56.000000000 +0900
-@@ -4,5 +4,16 @@
- <!-- Put site-specific property overrides in this file. -->
-
- <configuration>
--
-+ <property>
-+ <name>mapred.job.tracker</name>
-+ <value>localhost:9001</value>
-+ </property>
-+ <property>
-+ <name>mapred.tasktracker.map.tasks.maximum</name>
-+ <value>@tasks_max@</value>
-+ </property>
-+ <property>
-+ <name>mapred.tasktracker.reduce.tasks.maximum</name>
-+ <value>@tasks_max@</value>
-+ </property>
- </configuration>
Deleted: trunk/dports/java/hadoop/files/patch-fusedfs.diff
===================================================================
--- trunk/dports/java/hadoop/files/patch-fusedfs.diff 2012-09-14 06:24:23 UTC (rev 97745)
+++ trunk/dports/java/hadoop/files/patch-fusedfs.diff 2012-09-14 06:54:20 UTC (rev 97746)
@@ -1,31 +0,0 @@
---- src/contrib/fuse-dfs/build.xml.orig 2012-09-08 22:38:18.000000000 +0900
-+++ src/contrib/fuse-dfs/build.xml 2012-09-08 02:02:14.000000000 +0900
-@@ -39,9 +39,9 @@
- </exec>
- <property name="build.platform"
- value="${nonspace.os}-${os.arch}-${sun.arch.data.model}"/>
-- <property name="libhdfs.lib" value="${hadoop.root}/build/c++/${build.platform}/lib/libhdfs.so"/>
-+ <property name="libhdfs.lib" value="${hadoop.root}/build/c++/${build.platform}/lib/libhdfs.dylib"/>
- <available file="${libhdfs.lib}" property="libhdfs-exists"/>
-- <fail message="libhdfs.so does not exist: ${libhdfs.lib}. Please check flags -Dlibhdfs=1 -Dfusedfs=1 are set or first try ant compile-libhdfs -Dlibhdfs=1">
-+ <fail message="libhdfs.dylib does not exist: ${libhdfs.lib}. Please check flags -Dlibhdfs=1 -Dfusedfs=1 are set or first try ant compile-libhdfs -Dlibhdfs=1">
- <condition>
- <not><isset property="libhdfs-exists"/></not>
- </condition>
-@@ -60,6 +60,7 @@
-
- <exec executable="/bin/sh" failonerror="true">
- <arg value="${root}/bootstrap.sh"/>
-+ <arg value="${hadoop.root}/build/c++/${build.platform}/lib"/>
- </exec>
- <exec executable="make" failonerror="true">
- <env key="OS_NAME" value="${os.name}"/>
---- src/contrib/fuse-dfs/src/Makefile.am.orig 2012-05-09 05:34:53.000000000 +0900
-+++ src/contrib/fuse-dfs/src/Makefile.am 2012-09-08 16:33:28.000000000 +0900
-@@ -16,5 +16,5 @@
- bin_PROGRAMS = fuse_dfs
- fuse_dfs_SOURCES = fuse_dfs.c fuse_options.c fuse_trash.c fuse_stat_struct.c fuse_users.c fuse_init.c fuse_connect.c fuse_impls_access.c fuse_impls_chmod.c fuse_impls_chown.c fuse_impls_create.c fuse_impls_flush.c fuse_impls_getattr.c fuse_impls_mkdir.c fuse_impls_mknod.c fuse_impls_open.c fuse_impls_read.c fuse_impls_release.c fuse_impls_readdir.c fuse_impls_rename.c fuse_impls_rmdir.c fuse_impls_statfs.c fuse_impls_symlink.c fuse_impls_truncate.c fuse_impls_utimens.c fuse_impls_unlink.c fuse_impls_write.c
- AM_CPPFLAGS= -DPERMS=$(PERMS) -D_FILE_OFFSET_BITS=64 -I$(JAVA_HOME)/include -I$(HADOOP_HOME)/src/c++/libhdfs/ -I$(JAVA_HOME)/include/linux/ -D_FUSE_DFS_VERSION=\"$(PACKAGE_VERSION)\" -DPROTECTED_PATHS=\"$(PROTECTED_PATHS)\" -I$(FUSE_HOME)/include
--AM_LDFLAGS= -L$(HADOOP_HOME)/build/libhdfs -lhdfs -L$(FUSE_HOME)/lib -lfuse -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server -ljvm
-+AM_LDFLAGS= -L$(HADOOP_HOME)/build/libhdfs -lhdfs -L$(FUSE_HOME)/lib -lfuse
-
Deleted: trunk/dports/java/hadoop/files/patch-hadoop-env.sh.diff
===================================================================
--- trunk/dports/java/hadoop/files/patch-hadoop-env.sh.diff 2012-09-14 06:24:23 UTC (rev 97745)
+++ trunk/dports/java/hadoop/files/patch-hadoop-env.sh.diff 2012-09-14 06:54:20 UTC (rev 97746)
@@ -1,39 +0,0 @@
---- conf/hadoop-env.sh.orig 2011-05-05 00:00:30.000000000 +0900
-+++ conf/hadoop-env.sh 2011-05-16 23:40:40.000000000 +0900
-@@ -6,13 +6,16 @@
- # remote nodes.
-
- # The java implementation to use. Required.
--# export JAVA_HOME=/usr/lib/j2sdk1.5-sun
-+export JAVA_HOME=@java_home@
-+
-+# Extra Java options.
-+export JAVA_OPTS="-Dfile.encoding=UTF-8 $JAVA_OPTS"
-
- # Extra Java CLASSPATH elements. Optional.
- # export HADOOP_CLASSPATH=
-
- # The maximum amount of heap to use, in MB. Default is 1000.
--# export HADOOP_HEAPSIZE=2000
-+export HADOOP_HEAPSIZE=2000
-
- # Extra Java runtime options. Empty by default.
- # export HADOOP_OPTS=-server
-@@ -31,7 +34,7 @@
- # export HADOOP_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR"
-
- # Where log files are stored. $HADOOP_HOME/logs by default.
--# export HADOOP_LOG_DIR=${HADOOP_HOME}/logs
-+export HADOOP_LOG_DIR=@hadoop_log_dir@
-
- # File naming remote slave hosts. $HADOOP_HOME/conf/slaves by default.
- # export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves
-@@ -45,7 +48,7 @@
- # export HADOOP_SLAVE_SLEEP=0.1
-
- # The directory where pid files are stored. /tmp by default.
--# export HADOOP_PID_DIR=/var/hadoop/pids
-+export HADOOP_PID_DIR=@hadoop_pid_dir@
-
- # A string representing this instance of hadoop. $USER by default.
- # export HADOOP_IDENT_STRING=$USER
Deleted: trunk/dports/java/hadoop/files/patch-native.diff
===================================================================
--- trunk/dports/java/hadoop/files/patch-native.diff 2012-09-14 06:24:23 UTC (rev 97745)
+++ trunk/dports/java/hadoop/files/patch-native.diff 2012-09-14 06:54:20 UTC (rev 97746)
@@ -1,68 +0,0 @@
---- src/native/Makefile.am.orig 2012-05-09 05:34:52.000000000 +0900
-+++ src/native/Makefile.am 2012-09-02 12:56:57.000000000 +0900
-@@ -51,7 +51,7 @@
- src/org/apache/hadoop/io/nativeio/errno_enum.c \
- src/org/apache/hadoop/io/nativeio/NativeIO.c
- libhadoop_la_LDFLAGS = -version-info 1:0:0 $(AM_LDFLAGS)
--libhadoop_la_LIBADD = -ldl -ljvm
-+libhadoop_la_LIBADD = -ldl
-
- #
- #vim: sw=4: ts=4: noet
---- src/native/acinclude.m4.orig 2012-05-09 05:34:52.000000000 +0900
-+++ src/native/acinclude.m4 2012-09-01 23:29:18.000000000 +0900
-@@ -13,8 +13,10 @@
- ac_cv_libname_$1="`objdump -p conftest | grep NEEDED | grep $1 | sed 's/\W*NEEDED\W*\(.*\)\W*$/\"\1\"/'`"
- elif test ! -z "`which ldd | grep -v 'no ldd'`"; then
- ac_cv_libname_$1="`ldd conftest | grep $1 | sed 's/^[[[^A-Za-z0-9]]]*\([[[A-Za-z0-9\.]]]*\)[[[^A-Za-z0-9]]]*=>.*$/\"\1\"/'`"
-+ elif test ! -z "`which otool | grep -v 'no otool'`"; then
-+ ac_cv_libname_$1=\"`otool -L conftest | grep $1 | sed -e 's/^[ ]*//' -e 's/ .*//' -e 's/.*\/\(.*\)$/\1/'`\";
- else
-- AC_MSG_ERROR(Can't find either 'objdump' or 'ldd' to compute the dynamic library for '-l$1')
-+ AC_MSG_ERROR(Can't find either 'objdump', 'ldd' or 'otool' to compute the dynamic library for '-l$1')
- fi
- else
- ac_cv_libname_$1=libnotfound.so
---- src/native/configure.ac.orig 2012-05-09 05:34:53.000000000 +0900
-+++ src/native/configure.ac 2012-09-01 22:50:02.000000000 +0900
-@@ -54,9 +54,7 @@
- JNI_LDFLAGS=""
- if test $JAVA_HOME != ""
- then
-- JNI_LDFLAGS="-L$JAVA_HOME/jre/lib/$OS_ARCH/server"
-- JVMSOPATH=`find $JAVA_HOME/jre/ -name libjvm.so | head -n 1`
-- JNI_LDFLAGS="$JNI_LDFLAGS -L`dirname $JVMSOPATH`"
-+ JNI_LDFLAGS="-L$JAVA_HOME/bundle/Libraries"
- fi
- ldflags_bak=$LDFLAGS
- LDFLAGS="$LDFLAGS $JNI_LDFLAGS"
-@@ -73,13 +71,13 @@
-
- dnl Check for JNI headers
- JNI_CPPFLAGS=""
--if test $JAVA_HOME != ""
--then
-- for dir in `find $JAVA_HOME/include -follow -type d`
-+#if test $JAVA_HOME != ""
-+#then
-+ for dir in `find /System/Library/Frameworks/JavaVM.framework/Versions/Current/Headers -follow -type d`
- do
- JNI_CPPFLAGS="$JNI_CPPFLAGS -I$dir"
- done
--fi
-+#fi
- cppflags_bak=$CPPFLAGS
- CPPFLAGS="$CPPFLAGS $JNI_CPPFLAGS"
- AC_CHECK_HEADERS([jni.h], [], AC_MSG_ERROR([Native java headers not found. Is \$JAVA_HOME set correctly?]))
---- src/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c.orig 2012-05-09 05:34:52.000000000 +0900
-+++ src/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c 2012-09-01 23:46:54.000000000 +0900
-@@ -73,7 +73,8 @@
- // was successfull or not (as long as it was called we need to call
- // endnetgrent)
- setnetgrentCalledFlag = 1;
-- if(setnetgrent(cgroup) == 1) {
-+ setnetgrent(cgroup);
-+ if (true) {
- UserList *current = NULL;
- // three pointers are for host, user, domain, we only care
- // about user now
Added: trunk/dports/java/hadoop/files/patch-pseudo.diff
===================================================================
--- trunk/dports/java/hadoop/files/patch-pseudo.diff (rev 0)
+++ trunk/dports/java/hadoop/files/patch-pseudo.diff 2012-09-14 06:54:20 UTC (rev 97746)
@@ -0,0 +1,52 @@
+--- conf/core-site.xml.orig 2011-05-05 00:00:30.000000000 +0900
++++ conf/core-site.xml 2011-05-16 22:42:36.000000000 +0900
+@@ -4,5 +4,12 @@
+ <!-- Put site-specific property overrides in this file. -->
+
+ <configuration>
+-
++ <property>
++ <name>fs.default.name</name>
++ <value>hdfs://localhost:9000</value>
++ </property>
++ <property>
++ <name>hadoop.tmp.dir</name>
++ <value>@hadoop_tmp_dir@</value>
++ </property>
+ </configuration>
+--- conf/hdfs-site.xml.orig 2011-05-05 00:00:30.000000000 +0900
++++ conf/hdfs-site.xml 2011-05-16 21:37:13.000000000 +0900
+@@ -4,5 +4,12 @@
+ <!-- Put site-specific property overrides in this file. -->
+
+ <configuration>
+-
++ <property>
++ <name>dfs.replication</name>
++ <value>1</value>
++ </property>
++ <property>
++ <name>dfs.permissions</name>
++ <value>false</value>
++ </property>
+ </configuration>
+--- conf/mapred-site.xml.orig 2011-05-05 00:00:30.000000000 +0900
++++ conf/mapred-site.xml 2011-05-16 23:00:56.000000000 +0900
+@@ -4,5 +4,16 @@
+ <!-- Put site-specific property overrides in this file. -->
+
+ <configuration>
+-
++ <property>
++ <name>mapred.job.tracker</name>
++ <value>localhost:9001</value>
++ </property>
++ <property>
++ <name>mapred.tasktracker.map.tasks.maximum</name>
++ <value>@tasks_max@</value>
++ </property>
++ <property>
++ <name>mapred.tasktracker.reduce.tasks.maximum</name>
++ <value>@tasks_max@</value>
++ </property>
+ </configuration>
Added: trunk/dports/java/hadoop/files/patch-src-c++.diff
===================================================================
--- trunk/dports/java/hadoop/files/patch-src-c++.diff (rev 0)
+++ trunk/dports/java/hadoop/files/patch-src-c++.diff 2012-09-14 06:54:20 UTC (rev 97746)
@@ -0,0 +1,26 @@
+--- build.xml.orig 2012-05-09 05:35:00.000000000 +0900
++++ build.xml 2012-09-14 00:09:18.000000000 +0900
+@@ -2116,6 +2116,12 @@
+ failonerror="yes">
+ <env key="ac_cv_func_malloc_0_nonnull" value="yes"/>
+ <env key="JVM_ARCH" value="${jvm.arch}"/>
++ <env key="CC" value="@cc@"/>
++ <env key="CFLAGS" value="@cflags@"/>
++ <env key="CXX" value="@cxx@"/>
++ <env key="CXXFLAGS" value="@cxxflags@"/>
++ <env key="LDFLAGS" value="@ldflags@"/>
++ <env key="CPPFLAGS" value="@cppflags@"/>
+ <arg value="--prefix=${install.c++}"/>
+ </exec>
+ </target>
+--- src/c++/libhdfs/hdfsJniHelper.c.orig 2012-09-08 17:04:50.000000000 +0900
++++ src/c++/libhdfs/hdfsJniHelper.c 2012-09-08 01:37:44.000000000 +0900
+@@ -15,7 +15,7 @@
+ */
+
+ #include <string.h>
+-#include <error.h>
++//#include <error.h>
+ #include "hdfsJniHelper.h"
+
+ static pthread_mutex_t hdfsHashMutex = PTHREAD_MUTEX_INITIALIZER;
Added: trunk/dports/java/hadoop/files/patch-src-contrib-fusedfs.diff
===================================================================
--- trunk/dports/java/hadoop/files/patch-src-contrib-fusedfs.diff (rev 0)
+++ trunk/dports/java/hadoop/files/patch-src-contrib-fusedfs.diff 2012-09-14 06:54:20 UTC (rev 97746)
@@ -0,0 +1,79 @@
+--- src/contrib/fuse-dfs/build.xml.orig 2012-09-08 22:38:18.000000000 +0900
++++ src/contrib/fuse-dfs/build.xml 2012-09-08 02:02:14.000000000 +0900
+@@ -39,9 +39,9 @@
+ </exec>
+ <property name="build.platform"
+ value="${nonspace.os}-${os.arch}-${sun.arch.data.model}"/>
+- <property name="libhdfs.lib" value="${hadoop.root}/build/c++/${build.platform}/lib/libhdfs.so"/>
++ <property name="libhdfs.lib" value="${hadoop.root}/build/c++/${build.platform}/lib/libhdfs.dylib"/>
+ <available file="${libhdfs.lib}" property="libhdfs-exists"/>
+- <fail message="libhdfs.so does not exist: ${libhdfs.lib}. Please check flags -Dlibhdfs=1 -Dfusedfs=1 are set or first try ant compile-libhdfs -Dlibhdfs=1">
++ <fail message="libhdfs.dylib does not exist: ${libhdfs.lib}. Please check flags -Dlibhdfs=1 -Dfusedfs=1 are set or first try ant compile-libhdfs -Dlibhdfs=1">
+ <condition>
+ <not><isset property="libhdfs-exists"/></not>
+ </condition>
+@@ -60,6 +60,7 @@
+
+ <exec executable="/bin/sh" failonerror="true">
+ <arg value="${root}/bootstrap.sh"/>
++ <arg value="${hadoop.root}/build/c++/${build.platform}/lib"/>
+ </exec>
+ <exec executable="make" failonerror="true">
+ <env key="OS_NAME" value="${os.name}"/>
+--- src/contrib/fuse-dfs/src/Makefile.am.orig 2012-05-09 05:34:53.000000000 +0900
++++ src/contrib/fuse-dfs/src/Makefile.am 2012-09-08 16:33:28.000000000 +0900
+@@ -16,5 +16,5 @@
+ bin_PROGRAMS = fuse_dfs
+ fuse_dfs_SOURCES = fuse_dfs.c fuse_options.c fuse_trash.c fuse_stat_struct.c fuse_users.c fuse_init.c fuse_connect.c fuse_impls_access.c fuse_impls_chmod.c fuse_impls_chown.c fuse_impls_create.c fuse_impls_flush.c fuse_impls_getattr.c fuse_impls_mkdir.c fuse_impls_mknod.c fuse_impls_open.c fuse_impls_read.c fuse_impls_release.c fuse_impls_readdir.c fuse_impls_rename.c fuse_impls_rmdir.c fuse_impls_statfs.c fuse_impls_symlink.c fuse_impls_truncate.c fuse_impls_utimens.c fuse_impls_unlink.c fuse_impls_write.c
+ AM_CPPFLAGS= -DPERMS=$(PERMS) -D_FILE_OFFSET_BITS=64 -I$(JAVA_HOME)/include -I$(HADOOP_HOME)/src/c++/libhdfs/ -I$(JAVA_HOME)/include/linux/ -D_FUSE_DFS_VERSION=\"$(PACKAGE_VERSION)\" -DPROTECTED_PATHS=\"$(PROTECTED_PATHS)\" -I$(FUSE_HOME)/include
+-AM_LDFLAGS= -L$(HADOOP_HOME)/build/libhdfs -lhdfs -L$(FUSE_HOME)/lib -lfuse -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server -ljvm
++AM_LDFLAGS=
+
+--- src/contrib/fuse-dfs/acinclude.m4.orig 2012-05-09 05:34:52.000000000 +0900
++++ src/contrib/fuse-dfs/acinclude.m4 2012-09-14 15:09:41.000000000 +0900
+@@ -89,14 +89,14 @@
+ ENABLED_OPT="yes"
+ ]
+ )
+-if test "$ENABLED_OPT" = "yes"
+-then
+- CFLAGS="-Wall -O3"
+- CXXFLAGS="-Wall -O3"
+-else
+- CFLAGS="-Wall -g"
+- CXXFLAGS="-Wall -g"
+-fi
++#if test "$ENABLED_OPT" = "yes"
++#then
++# CFLAGS="-Wall -O3"
++# CXXFLAGS="-Wall -O3"
++#else
++# CFLAGS="-Wall -g"
++# CXXFLAGS="-Wall -g"
++#fi
+ AC_MSG_RESULT($ENABLED_OPT)
+ AM_CONDITIONAL([OPT], [test "$ENABLED_OPT" = yes])
+ AM_CONDITIONAL([DEBUG], [test "$ENABLED_OPT" = no])
+@@ -115,14 +115,14 @@
+ ENABLED_DEBUG="yes"
+ ]
+ )
+-if test "$ENABLED_DEBUG" = "yes"
+-then
+- CFLAGS="-Wall -g"
+- CXXFLAGS="-Wall -g"
+-else
+- CFLAGS="-Wall -O3"
+- CXXFLAGS="-Wall -O3"
+-fi
++#if test "$ENABLED_DEBUG" = "yes"
++#then
++# CFLAGS="-Wall -g"
++# CXXFLAGS="-Wall -g"
++#else
++# CFLAGS="-Wall -O3"
++# CXXFLAGS="-Wall -O3"
++#fi
+ AC_MSG_RESULT($ENABLED_DEBUG)
+ AM_CONDITIONAL([DEBUG], [test "$ENABLED_DEBUG" = yes])
+ AM_CONDITIONAL([OPT], [test "$ENABLED_DEBUG" = no])
Added: trunk/dports/java/hadoop/files/patch-src-native.diff
===================================================================
--- trunk/dports/java/hadoop/files/patch-src-native.diff (rev 0)
+++ trunk/dports/java/hadoop/files/patch-src-native.diff 2012-09-14 06:54:20 UTC (rev 97746)
@@ -0,0 +1,75 @@
+--- build.xml.orig 2012-05-09 05:35:00.000000000 +0900
++++ build.xml 2012-09-12 21:29:55.000000000 +0900
+@@ -680,7 +680,12 @@
+ <env key="OS_ARCH" value="${os.arch}"/>
+ <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
+ <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
+- <arg line="${native.src.dir}/configure"/>
++ <env key="CC" value="@cc@"/>
++ <env key="CFLAGS" value="@cflags@"/>
++ <env key="LDFLAGS" value="@ldflags@"/>
++ <env key="LIBS" value="@libs@"/>
++ <env key="CPPFLAGS" value="@cppflags@"/>
++ <arg line="${native.src.dir}/configure @args@"/>
+ </exec>
+
+ <exec dir="${build.native}" executable="${make.cmd}" failonerror="true">
+--- src/native/Makefile.am.orig 2012-05-09 05:34:52.000000000 +0900
++++ src/native/Makefile.am 2012-09-02 12:56:57.000000000 +0900
+@@ -51,7 +51,7 @@
+ src/org/apache/hadoop/io/nativeio/errno_enum.c \
+ src/org/apache/hadoop/io/nativeio/NativeIO.c
+ libhadoop_la_LDFLAGS = -version-info 1:0:0 $(AM_LDFLAGS)
+-libhadoop_la_LIBADD = -ldl -ljvm
++libhadoop_la_LIBADD =
+
+ #
+ #vim: sw=4: ts=4: noet
+--- src/native/acinclude.m4.orig 2012-05-09 05:34:52.000000000 +0900
++++ src/native/acinclude.m4 2012-09-01 23:29:18.000000000 +0900
+@@ -13,8 +13,10 @@
+ ac_cv_libname_$1="`objdump -p conftest | grep NEEDED | grep $1 | sed 's/\W*NEEDED\W*\(.*\)\W*$/\"\1\"/'`"
+ elif test ! -z "`which ldd | grep -v 'no ldd'`"; then
+ ac_cv_libname_$1="`ldd conftest | grep $1 | sed 's/^[[[^A-Za-z0-9]]]*\([[[A-Za-z0-9\.]]]*\)[[[^A-Za-z0-9]]]*=>.*$/\"\1\"/'`"
++ elif test ! -z "`which otool | grep -v 'no otool'`"; then
++ ac_cv_libname_$1=\"`otool -L conftest | grep $1 | sed -e 's/^[ ]*//' -e 's/ .*//' -e 's/.*\/\(.*\)$/\1/'`\";
+ else
+- AC_MSG_ERROR(Can't find either 'objdump' or 'ldd' to compute the dynamic library for '-l$1')
++ AC_MSG_ERROR(Can't find either 'objdump' or 'ldd' or 'otool' to compute the dynamic library for '-l$1')
+ fi
+ else
+ ac_cv_libname_$1=libnotfound.so
+--- src/native/configure.ac.orig 2012-05-09 05:34:53.000000000 +0900
++++ src/native/configure.ac 2012-09-12 22:41:48.000000000 +0900
+@@ -54,9 +54,7 @@
+ JNI_LDFLAGS=""
+ if test $JAVA_HOME != ""
+ then
+- JNI_LDFLAGS="-L$JAVA_HOME/jre/lib/$OS_ARCH/server"
+- JVMSOPATH=`find $JAVA_HOME/jre/ -name libjvm.so | head -n 1`
+- JNI_LDFLAGS="$JNI_LDFLAGS -L`dirname $JVMSOPATH`"
++ JNI_LDFLAGS=
+ fi
+ ldflags_bak=$LDFLAGS
+ LDFLAGS="$LDFLAGS $JNI_LDFLAGS"
+@@ -72,7 +70,7 @@
+ AC_CHECK_HEADERS([stdio.h stddef.h], [], AC_MSG_ERROR(Some system headers not found... please ensure their presence on your platform.))
+
+ dnl Check for JNI headers
+-JNI_CPPFLAGS=""
++JNI_CPPFLAGS="-I/System/Library/Frameworks/JavaVM.framework/Headers"
+ if test $JAVA_HOME != ""
+ then
+ for dir in `find $JAVA_HOME/include -follow -type d`
+--- src/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c.orig 2012-05-09 05:34:52.000000000 +0900
++++ src/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c 2012-09-01 23:46:54.000000000 +0900
+@@ -73,7 +73,8 @@
+ // was successfull or not (as long as it was called we need to call
+ // endnetgrent)
+ setnetgrentCalledFlag = 1;
+- if(setnetgrent(cgroup) == 1) {
++ setnetgrent(cgroup);
++ if (true) {
+ UserList *current = NULL;
+ // three pointers are for host, user, domain, we only care
+ // about user now
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.macosforge.org/pipermail/macports-changes/attachments/20120913/bdff332a/attachment-0001.html>
More information about the macports-changes
mailing list