[97554] trunk/dports/java/hadoop

hum at macports.org hum at macports.org
Sat Sep 8 20:24:50 PDT 2012


Revision: 97554
          https://trac.macports.org/changeset/97554
Author:   hum at macports.org
Date:     2012-09-08 20:24:48 -0700 (Sat, 08 Sep 2012)
Log Message:
-----------
hadoop: build c++-libhdfs with patch-c++.diff and add fusedfs variant with patch-fusedfs.diff (see #35902[comment:4]); use add_users instead of adduser and delete add_hadoop_user_and_group.

Modified Paths:
--------------
    trunk/dports/java/hadoop/Portfile
    trunk/dports/java/hadoop/files/patch-native.diff

Added Paths:
-----------
    trunk/dports/java/hadoop/files/fuse_dfs_wrapper.sh
    trunk/dports/java/hadoop/files/patch-c++.diff
    trunk/dports/java/hadoop/files/patch-fusedfs.diff

Modified: trunk/dports/java/hadoop/Portfile
===================================================================
--- trunk/dports/java/hadoop/Portfile	2012-09-08 23:39:30 UTC (rev 97553)
+++ trunk/dports/java/hadoop/Portfile	2012-09-09 03:24:48 UTC (rev 97554)
@@ -5,7 +5,7 @@
 
 name                hadoop
 version             1.0.3
-revision            2
+revision            3
 categories          java devel science
 maintainers         hum openmaintainer
 
@@ -23,7 +23,8 @@
                     sha256  716ab51f75ffb70343c3cca02f7ba4722f42376edb67eecbd42a426a054e6423
 
 patchfiles          patch-hadoop-env.sh.diff \
-                    patch-native.diff
+                    patch-native.diff \
+                    patch-c++.diff
 
 depends_build       bin:ant:apache-ant
 depends_lib         port:zlib \
@@ -44,16 +45,34 @@
     }
 }
 
+# Fix a file permission to build c++-libhdfs.
+post-extract {
+    file attributes ${worksrcpath}/src/c++/libhdfs/install-sh -permissions 0755
+}
+
 universal_variant   no
 
 use_configure       no
 
+# Build native and c++ libraries.
 build.cmd           ant
 build.args          -Dcompile.native=true \
                     -Dsnappy.prefix=${prefix} \
-                    -Dinstall.native=${prefix}
-build.target        compile
+                    -Dcompile.c++=true \
+                    -Dlibhdfs=true
+build.target        compile-native compile-c++-libhdfs
 
+# Fix install_name of dylib.
+post-build {
+    foreach file [glob ${worksrcpath}/build/native/**/lib/*.dylib \
+                       ${worksrcpath}/build/c++/**/lib/*.dylib] {
+        if {[file isfile ${file}]} {
+            set libname [file tail ${file}]
+            system "install_name_tool -id ${prefix}/lib/${libname} ${file}"
+        }
+    }
+}
+
 # Hadoop home and conf directories.
 set hadoop_basedir  ${prefix}/share/java
 set hadoop_home     ${hadoop_basedir}/${distname}
@@ -69,6 +88,41 @@
 
 set hadoopuser      hadoop
 
+add_users ${hadoopuser} \
+    group=${hadoopuser} \
+    realname=Hadoop\ Server \
+    home=${hadoop_var_dir} \
+    shell=/bin/bash
+
+variant fusedfs description {Add Fuse-DFS} {
+    depends_lib-append port:fuse4x
+
+    patchfiles-append  patch-fusedfs.diff
+
+    post-patch {
+        set cppflags "-I${prefix}/include -I/System/Library/Frameworks/JavaVM.framework/Headers"
+        set ldflags  "-L${prefix}/lib -L$@"
+        reinplace "s|configure|configure CPPFLAGS=\"${cppflags}\" LDFLAGS=\"${ldflags}\"|" \
+            ${worksrcpath}/src/contrib/fuse-dfs/bootstrap.sh
+    }
+
+    post-build {
+        system -W ${worksrcpath} "ant compile-contrib -Dlibhdfs=1 -Dfusedfs=1"
+    }
+
+    post-destroot {
+        xinstall -m 755 \
+            ${worksrcpath}/build/contrib/fuse-dfs/fuse_dfs \
+            ${destroot}${prefix}/bin
+        # Install fuse_dfs_wrapper.sh.
+        xinstall -m 755 ${filespath}/fuse_dfs_wrapper.sh ${destroot}${hadoop_home}/bin
+        set sh ${destroot}${hadoop_home}/bin/fuse_dfs_wrapper.sh
+        reinplace "s|@hadoop_home@|${hadoop_home}|" ${sh}
+        reinplace "s|@java_home@|${java_home}|"     ${sh}
+        reinplace "s|@prefix@|${prefix}|"           ${sh}
+    }
+}
+
 destroot {
     # Copy the distribution to Hadoop home directory.
     xinstall -m 755 -d  ${destroot}${hadoop_home}
@@ -86,8 +140,9 @@
     xinstall -m 755 -d ${webinf}
     destroot.keepdirs-append ${webinf}
 
-    # Install native libraries.
-    foreach file [glob ${worksrcpath}/build/native/**/lib/*] {
+    # Install native and c++ libraries.
+    foreach file [glob ${worksrcpath}/build/native/**/lib/* \
+                       ${worksrcpath}/build/c++/**/lib/*] {
         xinstall -m 644 ${file} ${destroot}${prefix}/lib
     }
     
@@ -111,8 +166,6 @@
     reinplace "s|@hadoop_log_dir@|${hadoop_log_dir}|g" ${env_sh}
     reinplace "s|@hadoop_pid_dir@|${hadoop_pid_dir}|g" ${env_sh}
 
-    add_hadoop_user_and_group
-
     # Create working directories.
     xinstall -m 755 -o ${hadoopuser} -g ${hadoopuser} -d \
         ${destroot}${hadoop_var_dir} \
@@ -124,22 +177,6 @@
         ${destroot}${hadoop_pid_dir}
 }
 
-pre-activate {
-    add_hadoop_user_and_group
-}
-
-proc add_hadoop_user_and_group {} {
-    global hadoopuser hadoop_var_dir
-    if {![existsgroup ${hadoopuser}]} {
-        addgroup ${hadoopuser}
-        adduser ${hadoopuser} \
-            gid=[existsgroup ${hadoopuser}] \
-            realname=Hadoop\ Server \
-            home=${hadoop_var_dir} \
-            shell=/bin/bash
-    }
-}
-
 post-deactivate {
     ui_msg "********************************************************"
     ui_msg "* To revert the system after uninstalling the port:"

Added: trunk/dports/java/hadoop/files/fuse_dfs_wrapper.sh
===================================================================
--- trunk/dports/java/hadoop/files/fuse_dfs_wrapper.sh	                        (rev 0)
+++ trunk/dports/java/hadoop/files/fuse_dfs_wrapper.sh	2012-09-09 03:24:48 UTC (rev 97554)
@@ -0,0 +1,29 @@
+#
+# Copyright 2005 The Apache Software Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+if [ "$HADOOP_HOME" = "" ]; then
+export HADOOP_HOME=@hadoop_home@
+fi
+
+for f in ls $HADOOP_HOME/lib/*.jar $HADOOP_HOME/*.jar ; do
+export  CLASSPATH=$CLASSPATH:$f
+done
+
+if [ "$JAVA_HOME" = "" ]; then
+export  JAVA_HOME=@java_home@
+fi
+
+ at prefix@/bin/fuse_dfs $@

Added: trunk/dports/java/hadoop/files/patch-c++.diff
===================================================================
--- trunk/dports/java/hadoop/files/patch-c++.diff	                        (rev 0)
+++ trunk/dports/java/hadoop/files/patch-c++.diff	2012-09-09 03:24:48 UTC (rev 97554)
@@ -0,0 +1,11 @@
+--- src/c++/libhdfs/hdfsJniHelper.c.orig	2012-09-08 17:04:50.000000000 +0900
++++ src/c++/libhdfs/hdfsJniHelper.c	2012-09-08 01:37:44.000000000 +0900
+@@ -15,7 +15,7 @@
+  */
+ 
+ #include <string.h> 
+-#include <error.h>
++//#include <error.h>
+ #include "hdfsJniHelper.h"
+ 
+ static pthread_mutex_t hdfsHashMutex = PTHREAD_MUTEX_INITIALIZER;

Added: trunk/dports/java/hadoop/files/patch-fusedfs.diff
===================================================================
--- trunk/dports/java/hadoop/files/patch-fusedfs.diff	                        (rev 0)
+++ trunk/dports/java/hadoop/files/patch-fusedfs.diff	2012-09-09 03:24:48 UTC (rev 97554)
@@ -0,0 +1,31 @@
+--- src/contrib/fuse-dfs/build.xml.orig	2012-09-08 22:38:18.000000000 +0900
++++ src/contrib/fuse-dfs/build.xml	2012-09-08 02:02:14.000000000 +0900
+@@ -39,9 +39,9 @@
+         </exec>
+     <property name="build.platform" 
+             value="${nonspace.os}-${os.arch}-${sun.arch.data.model}"/>
+-    <property name="libhdfs.lib" value="${hadoop.root}/build/c++/${build.platform}/lib/libhdfs.so"/>
++    <property name="libhdfs.lib" value="${hadoop.root}/build/c++/${build.platform}/lib/libhdfs.dylib"/>
+         <available file="${libhdfs.lib}" property="libhdfs-exists"/>
+-    <fail message="libhdfs.so does not exist: ${libhdfs.lib}. Please check flags -Dlibhdfs=1 -Dfusedfs=1 are set or first try ant compile-libhdfs -Dlibhdfs=1">
++    <fail message="libhdfs.dylib does not exist: ${libhdfs.lib}. Please check flags -Dlibhdfs=1 -Dfusedfs=1 are set or first try ant compile-libhdfs -Dlibhdfs=1">
+          <condition>
+             <not><isset property="libhdfs-exists"/></not>
+           </condition>
+@@ -60,6 +60,7 @@
+ 
+     <exec executable="/bin/sh" failonerror="true">
+       <arg value="${root}/bootstrap.sh"/>
++      <arg value="${hadoop.root}/build/c++/${build.platform}/lib"/>
+     </exec>
+     <exec executable="make" failonerror="true">
+       <env key="OS_NAME" value="${os.name}"/>
+--- src/contrib/fuse-dfs/src/Makefile.am.orig	2012-05-09 05:34:53.000000000 +0900
++++ src/contrib/fuse-dfs/src/Makefile.am	2012-09-08 16:33:28.000000000 +0900
+@@ -16,5 +16,5 @@
+ bin_PROGRAMS = fuse_dfs
+ fuse_dfs_SOURCES = fuse_dfs.c fuse_options.c fuse_trash.c fuse_stat_struct.c fuse_users.c fuse_init.c fuse_connect.c fuse_impls_access.c fuse_impls_chmod.c  fuse_impls_chown.c  fuse_impls_create.c  fuse_impls_flush.c fuse_impls_getattr.c  fuse_impls_mkdir.c  fuse_impls_mknod.c  fuse_impls_open.c fuse_impls_read.c fuse_impls_release.c fuse_impls_readdir.c fuse_impls_rename.c fuse_impls_rmdir.c fuse_impls_statfs.c fuse_impls_symlink.c fuse_impls_truncate.c fuse_impls_utimens.c  fuse_impls_unlink.c fuse_impls_write.c
+ AM_CPPFLAGS= -DPERMS=$(PERMS) -D_FILE_OFFSET_BITS=64 -I$(JAVA_HOME)/include -I$(HADOOP_HOME)/src/c++/libhdfs/ -I$(JAVA_HOME)/include/linux/ -D_FUSE_DFS_VERSION=\"$(PACKAGE_VERSION)\" -DPROTECTED_PATHS=\"$(PROTECTED_PATHS)\" -I$(FUSE_HOME)/include
+-AM_LDFLAGS= -L$(HADOOP_HOME)/build/libhdfs -lhdfs -L$(FUSE_HOME)/lib -lfuse -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server -ljvm
++AM_LDFLAGS= -L$(HADOOP_HOME)/build/libhdfs -lhdfs -L$(FUSE_HOME)/lib -lfuse
+ 

Modified: trunk/dports/java/hadoop/files/patch-native.diff
===================================================================
--- trunk/dports/java/hadoop/files/patch-native.diff	2012-09-08 23:39:30 UTC (rev 97553)
+++ trunk/dports/java/hadoop/files/patch-native.diff	2012-09-09 03:24:48 UTC (rev 97554)
@@ -66,14 +66,3 @@
      UserList *current = NULL;
      // three pointers are for host, user, domain, we only care
      // about user now
---- build.xml.orig	2012-05-09 05:35:00.000000000 +0900
-+++ build.xml	2012-09-05 23:10:45.000000000 +0900
-@@ -680,7 +680,7 @@
- 	  <env key="OS_ARCH" value="${os.arch}"/>
- 	  <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
- 	  <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
--      <arg line="${native.src.dir}/configure"/>
-+      <arg line="${native.src.dir}/configure --prefix=${install.native}"/>
-     </exec>
- 
-     <exec dir="${build.native}" executable="${make.cmd}" failonerror="true">
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.macosforge.org/pipermail/macports-changes/attachments/20120908/83dcd34c/attachment-0001.html>


More information about the macports-changes mailing list