123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472 |
- <!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
- <html>
- <head>
- <META http-equiv="Content-Type" content="text/html; charset=UTF-8">
- <meta content="Apache Forrest" name="Generator">
- <meta name="Forrest-version" content="0.8">
- <meta name="Forrest-skin-name" content="pelt">
- <title>Native Hadoop Libraries</title>
- <link type="text/css" href="skin/basic.css" rel="stylesheet">
- <link media="screen" type="text/css" href="skin/screen.css" rel="stylesheet">
- <link media="print" type="text/css" href="skin/print.css" rel="stylesheet">
- <link type="text/css" href="skin/profile.css" rel="stylesheet">
- <script src="skin/getBlank.js" language="javascript" type="text/javascript"></script><script src="skin/getMenu.js" language="javascript" type="text/javascript"></script><script src="skin/fontsize.js" language="javascript" type="text/javascript"></script>
- <link rel="shortcut icon" href="images/favicon.ico">
- </head>
- <body onload="init()">
- <script type="text/javascript">ndeSetTextSize();</script>
- <div id="top">
- <!--+
- |breadtrail
- +-->
- <div class="breadtrail">
- <a href="http://www.apache.org/">Apache</a> > <a href="http://hadoop.apache.org/">Hadoop</a> > <a href="http://hadoop.apache.org/core/">Core</a><script src="skin/breadcrumbs.js" language="JavaScript" type="text/javascript"></script>
- </div>
- <!--+
- |header
- +-->
- <div class="header">
- <!--+
- |start group logo
- +-->
- <div class="grouplogo">
- <a href="http://hadoop.apache.org/"><img class="logoImage" alt="Hadoop" src="images/hadoop-logo.jpg" title="Apache Hadoop"></a>
- </div>
- <!--+
- |end group logo
- +-->
- <!--+
- |start Project Logo
- +-->
- <div class="projectlogo">
- <a href="http://hadoop.apache.org/core/"><img class="logoImage" alt="Hadoop" src="images/core-logo.gif" title="Scalable Computing Platform"></a>
- </div>
- <!--+
- |end Project Logo
- +-->
- <!--+
- |start Search
- +-->
- <div class="searchbox">
- <form action="http://www.google.com/search" method="get" class="roundtopsmall">
- <input value="hadoop.apache.org" name="sitesearch" type="hidden"><input onFocus="getBlank (this, 'Search the site with google');" size="25" name="q" id="query" type="text" value="Search the site with google">
- <input name="Search" value="Search" type="submit">
- </form>
- </div>
- <!--+
- |end search
- +-->
- <!--+
- |start Tabs
- +-->
- <ul id="tabs">
- <li>
- <a class="unselected" href="http://hadoop.apache.org/core/">Project</a>
- </li>
- <li>
- <a class="unselected" href="http://wiki.apache.org/hadoop">Wiki</a>
- </li>
- <li class="current">
- <a class="selected" href="index.html">Hadoop 0.18 Documentation</a>
- </li>
- </ul>
- <!--+
- |end Tabs
- +-->
- </div>
- </div>
- <div id="main">
- <div id="publishedStrip">
- <!--+
- |start Subtabs
- +-->
- <div id="level2tabs"></div>
- <!--+
- |end Endtabs
- +-->
- <script type="text/javascript"><!--
- document.write("Last Published: " + document.lastModified);
- // --></script>
- </div>
- <!--+
- |breadtrail
- +-->
- <div class="breadtrail">
-
- </div>
- <!--+
- |start Menu, mainarea
- +-->
- <!--+
- |start Menu
- +-->
- <div id="menu">
- <div onclick="SwitchMenu('menu_selected_1.1', 'skin/')" id="menu_selected_1.1Title" class="menutitle" style="background-image: url('skin/images/chapter_open.gif');">Documentation</div>
- <div id="menu_selected_1.1" class="selectedmenuitemgroup" style="display: block;">
- <div class="menuitem">
- <a href="index.html">Overview</a>
- </div>
- <div class="menuitem">
- <a href="quickstart.html">Quickstart</a>
- </div>
- <div class="menuitem">
- <a href="cluster_setup.html">Cluster Setup</a>
- </div>
- <div class="menuitem">
- <a href="hdfs_design.html">HDFS Architecture</a>
- </div>
- <div class="menuitem">
- <a href="hdfs_user_guide.html">HDFS User Guide</a>
- </div>
- <div class="menuitem">
- <a href="hdfs_permissions_guide.html">HDFS Permissions Guide</a>
- </div>
- <div class="menuitem">
- <a href="hdfs_quota_admin_guide.html">HDFS Quotas Administrator Guide</a>
- </div>
- <div class="menuitem">
- <a href="commands_manual.html">Commands Manual</a>
- </div>
- <div class="menuitem">
- <a href="hdfs_shell.html">FS Shell Guide</a>
- </div>
- <div class="menuitem">
- <a href="distcp.html">DistCp Guide</a>
- </div>
- <div class="menuitem">
- <a href="mapred_tutorial.html">Map-Reduce Tutorial</a>
- </div>
- <div class="menupage">
- <div class="menupagetitle">Native Hadoop Libraries</div>
- </div>
- <div class="menuitem">
- <a href="streaming.html">Streaming</a>
- </div>
- <div class="menuitem">
- <a href="hadoop_archives.html">Hadoop Archives</a>
- </div>
- <div class="menuitem">
- <a href="hod.html">Hadoop On Demand</a>
- </div>
- <div class="menuitem">
- <a href="api/index.html">API Docs</a>
- </div>
- <div class="menuitem">
- <a href="jdiff/changes.html">API Changes</a>
- </div>
- <div class="menuitem">
- <a href="http://wiki.apache.org/hadoop/">Wiki</a>
- </div>
- <div class="menuitem">
- <a href="http://wiki.apache.org/hadoop/FAQ">FAQ</a>
- </div>
- <div class="menuitem">
- <a href="http://hadoop.apache.org/core/mailing_lists.html">Mailing Lists</a>
- </div>
- <div class="menuitem">
- <a href="releasenotes.html">Release Notes</a>
- </div>
- <div class="menuitem">
- <a href="changes.html">All Changes</a>
- </div>
- </div>
- <div id="credit"></div>
- <div id="roundbottom">
- <img style="display: none" class="corner" height="15" width="15" alt="" src="skin/images/rc-b-l-15-1body-2menu-3menu.png"></div>
- <!--+
- |alternative credits
- +-->
- <div id="credit2"></div>
- </div>
- <!--+
- |end Menu
- +-->
- <!--+
- |start content
- +-->
- <div id="content">
- <div title="Portable Document Format" class="pdflink">
- <a class="dida" href="native_libraries.pdf"><img alt="PDF -icon" src="skin/images/pdfdoc.gif" class="skin"><br>
- PDF</a>
- </div>
- <h1>Native Hadoop Libraries</h1>
- <div id="minitoc-area">
- <ul class="minitoc">
- <li>
- <a href="#Purpose">Purpose</a>
- </li>
- <li>
- <a href="#Components">Components</a>
- </li>
- <li>
- <a href="#Usage">Usage</a>
- </li>
- <li>
- <a href="#Supported+Platforms">Supported Platforms</a>
- </li>
- <li>
- <a href="#Building+Native+Hadoop+Libraries">Building Native Hadoop Libraries</a>
- <ul class="minitoc">
- <li>
- <a href="#Notes">Notes</a>
- </li>
- </ul>
- </li>
- <li>
- <a href="#Loading+native+libraries+through+DistributedCache"> Loading native libraries through DistributedCache </a>
- </li>
- </ul>
- </div>
-
-
- <a name="N1000D"></a><a name="Purpose"></a>
- <h2 class="h3">Purpose</h2>
- <div class="section">
- <p>Hadoop has native implementations of certain components for reasons of
- both performance and non-availability of Java implementations. These
- components are available in a single, dynamically-linked, native library.
- On the *nix platform it is <em>libhadoop.so</em>. This document describes
- the usage and details on how to build the native libraries.</p>
- </div>
-
-
- <a name="N1001A"></a><a name="Components"></a>
- <h2 class="h3">Components</h2>
- <div class="section">
- <p>Hadoop currently has the following
- <a href="api/org/apache/hadoop/io/compress/CompressionCodec.html">
- compression codecs</a> as the native components:</p>
- <ul>
-
- <li>
- <a href="http://www.zlib.net/">zlib</a>
- </li>
-
- <li>
- <a href="http://www.gzip.org/">gzip</a>
- </li>
-
- <li>
- <a href="http://www.oberhumer.com/opensource/lzo/">lzo</a>
- </li>
-
- </ul>
- <p>Of the above, the availability of native hadoop libraries is imperative
- for the lzo and gzip compression codecs to work.</p>
- </div>
-
- <a name="N1003D"></a><a name="Usage"></a>
- <h2 class="h3">Usage</h2>
- <div class="section">
- <p>It is fairly simple to use the native hadoop libraries:</p>
- <ul>
-
- <li>
- Take a look at the
- <a href="#Supported+Platforms">supported platforms</a>.
- </li>
-
- <li>
- Either <a href="http://hadoop.apache.org/core/releases.html#Download">download</a> the pre-built
- 32-bit i386-Linux native hadoop libraries (available as part of hadoop
- distribution in <span class="codefrag">lib/native</span> directory) or
- <a href="#Building+Native+Hadoop+Libraries">build</a> them yourself.
- </li>
-
- <li>
- Make sure you have either or both of <strong>>zlib-1.2</strong> and
- <strong>>lzo2.0</strong> packages for your platform installed;
- depending on your needs.
- </li>
-
- </ul>
- <p>The <span class="codefrag">bin/hadoop</span> script ensures that the native hadoop
- library is on the library path via the system property
- <em>-Djava.library.path=<path></em>.</p>
- <p>To check everything went alright check the hadoop log files for:</p>
- <p>
-
- <span class="codefrag">
- DEBUG util.NativeCodeLoader - Trying to load the custom-built
- native-hadoop library...
- </span>
- <br>
-
- <span class="codefrag">
- INFO util.NativeCodeLoader - Loaded the native-hadoop library
- </span>
-
- </p>
- <p>If something goes wrong, then:</p>
- <p>
-
- <span class="codefrag">
- INFO util.NativeCodeLoader - Unable to load native-hadoop library for
- your platform... using builtin-java classes where applicable
- </span>
-
- </p>
- </div>
-
-
- <a name="N10087"></a><a name="Supported+Platforms"></a>
- <h2 class="h3">Supported Platforms</h2>
- <div class="section">
- <p>Hadoop native library is supported only on *nix platforms only.
- Unfortunately it is known not to work on <a href="http://www.cygwin.com/">Cygwin</a>
- and <a href="http://www.apple.com/macosx">Mac OS X</a> and has mainly been used on the
- GNU/Linux platform.</p>
- <p>It has been tested on the following GNU/Linux distributions:</p>
- <ul>
-
- <li>
-
- <a href="http://www.redhat.com/rhel/">RHEL4</a>/<a href="http://fedora.redhat.com/">Fedora</a>
-
- </li>
-
- <li>
- <a href="http://www.ubuntu.com/">Ubuntu</a>
- </li>
-
- <li>
- <a href="http://www.gentoo.org/">Gentoo</a>
- </li>
-
- </ul>
- <p>On all the above platforms a 32/64 bit Hadoop native library will work
- with a respective 32/64 bit jvm.</p>
- </div>
-
-
- <a name="N100B7"></a><a name="Building+Native+Hadoop+Libraries"></a>
- <h2 class="h3">Building Native Hadoop Libraries</h2>
- <div class="section">
- <p>Hadoop native library is written in
- <a href="http://en.wikipedia.org/wiki/ANSI_C">ANSI C</a> and built using
- the GNU autotools-chain (autoconf, autoheader, automake, autoscan, libtool).
- This means it should be straight-forward to build them on any platform with
- a standards compliant C compiler and the GNU autotools-chain.
- See <a href="#Supported+Platforms">supported platforms</a>.</p>
- <p>In particular the various packages you would need on the target
- platform are:</p>
- <ul>
-
- <li>
- C compiler (e.g. <a href="http://gcc.gnu.org/">GNU C Compiler</a>)
- </li>
-
- <li>
- GNU Autools Chain:
- <a href="http://www.gnu.org/software/autoconf/">autoconf</a>,
- <a href="http://www.gnu.org/software/automake/">automake</a>,
- <a href="http://www.gnu.org/software/libtool/">libtool</a>
-
- </li>
-
- <li>
- zlib-development package (stable version >= 1.2.0)
- </li>
-
- <li>
- lzo-development package (stable version >= 2.0)
- </li>
-
- </ul>
- <p>Once you have the pre-requisites use the standard <span class="codefrag">build.xml</span>
- and pass along the <span class="codefrag">compile.native</span> flag (set to
- <span class="codefrag">true</span>) to build the native hadoop library:</p>
- <p>
- <span class="codefrag">$ ant -Dcompile.native=true <target></span>
- </p>
- <p>The native hadoop library is not built by default since not everyone is
- interested in building them.</p>
- <p>You should see the newly-built native hadoop library in:</p>
- <p>
- <span class="codefrag">$ build/native/<platform>/lib</span>
- </p>
- <p>where <platform> is combination of the system-properties:
- <span class="codefrag">${os.name}-${os.arch}-${sun.arch.data.model}</span>; for e.g.
- Linux-i386-32.</p>
- <a name="N1010A"></a><a name="Notes"></a>
- <h3 class="h4">Notes</h3>
- <ul>
-
- <li>
- It is <strong>mandatory</strong> to have both the zlib and lzo
- development packages on the target platform for building the
- native hadoop library; however for deployment it is sufficient to
- install zlib or lzo if you wish to use only one of them.
- </li>
-
- <li>
- It is necessary to have the correct 32/64 libraries of both zlib/lzo
- depending on the 32/64 bit jvm for the target platform for
- building/deployment of the native hadoop library.
- </li>
-
- </ul>
- </div>
-
- <a name="N1011E"></a><a name="Loading+native+libraries+through+DistributedCache"></a>
- <h2 class="h3"> Loading native libraries through DistributedCache </h2>
- <div class="section">
- <p>User can load native shared libraries through
- <a href="mapred_tutorial.html#DistributedCache">DistributedCache</a>
- for <em>distributing</em> and <em>symlinking</em> the library files</p>
- <p>Here is an example, describing how to distribute the library and
- load it from map/reduce task. </p>
- <ol>
-
- <li> First copy the library to the HDFS. <br>
-
- <span class="codefrag">bin/hadoop fs -copyFromLocal mylib.so.1 /libraries/mylib.so.1</span>
-
- </li>
-
- <li> The job launching program should contain the following: <br>
-
- <span class="codefrag"> DistributedCache.createSymlink(conf); </span>
- <br>
-
- <span class="codefrag"> DistributedCache.addCacheFile("hdfs://host:port/libraries/mylib.so.1#mylib.so", conf);
- </span>
-
- </li>
-
- <li> The map/reduce task can contain: <br>
-
- <span class="codefrag"> System.loadLibrary("mylib.so"); </span>
-
- </li>
-
- </ol>
- </div>
-
- </div>
- <!--+
- |end content
- +-->
- <div class="clearboth"> </div>
- </div>
- <div id="footer">
- <!--+
- |start bottomstrip
- +-->
- <div class="lastmodified">
- <script type="text/javascript"><!--
- document.write("Last Published: " + document.lastModified);
- // --></script>
- </div>
- <div class="copyright">
- Copyright ©
- 2007 <a href="http://www.apache.org/licenses/">The Apache Software Foundation.</a>
- </div>
- <!--+
- |end bottomstrip
- +-->
- </div>
- </body>
- </html>
|