فهرست منبع

AMBARI-4585. Ganglia metrcis not available for the third host when installing 3 node cluster. (Dmytro Shkvyra via dlysnichenko)

Lisnichenko Dmitro 11 سال پیش
والد
کامیت
ecf4ab543f

+ 30 - 14
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/GANGLIA/package/scripts/ganglia_monitor.py

@@ -83,39 +83,55 @@ class GangliaMonitor(Script):
                       owner = "root",
                       group = params.user_group)
 
-    if params.is_rmnode_master:
+    if params.is_hsnode_master:
       generate_daemon("gmond",
-                      name = "HDPResourceManager",
+                      name = "HDPHistoryServer",
                       role = "monitor",
                       owner = "root",
                       group = params.user_group)
 
-    if params.is_hsnode_master:
+    if params.is_hbase_master:
       generate_daemon("gmond",
-                      name = "HDPHistoryServer",
+                      name = "HDPHBaseMaster",
                       role = "monitor",
                       owner = "root",
                       group = params.user_group)
 
-    if params.is_hbase_master:
+    if params.is_slave:
       generate_daemon("gmond",
-                      name = "HDPHBaseMaster",
+                      name = "HDPDataNode",
+                      role = "monitor",
+                      owner = "root",
+                      group = params.user_group)
+
+    if params.is_tasktracker:
+      generate_daemon("gmond",
+                      name = "HDPTaskTracker",
                       role = "monitor",
                       owner = "root",
                       group = params.user_group)
 
-    pure_slave = not (params.is_namenode_master and
-                      params.is_jtnode_master and
-                      params.is_rmnode_master and
-                      params.is_hsnode_master and
-                      params.is_hbase_master) and params.is_slave
-    if pure_slave:
+    if params.is_hbase_rs:
       generate_daemon("gmond",
-                      name = "HDPSlaves",
+                      name = "HDPHBaseRegionServer",
                       role = "monitor",
                       owner = "root",
                       group = params.user_group)
 
+    if params.is_flume:
+      generate_daemon("gmond",
+                      name = "HDPFlumeServer",
+                      role = "monitor",
+                      owner = "root",
+                      group = params.user_group)
+ 
+                      
+    if params.pure_slave:
+      generate_daemon("gmond",
+                    name = "HDPSlaves",
+                    role = "monitor",
+                    owner = "root",
+                    group = params.user_group)                  
 
     Directory(path.join(params.ganglia_dir, "conf.d"),
               owner="root",
@@ -137,4 +153,4 @@ class GangliaMonitor(Script):
 
 
 if __name__ == "__main__":
-  GangliaMonitor().execute()
+  GangliaMonitor().execute()

+ 13 - 11
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/GANGLIA/package/scripts/ganglia_server.py

@@ -79,12 +79,6 @@ class GangliaServer(Script):
                       owner = "root",
                       group = params.user_group)
 
-    if params.has_resourcemanager:
-      generate_daemon("gmond",
-                      name = "HDPResourceManager",
-                      role = "server",
-                      owner = "root",
-                      group = params.user_group)
     if params.has_historyserver:
       generate_daemon("gmond",
                       name = "HDPHistoryServer",
@@ -119,11 +113,19 @@ class GangliaServer(Script):
                       role = "server",
                       owner = "root",
                       group = params.user_group)
-    generate_daemon("gmetad",
-                    name = "gmetad",
-                    role = "server",
-                    owner = "root",
-                    group = params.user_group)
+  
+    if params.ganglia_server_host == params.hostname:
+      generate_daemon("gmetad",
+                      name = "gmetad",
+                      role = "server",
+                      owner = "root",
+                      group = params.user_group)
+                      
+      generate_daemon("gmond",
+          name = "HDPSlaves",
+          role = "server",
+          owner = "root",
+          group = params.user_group)
 
     change_permission()
     server_files()

+ 10 - 11
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/GANGLIA/package/scripts/params.py

@@ -18,6 +18,7 @@ limitations under the License.
 
 from resource_management import *
 from resource_management.core.system import System
+from sets import Set
 
 config = Script.get_config()
 
@@ -37,20 +38,19 @@ rrdcached_base_dir = config['configurations']['global']["rrdcached_base_dir"]
 ganglia_server_host = config["clusterHostInfo"]["ganglia_server_host"][0]
 
 hostname = config["hostname"]
-namenode_host = default("/clusterHostInfo/namenode_host", [])
-jtnode_host = default("/clusterHostInfo/jtnode_host", [])
-rm_host = default("/clusterHostInfo/rm_host", [])
-hs_host = default("/clusterHostInfo/hs_host", [])
-hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
+namenode_host = Set(default("/clusterHostInfo/namenode_host", []))
+jtnode_host = Set(default("/clusterHostInfo/jtnode_host", []))
+hs_host = Set(default("/clusterHostInfo/hs_host", []))
+hbase_master_hosts = Set(default("/clusterHostInfo/hbase_master_hosts", []))
 # datanodes are marked as slave_hosts
-slave_hosts = default("/clusterHostInfo/slave_hosts", [])
-tt_hosts = default("/clusterHostInfo/mapred_tt_hosts", [])
-hbase_rs_hosts = default("/clusterHostInfo/hbase_rs_hosts", [])
-flume_hosts = default("/clusterHostInfo/flume_hosts", [])
+slave_hosts = Set(default("/clusterHostInfo/slave_hosts", []))
+tt_hosts = Set(default("/clusterHostInfo/mapred_tt_hosts", []))
+hbase_rs_hosts = Set(default("/clusterHostInfo/hbase_rs_hosts", []))
+flume_hosts = Set(default("/clusterHostInfo/flume_hosts", []))
 
+pure_slave = not hostname in (namenode_host | jtnode_host | hs_host | hbase_master_hosts | slave_hosts | tt_hosts | hbase_rs_hosts | flume_hosts)
 is_namenode_master = hostname in namenode_host
 is_jtnode_master = hostname in jtnode_host
-is_rmnode_master = hostname in rm_host
 is_hsnode_master = hostname in hs_host
 is_hbase_master = hostname in hbase_master_hosts
 is_slave = hostname in slave_hosts
@@ -60,7 +60,6 @@ is_flume = hostname in flume_hosts
 
 has_namenodes = not len(namenode_host) == 0
 has_jobtracker = not len(jtnode_host) == 0
-has_resourcemanager = not len(rm_host) == 0
 has_historyserver = not len(hs_host) == 0
 has_hbase_masters = not len(hbase_master_hosts) == 0
 has_slaves = not len(slave_hosts) == 0

+ 37 - 6
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/scripts/ganglia_monitor.py

@@ -117,18 +117,49 @@ class GangliaMonitor(Script):
                       owner = "root",
                       group = params.user_group)
 
-    pure_slave = not (params.is_namenode_master and
-                      params.is_jtnode_master and
-                      params.is_rmnode_master and
-                      params.is_hsnode_master and
-                      params.is_hbase_master) and params.is_slave
-    if pure_slave:
+    if params.pure_slave:
       generate_daemon("gmond",
                     name = "HDPSlaves",
                     role = "monitor",
                     owner = "root",
                     group = params.user_group)
 
+    if params.is_slave:
+      generate_daemon("gmond",
+                      name = "HDPDataNode",
+                      role = "monitor",
+                      owner = "root",
+                      group = params.user_group)
+
+    if params.is_tasktracker:
+      generate_daemon("gmond",
+                      name = "HDPTaskTracker",
+                      role = "monitor",
+                      owner = "root",
+                      group = params.user_group)
+
+    if params.is_hbase_rs:
+      generate_daemon("gmond",
+                      name = "HDPHBaseRegionServer",
+                      role = "monitor",
+                      owner = "root",
+                      group = params.user_group)
+
+    if params.is_flume:
+      generate_daemon("gmond",
+                      name = "HDPFlumeServer",
+                      role = "monitor",
+                      owner = "root",
+                      group = params.user_group)
+
+    if params.is_jn_host:
+      generate_daemon("gmond",
+                      name = "HDPJournalNode",
+                      role = "monitor",
+                      owner = "root",
+                      group = params.user_group)
+
+
     Directory(path.join(params.ganglia_dir, "conf.d"),
               owner="root",
               group=params.user_group

+ 13 - 6
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/scripts/ganglia_server.py

@@ -148,12 +148,19 @@ class GangliaServer(Script):
                       role = "server",
                       owner = "root",
                       group = params.user_group)
-
-    generate_daemon("gmetad",
-                    name = "gmetad",
-                    role = "server",
-                    owner = "root",
-                    group = params.user_group)
+                                
+    if params.ganglia_server_host == params.hostname:
+      generate_daemon("gmetad",
+                      name = "gmetad",
+                      role = "server",
+                      owner = "root",
+                      group = params.user_group)
+                      
+      generate_daemon("gmond",
+          name = "HDPSlaves",
+          role = "server",
+          owner = "root",
+          group = params.user_group)
 
     change_permission()
     server_files()

+ 18 - 13
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/scripts/params.py

@@ -18,6 +18,7 @@ limitations under the License.
 
 from resource_management import *
 from resource_management.core.system import System
+from sets import Set
 
 config = Script.get_config()
 
@@ -37,21 +38,25 @@ rrdcached_base_dir = config['configurations']['global']["rrdcached_base_dir"]
 ganglia_server_host = config["clusterHostInfo"]["ganglia_server_host"][0]
 
 hostname = config["hostname"]
-namenode_host = default("/clusterHostInfo/namenode_host", [])
-jtnode_host = default("/clusterHostInfo/jtnode_host", [])
-rm_host = default("/clusterHostInfo/rm_host", [])
-hs_host = default("/clusterHostInfo/hs_host", [])
-hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
+namenode_host = Set(default("/clusterHostInfo/namenode_host", []))
+jtnode_host = Set(default("/clusterHostInfo/jtnode_host", []))
+rm_host = Set(default("/clusterHostInfo/rm_host", []))
+hs_host = Set(default("/clusterHostInfo/hs_host", []))
+hbase_master_hosts = Set(default("/clusterHostInfo/hbase_master_hosts", []))
 # datanodes are marked as slave_hosts
-slave_hosts = default("/clusterHostInfo/slave_hosts", [])
-tt_hosts = default("/clusterHostInfo/mapred_tt_hosts", [])
-nm_hosts = default("/clusterHostInfo/nm_hosts", [])
-hbase_rs_hosts = default("/clusterHostInfo/hbase_rs_hosts", [])
-flume_hosts = default("/clusterHostInfo/flume_hosts", [])
-jn_hosts = default("/clusterHostInfo/journalnode_hosts", [])
-nimbus_server_hosts = default("/clusterHostInfo/nimbus_hosts", [])
-supervisor_server_hosts = default("/clusterHostInfo/supervisor_hosts", [])
+slave_hosts = Set(default("/clusterHostInfo/slave_hosts", []))
+tt_hosts = Set(default("/clusterHostInfo/mapred_tt_hosts", []))
+nm_hosts = Set(default("/clusterHostInfo/nm_hosts", []))
+hbase_rs_hosts = Set(default("/clusterHostInfo/hbase_rs_hosts", []))
+flume_hosts = Set(default("/clusterHostInfo/flume_hosts", []))
+jn_hosts = Set(default("/clusterHostInfo/journalnode_hosts", []))
+nimbus_server_hosts = Set(default("/clusterHostInfo/nimbus_hosts", []))
+supervisor_server_hosts = Set(default("/clusterHostInfo/supervisor_hosts", []))
 
+pure_slave = not hostname in (namenode_host | jtnode_host | rm_host | hs_host |\
+                              hbase_master_hosts |slave_hosts | tt_hosts | hbase_rs_hosts |\
+                              flume_hosts | nm_hosts | jn_hosts | nimbus_server_hosts |\
+                              supervisor_server_hosts)
 is_namenode_master = hostname in namenode_host
 is_jtnode_master = hostname in jtnode_host
 is_rmnode_master = hostname in rm_host

+ 15 - 1
ambari-server/src/test/python/stacks/1.3.2/GANGLIA/test_ganglia_monitor.py

@@ -143,7 +143,21 @@ class TestGangliaMonitor(RMFTestCase):
                                       '/bin',
                                       '/usr/bin'],
                               )
-    self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves -o root -g hadoop',
+    self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPDataNode -o root -g hadoop',
+                              path = ['/usr/libexec/hdp/ganglia',
+                                      '/usr/sbin',
+                                      '/sbin:/usr/local/bin',
+                                      '/bin',
+                                      '/usr/bin'],
+                              )
+    self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPTaskTracker -o root -g hadoop',
+                              path = ['/usr/libexec/hdp/ganglia',
+                                      '/usr/sbin',
+                                      '/sbin:/usr/local/bin',
+                                      '/bin',
+                                      '/usr/bin'],
+                              )
+    self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseRegionServer -o root -g hadoop',
                               path = ['/usr/libexec/hdp/ganglia',
                                       '/usr/sbin',
                                       '/sbin:/usr/local/bin',

+ 7 - 0
ambari-server/src/test/python/stacks/1.3.2/GANGLIA/test_ganglia_server.py

@@ -180,6 +180,13 @@ class TestGangliaServer(RMFTestCase):
                                       '/bin',
                                       '/usr/bin'],
                               )
+    self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPSlaves -m -o root -g hadoop',
+                              path = ['/usr/libexec/hdp/ganglia',
+                                      '/usr/sbin',
+                                      '/sbin:/usr/local/bin',
+                                      '/bin',
+                                      '/usr/bin'],
+                              )
     self.assertResourceCalled('Directory', '/var/lib/ganglia/dwoo',
                               owner = 'nobody',
                               recursive = True,

+ 103 - 111
ambari-server/src/test/python/stacks/2.0.6/GANGLIA/test_ganglia_server.py

@@ -25,135 +25,135 @@ class TestGangliaServer(RMFTestCase):
 
   def test_configure_default(self):
     self.executeScript("2.0.6/services/GANGLIA/package/scripts/ganglia_server.py",
-                       classname="GangliaServer",
-                       command="configure",
-                       config_file="default.json"
+                     classname="GangliaServer",
+                     command="configure",
+                     config_file="default.json"
     )
     self.assertResourceCalled('Directory', '/usr/libexec/hdp/ganglia',
-                              owner = 'root',
-                              group = 'root',
-                              recursive = True,
-                              )
+                      owner = 'root',
+                      group = 'root',
+                      recursive = True,
+    )
     self.assertResourceCalled('File', '/etc/init.d/hdp-gmetad',
-                              content = StaticFile('gmetad.init'),
-                              mode = 0755,
-                              )
+      content = StaticFile('gmetad.init'),
+      mode = 0755,
+    )
     self.assertResourceCalled('File', '/etc/init.d/hdp-gmond',
-                              content = StaticFile('gmond.init'),
-                              mode = 0755,
-                              )
+      content = StaticFile('gmond.init'),
+      mode = 0755,
+    )
     self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/checkGmond.sh',
-                              content = StaticFile('checkGmond.sh'),
-                              mode = 0755,
-                              )
+      content = StaticFile('checkGmond.sh'),
+      mode = 0755,
+    )
     self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/checkRrdcached.sh',
-                              content = StaticFile('checkRrdcached.sh'),
-                              mode = 0755,
-                              )
+      content = StaticFile('checkRrdcached.sh'),
+      mode = 0755,
+    )
     self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/gmetadLib.sh',
-                              content = StaticFile('gmetadLib.sh'),
-                              mode = 0755,
-                              )
+      content = StaticFile('gmetadLib.sh'),
+      mode = 0755,
+    )
     self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/gmondLib.sh',
-                              content = StaticFile('gmondLib.sh'),
-                              mode = 0755,
-                              )
+      content = StaticFile('gmondLib.sh'),
+      mode = 0755,
+    )
     self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/rrdcachedLib.sh',
-                              content = StaticFile('rrdcachedLib.sh'),
-                              mode = 0755,
-                              )
+      content = StaticFile('rrdcachedLib.sh'),
+      mode = 0755,
+    )
     self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/setupGanglia.sh',
-                              content = StaticFile('setupGanglia.sh'),
-                              mode = 0755,
-                              )
+      content = StaticFile('setupGanglia.sh'),
+      mode = 0755,
+    )
     self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startGmetad.sh',
-                              content = StaticFile('startGmetad.sh'),
-                              mode = 0755,
-                              )
+      content = StaticFile('startGmetad.sh'),
+      mode = 0755,
+    )
     self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startGmond.sh',
-                              content = StaticFile('startGmond.sh'),
-                              mode = 0755,
-                              )
+      content = StaticFile('startGmond.sh'),
+      mode = 0755,
+    )
     self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startRrdcached.sh',
-                              content = StaticFile('startRrdcached.sh'),
-                              mode = 0755,
-                              )
+      content = StaticFile('startRrdcached.sh'),
+      mode = 0755,
+    )
     self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopGmetad.sh',
-                              content = StaticFile('stopGmetad.sh'),
-                              mode = 0755,
-                              )
+      content = StaticFile('stopGmetad.sh'),
+      mode = 0755,
+    )
     self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopGmond.sh',
-                              content = StaticFile('stopGmond.sh'),
-                              mode = 0755,
-                              )
+      content = StaticFile('stopGmond.sh'),
+      mode = 0755,
+    )
     self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopRrdcached.sh',
-                              content = StaticFile('stopRrdcached.sh'),
-                              mode = 0755,
-                              )
+      content = StaticFile('stopRrdcached.sh'),
+      mode = 0755,
+    )
     self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/teardownGanglia.sh',
-                              content = StaticFile('teardownGanglia.sh'),
-                              mode = 0755,
-                              )
+      content = StaticFile('teardownGanglia.sh'),
+      mode = 0755,
+    )
     self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaClusters.conf',
-                              owner = 'root',
-                              template_tag = None,
-                              group = 'root',
-                              mode = 0755,
-                              )
+      owner = 'root',
+      template_tag = None,
+      group = 'root',
+      mode = 0755,
+    )
     self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaEnv.sh',
-                              owner = 'root',
-                              template_tag = None,
-                              group = 'root',
-                              mode = 0755,
-                              )
+      owner = 'root',
+      template_tag = None,
+      group = 'root',
+      mode = 0755,
+    )
     self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaLib.sh',
-                              owner = 'root',
-                              template_tag = None,
-                              group = 'root',
-                              mode = 0755,
-                              )
+      owner = 'root',
+      template_tag = None,
+      group = 'root',
+      mode = 0755,
+    )
     self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNameNode -m -o root -g hadoop',
-                              path = ['/usr/libexec/hdp/ganglia',
-                                      '/usr/sbin',
-                                      '/sbin:/usr/local/bin',
-                                      '/bin',
-                                      '/usr/bin'],
-                              )
+      path = ['/usr/libexec/hdp/ganglia',
+     '/usr/sbin',
+     '/sbin:/usr/local/bin',
+     '/bin',
+     '/usr/bin'],
+    )
     self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseMaster -m -o root -g hadoop',
-                              path = ['/usr/libexec/hdp/ganglia',
-                                      '/usr/sbin',
-                                      '/sbin:/usr/local/bin',
-                                      '/bin',
-                                      '/usr/bin'],
-                              )
+      path = ['/usr/libexec/hdp/ganglia',
+     '/usr/sbin',
+     '/sbin:/usr/local/bin',
+     '/bin',
+     '/usr/bin'],
+    )
     self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPResourceManager -m -o root -g hadoop',
-                              path = ['/usr/libexec/hdp/ganglia',
-                                      '/usr/sbin',
-                                      '/sbin:/usr/local/bin',
-                                      '/bin',
-                                      '/usr/bin'],
-                              )
+      path = ['/usr/libexec/hdp/ganglia',
+     '/usr/sbin',
+     '/sbin:/usr/local/bin',
+     '/bin',
+     '/usr/bin'],
+    )
     self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPNodeManager -m -o root -g hadoop',
-                              path = ['/usr/libexec/hdp/ganglia',
-                                      '/usr/sbin',
-                                      '/sbin:/usr/local/bin',
-                                      '/bin',
-                                      '/usr/bin'],
-                              )
+      path = ['/usr/libexec/hdp/ganglia',
+     '/usr/sbin',
+     '/sbin:/usr/local/bin',
+     '/bin',
+     '/usr/bin'],
+    )
     self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHistoryServer -m -o root -g hadoop',
-                              path = ['/usr/libexec/hdp/ganglia',
-                                      '/usr/sbin',
-                                      '/sbin:/usr/local/bin',
-                                      '/bin',
-                                      '/usr/bin'],
-                              )
+      path = ['/usr/libexec/hdp/ganglia',
+     '/usr/sbin',
+     '/sbin:/usr/local/bin',
+     '/bin',
+     '/usr/bin'],
+    )
     self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPDataNode -m -o root -g hadoop',
-                              path = ['/usr/libexec/hdp/ganglia',
-                                      '/usr/sbin',
-                                      '/sbin:/usr/local/bin',
-                                      '/bin',
-                                      '/usr/bin'],
-                              )
+      path = ['/usr/libexec/hdp/ganglia',
+     '/usr/sbin',
+     '/sbin:/usr/local/bin',
+     '/bin',
+     '/usr/bin'],
+    )
     self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -c HDPHBaseRegionServer -m -o root -g hadoop',
                               path = ['/usr/libexec/hdp/ganglia',
                                       '/usr/sbin',
@@ -175,14 +175,6 @@ class TestGangliaServer(RMFTestCase):
                                   '/bin',
                                   '/usr/bin'],
                           )
-
-    self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -t -o root -g hadoop',
-                              path = ['/usr/libexec/hdp/ganglia',
-                                      '/usr/sbin',
-                                      '/sbin:/usr/local/bin',
-                                      '/bin',
-                                      '/usr/bin'],
-                              )
     self.assertResourceCalled('Directory', '/var/lib/ganglia/dwoo',
                               owner = 'nobody',
                               recursive = True,