Selaa lähdekoodia

Committing AMBARI-812, AMBARI-820, AMBARI-812

git-svn-id: https://svn.apache.org/repos/asf/incubator/ambari/branches/AMBARI-666@1395430 13f79535-47bb-0310-9956-ffa450edef68
Mahadev Konar 12 vuotta sitten
vanhempi
commit
0179792b0c
100 muutettua tiedostoa jossa 6084 lisäystä ja 2010 poistoa
  1. 20 68
      AMBARI-666-CHANGES.txt
  2. 275 235
      ambari-agent/src/main/puppet/manifestloader/site.pp
  3. 9 3
      ambari-agent/src/main/puppet/modules/configgenerator/manifests/configfile.pp
  4. 17 23
      ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/init.pp
  5. 2 3
      ambari-agent/src/main/puppet/modules/hdp-hcat-old/manifests/init.pp
  6. 2 3
      ambari-agent/src/main/puppet/modules/hdp-hive/manifests/init.pp
  7. 3 4
      ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/init.pp
  8. 2 3
      ambari-agent/src/main/puppet/modules/hdp-templeton/manifests/init.pp
  9. 58 199
      ambari-agent/src/main/python/ambari_agent/ActionQueue.py
  10. 34 16
      ambari-agent/src/main/python/ambari_agent/Controller.py
  11. 8 0
      ambari-agent/src/main/python/ambari_agent/Hardware.py
  12. 4 2
      ambari-agent/src/main/python/ambari_agent/Heartbeat.py
  13. 1 1
      ambari-agent/src/main/python/ambari_agent/security.py
  14. 36 19
      ambari-agent/src/main/python/ambari_agent/shell.py
  15. 12 0
      ambari-agent/src/main/python/manifestGenerator/imports.txt
  16. 98 0
      ambari-agent/src/main/python/manifestGenerator/manifestGenerator.py
  17. 10 3
      ambari-api/pom.xml
  18. 30 0
      ambari-api/src/main/java/org/apache/ambari/api/controller/ProviderModule.java
  19. 5 5
      ambari-api/src/main/java/org/apache/ambari/api/controller/ganglia/GangliaPropertyProvider.java
  20. 130 18
      ambari-api/src/main/java/org/apache/ambari/api/controller/internal/ClusterControllerImpl.java
  21. 48 0
      ambari-api/src/main/java/org/apache/ambari/api/controller/internal/DefaultProviderModule.java
  22. 51 18
      ambari-api/src/main/java/org/apache/ambari/api/controller/internal/PropertyIdImpl.java
  23. 62 0
      ambari-api/src/main/java/org/apache/ambari/api/controller/internal/PropertyPredicateVisitor.java
  24. 33 7
      ambari-api/src/main/java/org/apache/ambari/api/controller/internal/RequestImpl.java
  25. 25 3
      ambari-api/src/main/java/org/apache/ambari/api/controller/internal/ResourceImpl.java
  26. 528 83
      ambari-api/src/main/java/org/apache/ambari/api/controller/internal/ResourceProviderImpl.java
  27. 32 17
      ambari-api/src/main/java/org/apache/ambari/api/controller/internal/SchemaImpl.java
  28. 8 1
      ambari-api/src/main/java/org/apache/ambari/api/controller/jdbc/ConnectionFactory.java
  29. 582 47
      ambari-api/src/main/java/org/apache/ambari/api/controller/jdbc/JDBCManagementController.java
  30. 44 0
      ambari-api/src/main/java/org/apache/ambari/api/controller/jdbc/JDBCProviderModule.java
  31. 424 0
      ambari-api/src/main/java/org/apache/ambari/api/controller/jdbc/JDBCResourceProvider.java
  32. 15 7
      ambari-api/src/main/java/org/apache/ambari/api/controller/jdbc/SQLPredicateVisitor.java
  33. 19 2
      ambari-api/src/main/java/org/apache/ambari/api/controller/jdbc/SQLiteConnectionFactory.java
  34. 5 5
      ambari-api/src/main/java/org/apache/ambari/api/controller/jmx/JMXPropertyProvider.java
  35. 15 40
      ambari-api/src/main/java/org/apache/ambari/api/controller/utilities/ClusterControllerHelper.java
  36. 0 1
      ambari-api/src/main/java/org/apache/ambari/api/controller/utilities/DBHelper.java
  37. 11 11
      ambari-api/src/main/java/org/apache/ambari/api/controller/utilities/PredicateBuilder.java
  38. 11 3
      ambari-api/src/main/java/org/apache/ambari/api/controller/utilities/PredicateHelper.java
  39. 2 2
      ambari-api/src/main/java/org/apache/ambari/api/controller/utilities/Properties.java
  40. 33 0
      ambari-api/src/main/java/org/apache/ambari/api/handlers/CreateHandler.java
  41. 11 2
      ambari-api/src/main/java/org/apache/ambari/api/handlers/DelegatingRequestHandler.java
  42. 33 0
      ambari-api/src/main/java/org/apache/ambari/api/handlers/DeleteHandler.java
  43. 53 0
      ambari-api/src/main/java/org/apache/ambari/api/handlers/ReadHandler.java
  44. 8 1
      ambari-api/src/main/java/org/apache/ambari/api/handlers/RequestHandler.java
  45. 17 4
      ambari-api/src/main/java/org/apache/ambari/api/handlers/RequestHandlerFactory.java
  46. 33 0
      ambari-api/src/main/java/org/apache/ambari/api/handlers/UpdateHandler.java
  47. 25 13
      ambari-api/src/main/java/org/apache/ambari/api/query/Query.java
  48. 133 78
      ambari-api/src/main/java/org/apache/ambari/api/query/QueryImpl.java
  49. 83 17
      ambari-api/src/main/java/org/apache/ambari/api/resource/BaseResourceDefinition.java
  50. 14 25
      ambari-api/src/main/java/org/apache/ambari/api/resource/ClusterResourceDefinition.java
  51. 79 27
      ambari-api/src/main/java/org/apache/ambari/api/resource/ComponentResourceDefinition.java
  52. 99 31
      ambari-api/src/main/java/org/apache/ambari/api/resource/HostComponentResourceDefinition.java
  53. 23 30
      ambari-api/src/main/java/org/apache/ambari/api/resource/HostResourceDefinition.java
  54. 70 9
      ambari-api/src/main/java/org/apache/ambari/api/resource/ResourceDefinition.java
  55. 23 30
      ambari-api/src/main/java/org/apache/ambari/api/resource/ServiceResourceDefinition.java
  56. 36 7
      ambari-api/src/main/java/org/apache/ambari/api/services/BaseService.java
  57. 57 8
      ambari-api/src/main/java/org/apache/ambari/api/services/ClusterService.java
  58. 2 2
      ambari-api/src/main/java/org/apache/ambari/api/services/ComponentService.java
  59. 2 2
      ambari-api/src/main/java/org/apache/ambari/api/services/HostComponentService.java
  60. 2 2
      ambari-api/src/main/java/org/apache/ambari/api/services/HostService.java
  61. 71 22
      ambari-api/src/main/java/org/apache/ambari/api/services/Request.java
  62. 11 2
      ambari-api/src/main/java/org/apache/ambari/api/services/RequestFactory.java
  63. 49 28
      ambari-api/src/main/java/org/apache/ambari/api/services/RequestImpl.java
  64. 8 1
      ambari-api/src/main/java/org/apache/ambari/api/services/ResponseFactory.java
  65. 9 8
      ambari-api/src/main/java/org/apache/ambari/api/services/Result.java
  66. 10 20
      ambari-api/src/main/java/org/apache/ambari/api/services/ResultImpl.java
  67. 33 0
      ambari-api/src/main/java/org/apache/ambari/api/services/ResultPostProcessor.java
  68. 115 0
      ambari-api/src/main/java/org/apache/ambari/api/services/ResultPostProcessorImpl.java
  69. 55 7
      ambari-api/src/main/java/org/apache/ambari/api/services/ServiceService.java
  70. 139 0
      ambari-api/src/main/java/org/apache/ambari/api/services/serializers/JsonSerializer.java
  71. 38 0
      ambari-api/src/main/java/org/apache/ambari/api/services/serializers/ResultSerializer.java
  72. 101 0
      ambari-api/src/main/java/org/apache/ambari/api/util/TreeNode.java
  73. 127 0
      ambari-api/src/main/java/org/apache/ambari/api/util/TreeNodeImpl.java
  74. 7 21
      ambari-api/src/test/java/org/apache/ambari/api/TestSuite.java
  75. 135 70
      ambari-api/src/test/java/org/apache/ambari/api/controller/internal/ClusterControllerImplTest.java
  76. 1 1
      ambari-api/src/test/java/org/apache/ambari/api/controller/internal/PropertyIdImplTest.java
  77. 2 2
      ambari-api/src/test/java/org/apache/ambari/api/controller/internal/RequestImplTest.java
  78. 2 2
      ambari-api/src/test/java/org/apache/ambari/api/controller/internal/ResourceImplTest.java
  79. 22 27
      ambari-api/src/test/java/org/apache/ambari/api/controller/internal/SchemaImplTest.java
  80. 827 131
      ambari-api/src/test/java/org/apache/ambari/api/controller/jdbc/JDBCManagementControllerTest.java
  81. 3 3
      ambari-api/src/test/java/org/apache/ambari/api/controller/utilities/PredicateBuilderTest.java
  82. 2 2
      ambari-api/src/test/java/org/apache/ambari/api/controller/utilities/PropertiesTest.java
  83. 34 34
      ambari-api/src/test/java/org/apache/ambari/api/handlers/DelegatingRequestHandlerTest.java
  84. 56 0
      ambari-api/src/test/java/org/apache/ambari/api/handlers/ReadHandlerTest.java
  85. 246 246
      ambari-api/src/test/java/org/apache/ambari/api/query/QueryImplTest.java
  86. 126 42
      ambari-api/src/test/java/org/apache/ambari/api/services/ClusterServiceTest.java
  87. 19 40
      ambari-api/src/test/java/org/apache/ambari/api/services/ComponentServiceTest.java
  88. 18 41
      ambari-api/src/test/java/org/apache/ambari/api/services/HostComponentServiceTest.java
  89. 18 41
      ambari-api/src/test/java/org/apache/ambari/api/services/HostServiceTest.java
  90. 133 42
      ambari-api/src/test/java/org/apache/ambari/api/services/ServiceServiceTest.java
  91. BIN
      ambari-api/src/test/resources/data.db
  92. 38 13
      ambari-project/pom.xml
  93. 35 7
      ambari-server/pom.xml
  94. 7 3
      ambari-server/src/main/assemblies/server.xml
  95. 3 2
      ambari-server/src/main/java/org/apache/ambari/server/ServiceComponentHostNotFoundException.java
  96. 31 0
      ambari-server/src/main/java/org/apache/ambari/server/ServiceComponentNotFoundException.java
  97. 28 0
      ambari-server/src/main/java/org/apache/ambari/server/ServiceNotFoundException.java
  98. 3 0
      ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
  99. 14 9
      ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBInMemoryImpl.java
  100. 1 3
      ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionManager.java

+ 20 - 68
AMBARI-666-CHANGES.txt

@@ -12,11 +12,22 @@ AMBARI-666 branch (unreleased changes)
 
   NEW FEATURES
 
-  AMBARI-816. Update Cluster Management -> Services pages to use a left nav 
-  rather than tabs for service selection. (yusaku)
+  AMBARI-812. In API , improve partial response support to drill down n levels
+  (John Spiedel)
 
-  AMBARI-815. Ensure data persistence during navigation, refresh and
-  ensure necessary restrictions. (Jaimin Jetly via yusaku)
+  AMBARI-791. Add unit tests and java docs for SPI code. (Tom Beerbower)
+
+  AMBARI-820. Remove JAXB dependencies in Server Agent protocol and move to
+  POJO based jackson serializer. (mahadev)
+
+  AMBARI-819. Management controller implemenation work. (hitesh)
+
+  AMBARI-811. Bug fix in jaxb serialization for maps. (jitendra)
+
+  AMBARI-810. Controller layer implementation part 1. (hitesh)
+
+  AMBARI-807. Fix Action scheduler tests because of fsm interface changes.
+  (jitendra)
 
   AMBARI-806. Remove State object as configs/stack version/running state are
   handled as standalone entities. (hitesh)
@@ -25,10 +36,8 @@ AMBARI-666 branch (unreleased changes)
 
   AMBARI-803. FSM initial refactoring for eventual live/desired objects. (hitesh)
 
-  AMBARI-802. Enhance Cluster Management pages. (yusaku)
-
   AMBARI-800. Hack to add a stage for testing in in-memory db. (jitendra)
-
+  
   AMBARI-801. Fix heartbeat message from the agent which is causing NPE at the
   server. (mahadev)
 
@@ -48,10 +57,8 @@ AMBARI-666 branch (unreleased changes)
 
   AMBARI-790. OK in registration response. (jitendra)
 
-  AMBARI-789. Prototype for management spi interface. (hitesh)
-
   AMBARI-787. Registration throws HostNotFoundException for new hosts. (jitendra)
-
+  
   AMBARI-788. Fix server and agent startup for end to end testing. (mahadev)
 
   AMBARI-785. Action response unit test. (jitendra)
@@ -65,32 +72,11 @@ AMBARI-666 branch (unreleased changes)
   AMBARI-754. Heartbeat handler: Registration response should query component 
   status. (jitendra)
 
-  AMBARI-779. Introduce ManagementController interface. (Tom Beerbower via hitesh)
-
-  AMBARI-755. Heartbeat handler: Update state as reported in heartbeat.
+  AMBARI-755. Heartbeat handler: Update state as reported in heartbeat. 
   (jitendra)
 
-  AMBARI-777. Add NOTICE file for copyright of stdlib from puppetlabs.
-  (mahadev)
-
-  AMBARI-776. Puppet scripts for all the modules to install/configure the
-  stack. (mahadev)
-
   AMBARI-756. Heartbeat handler: Handle heartbeat timeout. (jitendra)
 
-  AMBARI-772. Stylize main nav. (yusaku)
-
-  AMBARI-771. Add security between the server and agent and authentication for
-  the API. (mahadev)
-
-  AMBARI-770. Cluster Management pages for Ambari Web. (yusaku)
-
-  AMBARI-769. Implement step 9 (Install,start and test) of installer wizard.
-  (Jaimin Jetly via yusaku)
-
-  AMBARI-768. Implement step 5 of installer wizard (Assign Masters).
-  (Ananya Sen via yusaku)
-
   AMBARI-767. Add bootstrap script to ssh in parallel and setup agents on a
   list of hosts. (mahadev)
 
@@ -115,41 +101,16 @@ AMBARI-666 branch (unreleased changes)
 
   AMBARI-751. Re-structure servicecomponenthost fsm layout. (hitesh)
 
-  AMBARI-747. Add unit tests for step2 (Install option page) of installer.
-  (Jaimin Jetly via yusaku)
-
-  AMBARI-746. Integrate configuration properties and custom configuration 
-  file overrides in Customize Services page. (yusaku)
-
   AMBARI-732. Action scheduler unit tests. (jitendra)
 
   AMBARI-739. Cluster fsm implementation. (hitesh)
 
-  AMBARI-745. Add unit tests for Installer Step 1 (Welcome page). (yusaku)
-
-  AMBARI-744. Add definition for service config properties. (yusaku)
-
-  AMBARI-743. Add unit testing framework for Ambari Web. (yusaku)
-
-  AMBARI-742. Implement running a bootstrap api, keep a fifo queue and logic
-  for getting a request. (mahadev)
-
-  AMBARI-741. Use ember-i18n to externalize string resources in Ambari Web.
-  (yusaku)
-
-  AMBARI-735. Clean up Installer Welcome page and Install Options page.
-  (Jaimin Jetly via yusaku)
-
   AMBARI-738. s/Node/Host/g. (hitesh)
 
   AMBARI-737. ServiceComponentNode FSM implementation. (hitesh)
 
-  AMBARI-734. Initial work for adding DataNode/TaskTracker/RegionServer
-  configuration overrides on groups of hosts in Installer Customize Services
-  page. (yusaku)
-
-  AMBARI-736. Initial work on Cluster Management pages. (yusaku)
-
+  AMBARI-722. Action scheduler implementation. (jitendra)
+  
   AMBARI-733. Add Jersey Resource for BootStrapping and JAXB elements for API
   entities. (mahadev)
 
@@ -235,11 +196,6 @@ AMBARI-666 branch (unreleased changes)
 
   BUG FIXES
 
-  AMBARI-817. Fix import dependencies so that unit test suites can be run
-  again for Ambari Web. (yusaku)
-
-  AMBARI-804. Fix routing issues with Admin pages. (yusaku)
-
   AMBARI-798. Fix import issue due to move of Predicate class. (hitesh)
 
   AMBARI-780. Make FSM related changes for heartbeat handler. (hitesh)
@@ -248,14 +204,10 @@ AMBARI-666 branch (unreleased changes)
 
   AMBARI-773. Change Host FSM as per new requirements of heartbeat handler. (hitesh)
 
-  AMBARI-761. Fix broken build for adding guice servlet dependency. (mahadev)
-
   AMBARI-753. Fix broken compile as a result of re-factor of FSM layout. (hitesh)
 
   AMBARI-752. Add missing license header to TestServiceComponentHostState. (hitesh)
 
-  AMBARI-750. Fix build compilation issue. (mahadev)
-
   AMBARI-718. Fix installer navigation. (yusaku)
 
   AMBARI-684. Remove non-required dependencies from pom files (hitesh via jitendra)

+ 275 - 235
ambari-agent/src/main/puppet/manifestloader/site.pp

@@ -19,250 +19,290 @@
 #
 #
 
-$hdp_hadoop_mapred_queue_acls_props => {'mapred.queue.default.acl-submit-job' => '*',
-  'mapred.queue.default.acl-administer-jobs' => '*',}
+configgenerator::configfile::configuration {'hdp_hadoop__mapred_queue_acls':
+  filename => 'mapred-queue-acls.xml',
+  module => 'hdp-hadoop',
+  properties => {'mapred.queue.default.acl-submit-job' => '*',
+    'mapred.queue.default.acl-administer-jobs' => '*',}
+  }
 
-$hdp_hadoop_policy_props => {'security.client.protocol.acl' => '*',
-  'security.client.datanode.protocol.acl' => '*',
-  'security.datanode.protocol.acl' => '*',
-  'security.inter.datanode.protocol.acl' => '*',
-  'security.namenode.protocol.acl' => '*',
-  'security.inter.tracker.protocol.acl' => '*',
-  'security.job.submission.protocol.acl' => '*',
-  'security.task.umbilical.protocol.acl' => '*',
-  'security.admin.operations.protocol.acl' => '',
-  'security.refresh.usertogroups.mappings.protocol.acl' => '',
-  'security.refresh.policy.protocol.acl' => '',}
+configgenerator::configfile::configuration {'hdp_hadoop__hadoop_policy':
+  filename => 'hadoop-policy.xml',
+  module => 'hdp-hadoop',
+  properties=> {'security.client.protocol.acl' => '*',
+    'security.client.datanode.protocol.acl' => '*',
+    'security.datanode.protocol.acl' => '*',
+    'security.inter.datanode.protocol.acl' => '*',
+    'security.namenode.protocol.acl' => '*',
+    'security.inter.tracker.protocol.acl' => '*',
+    'security.job.submission.protocol.acl' => '*',
+    'security.task.umbilical.protocol.acl' => '*',
+    'security.admin.operations.protocol.acl' => '',
+    'security.refresh.usertogroups.mappings.protocol.acl' => '',
+    'security.refresh.policy.protocol.acl' => '',}
+   }
 
-$hdp_hadoop_core_site_props => {'io.file.buffer.size' => '131072',
-  'io.serializations' => 'org.apache.hadoop.io.serializer.WritableSerialization',
-  'io.compression.codecs' => '',
-  'io.compression.codec.lzo.class' => 'com.hadoop.compression.lzo.LzoCodec',
-  'fs.default.name' => '',
-  'fs.trash.interval' => '360',
-  'fs.checkpoint.dir' => '',
-  'fs.checkpoint.edits.dir' => '',
-  'fs.checkpoint.period' => '21600',
-  'fs.checkpoint.size' => '536870912',
-  'ipc.client.idlethreshold' => '8000',
-  'ipc.client.connection.maxidletime' => '30000',
-  'ipc.client.connect.max.retries' => '50',
-  'webinterface.private.actions' => 'false',
-  'hadoop.security.authentication' => '',
-  'hadoop.security.authorization' => '',
-  'hadoop.security.auth_to_local' => '',}
+configgenerator::configfile::configuration {'hdp_hadoop__core_site':
+  filename => 'core-site.xml',
+  module => 'hdp-hadoop',
+  properties => {'io.file.buffer.size' => '131072',
+    'io.serializations' => 'org.apache.hadoop.io.serializer.WritableSerialization',
+    'io.compression.codecs' => '',
+    'io.compression.codec.lzo.class' => 'com.hadoop.compression.lzo.LzoCodec',
+    'fs.default.name' => '',
+    'fs.trash.interval' => '360',
+    'fs.checkpoint.dir' => '',
+    'fs.checkpoint.edits.dir' => '',
+    'fs.checkpoint.period' => '21600',
+    'fs.checkpoint.size' => '536870912',
+    'ipc.client.idlethreshold' => '8000',
+    'ipc.client.connection.maxidletime' => '30000',
+    'ipc.client.connect.max.retries' => '50',
+    'webinterface.private.actions' => 'false',
+    'hadoop.security.authentication' => '',
+    'hadoop.security.authorization' => '',
+    'hadoop.security.auth_to_local' => '',}
+  }
 
-$hdp_hadoop_mapred_site_props => {'io.sort.mb' => '',
-  'io.sort.record.percent' => '.2',
-  'io.sort.spill.percent' => '',
-  'io.sort.factor' => '100',
-  'mapred.tasktracker.tasks.sleeptime-before-sigkill' => '250',
-  'mapred.job.tracker.handler.count' => '50',
-  'mapred.system.dir' => '',
-  'mapred.job.tracker' => '',
-  'mapred.job.tracker.http.address' => '',
-  'mapred.local.dir' => '',
-  'mapreduce.cluster.administrators' => ' hadoop',
-  'mapred.reduce.parallel.copies' => '30',
-  'mapred.tasktracker.map.tasks.maximum' => '',
-  'mapred.tasktracker.reduce.tasks.maximum' => '',
-  'tasktracker.http.threads' => '50',
-  'mapred.map.tasks.speculative.execution' => 'false',
-  'mapred.reduce.tasks.speculative.execution' => 'false',
-  'mapred.reduce.slowstart.completed.maps' => '0.05',
-  'mapred.inmem.merge.threshold' => '1000',
-  'mapred.job.shuffle.merge.percent' => '0.66',
-  'mapred.job.shuffle.input.buffer.percent'  => '0.7',
-  'mapred.map.output.compression.codec' => '',
-  'mapred.output.compression.type' => 'BLOCK',
-  'mapred.jobtracker.completeuserjobs.maximum' => '0',
-  'mapred.jobtracker.taskScheduler' => '',
-  'mapred.jobtracker.restart.recover' => 'false',
-  'mapred.job.reduce.input.buffer.percent' => '0.0',
-  'mapreduce.reduce.input.limit' => '10737418240',
-  'mapred.compress.map.output' => '',
-  'mapred.task.timeout' => '600000',
-  'jetty.connector' => 'org.mortbay.jetty.nio.SelectChannelConnector',
-  'mapred.task.tracker.task-controller' => '',
-  'mapred.child.root.logger' => 'INFO,TLA',
-  'mapred.child.java.opts' => '',
-  'mapred.cluster.map.memory.mb' => '',
-  'mapred.cluster.reduce.memory.mb' => '',
-  'mapred.job.map.memory.mb' => '',
-  'mapred.job.reduce.memory.mb' => '',
-  'mapred.cluster.max.map.memory.mb' => '',
-  'mapred.cluster.max.reduce.memory.mb' => '',
-  'mapred.hosts' => '',
-  'mapred.hosts.exclude' => '',
-  'mapred.max.tracker.blacklists' => '16',
-  'mapred.healthChecker.script.path' => '',
-  'mapred.healthChecker.interval' => '135000',
-  'mapred.healthChecker.script.timeout' => '60000',
-  'mapred.job.tracker.persist.jobstatus.active' => 'false',
-  'mapred.job.tracker.persist.jobstatus.hours' => '1',
-  'mapred.job.tracker.persist.jobstatus.dir' => '',
-  'mapred.jobtracker.retirejob.check' => '10000',
-  'mapred.jobtracker.retirejob.interval' => '0',
-  'mapred.job.tracker.history.completed.location' => '/mapred/history/done',
-  'mapred.task.maxvmem' => '',
-  'mapred.jobtracker.maxtasks.per.job' => '',
-  'mapreduce.fileoutputcommitter.marksuccessfuljobs' => 'false',
-  'mapred.userlog.retain.hours' => '',
-  'mapred.job.reuse.jvm.num.tasks' => '1',
-  'mapreduce.jobtracker.kerberos.principal' => '',
-  'mapreduce.tasktracker.kerberos.principal' => '',
-  'hadoop.job.history.user.location' => 'none',
-  'mapreduce.jobtracker.keytab.file' => '',
-  'mapreduce.tasktracker.keytab.file' => '',
-  'mapreduce.jobtracker.staging.root.dir' => '/user',
-  'mapreduce.tasktracker.group' => 'hadoop',
-  'mapreduce.jobtracker.split.metainfo.maxsize' => '50000000',
-  'mapreduce.history.server.embedded' => 'false',
-  'mapreduce.history.server.http.address' => '',
-  'mapreduce.jobhistory.kerberos.principal' => '',
-  'mapreduce.jobhistory.keytab.file' => '',
-  'mapred.jobtracker.blacklist.fault-timeout-window' => '180',
-  'mapred.jobtracker.blacklist.fault-bucket-width' => '15',
-  'mapred.queue.names' => 'default',}
+configgenerator::configfile::configuration {'hdp_hadoop__mapred_site':
+  filename => 'mapred-site.xml',
+  module => 'hdp-hadoop',
+  properties => {'io.sort.mb' => '',
+    'io.sort.record.percent' => '.2',
+    'io.sort.spill.percent' => '',
+    'io.sort.factor' => '100',
+    'mapred.tasktracker.tasks.sleeptime-before-sigkill' => '250',
+    'mapred.job.tracker.handler.count' => '50',
+    'mapred.system.dir' => '',
+    'mapred.job.tracker' => '',
+    'mapred.job.tracker.http.address' => '',
+    'mapred.local.dir' => '',
+    'mapreduce.cluster.administrators' => ' hadoop',
+    'mapred.reduce.parallel.copies' => '30',
+    'mapred.tasktracker.map.tasks.maximum' => '',
+    'mapred.tasktracker.reduce.tasks.maximum' => '',
+    'tasktracker.http.threads' => '50',
+    'mapred.map.tasks.speculative.execution' => 'false',
+    'mapred.reduce.tasks.speculative.execution' => 'false',
+    'mapred.reduce.slowstart.completed.maps' => '0.05',
+    'mapred.inmem.merge.threshold' => '1000',
+    'mapred.job.shuffle.merge.percent' => '0.66',
+    'mapred.job.shuffle.input.buffer.percent'  => '0.7',
+    'mapred.map.output.compression.codec' => '',
+    'mapred.output.compression.type' => 'BLOCK',
+    'mapred.jobtracker.completeuserjobs.maximum' => '0',
+    'mapred.jobtracker.taskScheduler' => '',
+    'mapred.jobtracker.restart.recover' => 'false',
+    'mapred.job.reduce.input.buffer.percent' => '0.0',
+    'mapreduce.reduce.input.limit' => '10737418240',
+    'mapred.compress.map.output' => '',
+    'mapred.task.timeout' => '600000',
+    'jetty.connector' => 'org.mortbay.jetty.nio.SelectChannelConnector',
+    'mapred.task.tracker.task-controller' => '',
+    'mapred.child.root.logger' => 'INFO,TLA',
+    'mapred.child.java.opts' => '',
+    'mapred.cluster.map.memory.mb' => '',
+    'mapred.cluster.reduce.memory.mb' => '',
+    'mapred.job.map.memory.mb' => '',
+    'mapred.job.reduce.memory.mb' => '',
+    'mapred.cluster.max.map.memory.mb' => '',
+    'mapred.cluster.max.reduce.memory.mb' => '',
+    'mapred.hosts' => '',
+    'mapred.hosts.exclude' => '',
+    'mapred.max.tracker.blacklists' => '16',
+    'mapred.healthChecker.script.path' => '',
+    'mapred.healthChecker.interval' => '135000',
+    'mapred.healthChecker.script.timeout' => '60000',
+    'mapred.job.tracker.persist.jobstatus.active' => 'false',
+    'mapred.job.tracker.persist.jobstatus.hours' => '1',
+    'mapred.job.tracker.persist.jobstatus.dir' => '',
+    'mapred.jobtracker.retirejob.check' => '10000',
+    'mapred.jobtracker.retirejob.interval' => '0',
+    'mapred.job.tracker.history.completed.location' => '/mapred/history/done',
+    'mapred.task.maxvmem' => '',
+    'mapred.jobtracker.maxtasks.per.job' => '',
+    'mapreduce.fileoutputcommitter.marksuccessfuljobs' => 'false',
+    'mapred.userlog.retain.hours' => '',
+    'mapred.job.reuse.jvm.num.tasks' => '1',
+    'mapreduce.jobtracker.kerberos.principal' => '',
+    'mapreduce.tasktracker.kerberos.principal' => '',
+    'hadoop.job.history.user.location' => 'none',
+    'mapreduce.jobtracker.keytab.file' => '',
+    'mapreduce.tasktracker.keytab.file' => '',
+    'mapreduce.jobtracker.staging.root.dir' => '/user',
+    'mapreduce.tasktracker.group' => 'hadoop',
+    'mapreduce.jobtracker.split.metainfo.maxsize' => '50000000',
+    'mapreduce.history.server.embedded' => 'false',
+    'mapreduce.history.server.http.address' => '',
+    'mapreduce.jobhistory.kerberos.principal' => '',
+    'mapreduce.jobhistory.keytab.file' => '',
+    'mapred.jobtracker.blacklist.fault-timeout-window' => '180',
+    'mapred.jobtracker.blacklist.fault-bucket-width' => '15',
+    'mapred.queue.names' => 'default',}
+  }
 
-$hdp_hadoop_capacity_scheduler_props => {'mapred.capacity-scheduler.queue.default.capacity' => '100',
-  'mapred.capacity-scheduler.queue.default.supports-priority' => 'false',
-  'mapred.capacity-scheduler.queue.default.minimum-user-limit-percent' => '100',
-  'mapred.capacity-scheduler.queue.default.maximum-initialized-jobs-per-user' => '25',}
+configgenerator::configfile::configuration {'hdp_hadoop__capacity_scheduler':
+  filename => 'capacity-scheduler.xml',
+  module => 'hdp-hadoop',
+  properties => {'mapred.capacity-scheduler.queue.default.capacity' => '100',
+    'mapred.capacity-scheduler.queue.default.supports-priority' => 'false',
+    'mapred.capacity-scheduler.queue.default.minimum-user-limit-percent' => '100',
+    'mapred.capacity-scheduler.queue.default.maximum-initialized-jobs-per-user' => '25',}
+  }
 
-$hdp_hadoop_hdfs_site_props => {'dfs.name.dir' => '',
-  'dfs.support.append' => '',
-  'dfs.webhdfs.enabled' => '',
-  'dfs.datanode.failed.volumes.tolerated' => '',
-  'dfs.block.local-path-access.user' => '',
-  'dfs.data.dir' => '',
-  'dfs.hosts.exclude' => '',
-  'dfs.hosts' => '',
-  'dfs.replication.max' => '50',
-  'dfs.replication' => '',
-  'dfs.heartbeat.interval' => '3',
-  'dfs.safemode.threshold.pct' => '1.0f',
-  'dfs.balance.bandwidthPerSec' => '6250000',
-  'dfs.datanode.address' => '',
-  'dfs.datanode.http.address' => '',
-  'dfs.block.size' => '134217728',
-  'dfs.http.address' => '',
-  'dfs.datanode.du.reserved' => '',
-  'dfs.datanode.ipc.address' => '0.0.0.0:8010',
-  'dfs.blockreport.initialDelay' => '120',
-  'dfs.datanode.du.pct' => '0.85f',
-  'dfs.namenode.handler.count' => '40',
-  'dfs.datanode.max.xcievers' => '1024',
-  'dfs.umaskmode' => '077',
-  'dfs.web.ugi' => 'gopher,gopher',
-  'dfs.permissions' => 'true',
-  'dfs.permissions.supergroup' => 'hdfs',
-  'dfs.namenode.handler.count' => '100',
-  'ipc.server.max.response.size' => '5242880',
-  'dfs.block.access.token.enable' => 'true',
-  'dfs.namenode.kerberos.principal' => '',
-  'dfs.secondary.namenode.kerberos.principal' => '',
-  'dfs.namenode.kerberos.https.principal' => '',
-  'dfs.secondary.namenode.kerberos.https.principal' => '',
-  'dfs.secondary.http.address' => '',
-  'dfs.secondary.https.port' => '50490',
-  'dfs.web.authentication.kerberos.principal' => '',
-  'dfs.web.authentication.kerberos.keytab' => '',
-  'dfs.datanode.kerberos.principal' => '',
-  'dfs.namenode.keytab.file' => '',
-  'dfs.secondary.namenode.keytab.file' => '',
-  'dfs.datanode.keytab.file' => '',
-  'dfs.https.port' => '50470',
-  'dfs.https.address' => '',
-  'dfs.datanode.data.dir.perm' => '',
-  'dfs.access.time.precision' => '0',
-  'dfs.cluster.administrators' => ' hdfs',
-  'ipc.server.read.threadpool.size' => '5',
-  'dfs.namenode.kerberos.internal.spnego.principal' => '',
-  'dfs.secondary.namenode.kerberos.internal.spnego.principal' => '',}
+configgenerator::configfile::configuration {'hdp_hadoop__hdfs_site':
+  filename => 'hdfs-site.xml',
+  module => 'hdp-hadoop',
+  properties => {'dfs.name.dir' => '',
+    'dfs.support.append' => '',
+    'dfs.webhdfs.enabled' => '',
+    'dfs.datanode.failed.volumes.tolerated' => '',
+    'dfs.block.local-path-access.user' => '',
+    'dfs.data.dir' => '',
+    'dfs.hosts.exclude' => '',
+    'dfs.hosts' => '',
+    'dfs.replication.max' => '50',
+    'dfs.replication' => '',
+    'dfs.heartbeat.interval' => '3',
+    'dfs.safemode.threshold.pct' => '1.0f',
+    'dfs.balance.bandwidthPerSec' => '6250000',
+    'dfs.datanode.address' => '',
+    'dfs.datanode.http.address' => '',
+    'dfs.block.size' => '134217728',
+    'dfs.http.address' => '',
+    'dfs.datanode.du.reserved' => '',
+    'dfs.datanode.ipc.address' => '0.0.0.0:8010',
+    'dfs.blockreport.initialDelay' => '120',
+    'dfs.datanode.du.pct' => '0.85f',
+    'dfs.namenode.handler.count' => '40',
+    'dfs.datanode.max.xcievers' => '1024',
+    'dfs.umaskmode' => '077',
+    'dfs.web.ugi' => 'gopher,gopher',
+    'dfs.permissions' => 'true',
+    'dfs.permissions.supergroup' => 'hdfs',
+    'dfs.namenode.handler.count' => '100',
+    'ipc.server.max.response.size' => '5242880',
+    'dfs.block.access.token.enable' => 'true',
+    'dfs.namenode.kerberos.principal' => '',
+    'dfs.secondary.namenode.kerberos.principal' => '',
+    'dfs.namenode.kerberos.https.principal' => '',
+    'dfs.secondary.namenode.kerberos.https.principal' => '',
+    'dfs.secondary.http.address' => '',
+    'dfs.secondary.https.port' => '50490',
+    'dfs.web.authentication.kerberos.principal' => '',
+    'dfs.web.authentication.kerberos.keytab' => '',
+    'dfs.datanode.kerberos.principal' => '',
+    'dfs.namenode.keytab.file' => '',
+    'dfs.secondary.namenode.keytab.file' => '',
+    'dfs.datanode.keytab.file' => '',
+    'dfs.https.port' => '50470',
+    'dfs.https.address' => '',
+    'dfs.datanode.data.dir.perm' => '',
+    'dfs.access.time.precision' => '0',
+    'dfs.cluster.administrators' => ' hdfs',
+    'ipc.server.read.threadpool.size' => '5',
+    'dfs.namenode.kerberos.internal.spnego.principal' => '',
+    'dfs.secondary.namenode.kerberos.internal.spnego.principal' => '',}
+  }
 
-$hdp_hcat_old_hive_site_props => {'hive.metastore.local' => 'false',
-  'javax.jdo.option.ConnectionURL' => '',
-  'javax.jdo.option.ConnectionDriverName' => 'com.mysql.jdbc.Driver',
-  'javax.jdo.option.ConnectionUserName' => '',
-  'javax.jdo.option.ConnectionPassword' => '',
-  'hive.metastore.warehouse.dir' => '/apps/hive/warehouse',
-  'hive.metastore.sasl.enabled' => '',
-  'hive.metastore.kerberos.keytab.file'  => '',
-  'hive.metastore.kerberos.principal' => '',
-  'hive.metastore.cache.pinobjtypes' => 'Table,Database,Type,FieldSchema,Order',
-  'hive.metastore.uris' => '',
-  'hive.semantic.analyzer.factory.impl' => 'org.apache.hcatalog.cli.HCatSemanticAnalyzerFactory',
-  'hadoop.clientside.fs.operations',
-  'hive.metastore.client.socket.timeout' => '60',
-  'hive.metastore.execute.setugi' => 'true',}
+configgenerator::configfile::configuration {'hdp_hcat_old__hive_site':
+  filename => 'hive-site.xml',
+  module => 'hdp-hcat-old',
+  properties => {'hive.metastore.local' => 'false',
+    'javax.jdo.option.ConnectionURL' => '',
+    'javax.jdo.option.ConnectionDriverName' => 'com.mysql.jdbc.Driver',
+    'javax.jdo.option.ConnectionUserName' => '',
+    'javax.jdo.option.ConnectionPassword' => '',
+    'hive.metastore.warehouse.dir' => '/apps/hive/warehouse',
+    'hive.metastore.sasl.enabled' => '',
+    'hive.metastore.kerberos.keytab.file'  => '',
+    'hive.metastore.kerberos.principal' => '',
+    'hive.metastore.cache.pinobjtypes' => 'Table,Database,Type,FieldSchema,Order',
+    'hive.metastore.uris' => '',
+    'hive.semantic.analyzer.factory.impl' => 'org.apache.hcatalog.cli.HCatSemanticAnalyzerFactory',
+    'hadoop.clientside.fs.operations',
+    'hive.metastore.client.socket.timeout' => '60',
+    'hive.metastore.execute.setugi' => 'true',}
+  }
 
-$hdp_hive_hive_site_props => {'hive.metastore.local' => 'false',
-  'javax.jdo.option.ConnectionURL' => '',
-  'javax.jdo.option.ConnectionDriverName' => 'com.mysql.jdbc.Driver',
-  'javax.jdo.option.ConnectionUserName' => '',
-  'javax.jdo.option.ConnectionPassword' => '',
-  'hive.metastore.warehouse.dir' => '/apps/hive/warehouse',
-  'hive.metastore.sasl.enabled' => '',
-  'hive.metastore.kerberos.keytab.file' => '',
-  'hive.metastore.kerberos.principal' => '',
-  'hive.metastore.cache.pinobjtypes' => 'Table,Database,Type,FieldSchema,Order',
-  'hive.metastore.uris' => '',
-  'hive.semantic.analyzer.factory.impl' => 'org.apache.hivealog.cli.HCatSemanticAnalyzerFactory',
-  'hadoop.clientside.fs.operations' => 'true',
-  'hive.metastore.client.socket.timeout' => '60',
-  'hive.metastore.execute.setugi' => 'true',
-  'hive.security.authorization.enabled' => 'true',
-  'hive.security.authorization.manager' => 'org.apache.hcatalog.security.HdfsAuthorizationProvider',}
+configgenerator::configfile::configuration {'hdp_hive__hive_site':
+  filename => 'hive-site.xml',
+  module => 'hdp-hive',
+  properties => {'hive.metastore.local' => 'false',
+    'javax.jdo.option.ConnectionURL' => '',
+    'javax.jdo.option.ConnectionDriverName' => 'com.mysql.jdbc.Driver',
+    'javax.jdo.option.ConnectionUserName' => '',
+    'javax.jdo.option.ConnectionPassword' => '',
+    'hive.metastore.warehouse.dir' => '/apps/hive/warehouse',
+    'hive.metastore.sasl.enabled' => '',
+    'hive.metastore.kerberos.keytab.file' => '',
+    'hive.metastore.kerberos.principal' => '',
+    'hive.metastore.cache.pinobjtypes' => 'Table,Database,Type,FieldSchema,Order',
+    'hive.metastore.uris' => '',
+    'hive.semantic.analyzer.factory.impl' => 'org.apache.hivealog.cli.HCatSemanticAnalyzerFactory',
+    'hadoop.clientside.fs.operations' => 'true',
+    'hive.metastore.client.socket.timeout' => '60',
+    'hive.metastore.execute.setugi' => 'true',
+    'hive.security.authorization.enabled' => 'true',
+    'hive.security.authorization.manager' => 'org.apache.hcatalog.security.HdfsAuthorizationProvider',}
+  }
 		
-$hdp_oozie_oozie_site_props => {'oozie.base.url' => '',
-  'oozie.system.id' => '',
-  'oozie.systemmode' => 'NORMAL',
-  'oozie.service.AuthorizationService.security.enabled' => 'true',
-  'oozie.service.PurgeService.older.than' => '30',
-  'oozie.service.PurgeService.purge.interval' => '3600',
-  'oozie.service.CallableQueueService.queue.size' => '1000',
-  'oozie.service.CallableQueueService.threads' => '10',
-  'oozie.service.CallableQueueService.callable.concurrency' => '3',
-  'oozie.service.coord.normal.default.timeout' => '120',
-  'oozie.db.schema.name' => 'oozie',
-  'oozie.service.StoreService.create.db.schema' => 'true',
-  'oozie.service.StoreService.jdbc.driver' => 'org.apache.derby.jdbc.EmbeddedDriver',
-  'oozie.service.StoreService.jdbc.url' => '',
-  'oozie.service.StoreService.jdbc.username' => 'sa',
-  'oozie.service.StoreService.jdbc.password' => ' ',
-  'oozie.service.StoreService.pool.max.active.conn' => '10',
-  'oozie.service.HadoopAccessorService.kerberos.enabled' => '',
-  'local.realm' => '',
-  'oozie.service.HadoopAccessorService.keytab.file' => '',
-  'oozie.service.HadoopAccessorService.kerberos.principal' => '',
-  'oozie.service.HadoopAccessorService.jobTracker.whitelist' => ' ',
-  'oozie.authentication.type' => '',
-  'oozie.authentication.kerberos.principal' => '',
-  'oozie.authentication.kerberos.keytab' => '',
-  'oozie.service.HadoopAccessorService.nameNode.whitelist' => ' ',
-  'oozie.service.WorkflowAppService.system.libpath' => '',
-  'use.system.libpath.for.mapreduce.and.pig.jobs' => 'false',
-  'oozie.authentication.kerberos.name.rules' => '',}
+configgenerator::configfile::configuration {'hdp_oozie__oozie_site':
+  filename => 'oozie-site.xml',
+  module => 'hdp-oozie',
+  properties => {'oozie.base.url' => '',
+    'oozie.system.id' => '',
+    'oozie.systemmode' => 'NORMAL',
+    'oozie.service.AuthorizationService.security.enabled' => 'true',
+    'oozie.service.PurgeService.older.than' => '30',
+    'oozie.service.PurgeService.purge.interval' => '3600',
+    'oozie.service.CallableQueueService.queue.size' => '1000',
+    'oozie.service.CallableQueueService.threads' => '10',
+    'oozie.service.CallableQueueService.callable.concurrency' => '3',
+    'oozie.service.coord.normal.default.timeout' => '120',
+    'oozie.db.schema.name' => 'oozie',
+    'oozie.service.StoreService.create.db.schema' => 'true',
+    'oozie.service.StoreService.jdbc.driver' => 'org.apache.derby.jdbc.EmbeddedDriver',
+    'oozie.service.StoreService.jdbc.url' => '',
+    'oozie.service.StoreService.jdbc.username' => 'sa',
+    'oozie.service.StoreService.jdbc.password' => ' ',
+    'oozie.service.StoreService.pool.max.active.conn' => '10',
+    'oozie.service.HadoopAccessorService.kerberos.enabled' => '',
+    'local.realm' => '',
+    'oozie.service.HadoopAccessorService.keytab.file' => '',
+    'oozie.service.HadoopAccessorService.kerberos.principal' => '',
+    'oozie.service.HadoopAccessorService.jobTracker.whitelist' => ' ',
+    'oozie.authentication.type' => '',
+    'oozie.authentication.kerberos.principal' => '',
+    'oozie.authentication.kerberos.keytab' => '',
+    'oozie.service.HadoopAccessorService.nameNode.whitelist' => ' ',
+    'oozie.service.WorkflowAppService.system.libpath' => '',
+    'use.system.libpath.for.mapreduce.and.pig.jobs' => 'false',
+    'oozie.authentication.kerberos.name.rules' => '',}
+  }
 
-$hdp_templeton_templeton_site_props => {'templeton.port' => '50111',
-  'templeton.hadoop.conf.dir' => '',
-  'templeton.jar' => '',
-  'templeton.libjars' => '',
-  'templeton.hadoop' => '',
-  'templeton.pig.archive' => '',
-  'templeton.pig.path' => '',
-  'templeton.hcat' => '',
-  'templeton.hive.archive' => '',
-  'templeton.hive.path' => '',
-  'templeton.hive.properties' => '',
-  'templeton.zookeeper.hosts' => '',
-  'templeton.storage.class' => 'org.apache.hcatalog.templeton.tool.ZooKeeperStorage',
-  'templeton.override.enabled' => 'false',
-  'templeton.streaming.jar' => 'hdfs:///apps/templeton/hadoop-streaming.jar',
-  'templeton.kerberos.principal' => '',
-  'templeton.kerberos.keytab' => '',
-  'templeton.kerberos.secret' => 'secret',}
+configgenerator::configfile::configuration {'hdp_templeton__templeton_site':
+  filename => 'templeton-site.xml',
+  module => 'hdp-templeton',
+  configuration => {'templeton.port' => '50111',
+    'templeton.hadoop.conf.dir' => '',
+    'templeton.jar' => '',
+    'templeton.libjars' => '',
+    'templeton.hadoop' => '',
+    'templeton.pig.archive' => '',
+    'templeton.pig.path' => '',
+    'templeton.hcat' => '',
+    'templeton.hive.archive' => '',
+    'templeton.hive.path' => '',
+    'templeton.hive.properties' => '',
+    'templeton.zookeeper.hosts' => '',
+    'templeton.storage.class' => 'org.apache.hcatalog.templeton.tool.ZooKeeperStorage',
+    'templeton.override.enabled' => 'false',
+    'templeton.streaming.jar' => 'hdfs:///apps/templeton/hadoop-streaming.jar',
+    'templeton.kerberos.principal' => '',
+    'templeton.kerberos.keytab' => '',
+    'templeton.kerberos.secret' => 'secret',}
+  }
     
 class manifestloader () {
     file { '/etc/puppet/agent/modules.tgz':

+ 9 - 3
ambari-agent/src/main/puppet/modules/configgenerator/manifests/configfile.pp

@@ -43,9 +43,9 @@
 # Note: Set correct $modulespath in the configgenerator (or pass it as parameter)
 #
 
-define configgenerator::configfile ($configname=$title, $modulespath='/etc/puppet/modules', $module, $properties) {
+define configgenerator::configfile ($modulespath='/etc/puppet/modules', $configuration) {
   $configcontent = inline_template('<configuration>
-  <% properties.each do |key,value| -%>
+  <% configuration.props.each do |key,value| -%>
   <property>
     <name><%=key %></name>
     <value><%=value %></value>
@@ -56,6 +56,12 @@ define configgenerator::configfile ($configname=$title, $modulespath='/etc/puppe
 file {'config':
   ensure  => present,
   content => $configcontent,
-  path => "${modulespath}/${module}/templates/${configname}",
+  path => "${modulespath}/${configuration::modulename}/templates/${configuration::configfile}",
 }
 } 
+
+define configgenerator::configfile::configuration($filename, $module, $nameproperties) {
+  $configfile = $filename
+  $modulename = $module
+  $props = $properties
+}

+ 17 - 23
ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/init.pp

@@ -41,34 +41,28 @@ class hdp-hadoop::initialize()
 #Configs generation  
   include manifestloader
 
-  configgenerator::configfile{'hdfs-site.xml': 
-    module => 'hdp-hadoop',
-    properties => $manifestloader::hdp_hadoop_hdfs_site_props
+  configgenerator::configfile{'mapred_queue_acls_xml': 
+    configuration => $manifestloader::Hdp_hadoop__mapred_queue_acls
   }
-
-  configgenerator::configfile{'capacity-scheduler.xml': 
-    module => 'hdp-hadoop',
-    properties => $manifestloader::hdp_hadoop_capacity_scheduler_props
+  
+  configgenerator::configfile{'hadoop_policy_xml': 
+    configuration => $manifestloader::Hdp_hadoop__hadoop_policy
   }
-
-  configgenerator::configfile{'mapred-site.xml': 
-    module => 'hdp-hadoop',
-    properties => $manifestloader::hdp_hadoop_mapred_site_props
+  
+  configgenerator::configfile{'core_site_xml': 
+    configuration => $manifestloader::Hdp_hadoop__core_site
   }
-      
-  configgenerator::configfile{'core-site.xml': 
-    module => 'hdp-hadoop',
-    properties => $manifestloader::hdp_hadoop_core_site_props
+
+  configgenerator::configfile{'mapred_site_xml': 
+    configuration => $manifestloader::Hdp_hadoop__mapred_site
   }
-      
-  configgenerator::configfile{'hadoop-policy.xml': 
-    module => 'hdp-hadoop',
-    properties => $manifestloader::hdp_hadoop_policy_props
+  
+  configgenerator::configfile{'capacity_scheduler_xml': 
+    configuration => $manifestloader::Hdp_hadoop__capacity_scheduler
   }
-      
-  configgenerator::configfile{'mapred-queue-acls.xml.erb': 
-    module => 'hdp-hadoop',
-    properties => $manifestloader::hdp_hadoop_mapred_queue_acls_props
+
+  configgenerator::configfile{'hdfs_site_xml': 
+    configuration => $manifestloader::Hdp_hadoop__hdfs_site
   }
 }
 

+ 2 - 3
ambari-agent/src/main/puppet/modules/hdp-hcat-old/manifests/init.pp

@@ -27,9 +27,8 @@ class hdp-hcat(
 
 #Configs generation  
 
-  configgenerator::configfile{'hive-site.xml': 
-    module => 'hdp-hcat-old',
-    properties => $manifestloader::hdp_hcat_old_hive_site_props
+  configgenerator::configfile{'hive_site_xml': 
+    configuration => $manifestloader::Hdp_hcat_old__hive_site
   }
 
   $hcat_user = $hdp::params::hcat_user

+ 2 - 3
ambari-agent/src/main/puppet/modules/hdp-hive/manifests/init.pp

@@ -29,9 +29,8 @@ class hdp-hive(
   $hive_user = $hdp-hive::params::hive_user
   $hive_config_dir = $hdp-hive::params::hive_conf_dir
 
-  configgenerator::configfile{'hive-site.xml.erb': 
-    module => 'hdp-hive',
-    properties => $manifestloader::hdp_hive_hive_site_props
+  configgenerator::configfile{'hive_site_xml': 
+    configuration => $manifestloader::Hdp_hive__hive_site
   }
 
   anchor { 'hdp-hive::begin': }

+ 3 - 4
ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/init.pp

@@ -27,12 +27,11 @@ class hdp-oozie(
   include hdp-oozie::params 
   include manifestloader
  
-   configgenerator::configfile{'oozie-site.xml': 
-    module => 'hdp-oozie',
-    properties => $manifestloader::hdp_oozie_oozie_site_props
+  configgenerator::configfile{'oozie_site_xml': 
+    configuration => $manifestloader::Hdp_oozie__oozie_site
   }
 
- $oozie_user = $hdp-oozie::params::oozie_user
+  $oozie_user = $hdp-oozie::params::oozie_user
   $oozie_config_dir = $hdp-oozie::params::conf_dir
   
   if ($service_state == 'uninstalled') {

+ 2 - 3
ambari-agent/src/main/puppet/modules/hdp-templeton/manifests/init.pp

@@ -25,9 +25,8 @@ class hdp-templeton(
 {
   include manifestloader
   
-  configgenerator::configfile{'templeton-site.xml: 
-    module => 'hdp-templeton',
-    properties => $manifestloader::hdp_templeton_templeton_site_props
+  configgenerator::configfile{'templeton_site_xml': 
+    configuration => $manifestloader::Hdp_templeton__templeton_site
   }
 
  include hdp-templeton::params 

+ 58 - 199
ambari-agent/src/main/python/ambari_agent/ActionQueue.py

@@ -28,6 +28,7 @@ from shell import shellRunner
 from FileUtil import writeFile, createStructure, deleteStructure, getFilePath, appendToFile
 from shell import shellRunner
 import json
+import pprint
 import os
 import time
 import subprocess
@@ -37,14 +38,11 @@ logger = logging.getLogger()
 installScriptHash = -1
 
 class ActionQueue(threading.Thread):
-  global q, r, clusterId, clusterDefinitionRevision
-  q = Queue.Queue()
-  r = Queue.Queue()
-  clusterId = 'unknown'
-  clusterDefinitionRevision = 0
-
+  global commandQueue, resultQueue
+  commandQueue = Queue.Queue()
+  resultQueue = Queue.Queue()
+ 
   def __init__(self, config):
-    global clusterId, clusterDefinitionRevision 
     super(ActionQueue, self).__init__()
     #threading.Thread.__init__(self)
     self.config = config
@@ -59,219 +57,80 @@ class ActionQueue(threading.Thread):
   def stopped(self):
     return self._stop.isSet()
 
-  #For unittest
   def getshellinstance(self):
+    """ For Testing purpose only.""" 
     return self.sh
 
-  def put(self, response):
-    if 'actions' in response:
-      actions = response['actions']
-      logger.debug(actions)
-      # for the servers, take a diff of what's running, and what the controller
-      # asked the agent to start. Kill all those servers that the controller
-      # didn't ask us to start
-      sh = shellRunner()
-      runningServers = sh.getServerTracker()
-
-      # get the list of servers the controller wants running
-      serversToRun = {}
-      for action in actions:
-        if action['kind'] == 'START_ACTION':
-          processKey = sh.getServerKey(action['clusterId'],action['clusterDefinitionRevision'],
-            action['component'], action['role'])
-          serversToRun[processKey] = 1
-
-      # create stop actions for the servers that the controller wants stopped
-      for server in runningServers.keys():
-        if server not in serversToRun:
-          sh.stopProcess(server)
-      # now put all the actions in the queue. The ordering is important (we stopped
-      # all unneeded servers first)
-      for action in actions:
-        q.put(action)
+  def put(self, command):
+    logger.info("The command from the server is \n" + pprint.pformat(command))
+    commandQueue.put(command)
+    pass
 
   def run(self):
-    global clusterId, clusterDefinitionRevision
+    result = []
     while not self.stopped():
-      while not q.empty():
-        action = q.get()
-        switches = {
-                     'START_ACTION'              : self.startAction,
-                     'RUN_ACTION'                : self.runAction,
-                     'CREATE_STRUCTURE_ACTION'   : self.createStructureAction,
-                     'DELETE_STRUCTURE_ACTION'   : self.deleteStructureAction,
-                     'WRITE_FILE_ACTION'         : self.writeFileAction,
-                     'INSTALL_AND_CONFIG_ACTION' : self.installAndConfigAction,
-                     'NO_OP_ACTION'              : self.noOpAction
-                   }
-        
-        exitCode = 1
-        retryCount = 1
-        while (exitCode != 0 and retryCount <= self.maxRetries):
-          result={}
-          try:
-            #pass a copy of action since we don't want anything to change in the 
-            #action dict 
-            actionCopy = copy.copy(action)
-            result = switches.get(action['kind'], self.unknownAction)(actionCopy)
-            if ('commandResult' in result):
-              commandResult = result['commandResult']
-              exitCode = commandResult['exitCode']
-              if (exitCode == 0):
-                break
-              else:
-                logger.warn(str(action) + " exited with code " + str(exitCode))
-            else:
-              #Really, no commandResult? Is this possible?
-              #TODO: check
-              exitCode = 0
-              break
-          except Exception, err:
-            traceback.print_exc()  
-            logger.warn(err)
-            if ('commandResult' in result):
-              commandResult = result['commandResult']
-              if ('exitCode' in commandResult):
-                exitCode = commandResult['exitCode']
-              else:
-                exitCode = 1
-            else:
-              result['commandResult'] = {'exitCode': 1, 'output':"", 'error':""}
-
-          #retry in some time  
-          logger.warn("Retrying %s in %d seconds" % (str(action),self.sleepInterval))
-          time.sleep(self.sleepInterval)
-          retryCount += 1
+      while not commandQueue.empty():
+        command = commandQueue.get()
+        try:
+          #pass a copy of action since we don't want anything to change in the 
+          #action dict 
+          commandCopy = copy.copy(command)
+          result = self.executeCommand(commandCopy)
           
-        if (exitCode != 0):
-          result['exitCode']=exitCode
-          result['retryActionCount'] = retryCount - 1
-        else:
-          result['retryActionCount'] = retryCount
-        # Update the result
-        r.put(result)
+        except Exception, err:
+          traceback.print_exc()  
+          logger.warn(err)
+          pass
+        
+        for entry in result:
+          resultQueue.put(entry)
+        pass
       if not self.stopped():
         time.sleep(5)
 
   # Store action result to agent response queue
   def result(self):
     result = []
-    while not r.empty():
-      result.append(r.get())
+    while not resultQueue.empty():
+      result.append(resultQueue.get())
     return result
 
-  # Generate default action response
-  def genResult(self, action):
-    result={}
-    if (action['kind'] == 'INSTALL_AND_CONFIG_ACTION' or action['kind'] == 'NO_OP_ACTION'):
-      result = {
-               'id'                        : action['id'],
-               'kind'                      : action['kind'],
-             }
-    else:
-      result = { 
-               'id'                        : action['id'],
-               'clusterId'                 : action['clusterId'],
-               'kind'                      : action['kind'],
-               'clusterDefinitionRevision' : action['clusterDefinitionRevision'],
-               'componentName'             : action['component'],
-               'role'                      : action['role']
-             }
+  def registerCommand(self, command):
+    return {}
+  
+  def statusCommand(self, command):
+    return {}
+  
+  def executeCommand(self, command):
+    logger.info("Executing command \n" + pprint.pformat(command))
+    clusterName = command['clusterName']
+    commandId = command['commandId']
+    hostname = command['hostname']
+    params = command['params']
+    clusterHostInfo = command['clusterHostInfo']
+    roleCommands = command['roleCommands']
+    configurations = command['configurations']
+    result = []
+    for roleCommand in roleCommands:
+      # assume some puppet pluing to run these commands
+      roleResult = {'role' : roleCommand['role'],
+                    'actionId' : commandId,
+                    'stdout' : "DONE",
+                    'stderr' : "DONE",
+                    'exitCode' : 0,
+                    'status' : "COMPLETED"}
+      result.append(roleResult)
+      pass
     return result
 
-  # Run start action, start a server process and
-  # track the liveness of the children process
-  def startAction(self, action):
-    result = self.genResult(action)
-    return self.sh.startProcess(action['clusterId'],
-      action['clusterDefinitionRevision'],
-      action['component'], 
-      action['role'], 
-      action['command'], 
-      action['user'], result)
-
-  # Write file action
-  def writeFileAction(self, action, fileName=""):
-    result = self.genResult(action)
-    return writeFile(action, result, fileName)
-
-  # get the install file
-  def getInstallFilename(self,id):
-    return "ambari-install-file-"+id
-
-  # Install and configure action
-  def installAndConfigAction(self, action):
-    global installScriptHash
-    r=self.genResult(action)
-    w = self.writeFileAction(action,self.getInstallFilename(action['id']))
-    commandResult = {}
-    if w['exitCode']!=0:
-      commandResult['error'] = w['stderr'] 
-      commandResult['exitCode'] = w['exitCode']
-      r['commandResult'] = commandResult
-      return r
-     
-    if 'command' not in action:
-      # this is hardcoded to do puppet specific stuff for now
-      # append the content of the puppet file to the file written above
-      filepath = getFilePath(action,self.getInstallFilename(action['id'])) 
-      logger.info("File path for puppet top level script: " + filepath)
-      p = self.sh.run(['/bin/cat',AmbariConfig.config.get('puppet','driver')])
-      if p['exitCode']!=0:
-        commandResult['error'] = p['error']
-        commandResult['exitCode'] = p['exitCode']
-        r['commandResult'] = commandResult
-        return r
-      logger.debug("The contents of the static file " + p['output'])
-      appendToFile(p['output'],filepath) 
-      arr = [AmbariConfig.config.get('puppet','commandpath') , filepath]
-      logger.debug(arr)
-      action['command'] = arr
-    logger.debug(action['command'])
-    commandResult = self.sh.run(action['command'])
-    logger.debug("PUPPET COMMAND OUTPUT: " + commandResult['output'])
-    logger.debug("PUPPET COMMAND ERROR: " + commandResult['error'])
-    if commandResult['exitCode'] == 0:
-      installScriptHash = action['id'] 
-    r['commandResult'] = commandResult
-    return r
-
-  # Run command action
-  def runAction(self, action):
-    result = self.genResult(action)
-    return self.sh.runAction(action['clusterId'], 
-      action['component'],
-      action['role'],
-      action['user'], 
-      action['command'], 
-      action['cleanUpCommand'], result)
-
-  # Create directory structure for cluster
-  def createStructureAction(self, action):
-    result = self.genResult(action)
-    result['exitCode'] = 0
-    return createStructure(action, result)
-
-  # Delete directory structure for cluster
-  def deleteStructureAction(self, action):
-    result = self.genResult(action)
-    result['exitCode'] = 0
-    return deleteStructure(action, result)
-
-  def noOpAction(self, action):
-    r = {'id' : action['id']}
-    return r
+  def noOpCommand(self, command):
+    result = {'commandId' : command['Id']}
+    return result
 
-  # Handle unknown action
   def unknownAction(self, action):
     logger.error('Unknown action: %s' % action['id'])
     result = { 'id': action['id'] }
     return result
 
-  # Discover agent idle state
   def isIdle(self):
-    return q.empty()
-
-  # Get the hash of the script currently used for install/config
-  def getInstallScriptHash(self):
-    return installScriptHash
+    return commandQueue.empty()

+ 34 - 16
ambari-agent/src/main/python/ambari_agent/Controller.py

@@ -51,11 +51,6 @@ class Controller(threading.Thread):
     self.safeMode = True
     self.credential = None
     self.config = config
-    #Disabled security until we have fix for AMBARI-157
-    #if(config.get('controller', 'user')!=None and config.get('controller', 'password')!=None):
-    #  self.credential = { 'user' : config.get('controller', 'user'),
-    #                      'password' : config.get('controller', 'password')
-    #  }
     self.hostname = socket.gethostname()
     self.registerUrl = config.get('server', 'secured_url') + \
       '/agent/register/' + self.hostname
@@ -82,6 +77,7 @@ class Controller(threading.Thread):
     while registered == False:
       try:
         data = json.dumps(self.register.build(id))
+        logger.info("Registering with the server " + pprint.pformat(data))
         req = urllib2.Request(self.registerUrl, data, {'Content-Type': 
                                                       'application/json'})
         stream = security.secured_url_open(req)
@@ -99,6 +95,23 @@ class Controller(threading.Thread):
       pass  
     return ret
   
+  
+  def addToQueue(self, commands):
+    """Add to the queue for running the commands """
+    """ Put the required actions into the Queue """ 
+    """ Verify if the action is to reboot or not """
+    if not commands:
+      logger.info("No commands from the server.")
+    else:
+      """Only add to the queue if not empty list """
+      for command in commands:
+        logger.info("Adding command to the action queue: \n" +
+                     pprint.pformat(command)) 
+        self.actionQueue.put(command)
+        pass
+      pass
+    pass
+  
   def heartbeatWithServer(self):
     retry = False
     #TODO make sure the response id is monotonically increasing
@@ -109,29 +122,33 @@ class Controller(threading.Thread):
           data = json.dumps(self.heartbeat.build(id))
           pass
         logger.info("Sending HeartBeat " + pprint.pformat(data))
-        req = urllib2.Request(self.heartbeatUrl, data, {'Content-Type': 'application/json'})
-        
-        logger.info(data)
-        
+        req = urllib2.Request(self.heartbeatUrl, data, {'Content-Type': 
+                                                        'application/json'})
         f = security.secured_url_open(req)
         response = f.read()
         f.close()
-        data = json.loads(response)
-        id=int(data['responseId'])
-        logger.info("HeartBeat Response from Server: \n" + pprint.pformat(data))
+        response = json.loads(response)
+        id=int(response['responseId'])
+        
+        if 'executionCommands' in response.keys():
+          self.addToQueue(response['executionCommands'])
+          pass
+        else:
+          logger.info("No commands sent from the Server.")
+          pass
         retry=False
       except Exception, err:
         retry=True
         if "code" in err:
           logger.error(err.code)
         else:
-          logger.error("Unable to connect to: "+self.heartbeatUrl,exc_info=True)
+          logger.error("Unable to connect to: "+ 
+                       self.heartbeatUrl,exc_info=True)
       if self.actionQueue.isIdle():
-        time.sleep(30)
+        time.sleep(3)
       else:
         time.sleep(1) 
     pass
-  
 
   def run(self):
     opener = urllib2.build_opener()
@@ -148,7 +165,8 @@ def main(argv=None):
   signal.signal(signal.SIGINT, signal.SIG_DFL)
 
   logger.setLevel(logging.INFO)
-  formatter = logging.Formatter("%(asctime)s %(filename)s:%(lineno)d - %(message)s")
+  formatter = logging.Formatter("%(asctime)s %(filename)s:%(lineno)d - \
+    %(message)s")
   stream_handler = logging.StreamHandler()
   stream_handler.setFormatter(formatter)
   logger.addHandler(stream_handler)

+ 8 - 0
ambari-agent/src/main/python/ambari_agent/Hardware.py

@@ -96,6 +96,14 @@ class Hardware:
           pass
         pass
       pass
+    """ Convert the needed types to the true values """
+    if 'physicalprocessorcount' in retDict.keys():
+      retDict['physicalprocessorcount'] = int(retDict['physicalprocessorcount'])
+      pass
+    if 'is_virtual' in retDict.keys():
+      retDict['is_virtual'] = ("true" == retDict['is_virtual'])
+      pass
+    
     logger.info("Facter info : \n" + pprint.pformat(retDict))
     return retDict
   

+ 4 - 2
ambari-agent/src/main/python/ambari_agent/Heartbeat.py

@@ -39,17 +39,19 @@ class Heartbeat:
     timestamp = int(time.time()*1000)
     queueResult = self.actionQueue.result()
     installedRoleStates = serverStatus.build()
+    
     nodeStatus = { "status" : "HEALTHY",
                    "cause" : "NONE"}
     
     heartbeat = { 'responseId'        : int(id),
                   'timestamp'         : timestamp,
                   'hostname'          : socket.gethostname(),
-                 # 'reports'           : self.reports,
                  # 'componentStatus'   : self.componentStatus,
                   'nodeStatus'        : nodeStatus
                 }
-  
+    if len(queueResult) != 0:
+      heartbeat['reports'] = queueResult
+      pass
     
     return heartbeat
 

+ 1 - 1
ambari-agent/src/main/python/ambari_agent/security.py

@@ -119,7 +119,7 @@ class CertificateManager():
     response = f.read()
     f.close()
     data = json.loads(response)
-    logger.info("Sign response from Server: \n" + pprint.pformat(data))
+    logger.debug("Sign response from Server: \n" + pprint.pformat(data))
     result=data['result']
     if result == 'OK':
       agentCrtContent=data['signedCa']

+ 36 - 19
ambari-agent/src/main/python/ambari_agent/shell.py

@@ -88,18 +88,19 @@ class shellRunner:
     code = 0
     cmd = " "
     cmd = cmd.join(script)
-    p = subprocess.Popen(cmd, preexec_fn=changeUid, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, close_fds=True)
+    p = subprocess.Popen(cmd, preexec_fn=changeUid, stdout=subprocess.PIPE, 
+                         stderr=subprocess.PIPE, shell=True, close_fds=True)
     out, err = p.communicate()
     code = p.wait()
     logger.debug("Exitcode for %s is %d" % (cmd,code))
     return {'exitCode': code, 'output': out, 'error': err}
 
   # dispatch action types
-  def runAction(self, clusterId, component, role, user, command, cleanUpCommand, result):
+  def runAction(self, clusterId, component, role, 
+                user, command, cleanUpCommand, result):
     oldDir = os.getcwd()
     #TODO: handle this better. Don't like that it is doing a chdir for the main process
     os.chdir(self.getWorkDir(clusterId, role))
-    oldUid = os.getuid()
     try:
       if user is not None:
         user=getpwnam(user)[2]
@@ -107,7 +108,8 @@ class shellRunner:
         user = oldUid
       threadLocal.uid = user
     except Exception:
-      logger.warn("%s %s %s can not switch user for RUN_ACTION." % (clusterId, component, role))
+      logger.warn("%s %s %s can not switch user for RUN_ACTION." 
+                  % (clusterId, component, role))
     code = 0
     cmd = sys.executable
     tempfilename = tempfile.mktemp()
@@ -116,7 +118,8 @@ class shellRunner:
     tmp.close()
     cmd = "%s %s %s" % (cmd, tempfilename, " ".join(command['param']))
     commandResult = {}
-    p = subprocess.Popen(cmd, preexec_fn=changeUid, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, close_fds=True)
+    p = subprocess.Popen(cmd, preexec_fn=changeUid, stdout=subprocess.PIPE,
+                          stderr=subprocess.PIPE, shell=True, close_fds=True)
     out, err = p.communicate()
     code = p.wait()
     if code != 0:
@@ -134,7 +137,8 @@ class shellRunner:
       cmd = "%s %s %s" % (cmd, tempfilename, " ".join(cleanUpCommand['param']))
       cleanUpCode = 0
       cleanUpResult = {}
-      p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, close_fds=True)
+      p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+                            shell=True, close_fds=True)
       out, err = p.communicate()
       cleanUpCode = p.wait()
       if cleanUpCode != 0:
@@ -147,17 +151,20 @@ class shellRunner:
     try:
       os.chdir(oldDir)
     except Exception:
-      logger.warn("%s %s %s can not restore environment for RUN_ACTION." % (clusterId, component, role))
+      logger.warn("%s %s %s can not restore environment for RUN_ACTION."
+                   % (clusterId, component, role))
     return result
 
   # Start a process and presist its state
-  def startProcess(self, clusterId, clusterDefinitionRevision, component, role, script, user, result):
+  def startProcess(self, clusterId, clusterDefinitionRevision, component,
+                    role, script, user, result):
     global serverTracker
     oldDir = os.getcwd()
     try:
       os.chdir(self.getWorkDir(clusterId,role))
     except Exception:
-      logger.warn("%s %s %s can not switch dir for START_ACTION." % (clusterId, component, role))
+      logger.warn("%s %s %s can not switch dir for START_ACTION."
+                   % (clusterId, component, role))
     oldUid = os.getuid()
     try:
       if user is not None:
@@ -166,10 +173,12 @@ class shellRunner:
         user = os.getuid()
       threadLocal.uid = user
     except Exception:
-      logger.warn("%s %s %s can not switch user for START_ACTION." % (clusterId, component, role))
+      logger.warn("%s %s %s can not switch user for START_ACTION." 
+                  % (clusterId, component, role))
     code = 0
     commandResult = {}
-    process = self.getServerKey(clusterId,clusterDefinitionRevision,component,role)
+    process = self.getServerKey(clusterId,clusterDefinitionRevision,
+                                component,role)
     if not process in serverTracker:
       try:
         plauncher = processlauncher(script,user)
@@ -177,7 +186,8 @@ class shellRunner:
         plauncher.blockUntilProcessCreation()
       except Exception:
         traceback.print_exc()
-        logger.warn("Can not launch process for %s %s %s" % (clusterId, component, role))
+        logger.warn("Can not launch process for %s %s %s" 
+                    % (clusterId, component, role))
         code = -1
       serverTracker[process] = plauncher
       commandResult['exitCode'] = code 
@@ -185,16 +195,19 @@ class shellRunner:
     try:
       os.chdir(oldDir)
     except Exception:
-      logger.warn("%s %s %s can not restore environment for START_ACTION." % (clusterId, component, role))
+      logger.warn("%s %s %s can not restore environment for START_ACTION." \
+                   % (clusterId, component, role))
     return result
 
   # Stop a process and remove presisted state
   def stopProcess(self, processKey):
     global serverTracker
     keyFragments = processKey.split('/')
-    process = self.getServerKey(keyFragments[0],keyFragments[1],keyFragments[2],keyFragments[3])
+    process = self.getServerKey(keyFragments[0],keyFragments[1],
+                                keyFragments[2],keyFragments[3])
     if process in serverTracker:
-      logger.info ("Sending %s with PID %d the SIGTERM signal" % (process,serverTracker[process].getpid()))
+      logger.info ("Sending %s with PID %d the SIGTERM signal"
+                    % (process,serverTracker[process].getpid()))
       killprocessgrp(serverTracker[process].getpid())
       del serverTracker[process]
 
@@ -227,10 +240,13 @@ class processlauncher(threading.Thread):
       tmp.write(self.script['script'])
       tmp.close()
       threadLocal.uid = self.uid
-      self.cmd = "%s %s %s" % (pythoncmd, tempfilename, " ".join(self.script['param']))
+      self.cmd = "%s %s %s" % (pythoncmd, tempfilename,
+                                " ".join(self.script['param']))
       logger.info("Launching %s as uid %d" % (self.cmd,self.uid) )
-      p = subprocess.Popen(self.cmd, preexec_fn=self.changeUidAndSetSid, stdout=subprocess.PIPE, 
-                           stderr=subprocess.PIPE, shell=True, close_fds=True)
+      p = subprocess.Popen(self.cmd,
+                            preexec_fn=self.changeUidAndSetSid, 
+                            stdout=subprocess.PIPE, 
+                            stderr=subprocess.PIPE, shell=True, close_fds=True)
       logger.info("Launched %s; PID %d" % (self.cmd,p.pid))
       self.serverpid = p.pid
       self.out, self.err = p.communicate()
@@ -253,7 +269,8 @@ class processlauncher(threading.Thread):
       time.sleep(1)
       logger.info("Waiting for process %s to start" % self.cmd)
       if sleepCount > 10:
-        logger.warn("Couldn't start process %s even after %d seconds" % (self.cmd,sleepCount))
+        logger.warn("Couldn't start process %s even after %d seconds"
+                     % (self.cmd,sleepCount))
         os._exit(1)
     return self.serverpid
 

+ 12 - 0
ambari-agent/src/main/python/manifestGenerator/imports.txt

@@ -0,0 +1,12 @@
+import "/etc/puppet/agent/modules/hdp/manifests/*.pp"
+import "/etc/puppet/agent/modules/hdp-hadoop/manifests/*.pp"
+import "/etc/puppet/agent/modules/hdp-hbase/manifests/*.pp"
+import "/etc/puppet/agent/modules/hdp-zookeeper/manifests/*.pp"
+import "/etc/puppet/agent/modules/hdp-oozie/manifests/*.pp"
+import "/etc/puppet/agent/modules/hdp-pig/manifests/*.pp"
+import "/etc/puppet/agent/modules/hdp-sqoop/manifests/*.pp"
+import "/etc/puppet/agent/modules/hdp-templeton/manifests/*.pp"
+import "/etc/puppet/agent/modules/hdp-hive/manifests/*.pp"
+import "/etc/puppet/agent/modules/hdp-hcat/manifests/*.pp"
+import "/etc/puppet/agent/modules/hdp-mysql/manifests/*.pp"
+import "/etc/puppet/agent/modules/hdp-monitor-webserver/manifests/*.pp"

+ 98 - 0
ambari-agent/src/main/python/manifestGenerator/manifestGenerator.py

@@ -0,0 +1,98 @@
+import json
+
+def generateManifest(inputJsonStr):
+#reading json
+  parsedJson = json.loads(inputJsonStr)
+  hostname = parsedJson['hostname']
+  clusterHostInfo = parsedJson['clusterHostInfo']
+  params = parsedJson['params']
+  configurations = parsedJson['configurations']
+  hostAttributes = parsedJson['hostAttributes']
+  roles = parsedJson['roles']
+  
+#writing manifest
+  manifest = open('site.pp', 'w')
+
+  #writing imports from external static file
+  writeImports(manifest)
+  
+  #writing nodes
+  writeNodes(manifest, clusterHostInfo)
+  
+  #writing params from map
+  writeParams(manifest, params)
+  
+  #writing config maps
+  writeConfigurations(manifest, configurations)
+
+  #cycle here - writing host attributes
+  writeHostAttributes(manifest, hostAttributes)
+
+  #writing task definitions 
+  writeTasks(manifest, roles)
+     
+  manifest.close()
+    
+  
+  #read static imports from file and write them to manifest
+  def writeImports(outputFile, inputFileName='imports.txt'):
+    inputFile = open(inputFileName, 'r')
+    
+    for line in inputFile:
+      outputFile.write(line)
+      
+    inputFile.close()
+
+  #write nodes
+  def writeNodes(outputFile, clusterHostInfo):
+    for node in clusterHostInfo.iterkeys():
+      outputFile.write('$' + node + '= ['
+    
+      coma = ''
+      for host in node:
+        outputFile.write(coma + '\'' + host + '\'')
+        coma = ', '
+
+      outputFile.write(']\n'
+
+  #write params
+  def writeParams(outputFile, params):
+    for param in params.iterkeys():
+      outputFile.write('$' +  param + '="' + params[param] + '"\n')
+
+  #write host attributes
+  def writeHostAttributes(outputFile, hostAttributes):
+    outputFile.write('$hostAttributes={\n')
+  
+    coma = ''
+    for attribute in hostAttributes.iterkeys():
+      outputFile.write(coma + '"' +  attribute + '" => "{' + hostAttributes[attribute] + '"}')
+      coma = ',\n'
+
+    outputFile.write('}\n')
+
+  #write configurations
+  def writeConfigurations(outputFile, configs):
+    outputFile.write('$configuration =  {\n'
+  
+    for configName in configs.iterkeys():
+      outputFile.write('$' + configName + '=> {\n')
+      config = configs[configName]
+      
+      coma = ''
+      for configParam in config.iterkeys():
+        outputFile.write(coma + '"' + configParam + '" => "' + config[configParam] + '"')
+        coma = ',\n'
+
+      outputFile.write('\n}\n')
+      
+    outputFile.write('\n}\n'
+
+  #write node tasks
+  def writeTasks(outputFile, tasks):
+    for task in tasks :
+      nodename = task['role']
+      command = task['roleCommand']
+      taskParams = task['params']
+    #TODO: write node task to file
+      

+ 10 - 3
ambari-api/pom.xml

@@ -29,6 +29,11 @@
   <profiles>
     </profiles>
   <dependencies>
+    <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-server</artifactId>
+      <version>1.0.3-SNAPSHOT</version>
+    </dependency>
     <dependency>
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-mapper-asl</artifactId>
@@ -62,17 +67,14 @@
     <dependency>
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-core-asl</artifactId>
-      <version>1.9.2</version>
     </dependency>
     <dependency>
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-jaxrs</artifactId>
-      <version>1.9.2</version>
     </dependency>
     <dependency>
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-xc</artifactId>
-      <version>1.9.7</version>
     </dependency>
     <dependency>
       <groupId>asm</groupId>
@@ -99,5 +101,10 @@
       <groupId>com.sun.grizzly</groupId>
       <artifactId>grizzly-comet-webserver</artifactId>
     </dependency>
+    <dependency>
+      <groupId>com.google.inject</groupId>
+      <artifactId>guice</artifactId>
+      <version>3.0</version>
+    </dependency>
   </dependencies>
 </project>

+ 30 - 0
ambari-api/src/main/java/org/apache/ambari/api/controller/ProviderModule.java

@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.api.controller;
+
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+
+
+/**
+ *
+ */
+public interface ProviderModule {
+  public ResourceProvider getResourceProvider(Resource.Type type);
+}

+ 5 - 5
ambari-api/src/main/java/org/apache/ambari/api/controller/ganglia/GangliaPropertyProvider.java

@@ -19,11 +19,11 @@
 package org.apache.ambari.api.controller.ganglia;
 
 import org.apache.ambari.api.controller.internal.PropertyIdImpl;
-import org.apache.ambari.api.controller.spi.Predicate;
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.PropertyProvider;
-import org.apache.ambari.api.controller.spi.Request;
-import org.apache.ambari.api.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.PropertyProvider;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.api.controller.utilities.PredicateHelper;
 
 import java.util.HashMap;

+ 130 - 18
ambari-api/src/main/java/org/apache/ambari/api/controller/internal/ClusterControllerImpl.java

@@ -15,17 +15,21 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.ambari.api.controller.internal;
 
-import org.apache.ambari.api.controller.spi.ClusterController;
-import org.apache.ambari.api.controller.spi.Predicate;
-import org.apache.ambari.api.controller.spi.PropertyProvider;
-import org.apache.ambari.api.controller.spi.Request;
-import org.apache.ambari.api.controller.spi.Resource;
-import org.apache.ambari.api.controller.spi.ResourceProvider;
-import org.apache.ambari.api.controller.spi.Schema;
+import org.apache.ambari.api.controller.ProviderModule;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.spi.ClusterController;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.PropertyProvider;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.controller.spi.Schema;
 
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
 import java.util.NoSuchElementException;
@@ -36,16 +40,32 @@ import java.util.Set;
  */
 public class ClusterControllerImpl implements ClusterController {
 
-  private final Map<Resource.Type, Schema> schemas;
+  /**
+   * Module of providers for this controller.
+   */
+  private final ProviderModule providerModule;
+
+  /**
+   * Map of resource providers keyed by resource type.
+   */
+  private final Map<Resource.Type, ResourceProvider> resourceProviders;
+
 
-  public ClusterControllerImpl(Map<Resource.Type, Schema> schemas) {
-    this.schemas = schemas;
+  // ----- Constructors ------------------------------------------------------
+
+  public ClusterControllerImpl(ProviderModule providerModule) {
+    this.providerModule = providerModule;
+    this.resourceProviders = getResourceSchemas();
   }
 
+
+  // ----- ClusterController -------------------------------------------------
+
   @Override
-  public Iterable<Resource> getResources(Resource.Type type, Request request, Predicate predicate) {
-    ResourceProvider provider = schemas.get(type).getResourceProvider();
-    Set<Resource> resources = null;
+  public Iterable<Resource> getResources(Resource.Type type, Request request, Predicate predicate)
+      throws AmbariException{
+    ResourceProvider provider = resourceProviders.get(type);
+    Set<Resource> resources;
 
     if (provider == null) {
       resources = Collections.emptySet();
@@ -58,49 +78,134 @@ public class ClusterControllerImpl implements ClusterController {
 
   @Override
   public Schema getSchema(Resource.Type type) {
-    return schemas.get(type);
+    return resourceProviders.get(type).getSchema();
+  }
+
+  @Override
+  public void createResources(Resource.Type type, Request request) throws AmbariException {
+    ResourceProvider provider = resourceProviders.get(type);
+    if (provider != null) {
+      provider.createResources(request);
+    }
   }
 
+  @Override
+  public void updateResources(Resource.Type type, Request request, Predicate predicate) throws AmbariException {
+    ResourceProvider provider = resourceProviders.get(type);
+    if (provider != null) {
+      provider.updateResources(request, predicate);
+    }
+  }
+
+  @Override
+  public void deleteResources(Resource.Type type, Predicate predicate) throws AmbariException {
+    ResourceProvider provider = resourceProviders.get(type);
+    if (provider != null) {
+      provider.deleteResources(predicate);
+    }
+  }
+
+
+  // ----- helper methods ----------------------------------------------------
+
   private Set<Resource> populateResources(Resource.Type type,
                                           Set<Resource> resources,
                                           Request request,
-                                          Predicate predicate) {
+                                          Predicate predicate) throws AmbariException{
     Set<Resource> keepers = resources;
 
-    for (PropertyProvider propertyProvider : schemas.get(type).getPropertyProviders()) {
+    for (PropertyProvider propertyProvider : resourceProviders.get(type).getPropertyProviders()) {
       //TODO : only call the provider if it provides properties that we need ...
       keepers = propertyProvider.populateResources(keepers, request, predicate);
     }
     return keepers;
   }
 
+  private Map<Resource.Type, ResourceProvider> getResourceSchemas() {
+    Map<Resource.Type, ResourceProvider> resourceProviders = new HashMap<Resource.Type, ResourceProvider>();
+
+    resourceProviders.put(Resource.Type.Cluster, providerModule.getResourceProvider(Resource.Type.Cluster));
+    resourceProviders.put(Resource.Type.Service, providerModule.getResourceProvider(Resource.Type.Service));
+    resourceProviders.put(Resource.Type.Host, providerModule.getResourceProvider(Resource.Type.Host));
+    resourceProviders.put(Resource.Type.Component, providerModule.getResourceProvider(Resource.Type.Component));
+    resourceProviders.put(Resource.Type.HostComponent, providerModule.getResourceProvider(Resource.Type.HostComponent));
+
+    return resourceProviders;
+  }
+
+
+  // ----- ResourceIterable inner class --------------------------------------
+
   private static class ResourceIterable implements Iterable<Resource> {
+
+    /**
+     * The resources to iterate over.
+     */
     private final Set<Resource> resources;
+
+    /**
+     * The predicate used to filter the set.
+     */
     private final Predicate predicate;
 
+    // ----- Constructors ----------------------------------------------------
+
+    /**
+     * Create a ResourceIterable.
+     *
+     * @param resources  the set of resources to iterate over
+     * @param predicate  the predicate used to filter the set of resources
+     */
     private ResourceIterable(Set<Resource> resources, Predicate predicate) {
       this.resources = resources;
       this.predicate = predicate;
     }
 
+    // ----- Iterable --------------------------------------------------------
+
     @Override
     public Iterator<Resource> iterator() {
       return new ResourceIterator(resources, predicate);
     }
   }
 
+
+  // ----- ResourceIterator inner class --------------------------------------
+
   private static class ResourceIterator implements Iterator<Resource> {
 
+    /**
+     * The underlying iterator.
+     */
     private final Iterator<Resource> iterator;
+
+    /**
+     * The predicate used to filter the resource being iterated over.
+     */
     private final Predicate predicate;
+
+    /**
+     * The next resource.
+     */
     private Resource nextResource;
 
+
+    // ----- Constructors ----------------------------------------------------
+
+    /**
+     * Create a new ResourceIterator.
+     *
+     * @param resources  the set of resources to iterate over
+     * @param predicate  the predicate used to filter the set of resources
+     */
     private ResourceIterator(Set<Resource> resources, Predicate predicate) {
-      this.iterator = resources.iterator();
-      this.predicate = predicate;
+      this.iterator     = resources.iterator();
+      this.predicate    = predicate;
       this.nextResource = getNextResource();
     }
 
+    // ----- Iterator --------------------------------------------------------
+
     @Override
     public boolean hasNext() {
       return nextResource != null;
@@ -123,6 +228,13 @@ public class ClusterControllerImpl implements ClusterController {
       throw new UnsupportedOperationException("Remove not supported.");
     }
 
+    // ----- helper methods --------------------------------------------------
+
+    /**
+     * Get the next resource.
+     *
+     * @return the next resource.
+     */
     private Resource getNextResource() {
       while (iterator.hasNext()) {
         Resource next = iterator.next();

+ 48 - 0
ambari-api/src/main/java/org/apache/ambari/api/controller/internal/DefaultProviderModule.java

@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.api.controller.internal;
+
+import org.apache.ambari.api.controller.ProviderModule;
+import org.apache.ambari.api.controller.utilities.Properties;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.AmbariManagementControllerImpl;
+import org.apache.ambari.server.controller.spi.PropertyProvider;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ *
+ */
+public class DefaultProviderModule implements ProviderModule {
+
+  private static final List<PropertyProvider> PROPERTY_PROVIDERS =
+      new LinkedList<PropertyProvider>();
+
+  @Override
+  public ResourceProvider getResourceProvider(Resource.Type type) {
+    AmbariManagementController managementController =
+        new AmbariManagementControllerImpl(null, null);
+    return ResourceProviderImpl.getResourceProvider(type,
+        PROPERTY_PROVIDERS, Properties.getPropertyIds(type, "DB"),
+        Properties.getKeyPropertyIds(type), managementController);
+  }
+}

+ 51 - 18
ambari-api/src/main/java/org/apache/ambari/api/controller/internal/PropertyIdImpl.java

@@ -18,26 +18,52 @@
 
 package org.apache.ambari.api.controller.internal;
 
-import org.apache.ambari.api.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.PropertyId;
 
 /**
- *
+ * Simple PropertyId implementation.
  */
 public class PropertyIdImpl implements PropertyId {
+  /**
+   * The property name.
+   */
   private String name;
+
+  /**
+   * The category name.
+   */
   private String category;
+
+  /**
+   * Indicates whether or not this property is temporal.
+   */
   private boolean temporal;
 
-  public PropertyIdImpl() {
 
+  // ----- Constructors ------------------------------------------------------
+
+  /**
+   * Create a property id.  Required for JSON serialization.
+   */
+  public PropertyIdImpl() {
   }
 
+  /**
+   * Create a property id.
+   *
+   * @param name      the property name.
+   * @param category  the property category.
+   * @param temporal  a temporal indicator
+   */
   public PropertyIdImpl(String name, String category, boolean temporal) {
-    this.name = name;
+    this.name     = name;
     this.category = category;
     this.temporal = temporal;
   }
 
+
+  // ----- PropertyId --------------------------------------------------------
+
   public String getName() {
     return name;
   }
@@ -50,17 +76,27 @@ public class PropertyIdImpl implements PropertyId {
     return temporal;
   }
 
+
+  // ----- Object overrides --------------------------------------------------
+
   @Override
   public int hashCode() {
-    return name.hashCode() + (category == null ? 0 : category.hashCode()) + (temporal ? 1 : 0);
+    return name.hashCode() +
+        (category == null ? 0 : category.hashCode()) +
+        (temporal ? 1 : 0);
   }
 
   @Override
   public boolean equals(Object o) {
 
-    if (!(o instanceof PropertyIdImpl)) {
+    if (this == o) {
+      return true;
+    }
+
+    if (!(o.getClass().equals(PropertyIdImpl.class))) {
       return false;
     }
+
     PropertyIdImpl that = (PropertyIdImpl) o;
 
     return this.name.equals(that.getName()) &&
@@ -68,21 +104,18 @@ public class PropertyIdImpl implements PropertyId {
         this.isTemporal() == that.isTemporal();
   }
 
+  @Override
+  public String toString() {
+    return "PropertyId[" + category + ", " + name + "]";
+  }
+
+
+  // ----- helper methods ----------------------------------------------------
+
   private static boolean equals(Object o1, Object o2) {
     if (o1 == null) {
       return o2 == null;
     }
-
-    if (o2 == null) {
-      return o1 == null;
-    }
-
-    return o1.equals(o2);
-  }
-
-
-  @Override
-  public String toString() {
-    return "PropertyId[" + category + ", " + name + "]";
+    return o2 != null && o1.equals(o2);
   }
 }

+ 62 - 0
ambari-api/src/main/java/org/apache/ambari/api/controller/internal/PropertyPredicateVisitor.java

@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.api.controller.internal;
+
+import org.apache.ambari.server.controller.predicate.ArrayPredicate;
+import org.apache.ambari.server.controller.predicate.ComparisonPredicate;
+import org.apache.ambari.server.controller.predicate.PredicateVisitor;
+import org.apache.ambari.server.controller.predicate.UnaryPredicate;
+import org.apache.ambari.server.controller.spi.PropertyId;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Predicate visitor for extracting property values from the PropertyPredicates of a predicate graph.
+ */
+public class PropertyPredicateVisitor implements PredicateVisitor {
+  private final Map<PropertyId, Object> properties = new HashMap<PropertyId, Object>();
+
+  @Override
+  public void acceptComparisonPredicate(ComparisonPredicate predicate) {
+    properties.put(predicate.getPropertyId(), predicate.getValue());
+  }
+
+  @Override
+  public void acceptArrayPredicate(ArrayPredicate predicate) {
+    //Do nothing
+  }
+
+  @Override
+  public void acceptUnaryPredicate(UnaryPredicate predicate) {
+    //Do nothing
+  }
+
+
+  // ----- accessors ---------------------------------------------------------
+
+  /**
+   * Get the properties.
+   *
+   * @return the properties
+   */
+  public Map<PropertyId, Object> getProperties() {
+    return properties;
+  }
+}

+ 33 - 7
ambari-api/src/main/java/org/apache/ambari/api/controller/internal/RequestImpl.java

@@ -18,8 +18,8 @@
 
 package org.apache.ambari.api.controller.internal;
 
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.Request;
 
 import java.util.Collections;
 import java.util.HashSet;
@@ -31,21 +31,47 @@ import java.util.Set;
  */
 public class RequestImpl implements Request {
 
+  /**
+   * The property ids associated with this request.  Used for requests that
+   * get resource values.
+   */
   private final Set<PropertyId> propertyIds;
-  private final Set<Map<PropertyId, String>> properties;
 
-  public RequestImpl(Set<PropertyId> propertyIds, Set<Map<PropertyId, String>> properties) {
-    this.propertyIds = propertyIds == null ? Collections.unmodifiableSet(new HashSet<PropertyId>()) : Collections.unmodifiableSet(propertyIds);
-    this.properties = properties == null ? Collections.unmodifiableSet(new HashSet<Map<PropertyId, String>>()) : Collections.unmodifiableSet(properties);
+  /**
+   * The properties associated with this request.  Used for requests that create
+   * resources or update resource values.
+   */
+  private final Set<Map<PropertyId, Object>> properties;
+
+
+  // ----- Constructors ------------------------------------------------------
+
+  /**
+   * Create a request.
+   *
+   * @param propertyIds  the property ids associated with the request; may be null
+   * @param properties   the properties associated with the request; may be null
+   */
+  public RequestImpl(Set<PropertyId> propertyIds, Set<Map<PropertyId, Object>> properties) {
+    this.propertyIds = propertyIds == null ?
+        Collections.unmodifiableSet(new HashSet<PropertyId>()) :
+        Collections.unmodifiableSet(propertyIds);
+
+    this.properties = properties == null ?
+        Collections.unmodifiableSet(new HashSet<Map<PropertyId, Object>>()) :
+        Collections.unmodifiableSet(properties);
   }
 
+
+  // ----- Request -----------------------------------------------------------
+
   @Override
   public Set<PropertyId> getPropertyIds() {
     return propertyIds;
   }
 
   @Override
-  public Set<Map<PropertyId, String>> getProperties() {
+  public Set<Map<PropertyId, Object>> getProperties() {
     return properties;
   }
 

+ 25 - 3
ambari-api/src/main/java/org/apache/ambari/api/controller/internal/ResourceImpl.java

@@ -18,23 +18,42 @@
 
 package org.apache.ambari.api.controller.internal;
 
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.Resource;
 
 import java.util.HashMap;
 import java.util.Map;
 
 /**
- * Default resource implementation.
+ * Simple resource implementation.
  */
 public class ResourceImpl implements Resource {
+
+  /**
+   * The resource type.
+   */
   private final Type type;
+
+  /**
+   * The map of categories/properties for this resource.
+   */
   private final Map<String, Map<String, String>> categories = new HashMap<String, Map<String, String>>();
 
+
+  // ----- Constructors ------------------------------------------------------
+
+  /**
+   * Create a resource of the given type.
+   *
+   * @param type  the resource type
+   */
   public ResourceImpl(Type type) {
     this.type = type;
   }
 
+
+  // ----- Resource ----------------------------------------------------------
+
   @Override
   public Type getType() {
     return type;
@@ -90,6 +109,9 @@ public class ResourceImpl implements Resource {
     return null;
   }
 
+
+  // ----- Object overrides --------------------------------------------------
+
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();

+ 528 - 83
ambari-api/src/main/java/org/apache/ambari/api/controller/internal/ResourceProviderImpl.java

@@ -18,201 +18,646 @@
 
 package org.apache.ambari.api.controller.internal;
 
-import org.apache.ambari.api.controller.spi.ManagementController;
-import org.apache.ambari.api.controller.spi.Predicate;
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.Request;
-import org.apache.ambari.api.controller.spi.Resource;
-import org.apache.ambari.api.controller.spi.ResourceProvider;
-
+import org.apache.ambari.api.controller.utilities.PredicateHelper;
+import org.apache.ambari.api.controller.utilities.Properties;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.ClusterRequest;
+import org.apache.ambari.server.controller.ClusterResponse;
+import org.apache.ambari.server.controller.HostRequest;
+import org.apache.ambari.server.controller.HostResponse;
+import org.apache.ambari.server.controller.ServiceComponentHostRequest;
+import org.apache.ambari.server.controller.ServiceComponentHostResponse;
+import org.apache.ambari.server.controller.ServiceComponentRequest;
+import org.apache.ambari.server.controller.ServiceComponentResponse;
+import org.apache.ambari.server.controller.ServiceRequest;
+import org.apache.ambari.server.controller.ServiceResponse;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.PropertyProvider;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.controller.spi.Schema;
+
+import java.util.Collections;
 import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
 import java.util.Set;
 
 /**
+ * Basic resource provider implementation that maps to a management controller.
  */
 public abstract class ResourceProviderImpl implements ResourceProvider {
 
-  protected final Set<PropertyId> propertyIds;
-
-  private final ManagementController managementController;
-
-  private ResourceProviderImpl(Set<PropertyId> propertyIds, ManagementController managementController) {
-    this.propertyIds = propertyIds;
+  /**
+   * The list of property providers for this provider's resource type.
+   */
+  private final List<PropertyProvider> propertyProviders;
+
+  /**
+   * The set of property ids supported by this resource provider.
+   */
+  private final Set<PropertyId> propertyIds;
+
+  /**
+   * The management controller to delegate to.
+   */
+  private final AmbariManagementController managementController;
+
+  /**
+   * The schema for this provider's resource type.
+   */
+  private final Schema schema;
+
+
+  // ----- Property ID constants ---------------------------------------------
+
+  // Clusters
+  private static final PropertyId CLUSTER_ID_PROPERTY_ID      = Properties.getPropertyId("cluster_id", "Clusters");
+  private static final PropertyId CLUSTER_NAME_PROPERTY_ID    = Properties.getPropertyId("cluster_name", "Clusters");
+  private static final PropertyId CLUSTER_VERSION_PROPERTY_ID = Properties.getPropertyId("version", "Clusters");
+  // Services
+  private static final PropertyId SERVICE_CLUSTER_NAME_PROPERTY_ID  = Properties.getPropertyId("cluster_name", "ServiceInfo");
+  private static final PropertyId SERVICE_SERVICE_NAME_PROPERTY_ID  = Properties.getPropertyId("service_name", "ServiceInfo");
+  private static final PropertyId SERVICE_SERVICE_STATE_PROPERTY_ID = Properties.getPropertyId("state", "ServiceInfo");
+  // Components
+  private static final PropertyId COMPONENT_CLUSTER_NAME_PROPERTY_ID   = Properties.getPropertyId("cluster_name", "ServiceComponentInfo");
+  private static final PropertyId COMPONENT_SERVICE_NAME_PROPERTY_ID   = Properties.getPropertyId("service_name", "ServiceComponentInfo");
+  private static final PropertyId COMPONENT_COMPONENT_NAME_PROPERTY_ID = Properties.getPropertyId("service_name", "ServiceComponentInfo");
+  private static final PropertyId COMPONENT_STATE_PROPERTY_ID          = Properties.getPropertyId("state", "ServiceComponentInfo");
+  // Hosts
+  private static final PropertyId HOST_CLUSTER_NAME_PROPERTY_ID = Properties.getPropertyId("cluster_name", "Hosts");
+  private static final PropertyId HOST_NAME_PROPERTY_ID         = Properties.getPropertyId("host_name", "Hosts");
+  private static final PropertyId HOST_IP_PROPERTY_ID           = Properties.getPropertyId("ip", "Hosts");
+  private static final PropertyId HOST_TOTAL_MEM_PROPERTY_ID    = Properties.getPropertyId("total_mem", "Hosts");
+  private static final PropertyId HOST_CPU_COUNT_PROPERTY_ID    = Properties.getPropertyId("cpu_count", "Hosts");
+  private static final PropertyId HOST_OS_ARCH_PROPERTY_ID      = Properties.getPropertyId("os_arch", "Hosts");
+  private static final PropertyId HOST_OS_TYPE_PROPERTY_ID      = Properties.getPropertyId("os_type", "Hosts");
+  // Host Components
+  private static final PropertyId HOST_COMPONENT_CLUSTER_NAME_PROPERTY_ID   = Properties.getPropertyId("cluster_name", "HostRoles");
+  private static final PropertyId HOST_COMPONENT_SERVICE_NAME_PROPERTY_ID   = Properties.getPropertyId("service_name", "HostRoles");
+  private static final PropertyId HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID = Properties.getPropertyId("service_name", "HostRoles");
+  private static final PropertyId HOST_COMPONENT_HOST_NAME_PROPERTY_ID      = Properties.getPropertyId("host_name", "HostRoles");
+  private static final PropertyId HOST_COMPONENT_STATE_PROPERTY_ID          = Properties.getPropertyId("state", "HostRoles");
+
+
+  // ----- Constructors ------------------------------------------------------
+
+  /**
+   * Create a  new resource provider for the given management controller.
+   *
+   * @param propertyIds           the property ids
+   * @param managementController  the management controller
+   */
+  private ResourceProviderImpl(List<PropertyProvider> propertyProviders,
+                               Set<PropertyId> propertyIds,
+                               Map<Resource.Type, PropertyId> keyPropertyIds,
+                               AmbariManagementController managementController) {
+    this.propertyProviders    = propertyProviders;
+    this.propertyIds          = propertyIds;
     this.managementController = managementController;
+    this.schema               = new SchemaImpl(this, keyPropertyIds);
   }
 
+
+  // ----- ResourceProvider --------------------------------------------------
+
   @Override
   public Set<PropertyId> getPropertyIds() {
     return propertyIds;
   }
 
-  public ManagementController getManagementController() {
+  @Override
+  public List<PropertyProvider> getPropertyProviders() {
+    return propertyProviders;
+  }
+
+  @Override
+  public Schema getSchema() {
+    return schema;
+  }
+
+
+  // ----- accessors ---------------------------------------------------------
+
+  /**
+   * Get the associated management controller.
+   *
+   * @return the associated management controller
+   */
+  public AmbariManagementController getManagementController() {
     return managementController;
   }
 
-  public static ResourceProvider getResourceProvider(Resource.Type type, Set<PropertyId> propertyIds, ManagementController managementController)  {
 
+  // ----- utility methods ---------------------------------------------------
+
+  /**
+   * Get a map of property values from a given predicate.
+   *
+   * @param predicate  the predicate
+   *
+   * @return the map of properties
+   */
+  private static Map<PropertyId, Object> getProperties(Predicate predicate) {
+    PropertyPredicateVisitor visitor = new PropertyPredicateVisitor();
+    PredicateHelper.visit(predicate, visitor);
+    return visitor.getProperties();
+  }
+
+  /**
+   * Transfer property values from one map to another for the given list of property ids.
+   *
+   * @param to           the target map
+   * @param from         the source map
+   * @param propertyIds  the list of property ids
+   */
+  private static void setProperties(Map<PropertyId, Object> to, Map<PropertyId, Object> from, PropertyId ... propertyIds) {
+    for (PropertyId propertyId : propertyIds) {
+      if (from.containsKey(propertyId)) {
+        to.put(propertyId, from.get(propertyId));
+      }
+    }
+  }
+
+  /**
+   * Set a string property value on the given resource for the given id and value.
+   * Make sure that the id is in the given set of requested ids.
+   *
+   * @param resource      the resource
+   * @param propertyId    the property id
+   * @param value         the value to set
+   * @param requestedIds  the requested set of property ids
+   */
+  private static void setResourceProperty(Resource resource, PropertyId propertyId, String value, Set<PropertyId> requestedIds) {
+    if (requestedIds.contains(propertyId)) {
+      resource.setProperty(propertyId, value);
+    }
+  }
+
+  /**
+   * Set a long property value on the given resource for the given id and value.
+   * Make sure that the id is in the given set of requested ids.
+   *
+   * @param resource      the resource
+   * @param propertyId    the property id
+   * @param value         the value to set
+   * @param requestedIds  the requested set of property ids
+   */
+  private static void setResourceProperty(Resource resource, PropertyId propertyId, Long value, Set<PropertyId> requestedIds) {
+    if (requestedIds.contains(propertyId)) {
+      resource.setProperty(propertyId, value);
+    }
+  }
+
+  /**
+   * Set a integer property value on the given resource for the given id and value.
+   * Make sure that the id is in the given set of requested ids.
+   *
+   * @param resource      the resource
+   * @param propertyId    the property id
+   * @param value         the value to set
+   * @param requestedIds  the requested set of property ids
+   */
+  private static void setResourceProperty(Resource resource, PropertyId propertyId, Integer value, Set<PropertyId> requestedIds) {
+    if (requestedIds.contains(propertyId)) {
+      resource.setProperty(propertyId, value);
+    }
+  }
+
+  /**
+   * Factory method for obtaining a resource provider based on a given type and management controller.
+   *
+   * @param type                  the resource type
+   * @param propertyIds           the property ids
+   * @param managementController  the management controller
+   *
+   * @return a new resource provider
+   */
+  public static ResourceProvider getResourceProvider(Resource.Type type,
+                                                     List<PropertyProvider> propertyProviders,
+                                                     Set<PropertyId> propertyIds,
+                                                     Map<Resource.Type, PropertyId> keyPropertyIds,
+                                                     AmbariManagementController managementController) {
     switch (type) {
       case Cluster:
-        return new ClusterResourceProvider(propertyIds, managementController);
+        return new ClusterResourceProvider(propertyProviders, propertyIds, keyPropertyIds, managementController);
       case Service:
-        return new ServiceResourceProvider(propertyIds, managementController);
+        return new ServiceResourceProvider(propertyProviders, propertyIds, keyPropertyIds, managementController);
       case Component:
-        return new ComponentResourceProvider(propertyIds, managementController);
+        return new ComponentResourceProvider(propertyProviders, propertyIds, keyPropertyIds, managementController);
       case Host:
-        return new HostResourceProvider(propertyIds, managementController);
+        return new HostResourceProvider(propertyProviders, propertyIds, keyPropertyIds, managementController);
       case HostComponent:
-        return new HostComponentResourceProvider(propertyIds, managementController);
+        return new HostComponentResourceProvider(propertyProviders, propertyIds, keyPropertyIds, managementController);
     }
     throw new IllegalArgumentException("Unknown type " + type);
   }
 
-  protected Request getRequest(Request request) {
-    Set<PropertyId> propertyIds = new HashSet<PropertyId>(request.getPropertyIds());
-    if (propertyIds.size() == 0) {
-      request = new RequestImpl(this.propertyIds, null);
-    }
-    return request;
-  }
+
+  // ------ ClusterResourceProvider inner class ------------------------------
 
   private static class ClusterResourceProvider extends ResourceProviderImpl{
 
-    private ClusterResourceProvider(Set<PropertyId> propertyIds, ManagementController managementController) {
-      super(propertyIds, managementController);
+    // ----- Constructors ----------------------------------------------------
+
+    /**
+     * Create a  new resource provider for the given management controller.
+     *
+     * @param propertyIds           the property ids
+     * @param managementController  the management controller
+     */
+    private ClusterResourceProvider(List<PropertyProvider> propertyProviders, Set<PropertyId> propertyIds, Map<Resource.Type, PropertyId> keyPropertyIds, AmbariManagementController managementController) {
+      super(propertyProviders, propertyIds, keyPropertyIds, managementController);
     }
 
+// ----- ResourceProvider ------------------------------------------------
+
     @Override
-    public void createResources(Request request) {
-      getManagementController().createClusters(request);
+    public void createResources(Request request) throws AmbariException {
+
+      for (Map<PropertyId, Object> properties : request.getProperties()) {
+        getManagementController().createCluster(getRequest(properties));
+      }
     }
 
     @Override
-    public Set<Resource> getResources(Request request, Predicate predicate) {
-      request = getRequest(request);
-      return getManagementController().getClusters(request, predicate);
+    public Set<Resource> getResources(Request request, Predicate predicate) throws AmbariException {
+      Set<PropertyId> requestedIds   = request.getPropertyIds();
+      ClusterRequest  clusterRequest = getRequest(getProperties(predicate));
+
+      Set<ClusterResponse> responses = getManagementController().getClusters(clusterRequest);
+
+      Set<Resource> resources = new HashSet<Resource>();
+      for (ClusterResponse response : responses) {
+        Resource resource = new ResourceImpl(Resource.Type.Cluster);
+        setResourceProperty(resource, CLUSTER_ID_PROPERTY_ID, response.getClusterId(), requestedIds);
+        setResourceProperty(resource, CLUSTER_NAME_PROPERTY_ID, response.getClusterName(), requestedIds);
+      }
+      return resources;
     }
 
     @Override
-    public void updateResources(Request request, Predicate predicate) {
-      getManagementController().updateClusters(request, predicate);
+    public void updateResources(Request request, Predicate predicate) throws AmbariException {
+      // get the cluster request properties from the given request
+      Map<PropertyId, Object> properties = request.getProperties().iterator().next();
+      // get the id for the cluster request from the predicate
+      setProperties(properties, getProperties(predicate), CLUSTER_ID_PROPERTY_ID);
+
+      ClusterRequest clusterRequest = getRequest(properties);
+      getManagementController().updateCluster(clusterRequest);
     }
 
     @Override
-    public void deleteResources(Predicate predicate) {
-      getManagementController().deleteClusters(predicate);
+    public void deleteResources(Predicate predicate) throws AmbariException {
+      ClusterRequest clusterRequest = getRequest(getProperties(predicate));
+      getManagementController().deleteCluster(clusterRequest);
+    }
+
+    // ----- utility methods -------------------------------------------------
+
+    /**
+     * Get a cluster request object from a map of property values.
+     *
+     * @param properties  the predicate
+     *
+     * @return the cluster request object
+     */
+    private ClusterRequest getRequest(Map<PropertyId, Object> properties) {
+
+      return new ClusterRequest(
+          (Long) properties.get(CLUSTER_ID_PROPERTY_ID),
+          (String) properties.get(CLUSTER_NAME_PROPERTY_ID),
+          (String) properties.get(CLUSTER_VERSION_PROPERTY_ID),
+          null);  // TODO : host names
     }
   }
 
+  // ------ ServiceResourceProvider inner class ------------------------------
+
   private static class ServiceResourceProvider extends ResourceProviderImpl{
 
-    private ServiceResourceProvider(Set<PropertyId> propertyIds, ManagementController managementController) {
-      super(propertyIds, managementController);
+    // ----- Constructors ----------------------------------------------------
+
+    /**
+     * Create a  new resource provider for the given management controller.
+     *
+     * @param propertyIds           the property ids
+     * @param managementController  the management controller
+     */
+    private ServiceResourceProvider(List<PropertyProvider> propertyProviders, Set<PropertyId> propertyIds, Map<Resource.Type, PropertyId> keyPropertyIds, AmbariManagementController managementController) {
+      super(propertyProviders, propertyIds, keyPropertyIds, managementController);
     }
 
+    // ----- ResourceProvider ------------------------------------------------
+
     @Override
-    public void createResources(Request request) {
-      getManagementController().createServices(request);
+    public void createResources(Request request) throws AmbariException {
+      for (Map<PropertyId, Object> properties : request.getProperties()) {
+        getManagementController().createService(getRequest(properties));
+      }
     }
 
     @Override
-    public Set<Resource> getResources(Request request, Predicate predicate) {
-      request = getRequest(request);
-      return getManagementController().getServices(request, predicate);
+    public Set<Resource> getResources(Request request, Predicate predicate) throws AmbariException {
+      Set<PropertyId> requestedIds   = request.getPropertyIds();
+      ServiceRequest  serviceRequest = getRequest(getProperties(predicate));
+
+      Set<ServiceResponse> responses = getManagementController().getServices(serviceRequest);
+
+      Set<Resource> resources = new HashSet<Resource>();
+      for (ServiceResponse response : responses) {
+        Resource resource = new ResourceImpl(Resource.Type.Service);
+//        setResourceProperty(resource, SERVICE_CLUSTER_ID_PROPERTY_ID, response.getClusterId(), requestedIds);
+        setResourceProperty(resource, SERVICE_CLUSTER_NAME_PROPERTY_ID, response.getClusterName(), requestedIds);
+        setResourceProperty(resource, SERVICE_SERVICE_NAME_PROPERTY_ID, response.getServiceName(), requestedIds);
+//        setResourceProperty(resource, SERVICE_VERSION_PROPERTY_ID, response.getCurrentStackVersion(), requestedIds);
+      }
+      return resources;
     }
 
     @Override
-    public void updateResources(Request request, Predicate predicate) {
-      getManagementController().updateServices(request, predicate);
+    public void updateResources(Request request, Predicate predicate) throws AmbariException {
+      // get the service request properties from the given request
+      Map<PropertyId, Object> properties = request.getProperties().iterator().next();
+      // get the pk for the service request from the predicate
+      setProperties(properties, getProperties(predicate),
+          SERVICE_CLUSTER_NAME_PROPERTY_ID, SERVICE_SERVICE_NAME_PROPERTY_ID);
+
+      ServiceRequest serviceRequest = getRequest(properties);
+      getManagementController().updateService(serviceRequest);
     }
 
     @Override
-    public void deleteResources(Predicate predicate) {
-      getManagementController().deleteServices(predicate);
+    public void deleteResources(Predicate predicate) throws AmbariException {
+      ServiceRequest serviceRequest = getRequest(getProperties(predicate));
+      getManagementController().deleteService(serviceRequest);
+    }
+
+    // ----- utility methods -------------------------------------------------
+
+    /**
+     * Get a service request object from a map of property values.
+     *
+     * @param properties  the predicate
+     *
+     * @return the service request object
+     */
+    private ServiceRequest getRequest(Map<PropertyId, Object> properties) {
+      return new ServiceRequest(
+          (String) properties.get(SERVICE_CLUSTER_NAME_PROPERTY_ID),
+          (String) properties.get(SERVICE_SERVICE_NAME_PROPERTY_ID),
+          null,
+          (String) properties.get(SERVICE_SERVICE_STATE_PROPERTY_ID));
     }
   }
 
+  // ------ ComponentResourceProvider inner class ----------------------------
+
   private static class ComponentResourceProvider extends ResourceProviderImpl{
 
-    private ComponentResourceProvider(Set<PropertyId> propertyIds, ManagementController managementController) {
-      super(propertyIds, managementController);
+    // ----- Constructors ----------------------------------------------------
+
+    /**
+     * Create a  new resource provider for the given management controller.
+     *
+     * @param propertyIds           the property ids
+     * @param managementController  the management controller
+     */
+    private ComponentResourceProvider(List<PropertyProvider> propertyProviders, Set<PropertyId> propertyIds, Map<Resource.Type, PropertyId> keyPropertyIds, AmbariManagementController managementController) {
+      super(propertyProviders, propertyIds, keyPropertyIds, managementController);
     }
 
+    // ----- ResourceProvider ------------------------------------------------
+
     @Override
-    public void createResources(Request request) {
-      getManagementController().createComponents(request);
+    public void createResources(Request request) throws AmbariException {
+      for (Map<PropertyId, Object> properties : request.getProperties()) {
+        getManagementController().createComponent(getRequest(properties));
+      }
     }
 
     @Override
-    public Set<Resource> getResources(Request request, Predicate predicate) {
-      request = getRequest(request);
-      return getManagementController().getComponents(request, predicate);
+    public Set<Resource> getResources(Request request, Predicate predicate) throws AmbariException {
+      Set<PropertyId>          requestedIds            = request.getPropertyIds();
+      ServiceComponentRequest  serviceComponentRequest = getRequest(getProperties(predicate));
+
+      Set<ServiceComponentResponse> responses = getManagementController().getComponents(serviceComponentRequest);
+
+      Set<Resource> resources = new HashSet<Resource>();
+      for (ServiceComponentResponse response : responses) {
+        Resource resource = new ResourceImpl(Resource.Type.Component);
+//        setResourceProperty(resource, COMPONENT_CLUSTER_ID_PROPERTY_ID, response.getClusterId(), requestedIds);
+        setResourceProperty(resource, COMPONENT_CLUSTER_NAME_PROPERTY_ID, response.getClusterName(), requestedIds);
+        setResourceProperty(resource, COMPONENT_SERVICE_NAME_PROPERTY_ID, response.getServiceName(), requestedIds);
+        setResourceProperty(resource, COMPONENT_COMPONENT_NAME_PROPERTY_ID, response.getComponentName(), requestedIds);
+//        setResourceProperty(resource, COMPONENT_VERSION_PROPERTY_ID, response.getCurrentStackVersion(), requestedIds);
+      }
+      return resources;
     }
 
     @Override
-    public void updateResources(Request request, Predicate predicate) {
-      getManagementController().updateComponents(request, predicate);
+    public void updateResources(Request request, Predicate predicate) throws AmbariException {
+      // get the component request properties from the given request
+      Map<PropertyId, Object> properties = request.getProperties().iterator().next();
+      // get the pk for the service request from the predicate
+      setProperties(properties, getProperties(predicate),
+          COMPONENT_CLUSTER_NAME_PROPERTY_ID, COMPONENT_SERVICE_NAME_PROPERTY_ID, COMPONENT_COMPONENT_NAME_PROPERTY_ID);
+
+      ServiceComponentRequest serviceComponentRequest = getRequest(properties);
+      getManagementController().updateComponent(serviceComponentRequest);
     }
 
     @Override
-    public void deleteResources(Predicate predicate) {
-      getManagementController().deleteComponents(predicate);
+    public void deleteResources(Predicate predicate) throws AmbariException {
+      ServiceComponentRequest serviceComponentRequest = getRequest(getProperties(predicate));
+      getManagementController().deleteComponent(serviceComponentRequest);
+    }
+
+    // ----- utility methods -------------------------------------------------
+
+    /**
+     * Get a component request object from a map of property values.
+     *
+     * @param properties  the predicate
+     *
+     * @return the component request object
+     */
+    private ServiceComponentRequest getRequest(Map<PropertyId, Object> properties) {
+      return new ServiceComponentRequest(
+          (String) properties.get(COMPONENT_CLUSTER_NAME_PROPERTY_ID),
+          (String) properties.get(COMPONENT_SERVICE_NAME_PROPERTY_ID),
+          (String) properties.get(COMPONENT_COMPONENT_NAME_PROPERTY_ID),
+          null,
+          (String) properties.get(COMPONENT_STATE_PROPERTY_ID));
     }
   }
 
+  // ------ HostResourceProvider inner class ---------------------------------
+
   private static class HostResourceProvider extends ResourceProviderImpl{
 
-    private HostResourceProvider(Set<PropertyId> propertyIds, ManagementController managementController) {
-      super(propertyIds, managementController);
+    // ----- Constructors ----------------------------------------------------
+
+    /**
+     * Create a  new resource provider for the given management controller.
+     *
+     * @param propertyIds           the property ids
+     * @param managementController  the management controller
+     */
+    private HostResourceProvider(List<PropertyProvider> propertyProviders, Set<PropertyId> propertyIds, Map<Resource.Type, PropertyId> keyPropertyIds, AmbariManagementController managementController) {
+      super(propertyProviders, propertyIds, keyPropertyIds, managementController);
     }
 
+    // ----- ResourceProvider ------------------------------------------------
+
     @Override
-    public void createResources(Request request) {
-      getManagementController().createHosts(request);
+    public void createResources(Request request) throws AmbariException {
+      for (Map<PropertyId, Object> properties : request.getProperties()) {
+        getManagementController().createHost(getRequest(properties));
+      }
     }
 
     @Override
-    public Set<Resource> getResources(Request request, Predicate predicate) {
-      request = getRequest(request);
-      return getManagementController().getHosts(request, predicate);
+    public Set<Resource> getResources(Request request, Predicate predicate) throws AmbariException {
+      Set<PropertyId> requestedIds = request.getPropertyIds();
+      HostRequest     hostRequest  = getRequest(getProperties(predicate));
+
+      Set<HostResponse> responses = getManagementController().getHosts(hostRequest);
+
+      Set<Resource> resources = new HashSet<Resource>();
+      for (HostResponse response : responses) {
+        Resource resource = new ResourceImpl(Resource.Type.Host);
+        // TODO : more than one cluster
+//        setResourceProperty(resource, HOST_CLUSTER_NAME_PROPERTY_ID, response.getClusterNames(), requestedIds);
+        setResourceProperty(resource, HOST_NAME_PROPERTY_ID, response.getHostname(), requestedIds);
+        setResourceProperty(resource, HOST_IP_PROPERTY_ID, response.getIpv4(), requestedIds);
+        setResourceProperty(resource, HOST_TOTAL_MEM_PROPERTY_ID, response.getTotalMemBytes(), requestedIds);
+        setResourceProperty(resource, HOST_CPU_COUNT_PROPERTY_ID, response.getCpuCount(), requestedIds);
+        setResourceProperty(resource, HOST_OS_ARCH_PROPERTY_ID, response.getOsArch(), requestedIds);
+        setResourceProperty(resource, HOST_OS_TYPE_PROPERTY_ID, response.getOsType(), requestedIds);
+        // TODO ...
+      }
+      return resources;
     }
 
     @Override
-    public void updateResources(Request request, Predicate predicate) {
-      getManagementController().updateHosts(request, predicate);
+    public void updateResources(Request request, Predicate predicate) throws AmbariException {
+      // get the host request properties from the given request
+      Map<PropertyId, Object> properties = request.getProperties().iterator().next();
+      // get the pk for the service request from the predicate
+      setProperties(properties, getProperties(predicate), HOST_CLUSTER_NAME_PROPERTY_ID);
+
+      HostRequest hostRequest = getRequest(properties);
+      getManagementController().updateHost(hostRequest);
     }
 
     @Override
-    public void deleteResources(Predicate predicate) {
-      getManagementController().deleteHosts(predicate);
+    public void deleteResources(Predicate predicate) throws AmbariException {
+      HostRequest hostRequest = getRequest(getProperties(predicate));
+      getManagementController().deleteHost(hostRequest);
+    }
+
+    // ----- utility methods -------------------------------------------------
+
+    /**
+     * Get a component request object from a map of property values.
+     *
+     * @param properties  the predicate
+     *
+     * @return the component request object
+     */
+    private HostRequest getRequest(Map<PropertyId, Object> properties) {
+      return new HostRequest(
+          (String) properties.get(HOST_NAME_PROPERTY_ID),
+          // TODO : more than one cluster
+          Collections.singletonList((String) properties.get(HOST_CLUSTER_NAME_PROPERTY_ID)),
+          null);
     }
   }
 
+  // ------ HostComponentResourceProvider inner class ------------------------
+
   private static class HostComponentResourceProvider extends ResourceProviderImpl{
 
-    private HostComponentResourceProvider(Set<PropertyId> propertyIds, ManagementController managementController) {
-      super(propertyIds, managementController);
+    // ----- Constructors ----------------------------------------------------
+
+    /**
+     * Create a  new resource provider for the given management controller.
+     *
+     * @param propertyIds           the property ids
+     * @param managementController  the management controller
+     */
+    private HostComponentResourceProvider(List<PropertyProvider> propertyProviders, Set<PropertyId> propertyIds, Map<Resource.Type, PropertyId> keyPropertyIds, AmbariManagementController managementController) {
+      super(propertyProviders, propertyIds, keyPropertyIds, managementController);
     }
 
+    // ----- ResourceProvider ------------------------------------------------
+
     @Override
-    public void createResources(Request request) {
-      getManagementController().createHostComponents(request);
+    public void createResources(Request request) throws AmbariException {
+      for (Map<PropertyId, Object> properties : request.getProperties()) {
+        getManagementController().createHostComponent(getRequest(properties));
+      }
     }
 
     @Override
-    public Set<Resource> getResources(Request request, Predicate predicate) {
-      request = getRequest(request);
-      return getManagementController().getHostComponents(request, predicate);
+    public Set<Resource> getResources(Request request, Predicate predicate) throws AmbariException {
+      Set<PropertyId>             requestedIds         = request.getPropertyIds();
+      ServiceComponentHostRequest hostComponentRequest = getRequest(getProperties(predicate));
+
+      Set<ServiceComponentHostResponse> responses = getManagementController().getHostComponents(hostComponentRequest);
+
+      Set<Resource> resources = new HashSet<Resource>();
+      for (ServiceComponentHostResponse response : responses) {
+        Resource resource = new ResourceImpl(Resource.Type.HostComponent);
+        setResourceProperty(resource, HOST_COMPONENT_CLUSTER_NAME_PROPERTY_ID, response.getClusterName(), requestedIds);
+        setResourceProperty(resource, HOST_COMPONENT_SERVICE_NAME_PROPERTY_ID, response.getServiceName(), requestedIds);
+        setResourceProperty(resource, HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID, response.getComponentName(), requestedIds);
+        setResourceProperty(resource, HOST_COMPONENT_HOST_NAME_PROPERTY_ID, response.getHostname(), requestedIds);
+        setResourceProperty(resource, HOST_COMPONENT_STATE_PROPERTY_ID, response.getLiveState(), requestedIds);
+      }
+      return resources;
     }
 
     @Override
-    public void updateResources(Request request, Predicate predicate) {
-      getManagementController().updateHostComponents(request, predicate);
+    public void updateResources(Request request, Predicate predicate) throws AmbariException {
+      // get the host request properties from the given request
+      Map<PropertyId, Object> properties = request.getProperties().iterator().next();
+      // get the pk for the service request from the predicate
+      setProperties(properties, getProperties(predicate),
+          HOST_COMPONENT_CLUSTER_NAME_PROPERTY_ID, HOST_COMPONENT_SERVICE_NAME_PROPERTY_ID,
+          HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID, HOST_COMPONENT_HOST_NAME_PROPERTY_ID);
+
+      ServiceComponentHostRequest hostComponentRequest = getRequest(properties);
+      getManagementController().updateHostComponent(hostComponentRequest);
     }
 
     @Override
-    public void deleteResources(Predicate predicate) {
-      getManagementController().deleteHostComponents(predicate);
+    public void deleteResources(Predicate predicate) throws AmbariException {
+      ServiceComponentHostRequest clusterRequest = getRequest(getProperties(predicate));
+      getManagementController().deleteHostComponent(clusterRequest);
+    }
+
+    // ----- utility methods -------------------------------------------------
+
+    /**
+     * Get a component request object from a map of property values.
+     *
+     * @param properties  the predicate
+     *
+     * @return the component request object
+     */
+    private ServiceComponentHostRequest getRequest(Map<PropertyId, Object> properties) {
+      return new ServiceComponentHostRequest(
+          (String) properties.get(HOST_COMPONENT_CLUSTER_NAME_PROPERTY_ID),
+          (String) properties.get(HOST_COMPONENT_SERVICE_NAME_PROPERTY_ID),
+          (String) properties.get(HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID),
+          (String) properties.get(HOST_COMPONENT_HOST_NAME_PROPERTY_ID),
+          null,
+          (String) properties.get(HOST_COMPONENT_STATE_PROPERTY_ID));
     }
   }
 }
-

+ 32 - 17
ambari-api/src/main/java/org/apache/ambari/api/controller/internal/SchemaImpl.java

@@ -18,11 +18,11 @@
 
 package org.apache.ambari.api.controller.internal;
 
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.PropertyProvider;
-import org.apache.ambari.api.controller.spi.Resource;
-import org.apache.ambari.api.controller.spi.ResourceProvider;
-import org.apache.ambari.api.controller.spi.Schema;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.PropertyProvider;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.controller.spi.Schema;
 
 import java.util.HashMap;
 import java.util.HashSet;
@@ -31,21 +31,43 @@ import java.util.Map;
 import java.util.Set;
 
 /**
- * Default schema implementation.
+ * Simple schema implementation.
  */
 public class SchemaImpl implements Schema {
+  /**
+   * The associated resource provider.
+   */
   private final ResourceProvider resourceProvider;
+
+  /**
+   * The list of associated property providers.
+   */
   private final List<PropertyProvider> propertyProviders;
+
+  /**
+   * Key property mapping by resource type.
+   */
   private final Map<Resource.Type, PropertyId> keyPropertyIds;
 
+
+  // ----- Constructors ------------------------------------------------------
+
+  /**
+   * Create a new schema for the given providers.
+   *
+   * @param resourceProvider   the resource provider
+   * @param keyPropertyIds     the key property mapping
+   */
   public SchemaImpl(ResourceProvider resourceProvider,
-                    List<PropertyProvider> propertyProviders,
                     Map<Resource.Type, PropertyId> keyPropertyIds) {
     this.resourceProvider = resourceProvider;
-    this.propertyProviders = propertyProviders;
+    this.propertyProviders = resourceProvider.getPropertyProviders();
     this.keyPropertyIds = keyPropertyIds;
   }
 
+
+  // ----- Schema ------------------------------------------------------------
+
   @Override
   public PropertyId getKeyPropertyId(Resource.Type type) {
     return keyPropertyIds.get(type);
@@ -67,17 +89,10 @@ public class SchemaImpl implements Schema {
     return categories;
   }
 
-  @Override
-  public ResourceProvider getResourceProvider() {
-    return resourceProvider;
-  }
 
-  @Override
-  public List<PropertyProvider> getPropertyProviders() {
-    return propertyProviders;
-  }
+  // ----- helper methods ----------------------------------------------------
 
-  public Set<PropertyId> getPropertyIds() {
+  private Set<PropertyId> getPropertyIds() {
     Set<PropertyId> propertyIds = new HashSet<PropertyId>(resourceProvider.getPropertyIds());
     for (PropertyProvider propertyProvider : propertyProviders) {
       propertyIds.addAll(propertyProvider.getPropertyIds());

+ 8 - 1
ambari-api/src/main/java/org/apache/ambari/api/controller/jdbc/ConnectionFactory.java

@@ -22,8 +22,15 @@ import java.sql.Connection;
 import java.sql.SQLException;
 
 /**
- *
+ * Simple JDBC connection factory interface.
  */
 public interface ConnectionFactory {
+  /**
+   * Get a connection.
+   *
+   * @return the connection
+   *
+   * @throws SQLException thrown if the connection cannot be obtained
+   */
   public Connection getConnection() throws SQLException;
 }

+ 582 - 47
ambari-api/src/main/java/org/apache/ambari/api/controller/jdbc/JDBCManagementController.java

@@ -18,121 +18,656 @@
 
 package org.apache.ambari.api.controller.jdbc;
 
-import org.apache.ambari.api.controller.spi.ManagementController;
-import org.apache.ambari.api.controller.spi.Predicate;
-import org.apache.ambari.api.controller.spi.Request;
-import org.apache.ambari.api.controller.spi.Resource;
+import org.apache.ambari.api.controller.internal.PropertyIdImpl;
+import org.apache.ambari.api.controller.internal.ResourceImpl;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.ClusterRequest;
+import org.apache.ambari.server.controller.ClusterResponse;
+import org.apache.ambari.server.controller.HostRequest;
+import org.apache.ambari.server.controller.HostResponse;
+import org.apache.ambari.server.controller.ServiceComponentHostRequest;
+import org.apache.ambari.server.controller.ServiceComponentHostResponse;
+import org.apache.ambari.server.controller.ServiceComponentRequest;
+import org.apache.ambari.server.controller.ServiceComponentResponse;
+import org.apache.ambari.server.controller.ServiceRequest;
+import org.apache.ambari.server.controller.ServiceResponse;
+import org.apache.ambari.server.controller.TrackActionResponse;
+import org.apache.ambari.server.controller.predicate.AndPredicate;
+import org.apache.ambari.server.controller.predicate.BasePredicate;
+import org.apache.ambari.server.controller.predicate.EqualsPredicate;
+import org.apache.ambari.server.controller.predicate.PredicateVisitorAcceptor;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.api.controller.utilities.PredicateHelper;
+import org.apache.ambari.api.controller.utilities.Properties;
 
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
 import java.util.Set;
 
 /**
  * Generic JDBC implementation of a management controller.
  */
-public class JDBCManagementController implements ManagementController {
-
+public class JDBCManagementController implements AmbariManagementController {
+  /**
+   * The connection factory.
+   */
   private final ConnectionFactory connectionFactory;
 
-  public JDBCManagementController(ConnectionFactory connectionFactory) {
+  /**
+   * Mapping of resource type to the name of the primary table for the resource.
+   */
+  private final Map<Resource.Type, String> resourceTables;
+
+  /**
+   * Primary key mappings.
+   */
+  private final Map<String, Set<PropertyId>> primaryKeys = new HashMap<String, Set<PropertyId>>();
+
+  /**
+   * Key mappings used for joins.
+   */
+  private final Map<String, Map<PropertyId, PropertyId>> importedKeys = new HashMap<String, Map<PropertyId, PropertyId>>();
+
+
+  // ----- Constructors ------------------------------------------------------
+
+  /**
+   * Construct a new JDBC management controller with the given JDBC connection.
+   *
+   * @param connectionFactory  the connection factory
+   */
+  public JDBCManagementController(ConnectionFactory connectionFactory, Map<Resource.Type, String> resourceTables) {
     this.connectionFactory = connectionFactory;
+    this.resourceTables = resourceTables;
   }
 
+  // ----- AmbariManagementController ----------------------------------------
+
   @Override
-  public void createClusters(Request request) {
-    JDBCHelper.createResources(connectionFactory, request);
+  public TrackActionResponse createCluster(ClusterRequest request) throws AmbariException {
+//    createResources(Resource.Type.Cluster, request);
+    return null;
   }
 
   @Override
-  public void createServices(Request request) {
-    JDBCHelper.createResources(connectionFactory, request);
+  public TrackActionResponse createService(ServiceRequest request) throws AmbariException {
+    return null;
   }
 
   @Override
-  public void createComponents(Request request) {
-    JDBCHelper.createResources(connectionFactory, request);
+  public TrackActionResponse createComponent(ServiceComponentRequest request) throws AmbariException {
+    return null;
   }
 
   @Override
-  public void createHosts(Request request) {
-    JDBCHelper.createResources(connectionFactory, request);
+  public TrackActionResponse createHost(HostRequest request) throws AmbariException {
+    return null;
   }
 
   @Override
-  public void createHostComponents(Request request) {
-    JDBCHelper.createResources(connectionFactory, request);
+  public TrackActionResponse createHostComponent(ServiceComponentHostRequest request) throws AmbariException {
+    return null;
   }
 
   @Override
-  public Set<Resource> getClusters(Request request, Predicate predicate) {
-    return JDBCHelper.getResources(connectionFactory, Resource.Type.Cluster, request, predicate);
+  public Set<ClusterResponse> getClusters(ClusterRequest request) throws AmbariException {
+//    return getResources(Resource.Type.Cluster, request, predicate);
+    return null;
   }
 
   @Override
-  public Set<Resource> getServices(Request request, Predicate predicate) {
-    return JDBCHelper.getResources(connectionFactory, Resource.Type.Service, request, predicate);
+  public Set<ServiceResponse> getServices(ServiceRequest request) throws AmbariException {
+    return null;
   }
 
   @Override
-  public Set<Resource> getComponents(Request request, Predicate predicate) {
-    return JDBCHelper.getResources(connectionFactory, Resource.Type.Component, request, predicate);
+  public Set<ServiceComponentResponse> getComponents(ServiceComponentRequest request) throws AmbariException {
+    return null;
   }
 
   @Override
-  public Set<Resource> getHosts(Request request, Predicate predicate) {
-    return JDBCHelper.getResources(connectionFactory, Resource.Type.Host, request, predicate);
+  public Set<HostResponse> getHosts(HostRequest request) throws AmbariException {
+    return null;
   }
 
   @Override
-  public Set<Resource> getHostComponents(Request request, Predicate predicate) {
-    return JDBCHelper.getResources(connectionFactory, Resource.Type.HostComponent, request, predicate);
+  public Set<ServiceComponentHostResponse> getHostComponents(ServiceComponentHostRequest request) throws AmbariException {
+    return null;
   }
 
   @Override
-  public void updateClusters(Request request, Predicate predicate) {
-    JDBCHelper.updateResources(connectionFactory, request, predicate);
+  public TrackActionResponse updateCluster(ClusterRequest request) throws AmbariException {
+//    updateResources(Resource.Type.Cluster, request, predicate);
+    return null;
   }
 
   @Override
-  public void updateServices(Request request, Predicate predicate) {
-    JDBCHelper.updateResources(connectionFactory, request, predicate);
+  public TrackActionResponse updateService(ServiceRequest request) throws AmbariException {
+    return null;
   }
 
   @Override
-  public void updateComponents(Request request, Predicate predicate) {
-    JDBCHelper.updateResources(connectionFactory, request, predicate);
+  public TrackActionResponse updateComponent(ServiceComponentRequest request) throws AmbariException {
+    return null;
   }
 
   @Override
-  public void updateHosts(Request request, Predicate predicate) {
-    JDBCHelper.updateResources(connectionFactory, request, predicate);
+  public TrackActionResponse updateHost(HostRequest request) throws AmbariException {
+    return null;
   }
 
   @Override
-  public void updateHostComponents(Request request, Predicate predicate) {
-    JDBCHelper.updateResources(connectionFactory, request, predicate);
+  public TrackActionResponse updateHostComponent(ServiceComponentHostRequest request) throws AmbariException {
+    return null;
   }
 
   @Override
-  public void deleteClusters(Predicate predicate) {
-    JDBCHelper.deleteResources(connectionFactory, predicate);
+  public TrackActionResponse deleteCluster(ClusterRequest request) throws AmbariException {
+//    deleteResources(Resource.Type.Cluster, predicate);
+    return null;
   }
 
   @Override
-  public void deleteServices(Predicate predicate) {
-    JDBCHelper.deleteResources(connectionFactory, predicate);
+  public TrackActionResponse deleteService(ServiceRequest request) throws AmbariException {
+    return null;
   }
 
   @Override
-  public void deleteComponents(Predicate predicate) {
-    JDBCHelper.deleteResources(connectionFactory, predicate);
+  public TrackActionResponse deleteComponent(ServiceComponentRequest request) throws AmbariException {
+    return null;
   }
 
   @Override
-  public void deleteHosts(Predicate predicate) {
-    JDBCHelper.deleteResources(connectionFactory, predicate);
+  public TrackActionResponse deleteHost(HostRequest request) throws AmbariException {
+    return null;
   }
 
   @Override
-  public void deleteHostComponents(Predicate predicate) {
-    JDBCHelper.deleteResources(connectionFactory, predicate);
+  public TrackActionResponse deleteHostComponent(ServiceComponentHostRequest request) throws AmbariException {
+    return null;
+  }
+
+
+  // ----- Helper methods ----------------------------------------------------
+
+  /**
+   * Create the resources defined by the properties in the given request object.
+   *
+   * @param type     the resource type
+   * @param request  the request object which defines the set of properties
+   *                 for the resource to be created
+   */
+  private void createResources(Resource.Type type, Request request) {
+    try {
+      Connection connection = connectionFactory.getConnection();
+
+      try {
+
+        Set<Map<PropertyId, Object>> propertySet = request.getProperties();
+
+        for (Map<PropertyId, Object> properties : propertySet) {
+          String sql = getInsertSQL(resourceTables.get(type), properties);
+
+          Statement statement = connection.createStatement();
+
+          statement.execute(sql);
+        }
+      } finally {
+        connection.close();
+      }
+
+    } catch (SQLException e) {
+      throw new IllegalStateException("DB error : ", e);
+    }
+  }
+
+  /**
+   * Get a set of {@link Resource resources} based on the given request and predicate
+   * information.
+   *
+   * @param type       the resource type
+   * @param request    the request object which defines the desired set of properties
+   * @param predicate  the predicate object which can be used to filter which
+   *                   resources are returned
+   * @return a set of resources based on the given request and predicate information
+   */
+  private Set<Resource> getResources(Resource.Type type, Request request, Predicate predicate) {
+
+    Set<Resource> resources = new HashSet<Resource>();
+    Set<PropertyId> propertyIds = new HashSet<PropertyId>(request.getPropertyIds());
+    if (predicate != null) {
+      propertyIds.addAll(PredicateHelper.getPropertyIds(predicate));
+    }
+
+    try {
+      Connection connection = connectionFactory.getConnection();
+
+      try {
+
+        for (String table : getTables(propertyIds)) {
+          getImportedKeys(connection, table);
+        }
+        String sql = getSelectSQL(propertyIds, predicate);
+        Statement statement = connection.createStatement();
+
+        ResultSet rs = statement.executeQuery(sql);
+        ResultSetMetaData metaData = rs.getMetaData();
+        int columnCount = metaData.getColumnCount();
+
+        while (rs.next()) {
+          final ResourceImpl resource = new ResourceImpl(type);
+          for (int i = 1; i <= columnCount; ++i) {
+            PropertyIdImpl propertyId = new PropertyIdImpl(metaData.getColumnName(i), metaData.getTableName(i), false);
+            if (propertyIds.contains(propertyId)) {
+              resource.setProperty(propertyId, rs.getString(i));
+            }
+          }
+          resources.add(resource);
+        }
+
+      } finally {
+        connection.close();
+      }
+
+    } catch (SQLException e) {
+      throw new IllegalStateException("DB error : ", e);
+    }
+
+    return resources;
+  }
+
+  /**
+   * Update the host resources selected by the given predicate with the properties
+   * from the given request object.
+   *
+   * @param type       the resource type
+   * @param request    the request object which defines the set of properties
+   *                   for the resources to be updated
+   * @param predicate  the predicate object which can be used to filter which
+   *                   host resources are updated
+   */
+  private void updateResources(Resource.Type type, Request request, Predicate predicate) {
+    try {
+      Connection connection = connectionFactory.getConnection();
+      try {
+        Set<Map<PropertyId, Object>> propertySet = request.getProperties();
+
+        Map<PropertyId, Object> properties = propertySet.iterator().next();
+
+        String resourceTable = resourceTables.get(type);
+
+        predicate = getPredicate(connection, resourceTable, predicate);
+
+        if (predicate == null) {
+          return;
+        }
+
+        String sql = getUpdateSQL(resourceTable, properties, predicate);
+
+        Statement statement = connection.createStatement();
+
+        statement.execute(sql);
+      } finally {
+        connection.close();
+      }
+
+    } catch (SQLException e) {
+      throw new IllegalStateException("DB error : ", e);
+    }
+  }
+
+  /**
+   * Delete the resources selected by the given predicate.
+   *
+   * @param type      the resource type
+   * @param predicate the predicate object which can be used to filter which
+   *                  resources are deleted
+   */
+  private void deleteResources(Resource.Type type, Predicate predicate) {
+    try {
+      Connection connection = connectionFactory.getConnection();
+      try {
+        String resourceTable = resourceTables.get(type);
+
+        predicate = getPredicate(connection, resourceTable, predicate);
+
+        if (predicate == null) {
+          return;
+        }
+
+        String sql = getDeleteSQL(resourceTable, predicate);
+
+        Statement statement = connection.createStatement();
+        statement.execute(sql);
+      } finally {
+        connection.close();
+      }
+
+    } catch (SQLException e) {
+      throw new IllegalStateException("DB error : ", e);
+    }
+  }
+
+  /**
+   * Lazily populate the imported key mappings for the given table.
+   *
+   * @param connection  the connection to use to obtain the database meta data
+   * @param table       the table
+   *
+   * @throws SQLException thrown if the meta data for the given connection cannot be obtained
+   */
+  private void getImportedKeys(Connection connection, String table) throws SQLException {
+    if (!this.importedKeys.containsKey(table)) {
+
+      Map<PropertyId, PropertyId> importedKeys = new HashMap<PropertyId, PropertyId>();
+      this.importedKeys.put(table, importedKeys);
+
+      DatabaseMetaData metaData = connection.getMetaData();
+
+      ResultSet rs = metaData.getImportedKeys(connection.getCatalog(), null, table);
+
+      while (rs.next()) {
+
+        PropertyId pkPropertyId = Properties.getPropertyId(
+            rs.getString("PKCOLUMN_NAME"), rs.getString("PKTABLE_NAME"));
+
+        PropertyId fkPropertyId = Properties.getPropertyId(
+            rs.getString("FKCOLUMN_NAME"), rs.getString("FKTABLE_NAME"));
+
+        importedKeys.put(pkPropertyId, fkPropertyId);
+      }
+    }
+  }
+
+  /**
+   * Lazily populate the primary key mappings for the given table.
+   *
+   * @param connection  the connection to use to obtain the database meta data
+   * @param table       the table
+   *
+   * @throws SQLException thrown if the meta data for the given connection cannot be obtained
+   */
+  private void getPrimaryKeys(Connection connection, String table) throws SQLException {
+
+    if (!this.primaryKeys.containsKey(table)) {
+
+      Set<PropertyId> primaryKeys = new HashSet<PropertyId>();
+      this.primaryKeys.put(table, primaryKeys);
+
+      DatabaseMetaData metaData = connection.getMetaData();
+
+      ResultSet rs = metaData.getPrimaryKeys(connection.getCatalog(), null, table);
+
+      while (rs.next()) {
+
+        PropertyId pkPropertyId = Properties.getPropertyId(
+            rs.getString("COLUMN_NAME"), rs.getString("TABLE_NAME"));
+
+        primaryKeys.add(pkPropertyId);
+      }
+    }
+  }
+
+  /**
+   * Create a new predicate if the given predicate doesn't work for the given table.  Use the
+   * given predicate and join to the given table to get the primary key values to create a new
+   * predicate. (Could probably do this with INNER JOIN???)
+   *
+   * @param connection  the JDBC connection
+   * @param table       the resource table
+   * @param predicate   the predicate
+   *
+   * @return the new predicate
+   *
+   * @throws SQLException thrown if an exception occurred operating on the given connection
+   */
+  private Predicate getPredicate(Connection connection, String table, Predicate predicate) throws SQLException {
+
+    Set<String> predicateTables = getTables(PredicateHelper.getPropertyIds(predicate));
+
+    if (predicateTables.size() > 1 || !predicateTables.contains(table)) {
+      for (String predicateTable : predicateTables){
+        getImportedKeys(connection, predicateTable);
+      }
+
+      getPrimaryKeys(connection, table);
+      getImportedKeys(connection, table);
+
+      Set<PropertyId>   pkPropertyIds = primaryKeys.get(table);
+      String            sql           = getSelectSQL(pkPropertyIds, predicate);
+      Statement         statement     = connection.createStatement();
+      ResultSet         rs            = statement.executeQuery(sql);
+      ResultSetMetaData metaData      = rs.getMetaData();
+      int               columnCount   = metaData.getColumnCount();
+
+      Set<BasePredicate> predicates = new HashSet<BasePredicate>();
+      while (rs.next()) {
+        for (int i = 1; i <= columnCount; ++i) {
+          PropertyIdImpl propertyId = new PropertyIdImpl(metaData.getColumnName(i), metaData.getTableName(i), false);
+          if (pkPropertyIds.contains(propertyId)) {
+            predicates.add(new EqualsPredicate(propertyId, rs.getString(i)));
+          }
+        }
+      }
+
+      predicate = predicates.size() == 0 ? null : predicates.size() > 1 ?
+          new AndPredicate(predicates.toArray(new BasePredicate[2])) :
+          predicates.iterator().next();
+    }
+    return predicate;
+  }
+
+  /**
+   * Get an insert SQL statement based on the given properties.
+   *
+   * @param table      the table
+   * @param properties  the properties
+   *
+   * @return the insert SQL
+   */
+  private String getInsertSQL(String table, Map<PropertyId, Object> properties) {
+
+    StringBuilder columns = new StringBuilder();
+    StringBuilder values = new StringBuilder();
+
+    for (Map.Entry<PropertyId, Object> entry : properties.entrySet()) {
+      PropertyId propertyId = entry.getKey();
+      Object propertyValue = entry.getValue();
+
+      if (columns.length() > 0) {
+        columns.append(", ");
+      }
+      columns.append(propertyId.getName());
+
+      if (values.length() > 0) {
+        values.append(", ");
+      }
+
+      if (propertyValue instanceof String) {
+        values.append("'");
+        values.append(propertyValue);
+        values.append("'");
+      } else {
+        values.append(propertyValue);
+      }
+    }
+
+    return "insert into " + table + " (" +
+        columns + ") values (" + values + ")";
+  }
+
+  /**
+   * Get a select SQL statement based on the given property ids and predicate.
+   *
+   * @param propertyIds  the property ids
+   * @param predicate    the predicate
+   *
+   * @return the select SQL
+   */
+  private String getSelectSQL(Set<PropertyId> propertyIds, Predicate predicate) {
+
+    StringBuilder columns = new StringBuilder();
+    Set<String> tableSet = new HashSet<String>();
+
+    for (PropertyId propertyId : propertyIds) {
+      if (columns.length() > 0) {
+        columns.append(", ");
+      }
+      columns.append(propertyId.getCategory()).append(".").append(propertyId.getName());
+      tableSet.add(propertyId.getCategory());
+    }
+
+    boolean haveWhereClause = false;
+    StringBuilder whereClause = new StringBuilder();
+    if (predicate != null && predicate instanceof PredicateVisitorAcceptor) {
+
+      SQLPredicateVisitor visitor = new SQLPredicateVisitor();
+      PredicateHelper.visit(predicate, visitor);
+      whereClause.append(visitor.getSQL());
+
+      for (PropertyId propertyId : PredicateHelper.getPropertyIds(predicate)) {
+        tableSet.add(propertyId.getCategory());
+      }
+      haveWhereClause = true;
+    }
+
+    StringBuilder joinClause = new StringBuilder();
+
+    if (tableSet.size() > 1) {
+
+      for (String table : tableSet) {
+        Map<PropertyId, PropertyId> joinKeys = importedKeys.get(table);
+        if (joinKeys != null) {
+          for (Map.Entry<PropertyId, PropertyId> entry : joinKeys.entrySet()) {
+            String category1 = entry.getKey().getCategory();
+            String category2 = entry.getValue().getCategory();
+            if (tableSet.contains(category1) && tableSet.contains(category2)) {
+              if (haveWhereClause) {
+                joinClause.append(" AND ");
+              }
+              joinClause.append(category1).append(".").append(entry.getKey().getName());
+              joinClause.append(" = ");
+              joinClause.append(category2).append(".").append(entry.getValue().getName());
+              tableSet.add(category1);
+              tableSet.add(category2);
+
+              haveWhereClause = true;
+            }
+          }
+        }
+      }
+    }
+
+    StringBuilder tables = new StringBuilder();
+
+    for (String table : tableSet) {
+      if (tables.length() > 0) {
+        tables.append(", ");
+      }
+      tables.append(table);
+    }
+
+    String sql = "select " + columns + " from " + tables;
+
+    if (haveWhereClause) {
+      sql = sql + " where " + whereClause + joinClause;
+    }
+    return sql;
+  }
+
+  /**
+   * Get a delete SQL statement based on the given predicate.
+   *
+   * @param table      the table
+   * @param predicate  the predicate
+   *
+   * @return the delete SQL statement
+   */
+  private String getDeleteSQL(String table, Predicate predicate) {
+
+    StringBuilder whereClause = new StringBuilder();
+    if (predicate instanceof BasePredicate) {
+
+      BasePredicate basePredicate = (BasePredicate) predicate;
+
+      SQLPredicateVisitor visitor = new SQLPredicateVisitor();
+      basePredicate.accept(visitor);
+      whereClause.append(visitor.getSQL());
+
+      return "delete from " + table + " where " + whereClause;
+    }
+    throw new IllegalStateException("Can't generate SQL.");
+  }
+
+  /**
+   * Get an update SQL statement based on the given properties and predicate.
+   *
+   * @param table       the table
+   * @param properties  the properties
+   * @param predicate   the predicate
+   *
+   * @return the update SQL statement
+   */
+  private String getUpdateSQL(String table, Map<PropertyId, Object> properties, Predicate predicate) {
+
+    if (predicate instanceof BasePredicate) {
+
+      StringBuilder whereClause = new StringBuilder();
+
+      BasePredicate basePredicate = (BasePredicate) predicate;
+
+      SQLPredicateVisitor visitor = new SQLPredicateVisitor();
+      basePredicate.accept(visitor);
+      whereClause.append(visitor.getSQL());
+
+      StringBuilder setClause = new StringBuilder();
+      for (Map.Entry<PropertyId, Object> entry : properties.entrySet()) {
+
+        if (setClause.length() > 0) {
+          setClause.append(", ");
+        }
+        setClause.append(entry.getKey().getName());
+        setClause.append(" = ");
+        Object propertyValue = entry.getValue();
+
+        if (propertyValue instanceof String) {
+          setClause.append("'");
+          setClause.append(propertyValue);
+          setClause.append("'");
+        } else {
+          setClause.append(propertyValue);
+        }
+      }
+
+      return "update " + table + " set " + setClause + " where " + whereClause;
+    }
+    throw new IllegalStateException("Can't generate SQL.");
+  }
+
+  /**
+   * Get the set of tables associated with the given property ids.
+   *
+   * @param propertyIds  the property ids
+   *
+   * @return the set of tables
+   */
+  private static Set<String> getTables(Set<PropertyId> propertyIds) {
+    Set<String> tables = new HashSet<String>();
+    for (PropertyId propertyId : propertyIds) {
+      tables.add(propertyId.getCategory());
+    }
+    return tables;
   }
 }
+

+ 44 - 0
ambari-api/src/main/java/org/apache/ambari/api/controller/jdbc/JDBCProviderModule.java

@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.api.controller.jdbc;
+
+import org.apache.ambari.api.controller.ProviderModule;
+import org.apache.ambari.api.controller.utilities.DBHelper;
+import org.apache.ambari.api.controller.utilities.Properties;
+import org.apache.ambari.server.controller.spi.PropertyProvider;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ *
+ */
+public class JDBCProviderModule implements ProviderModule {
+
+  private static final List<PropertyProvider> PROPERTY_PROVIDERS = new LinkedList<PropertyProvider>();
+
+  @Override
+  public ResourceProvider getResourceProvider(Resource.Type type) {
+    return new JDBCResourceProvider(DBHelper.CONNECTION_FACTORY, type,
+        PROPERTY_PROVIDERS, Properties.getPropertyIds(type, "DB"),
+        Properties.getKeyPropertyIds(type));
+  }
+}

+ 424 - 0
ambari-api/src/main/java/org/apache/ambari/api/controller/jdbc/JDBCResourceProvider.java

@@ -0,0 +1,424 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.api.controller.jdbc;
+
+import org.apache.ambari.api.controller.internal.PropertyIdImpl;
+import org.apache.ambari.api.controller.internal.ResourceImpl;
+import org.apache.ambari.api.controller.internal.SchemaImpl;
+import org.apache.ambari.api.controller.utilities.PredicateHelper;
+import org.apache.ambari.api.controller.utilities.Properties;
+import org.apache.ambari.server.controller.predicate.BasePredicate;
+import org.apache.ambari.server.controller.predicate.PredicateVisitorAcceptor;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.PropertyProvider;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.controller.spi.Schema;
+
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Generic JDBC based resource provider.
+ */
+public class JDBCResourceProvider implements ResourceProvider {
+
+  private final Resource.Type type;
+
+  private final Set<PropertyId> propertyIds;
+
+  private final ConnectionFactory connectionFactory;
+
+  /**
+   * The list of property providers for this provider's resource type.
+   */
+  private final List<PropertyProvider> propertyProviders;
+
+  /**
+   * The schema for this provider's resource type.
+   */
+  private final Schema schema;
+
+  /**
+   * Key mappings used for joins.
+   */
+  private final Map<String, Map<PropertyId, PropertyId>> importedKeys = new HashMap<String, Map<PropertyId, PropertyId>>();
+
+  public JDBCResourceProvider(ConnectionFactory connectionFactory,
+                              Resource.Type type,
+                              List<PropertyProvider> propertyProviders,
+                              Set<PropertyId> propertyIds,
+                              Map<Resource.Type, PropertyId> keyPropertyIds) {
+    this.connectionFactory = connectionFactory;
+    this.type = type;
+    this.propertyProviders = propertyProviders;
+    this.propertyIds = propertyIds;
+    this.schema = new SchemaImpl(this, keyPropertyIds);
+  }
+
+  @Override
+  public Set<Resource> getResources(Request request, Predicate predicate) {
+
+    Set<Resource> resources = new HashSet<Resource>();
+    Set<PropertyId> propertyIds = new HashSet<PropertyId>(request.getPropertyIds());
+    if (propertyIds.isEmpty()) {
+      propertyIds.addAll(this.propertyIds);
+    } else {
+      if (predicate != null) {
+        propertyIds.addAll(PredicateHelper.getPropertyIds(predicate));
+      }
+      propertyIds.retainAll(this.propertyIds);
+    }
+
+    try {
+      Connection connection = connectionFactory.getConnection();
+
+      try {
+
+        for (String table : getTables(propertyIds)) {
+          getImportedKeys(connection, table);
+        }
+
+        String sql = getSelectSQL(propertyIds, predicate);
+        Statement statement = connection.createStatement();
+
+        ResultSet rs = statement.executeQuery(sql);
+
+        while (rs.next()) {
+          ResultSetMetaData metaData = rs.getMetaData();
+          int columnCount = metaData.getColumnCount();
+
+          final ResourceImpl resource = new ResourceImpl(type);
+          for (int i = 1; i <= columnCount; ++i) {
+            PropertyIdImpl propertyId = new PropertyIdImpl(metaData.getColumnName(i), metaData.getTableName(i), false);
+            if (propertyIds.contains(propertyId)) {
+              resource.setProperty(propertyId, rs.getString(i));
+            }
+          }
+          resources.add(resource);
+        }
+
+      } finally {
+        connection.close();
+      }
+
+    } catch (SQLException e) {
+      throw new IllegalStateException("DB error : ", e);
+    }
+
+    return resources;
+  }
+
+  @Override
+  public void createResources(Request request) {
+    try {
+      Connection connection = connectionFactory.getConnection();
+
+      try {
+
+        Set<Map<PropertyId, Object>> propertySet = request.getProperties();
+
+        for (Map<PropertyId, Object> properties : propertySet) {
+          String sql = getInsertSQL(properties);
+
+          Statement statement = connection.createStatement();
+
+          statement.execute(sql);
+        }
+      } finally {
+        connection.close();
+      }
+
+    } catch (SQLException e) {
+      throw new IllegalStateException("DB error : ", e);
+    }
+  }
+
+  @Override
+  public void updateResources(Request request, Predicate predicate) {
+
+    try {
+      Connection connection = connectionFactory.getConnection();
+      try {
+        Set<Map<PropertyId, Object>> propertySet = request.getProperties();
+
+        Map<PropertyId, Object> properties = propertySet.iterator().next();
+
+        String sql = getUpdateSQL(properties, predicate);
+
+        System.out.println(sql);
+
+        Statement statement = connection.createStatement();
+
+        statement.execute(sql);
+      } finally {
+        connection.close();
+      }
+
+    } catch (SQLException e) {
+      throw new IllegalStateException("DB error : ", e);
+    }
+  }
+
+  @Override
+  public void deleteResources(Predicate predicate) {
+    try {
+      Connection connection = connectionFactory.getConnection();
+      try {
+        String sql = getDeleteSQL(predicate);
+
+        Statement statement = connection.createStatement();
+        statement.execute(sql);
+      } finally {
+        connection.close();
+      }
+
+    } catch (SQLException e) {
+      throw new IllegalStateException("DB error : ", e);
+    }
+  }
+
+
+  private String getInsertSQL(Map<PropertyId, Object> properties) {
+
+    StringBuilder columns = new StringBuilder();
+    StringBuilder values = new StringBuilder();
+    String table = null;
+
+
+    for (Map.Entry<PropertyId, Object> entry : properties.entrySet()) {
+      PropertyId propertyId    = entry.getKey();
+      String     propertyValue = (String) entry.getValue();
+
+      table = propertyId.getCategory();
+
+
+      if (columns.length() > 0) {
+        columns.append(", ");
+      }
+      columns.append(propertyId.getName());
+
+      if (values.length() > 0) {
+        values.append(", ");
+      }
+      values.append("'");
+      values.append(propertyValue);
+      values.append("'");
+    }
+
+    return "insert into " + table + " (" +
+      columns + ") values (" +values + ")";
+  }
+
+  private String getSelectSQL(Set<PropertyId> propertyIds, Predicate predicate) {
+
+    StringBuilder columns = new StringBuilder();
+    Set<String> tableSet = new HashSet<String>();
+
+    for (PropertyId propertyId : propertyIds) {
+      if (columns.length() > 0) {
+        columns.append(", ");
+      }
+      columns.append(propertyId.getCategory()).append(".").append(propertyId.getName());
+      tableSet.add(propertyId.getCategory());
+    }
+
+
+    boolean haveWhereClause = false;
+    StringBuilder whereClause = new StringBuilder();
+    if (predicate != null &&
+        propertyIds.containsAll(PredicateHelper.getPropertyIds(predicate)) &&
+        predicate instanceof PredicateVisitorAcceptor) {
+
+      SQLPredicateVisitor visitor = new SQLPredicateVisitor();
+      ((PredicateVisitorAcceptor) predicate).accept(visitor);
+      whereClause.append(visitor.getSQL());
+      haveWhereClause = true;
+    }
+
+    StringBuilder joinClause = new StringBuilder();
+
+    if (tableSet.size() > 1) {
+
+      for (String table : tableSet) {
+        Map<PropertyId, PropertyId> joinKeys = importedKeys.get(table);
+        if (joinKeys != null) {
+          for (Map.Entry<PropertyId, PropertyId> entry : joinKeys.entrySet()) {
+            String category1 = entry.getKey().getCategory();
+            String category2 = entry.getValue().getCategory();
+            if (tableSet.contains(category1) && tableSet.contains(category2)) {
+              if (haveWhereClause) {
+                joinClause.append(" AND ");
+              }
+              joinClause.append(category1).append(".").append(entry.getKey().getName());
+              joinClause.append(" = ");
+              joinClause.append(category2).append(".").append(entry.getValue().getName());
+              tableSet.add(category1);
+              tableSet.add(category2);
+
+              haveWhereClause = true;
+            }
+          }
+        }
+      }
+    }
+
+    StringBuilder tables = new StringBuilder();
+
+    for (String table : tableSet) {
+      if (tables.length() > 0) {
+        tables.append(", ");
+      }
+      tables.append(table);
+    }
+
+    String sql = "select " + columns + " from " + tables;
+
+    if (haveWhereClause) {
+      sql = sql + " where " + whereClause + joinClause;
+    }
+
+    System.out.println(sql);
+
+    return sql;
+  }
+
+  private String getDeleteSQL(Predicate predicate) {
+
+    StringBuilder whereClause = new StringBuilder();
+    if (predicate instanceof BasePredicate) {
+
+      BasePredicate basePredicate = (BasePredicate) predicate;
+
+      SQLPredicateVisitor visitor = new SQLPredicateVisitor();
+      basePredicate.accept(visitor);
+      whereClause.append(visitor.getSQL());
+
+      String table = basePredicate.getPropertyIds().iterator().next().getCategory();
+
+      return "delete from " + table + " where " + whereClause;
+    }
+    throw new IllegalStateException("Can't generate SQL.");
+  }
+
+  private String getUpdateSQL(Map<PropertyId, Object> properties, Predicate predicate) {
+
+    if (predicate instanceof BasePredicate) {
+
+      StringBuilder whereClause = new StringBuilder();
+
+      BasePredicate basePredicate = (BasePredicate) predicate;
+
+      SQLPredicateVisitor visitor = new SQLPredicateVisitor();
+      basePredicate.accept(visitor);
+      whereClause.append(visitor.getSQL());
+
+      String table = basePredicate.getPropertyIds().iterator().next().getCategory();
+
+
+      StringBuilder setClause = new StringBuilder();
+      for (Map.Entry<PropertyId, Object> entry : properties.entrySet()) {
+
+        if (setClause.length() > 0) {
+          setClause.append(", ");
+        }
+        setClause.append(entry.getKey().getName());
+        setClause.append(" = ");
+        setClause.append("'");
+        setClause.append(entry.getValue());
+        setClause.append("'");
+      }
+
+      return "update " + table + " set " + setClause + " where " + whereClause;
+    }
+    throw new IllegalStateException("Can't generate SQL.");
+  }
+
+  @Override
+  public Set<PropertyId> getPropertyIds() {
+    return propertyIds;
+  }
+
+  @Override
+  public List<PropertyProvider> getPropertyProviders() {
+    return propertyProviders;
+  }
+
+  @Override
+  public Schema getSchema() {
+    return schema;
+  }
+
+  /**
+   * Lazily populate the imported key mappings for the given table.
+   *
+   * @param connection  the connection to use to obtain the database meta data
+   * @param table       the table
+   *
+   * @throws SQLException thrown if the meta data for the given connection cannot be obtained
+   */
+  private void getImportedKeys(Connection connection, String table) throws SQLException {
+    if (!this.importedKeys.containsKey(table)) {
+
+      Map<PropertyId, PropertyId> importedKeys = new HashMap<PropertyId, PropertyId>();
+      this.importedKeys.put(table, importedKeys);
+
+      DatabaseMetaData metaData = connection.getMetaData();
+
+      ResultSet rs = metaData.getImportedKeys(connection.getCatalog(), null, table);
+
+      while (rs.next()) {
+
+        PropertyId pkPropertyId = Properties.getPropertyId(
+            rs.getString("PKCOLUMN_NAME"), rs.getString("PKTABLE_NAME"));
+
+        PropertyId fkPropertyId = Properties.getPropertyId(
+            rs.getString("FKCOLUMN_NAME"), rs.getString("FKTABLE_NAME"));
+
+        importedKeys.put(pkPropertyId, fkPropertyId);
+      }
+    }
+  }
+
+  /**
+   * Get the set of tables associated with the given property ids.
+   *
+   * @param propertyIds  the property ids
+   *
+   * @return the set of tables
+   */
+  private static Set<String> getTables(Set<PropertyId> propertyIds) {
+    Set<String> tables = new HashSet<String>();
+    for (PropertyId propertyId : propertyIds) {
+      tables.add(propertyId.getCategory());
+    }
+    return tables;
+  }
+}

+ 15 - 7
ambari-api/src/main/java/org/apache/ambari/api/controller/jdbc/SQLPredicateVisitor.java

@@ -18,20 +18,26 @@
 
 package org.apache.ambari.api.controller.jdbc;
 
-import org.apache.ambari.api.controller.predicate.ArrayPredicate;
-import org.apache.ambari.api.controller.predicate.BasePredicate;
-import org.apache.ambari.api.controller.predicate.ComparisonPredicate;
-import org.apache.ambari.api.controller.predicate.PredicateVisitor;
-import org.apache.ambari.api.controller.predicate.UnaryPredicate;
-import org.apache.ambari.api.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.predicate.ArrayPredicate;
+import org.apache.ambari.server.controller.predicate.BasePredicate;
+import org.apache.ambari.server.controller.predicate.ComparisonPredicate;
+import org.apache.ambari.server.controller.predicate.PredicateVisitor;
+import org.apache.ambari.server.controller.predicate.UnaryPredicate;
+import org.apache.ambari.server.controller.spi.PropertyId;
 
 /**
- *
+ * Predicate visitor used to generate a SQL where clause from a predicate graph.
  */
 public class SQLPredicateVisitor implements PredicateVisitor {
 
+  /**
+   * The string builder.
+   */
   private final StringBuilder stringBuilder = new StringBuilder();
 
+
+  // ----- PredicateVisitor --------------------------------------------------
+
   @Override
   public void acceptComparisonPredicate(ComparisonPredicate predicate) {
     PropertyId propertyId = predicate.getPropertyId();
@@ -71,6 +77,8 @@ public class SQLPredicateVisitor implements PredicateVisitor {
   }
 
 
+  // ----- SQLPredicateVisitor -----------------------------------------------
+
   public String getSQL() {
     return stringBuilder.toString();
   }

+ 19 - 2
ambari-api/src/main/java/org/apache/ambari/api/controller/jdbc/SQLiteConnectionFactory.java

@@ -23,14 +23,28 @@ import java.sql.DriverManager;
 import java.sql.SQLException;
 
 /**
- *
+ * Connection factory implementation for SQLite.
  */
 public class SQLiteConnectionFactory implements ConnectionFactory {
 
-  public static final String CONNECTION_URL = "jdbc:sqlite:";
+  /**
+   * The connection URL minus the db file.
+   */
+  private static final String CONNECTION_URL = "jdbc:sqlite:";
 
+  /**
+   * The filename of the SQLite db file.
+   */
   private final String dbFile;
 
+
+  // ----- Constructors ------------------------------------------------------
+
+  /**
+   * Create a connection factory.
+   *
+   * @param dbFile  the SQLite DB filename
+   */
   public SQLiteConnectionFactory(String dbFile) {
     this.dbFile = dbFile;
     try {
@@ -40,6 +54,9 @@ public class SQLiteConnectionFactory implements ConnectionFactory {
     }
   }
 
+
+  // ----- ConnectionFactory -------------------------------------------------
+
   @Override
   public Connection getConnection() throws SQLException {
     return DriverManager.getConnection(CONNECTION_URL + dbFile);

+ 5 - 5
ambari-api/src/main/java/org/apache/ambari/api/controller/jmx/JMXPropertyProvider.java

@@ -19,11 +19,11 @@
 package org.apache.ambari.api.controller.jmx;
 
 import org.apache.ambari.api.controller.internal.PropertyIdImpl;
-import org.apache.ambari.api.controller.spi.Predicate;
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.PropertyProvider;
-import org.apache.ambari.api.controller.spi.Request;
-import org.apache.ambari.api.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.PropertyProvider;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.api.controller.utilities.PredicateHelper;
 import org.apache.ambari.api.controller.utilities.Properties;
 

+ 15 - 40
ambari-api/src/main/java/org/apache/ambari/api/controller/utilities/ClusterControllerHelper.java

@@ -18,57 +18,32 @@
 
 package org.apache.ambari.api.controller.utilities;
 
+import org.apache.ambari.api.controller.ProviderModule;
 import org.apache.ambari.api.controller.internal.ClusterControllerImpl;
-import org.apache.ambari.api.controller.internal.PropertyIdImpl;
-import org.apache.ambari.api.controller.internal.ResourceProviderImpl;
-import org.apache.ambari.api.controller.internal.SchemaImpl;
-import org.apache.ambari.api.controller.jdbc.JDBCManagementController;
-import org.apache.ambari.api.controller.spi.ClusterController;
-import org.apache.ambari.api.controller.spi.ManagementController;
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.PropertyProvider;
-import org.apache.ambari.api.controller.spi.Resource;
-import org.apache.ambari.api.controller.spi.ResourceProvider;
-import org.apache.ambari.api.controller.spi.Schema;
-
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
+import org.apache.ambari.api.controller.internal.DefaultProviderModule;
+import org.apache.ambari.server.controller.spi.ClusterController;
 
 /**
  * Temporary class to bootstrap a cluster controller.  TODO : Replace this global state with injection.
  */
 public class ClusterControllerHelper {
+
+  private static String PROVIDER_MODULE_CLASS = System.getProperty("provider.module.class",
+      "org.apache.ambari.api.controller.internal.DefaultProviderModule");
+
   private static ClusterController controller;
 
   public static synchronized ClusterController getClusterController() {
     if (controller == null) {
-      controller = new ClusterControllerImpl(getResourceSchemas());
+      try {
+        Class implClass = Class.forName(PROVIDER_MODULE_CLASS);
+        ProviderModule providerModule = (ProviderModule) implClass.newInstance();
+        return new ClusterControllerImpl(providerModule);
+
+      } catch (Exception e) {
+        throw new IllegalStateException("Can't create provider module " + PROVIDER_MODULE_CLASS);
+      }
     }
     return controller;
   }
-
-  private static Map<Resource.Type, Schema> getResourceSchemas() {
-    Map<Resource.Type, Schema> schemas = new HashMap<Resource.Type, Schema>();
-
-    schemas.put(Resource.Type.Cluster, getResourceSchema(Resource.Type.Cluster));
-    schemas.put(Resource.Type.Service, getResourceSchema(Resource.Type.Service));
-    schemas.put(Resource.Type.Host, getResourceSchema(Resource.Type.Host));
-    schemas.put(Resource.Type.Component, getResourceSchema(Resource.Type.Component));
-    schemas.put(Resource.Type.HostComponent, getResourceSchema(Resource.Type.HostComponent));
-
-    return schemas;
-  }
-
-  private static Schema getResourceSchema(Resource.Type type) {
-
-    ManagementController managementController = new JDBCManagementController(DBHelper.CONNECTION_FACTORY);
-
-    ResourceProvider resourceProvider = ResourceProviderImpl.getResourceProvider(type, Properties.getPropertyIds(type, "DB"), managementController);
-
-    List<PropertyProvider> propertyProviders = new LinkedList<PropertyProvider>();
-
-    return new SchemaImpl(resourceProvider, propertyProviders, Properties.getKeyPropertyIds(type));
-  }
 }

+ 0 - 1
ambari-api/src/main/java/org/apache/ambari/api/controller/utilities/DBHelper.java

@@ -54,7 +54,6 @@ public class DBHelper {
         String sql = "select attributes from hosts";
 
         Statement statement = connection.createStatement();
-        statement.setQueryTimeout(30);  // set timeout to 30 sec.
 
         ResultSet rs = statement.executeQuery(sql);
 

+ 11 - 11
ambari-api/src/main/java/org/apache/ambari/api/controller/utilities/PredicateBuilder.java

@@ -18,17 +18,17 @@
 package org.apache.ambari.api.controller.utilities;
 
 import org.apache.ambari.api.controller.internal.PropertyIdImpl;
-import org.apache.ambari.api.controller.predicate.AndPredicate;
-import org.apache.ambari.api.controller.predicate.BasePredicate;
-import org.apache.ambari.api.controller.predicate.Comparables;
-import org.apache.ambari.api.controller.predicate.EqualsPredicate;
-import org.apache.ambari.api.controller.predicate.GreaterEqualsPredicate;
-import org.apache.ambari.api.controller.predicate.GreaterPredicate;
-import org.apache.ambari.api.controller.predicate.LessEqualsPredicate;
-import org.apache.ambari.api.controller.predicate.LessPredicate;
-import org.apache.ambari.api.controller.predicate.NotPredicate;
-import org.apache.ambari.api.controller.predicate.OrPredicate;
-import org.apache.ambari.api.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.predicate.AndPredicate;
+import org.apache.ambari.server.controller.predicate.BasePredicate;
+import org.apache.ambari.server.controller.predicate.Comparables;
+import org.apache.ambari.server.controller.predicate.EqualsPredicate;
+import org.apache.ambari.server.controller.predicate.GreaterEqualsPredicate;
+import org.apache.ambari.server.controller.predicate.GreaterPredicate;
+import org.apache.ambari.server.controller.predicate.LessEqualsPredicate;
+import org.apache.ambari.server.controller.predicate.LessPredicate;
+import org.apache.ambari.server.controller.predicate.NotPredicate;
+import org.apache.ambari.server.controller.predicate.OrPredicate;
+import org.apache.ambari.server.controller.spi.PropertyId;
 
 import java.util.LinkedList;
 import java.util.List;

+ 11 - 3
ambari-api/src/main/java/org/apache/ambari/api/controller/utilities/PredicateHelper.java

@@ -17,9 +17,11 @@
  */
 package org.apache.ambari.api.controller.utilities;
 
-import org.apache.ambari.api.controller.predicate.BasePredicate;
-import org.apache.ambari.api.controller.spi.Predicate;
-import org.apache.ambari.api.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.predicate.BasePredicate;
+import org.apache.ambari.server.controller.predicate.PredicateVisitor;
+import org.apache.ambari.server.controller.predicate.PredicateVisitorAcceptor;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.PropertyId;
 
 import java.util.Collections;
 import java.util.Set;
@@ -35,4 +37,10 @@ public class PredicateHelper {
     }
     return Collections.emptySet();
   }
+
+  public static void visit(Predicate predicate, PredicateVisitor visitor) {
+    if (predicate instanceof PredicateVisitorAcceptor) {
+      ((PredicateVisitorAcceptor) predicate).accept(visitor);
+    }
+  }
 }

+ 2 - 2
ambari-api/src/main/java/org/apache/ambari/api/controller/utilities/Properties.java

@@ -18,8 +18,8 @@
 package org.apache.ambari.api.controller.utilities;
 
 import org.apache.ambari.api.controller.internal.PropertyIdImpl;
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.Resource;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.type.TypeReference;
 

+ 33 - 0
ambari-api/src/main/java/org/apache/ambari/api/handlers/CreateHandler.java

@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.api.handlers;
+
+import org.apache.ambari.api.services.Request;
+import org.apache.ambari.api.services.Result;
+
+/**
+ * Responsible for create requests.
+ */
+public class CreateHandler implements RequestHandler {
+  @Override
+  public Result handleRequest(Request request) {
+    //TODO: implement
+    return null;
+  }
+}

+ 11 - 2
ambari-api/src/main/java/org/apache/ambari/api/handlers/DelegatingRequestHandler.java

@@ -22,14 +22,23 @@ import org.apache.ambari.api.services.Request;
 import org.apache.ambari.api.services.Result;
 
 /**
- *
+ * Request handler implementation that all requests are funneled through.
+ * Provides common handler functionality and delegates to concrete handler.
  */
 public class DelegatingRequestHandler implements RequestHandler {
   @Override
   public Result handleRequest(Request request) {
-    return getRequestHandlerFactory().getRequestHandler(request.getRequestType()).handleRequest(request);
+    Result result = getRequestHandlerFactory().getRequestHandler(request.getRequestType()).handleRequest(request);
+    request.getResultPostProcessor().process(result);
+
+    return result;
   }
 
+  /**
+   * Obtain a factory for the request specific concrete request handlers.
+   *
+   * @return A request handler factory
+   */
   RequestHandlerFactory getRequestHandlerFactory() {
     return new RequestHandlerFactory();
   }

+ 33 - 0
ambari-api/src/main/java/org/apache/ambari/api/handlers/DeleteHandler.java

@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.api.handlers;
+
+import org.apache.ambari.api.services.Request;
+import org.apache.ambari.api.services.Result;
+
+/**
+ * Responsible for delete requests.
+ */
+public class DeleteHandler implements RequestHandler {
+  @Override
+  public Result handleRequest(Request request) {
+    //TODO: implement
+    return null;
+  }
+}

+ 53 - 0
ambari-api/src/main/java/org/apache/ambari/api/handlers/ReadHandler.java

@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.api.handlers;
+
+import org.apache.ambari.api.services.Request;
+import org.apache.ambari.api.services.Result;
+import org.apache.ambari.api.query.Query;
+import org.apache.ambari.server.AmbariException;
+
+/**
+ * Responsible for read requests.
+ */
+public class ReadHandler implements RequestHandler {
+
+  @Override
+  public Result handleRequest(Request request) {
+    Query query = request.getResourceDefinition().getQuery();
+
+    //Partial response
+    //todo: could be encapsulated in request/query
+    for (String s : request.getPartialResponseFields()) {
+      int i = s.lastIndexOf('/');
+      if (i == -1) {
+        query.addProperty(null, s);
+      } else {
+        query.addProperty(s.substring(0, i), s.substring(i + 1));
+      }
+    }
+
+    try {
+      return query.execute();
+    } catch (AmbariException e) {
+      //TODO: exceptions
+      throw new RuntimeException("An exception occurred processing the request: " + e, e);
+    }
+  }
+}

+ 8 - 1
ambari-api/src/main/java/org/apache/ambari/api/handlers/RequestHandler.java

@@ -20,10 +20,17 @@ package org.apache.ambari.api.handlers;
 
 import org.apache.ambari.api.services.Request;
 import org.apache.ambari.api.services.Result;
+import org.apache.ambari.server.AmbariException;
 
 /**
- *
+ * Responsible for handling of requests and returning a result.
  */
 public interface RequestHandler {
+  /**
+   * Handle the given request and return a result.
+   *
+   * @param request the request to handle
+   * @return the result of the request
+   */
   public Result handleRequest(Request request);
 }

+ 17 - 4
ambari-api/src/main/java/org/apache/ambari/api/handlers/RequestHandlerFactory.java

@@ -21,16 +21,29 @@ package org.apache.ambari.api.handlers;
 import org.apache.ambari.api.services.Request;
 
 /**
- *
+ * Factory for {@link RequestHandler}
+ * Returns the appropriate request handler based on the request.
  */
 public class RequestHandlerFactory {
-  public RequestHandler getRequestHandler(Request.RequestType requestType) {
+  /**
+   * Return an instance of the correct request handler based on the request type.
+   *
+   * @param requestType the request type.  Is one of {@link Request.Type}
+   * @return a request handler for the request
+   */
+  public RequestHandler getRequestHandler(Request.Type requestType) {
     switch (requestType) {
       case GET:
-        return new ReadRequestHandler();
+        return new ReadHandler();
+      case PUT:
+        return new CreateHandler();
+      case POST:
+        return new UpdateHandler();
+      case DELETE:
+        return new DeleteHandler();
       default:
         //todo:
-        throw new UnsupportedOperationException("Only GET requests are supported at this time");
+        throw new UnsupportedOperationException("Unsupported Request Type: " + requestType);
     }
   }
 }

+ 33 - 0
ambari-api/src/main/java/org/apache/ambari/api/handlers/UpdateHandler.java

@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.api.handlers;
+
+import org.apache.ambari.api.services.Request;
+import org.apache.ambari.api.services.Result;
+
+/**
+ * Responsible for update requests.
+ */
+public class UpdateHandler implements RequestHandler {
+  @Override
+  public Result handleRequest(Request request) {
+    //TODO: implement
+    return null;
+  }
+}

+ 25 - 13
ambari-api/src/main/java/org/apache/ambari/api/query/Query.java

@@ -19,25 +19,37 @@
 package org.apache.ambari.api.query;
 
 import org.apache.ambari.api.services.Result;
-import org.apache.ambari.api.controller.spi.PropertyId;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.spi.PropertyId;
 
-import java.util.Map;
-import java.util.Set;
 
 /**
- *
+ * Responsible for querying the back end for read requests
  */
 public interface Query {
-  public void addAllProperties(Map<String, Set<String>> setProperties);
-
-  public void addProperty(String path, String property);
 
+  /**
+   * Add a property to the query.
+   * This is the select portion of the query.
+   *
+   * @param group    the group name that contains the property
+   * @param property the property name
+   */
+  public void addProperty(String group, String property);
+
+  /**
+   * Add a property to the query.
+   * This is the select portion of the query.
+   *
+   * @param property the property id which contains the group, property name
+   *                 and whether the property is temporal
+   */
   public void addProperty(PropertyId property);
 
-  //todo: signature - need path
-  public void retainAllProperties(Set<String> setFields);
-
-  public void clearAllProperties();
-
-  public Result execute();
+  /**
+   * Execute the query.
+   *
+   * @return the result of the query.
+   */
+  public Result execute() throws AmbariException;
 }

+ 133 - 78
ambari-api/src/main/java/org/apache/ambari/api/query/QueryImpl.java

@@ -20,107 +20,162 @@ package org.apache.ambari.api.query;
 
 import org.apache.ambari.api.controller.internal.PropertyIdImpl;
 import org.apache.ambari.api.controller.internal.RequestImpl;
-import org.apache.ambari.api.controller.predicate.AndPredicate;
-import org.apache.ambari.api.controller.predicate.BasePredicate;
-import org.apache.ambari.api.controller.predicate.EqualsPredicate;
 import org.apache.ambari.api.controller.utilities.ClusterControllerHelper;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.predicate.AndPredicate;
+import org.apache.ambari.server.controller.predicate.BasePredicate;
+import org.apache.ambari.server.controller.predicate.EqualsPredicate;
 import org.apache.ambari.api.services.Result;
 import org.apache.ambari.api.services.ResultImpl;
-import org.apache.ambari.api.controller.spi.*;
+import org.apache.ambari.server.controller.spi.*;
 import org.apache.ambari.api.resource.ResourceDefinition;
+import org.apache.ambari.api.util.TreeNode;
 
 import java.util.*;
 
 /**
- *
+ * Default read query.
  */
 public class QueryImpl implements Query {
+  /**
+   * Resource definition of resource being operated on.
+   */
   ResourceDefinition m_resourceDefinition;
-  Predicate m_predicate;
-  private Map<String, Set<String>> m_mapProperties = new HashMap<String, Set<String>>();
-  private Map<ResourceDefinition, Query> m_mapSubQueries = new HashMap<ResourceDefinition, Query>();
 
+  /**
+   * Properties of the query which make up the select portion of the query.
+   */
+  private Map<String, Set<String>> m_mapQueryProperties = new HashMap<String, Set<String>>();
+
+  /**
+   * All properties that are available for the resource.
+   */
+  private Map<String, Set<String>> m_mapAllProperties;
+
+  /**
+   * Sub-resources of the resource which is being operated on.
+   */
+  private Map<String, ResourceDefinition> m_mapSubResources = new HashMap<String, ResourceDefinition>();
 
+
+  /**
+   * Constructor.
+   *
+   * @param resourceDefinition the resource definition of the resource being operated on
+   */
   public QueryImpl(ResourceDefinition resourceDefinition) {
     m_resourceDefinition = resourceDefinition;
+    m_mapAllProperties = Collections.unmodifiableMap(getClusterController().
+        getSchema(resourceDefinition.getType()).getCategories());
+  }
+
+  @Override
+  public void addProperty(String path, String property) {
+    if (m_mapAllProperties.containsKey(path) && m_mapAllProperties.get(path).contains(property)) {
+      // local property
+      Set<String> setProps = m_mapQueryProperties.get(path);
+      if (setProps == null) {
+        setProps = new HashSet<String>();
+        m_mapQueryProperties.put(path, setProps);
+      }
+      setProps.add(property);
+    } else if (m_mapAllProperties.containsKey(property)) {
+      // no path specified because path is provided as property
+      //local category
+      Set<String> setProps = m_mapQueryProperties.get(property);
+      if (setProps == null) {
+        setProps = new HashSet<String>();
+        m_mapQueryProperties.put(property, setProps);
+      }
+      // add all props for category
+      setProps.addAll(m_mapAllProperties.get(property));
+    } else {
+      // not a local category/property
+      boolean success = addPropertyToSubResource(path, property);
+      if (!success) {
+        //TODO
+        throw new RuntimeException("Attempted to add invalid property to resource.  Resource=" +
+            m_resourceDefinition.getType() + ", Property: Category=" + path + " Field=" + property);
+      }
+    }
   }
 
   @Override
-  public Result execute() {
-    initialize();
+  public void addProperty(PropertyId property) {
+    addProperty(property.getCategory(), property.getName());
+  }
 
+  @Override
+  public Result execute() throws AmbariException {
     Result result = createResult();
+
+    if (m_resourceDefinition.getId() == null) {
+      // collection, add pk only
+      Schema schema = getClusterController().getSchema(m_resourceDefinition.getType());
+      addProperty(schema.getKeyPropertyId(m_resourceDefinition.getType()));
+      result.getResultTree().setProperty("isCollection", "true");
+    }
+
+    if (m_mapQueryProperties.isEmpty() && m_mapSubResources.isEmpty()) {
+      //Add sub resource properties for default case where no fields are specified.
+      m_mapSubResources.putAll(m_resourceDefinition.getSubResources());
+    }
+
+    Predicate predicate = createPredicate(m_resourceDefinition);
     Iterable<Resource> iterResource = getClusterController().getResources(
-        m_resourceDefinition.getType(), createRequest(), m_predicate);
+        m_resourceDefinition.getType(), createRequest(), predicate);
 
-    List<Resource> listResources = new ArrayList<Resource>();
     for (Resource resource : iterResource) {
-      listResources.add(resource);
-    }
-    //todo: tree?
-    result.addResources("/", listResources);
-
-    for (Map.Entry<ResourceDefinition, Query> entry : m_mapSubQueries.entrySet()) {
-      Query query = entry.getValue();
-      ResourceDefinition resDef = entry.getKey();
-
-      //todo: this ensures that the sub query is only executed if needed.  Refactor.
-      if (m_mapProperties.isEmpty() || m_mapProperties.containsKey(resDef.getId() == null ?
-          resDef.getPluralName() : resDef.getSingularName())) {
-        Map<String, List<Resource>> mapSubResults = query.execute().getResources();
-        //todo: only getting sub-resource one level deep at this time
-        List<Resource> listSubResources = mapSubResults.get("/");
-        String subResourceName = resDef.getId() == null ? resDef.getPluralName() : resDef.getSingularName();
-        result.addResources(subResourceName, listSubResources);
+      TreeNode<Resource> node = result.getResultTree().addChild(resource, null);
+
+      for (Map.Entry<String, ResourceDefinition> entry : m_mapSubResources.entrySet()) {
+        String subResCategory = entry.getKey();
+        ResourceDefinition r = entry.getValue();
+
+        r.setParentId(m_resourceDefinition.getType(), resource.getPropertyValue(
+            getClusterController().getSchema(m_resourceDefinition.getType()).
+                getKeyPropertyId(m_resourceDefinition.getType())));
+
+        TreeNode<Resource> childResult = r.getQuery().execute().getResultTree();
+        childResult.setName(subResCategory);
+        childResult.setProperty("isCollection", "false");
+        node.addChild(childResult);
       }
     }
 
     return result;
   }
 
-  //todo: refactor
-  public void initialize() {
-    m_predicate = createPredicate(m_resourceDefinition);
 
-    if (m_resourceDefinition.getId() != null) {
-      //sub-resource queries
-      for (ResourceDefinition resource : m_resourceDefinition.getChildren()) {
-        m_mapSubQueries.put(resource, resource.getQuery());
-      }
-      for (ResourceDefinition resource : m_resourceDefinition.getRelations()) {
-        m_mapSubQueries.put(resource, resource.getQuery());
-      }
-    }
-  }
+  private boolean addPropertyToSubResource(String path, String property) {
+    boolean resourceAdded = false;
 
-  @Override
-  public void addAllProperties(Map<String, Set<String>> mapProperties) {
-    m_mapProperties.putAll(mapProperties);
-  }
+    // cases:
+    // path is null, property is path
+    // path is single token and prop in non null
+    // path is multi level and prop is non null
 
-  @Override
-  public void addProperty(String path, String property) {
-    Set<String> setProps = m_mapProperties.get(path);
-    if (setProps == null) {
-      setProps = new HashSet<String>();
-      m_mapProperties.put(path, setProps);
+    if (path == null) {
+      path = property;
+      property = null;
     }
-    setProps.add(property);
-  }
 
-  @Override
-  public void addProperty(PropertyId property) {
-    addProperty(property.getCategory(), property.getName());
-  }
+    int i = path.indexOf("/");
+    String p = i == -1 ? path : path.substring(0, i);
 
-  @Override
-  public void retainAllProperties(Set<String> setFields) {
-    //todo
-  }
+    ResourceDefinition subResource = m_resourceDefinition.getSubResources().get(p);
+    if (subResource != null) {
+      m_mapSubResources.put(p, subResource);
+      //todo: handle case of trailing /
+      //todo: for example fields=subResource/
 
-  @Override
-  public void clearAllProperties() {
-    m_mapProperties.clear();
+      if (property != null || !path.equals(p)) {
+        //only add if a sub property is set or if a sub category is specified
+        subResource.getQuery().addProperty(i == -1 ? null : path.substring(i + 1), property);
+      }
+      resourceAdded = true;
+    }
+    return resourceAdded;
   }
 
   private Predicate createPredicate(ResourceDefinition resourceDefinition) {
@@ -129,31 +184,33 @@ public class QueryImpl implements Query {
     Map<Resource.Type, String> mapResourceIds = resourceDefinition.getResourceIds();
     Schema schema = getClusterController().getSchema(resourceType);
 
-    BasePredicate[] predicates = new BasePredicate[mapResourceIds.size()];
-    int count = 0;
+    Set<Predicate> setPredicates = new HashSet<Predicate>();
     for (Map.Entry<Resource.Type, String> entry : mapResourceIds.entrySet()) {
-      predicates[count++] = new EqualsPredicate(schema.getKeyPropertyId(entry.getKey()), entry.getValue());
+      //todo: this is a hack for host_component and component queries where serviceId is not available for
+      //todo: host_component queries and host is not available for component queries.
+      //todo: this should be rectified when the data model is changed for host_component
+      if (entry.getValue() != null) {
+        setPredicates.add(new EqualsPredicate(schema.getKeyPropertyId(entry.getKey()), entry.getValue()));
+      }
     }
 
-    if (predicates.length == 1) {
-      return predicates[0];
-    } else if (predicates.length > 1) {
-      return new AndPredicate(predicates);
+    if (setPredicates.size() == 1) {
+      return setPredicates.iterator().next();
+    } else if (setPredicates.size() > 1) {
+      return new AndPredicate(setPredicates.toArray(new BasePredicate[setPredicates.size()]));
     } else {
       return null;
     }
   }
 
-  //todo: how to get Controller?
   ClusterController getClusterController() {
     return ClusterControllerHelper.getClusterController();
   }
 
-  //todo
   Request createRequest() {
     Set<PropertyId> setProperties = new HashSet<PropertyId>();
-    //todo: convert property names to PropertyId's.
-    for (Map.Entry<String, Set<String>> entry : m_mapProperties.entrySet()) {
+
+    for (Map.Entry<String, Set<String>> entry : m_mapQueryProperties.entrySet()) {
       String group = entry.getKey();
       for (String property : entry.getValue()) {
         setProperties.add(new PropertyIdImpl(property, group, false));
@@ -162,10 +219,8 @@ public class QueryImpl implements Query {
     return new RequestImpl(setProperties, null);
   }
 
-  //todo
   Result createResult() {
     return new ResultImpl();
   }
 
-
 }

+ 83 - 17
ambari-api/src/main/java/org/apache/ambari/api/resource/BaseResourceDefinition.java

@@ -22,29 +22,58 @@ package org.apache.ambari.api.resource;
 import org.apache.ambari.api.controller.utilities.ClusterControllerHelper;
 import org.apache.ambari.api.query.Query;
 import org.apache.ambari.api.query.QueryImpl;
-import org.apache.ambari.api.controller.spi.ClusterController;
-import org.apache.ambari.api.controller.spi.Resource;
+import org.apache.ambari.api.services.Request;
+import org.apache.ambari.server.controller.spi.ClusterController;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.Schema;
+import org.apache.ambari.api.util.TreeNode;
 
+import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 
 /**
- *
+ * Base resource definition.  Contains behavior common to all resource types.
  */
 public abstract class BaseResourceDefinition implements ResourceDefinition {
 
+  /**
+   * Resource type.  One of {@link Resource.Type}
+   */
   private Resource.Type m_type;
+
+  /**
+   * Value of the id property for the resource.
+   */
   private String m_id;
-  private Query m_query = new QueryImpl(this);
-  Map<Resource.Type, String> m_mapResourceIds = new HashMap<Resource.Type, String>();
 
+  /**
+   * Query associated with the resource definition.
+   */
+  private Query m_query;
+
+  /**
+   * Map of primary and foreign keys and values necessary to identify the resource.
+   */
+  private Map<Resource.Type, String> m_mapResourceIds = new HashMap<Resource.Type, String>();
+
+
+  /**
+   * Constructor.
+   *
+   * @param resourceType resource type
+   * @param id           value of primary key
+   */
   public BaseResourceDefinition(Resource.Type resourceType, String id) {
     m_type = resourceType;
-    m_id = id;
+    setId(id);
+    m_query = new QueryImpl(this);
+  }
 
-    if (id != null) {
-      setResourceId(resourceType, id);
-    }
+  @Override
+  public void setParentId(Resource.Type type, String value) {
+    setResourceId(type, value);
   }
 
   @Override
@@ -52,6 +81,11 @@ public abstract class BaseResourceDefinition implements ResourceDefinition {
     return m_id;
   }
 
+  void setId(String val) {
+    setResourceId(getType(), val);
+    m_id = val;
+  }
+
   @Override
   public Resource.Type getType() {
     return m_type;
@@ -64,10 +98,7 @@ public abstract class BaseResourceDefinition implements ResourceDefinition {
   }
 
   protected void setResourceId(Resource.Type resourceType, String val) {
-    //todo: hack for case where service id is null when getting a component from hostComponent
-    if (val != null) {
-      m_mapResourceIds.put(resourceType, val);
-    }
+    m_mapResourceIds.put(resourceType, val);
   }
 
   @Override
@@ -75,6 +106,19 @@ public abstract class BaseResourceDefinition implements ResourceDefinition {
     return m_mapResourceIds;
   }
 
+  ClusterController getClusterController() {
+    return ClusterControllerHelper.getClusterController();
+  }
+
+  @Override
+  public List<PostProcessor> getPostProcessors() {
+    List<PostProcessor> listProcessors = new ArrayList<PostProcessor>();
+    listProcessors.add(new BaseHrefPostProcessor());
+
+    return listProcessors;
+  }
+
+
   @Override
   public boolean equals(Object o) {
     if (this == o) return true;
@@ -85,9 +129,9 @@ public abstract class BaseResourceDefinition implements ResourceDefinition {
     if (m_id != null ? !m_id.equals(that.m_id) : that.m_id != null) return false;
     if (m_mapResourceIds != null ? !m_mapResourceIds.equals(that.m_mapResourceIds) : that.m_mapResourceIds != null)
       return false;
-    if (m_type != that.m_type) return false;
 
-    return true;
+    return m_type == that.m_type;
+
   }
 
   @Override
@@ -98,7 +142,29 @@ public abstract class BaseResourceDefinition implements ResourceDefinition {
     return result;
   }
 
-  ClusterController getClusterController() {
-    return ClusterControllerHelper.getClusterController();
+  class BaseHrefPostProcessor implements PostProcessor {
+    @Override
+    public void process(Request request, TreeNode<Resource> resultNode, String href) {
+      Resource r = resultNode.getObject();
+      TreeNode<Resource> parent = resultNode.getParent();
+
+      if (parent.getName() != null) {
+        String parentName = parent.getName();
+        Schema schema = getClusterController().getSchema(r.getType());
+        String id = r.getPropertyValue(schema.getKeyPropertyId(r.getType()));
+
+        int i = href.indexOf("?");
+        if (i != -1) {
+          href = href.substring(0, i);
+        }
+
+        if (!href.endsWith("/")) {
+          href = href + '/';
+        }
+        String isCollectionResource = parent.getProperty("isCollection");
+        href = "true".equals(isCollectionResource) ? href + id : href + parentName + '/' + id;
+      }
+      resultNode.setProperty("href", href);
+    }
   }
 }

+ 14 - 25
ambari-api/src/main/java/org/apache/ambari/api/resource/ClusterResourceDefinition.java

@@ -18,21 +18,21 @@
 
 package org.apache.ambari.api.resource;
 
-import org.apache.ambari.api.services.formatters.ClusterInstanceFormatter;
-import org.apache.ambari.api.services.formatters.CollectionFormatter;
-import org.apache.ambari.api.services.formatters.ResultFormatter;
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.Resource;
 
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
+import java.util.*;
 
 /**
- *
+ * Cluster resource definition.
  */
 public class ClusterResourceDefinition extends BaseResourceDefinition {
 
+  /**
+   * Constructor.
+   *
+   * @param id value of primary key
+   */
   public ClusterResourceDefinition(String id) {
     super(Resource.Type.Cluster, id);
 
@@ -53,32 +53,21 @@ public class ClusterResourceDefinition extends BaseResourceDefinition {
   }
 
   @Override
-  public Set<ResourceDefinition> getChildren() {
-    Set<ResourceDefinition> setChildren = new HashSet<ResourceDefinition>();
+  public Map<String, ResourceDefinition> getSubResources() {
+    Map<String, ResourceDefinition> mapChildren = new HashMap<String, ResourceDefinition>();
 
     ServiceResourceDefinition serviceResource = new ServiceResourceDefinition(null, getId());
     PropertyId serviceIdProperty = getClusterController().getSchema(
         Resource.Type.Service).getKeyPropertyId(Resource.Type.Service);
     serviceResource.getQuery().addProperty(serviceIdProperty);
-    setChildren.add(serviceResource);
+    mapChildren.put(serviceResource.getPluralName(), serviceResource);
 
     HostResourceDefinition hostResource = new HostResourceDefinition(null, getId());
     PropertyId hostIdProperty = getClusterController().getSchema(
         Resource.Type.Host).getKeyPropertyId(Resource.Type.Host);
     hostResource.getQuery().addProperty(hostIdProperty);
-    setChildren.add(hostResource);
-
-    return setChildren;
-  }
+    mapChildren.put(hostResource.getPluralName(), hostResource);
 
-  @Override
-  public Set<ResourceDefinition> getRelations() {
-    return Collections.emptySet();
-  }
-
-  @Override
-  public ResultFormatter getResultFormatter() {
-    //todo: instance formatter
-    return getId() == null ? new CollectionFormatter(this) : new ClusterInstanceFormatter(this);
+    return mapChildren;
   }
 }

+ 79 - 27
ambari-api/src/main/java/org/apache/ambari/api/resource/ComponentResourceDefinition.java

@@ -16,36 +16,43 @@
  * limitations under the License.
  */
 
+
 package org.apache.ambari.api.resource;
 
-import org.apache.ambari.api.services.formatters.CollectionFormatter;
-import org.apache.ambari.api.services.formatters.ComponentInstanceFormatter;
-import org.apache.ambari.api.services.formatters.ResultFormatter;
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.Resource;
+import org.apache.ambari.api.controller.utilities.ClusterControllerHelper;
+import org.apache.ambari.api.services.Request;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.Schema;
+import org.apache.ambari.api.util.TreeNode;
+
+import org.apache.ambari.api.services.ResultPostProcessor;
 
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
+import java.util.*;
 
 /**
- *
+ * Component resource definition.
  */
 public class ComponentResourceDefinition extends BaseResourceDefinition {
 
+  /**
+   * value of clusterId foreign key
+   */
   private String m_clusterId;
-  private String m_serviceId;
 
-  @Override
-  public String getPluralName() {
-    return "components";
-  }
+  /**
+   * value of serviceId foreign key
+   */
+  private String m_serviceId;
 
-  @Override
-  public String getSingularName() {
-    return "component";
-  }
 
+  /**
+   * Constructor.
+   *
+   * @param id        value of component id
+   * @param clusterId value of cluster id
+   * @param serviceId value of service id
+   */
   public ComponentResourceDefinition(String id, String clusterId, String serviceId) {
     super(Resource.Type.Component, id);
     m_clusterId = clusterId;
@@ -55,26 +62,71 @@ public class ComponentResourceDefinition extends BaseResourceDefinition {
   }
 
   @Override
-  public Set<ResourceDefinition> getChildren() {
-    return Collections.emptySet();
+  public String getPluralName() {
+    return "components";
+  }
+
+  @Override
+  public String getSingularName() {
+    return "component";
   }
 
+
   @Override
-  public Set<ResourceDefinition> getRelations() {
-    Set<ResourceDefinition> setResourceDefinitions = new HashSet<ResourceDefinition>();
+  public Map<String, ResourceDefinition> getSubResources() {
+    Map<String, ResourceDefinition> mapChildren = new HashMap<String, ResourceDefinition>();
+
     // for host_component collection need host id property
     HostComponentResourceDefinition hostComponentResource = new HostComponentResourceDefinition(
         getId(), m_clusterId, null);
     PropertyId hostIdProperty = getClusterController().getSchema(
         Resource.Type.HostComponent).getKeyPropertyId(Resource.Type.Host);
     hostComponentResource.getQuery().addProperty(hostIdProperty);
-    setResourceDefinitions.add(hostComponentResource);
-    return setResourceDefinitions;
+    mapChildren.put(hostComponentResource.getPluralName(), hostComponentResource);
+    return mapChildren;
+
+  }
+
+  @Override
+  public List<PostProcessor> getPostProcessors() {
+    List<PostProcessor> listProcessors = super.getPostProcessors();
+    listProcessors.add(new ComponentHrefProcessor());
+
+    return listProcessors;
   }
 
   @Override
-  public ResultFormatter getResultFormatter() {
-    //todo: instance formatter
-    return getId() == null ? new CollectionFormatter(this) : new ComponentInstanceFormatter(this);
+  public void setParentId(Resource.Type type, String value) {
+    if (type == Resource.Type.HostComponent) {
+      setId(value);
+    } else {
+      super.setParentId(type, value);
+    }
+  }
+
+  /**
+   * Base resource processor which generates href's.  This is called by the {@link ResultPostProcessor} during post
+   * processing of a result.
+   */
+  private class ComponentHrefProcessor extends BaseHrefPostProcessor {
+    @Override
+    public void process(Request request, TreeNode<Resource> resultNode, String href) {
+      TreeNode<Resource> parent = resultNode.getParent();
+
+      if (parent.getParent() != null && parent.getParent().getObject().getType() == Resource.Type.HostComponent) {
+        Resource r = resultNode.getObject();
+        String clusterId = getResourceIds().get(Resource.Type.Cluster);
+        Schema schema = ClusterControllerHelper.getClusterController().getSchema(r.getType());
+        String serviceId = r.getPropertyValue(schema.getKeyPropertyId(Resource.Type.Service));
+        String componentId = r.getPropertyValue(schema.getKeyPropertyId(r.getType()));
+
+        href = href.substring(0, href.indexOf(clusterId) + clusterId.length() + 1) +
+            "services/" + serviceId + "/components/" + componentId;
+
+        resultNode.setProperty("href", href);
+      } else {
+        super.process(request, resultNode, href);
+      }
+    }
   }
 }

+ 99 - 31
ambari-api/src/main/java/org/apache/ambari/api/resource/HostComponentResourceDefinition.java

@@ -18,34 +18,42 @@
 
 package org.apache.ambari.api.resource;
 
-import org.apache.ambari.api.services.formatters.CollectionFormatter;
-import org.apache.ambari.api.services.formatters.HostComponentInstanceFormatter;
-import org.apache.ambari.api.services.formatters.ResultFormatter;
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.Resource;
+import org.apache.ambari.api.controller.internal.ClusterControllerImpl;
+import org.apache.ambari.api.controller.internal.PropertyIdImpl;
+import org.apache.ambari.api.controller.utilities.ClusterControllerHelper;
+import org.apache.ambari.api.services.Request;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.Schema;
+import org.apache.ambari.api.util.TreeNode;
 
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
+import org.apache.ambari.api.services.ResultPostProcessor;
+
+import java.util.*;
 
 /**
- *
+ * Host_Component resource definition.
  */
 public class HostComponentResourceDefinition extends BaseResourceDefinition {
 
+  /**
+   * value of cluster id foreign key
+   */
   private String m_clusterId;
+
+  /**
+   * value of host id foreign key
+   */
   private String m_hostId;
 
-  @Override
-  public String getPluralName() {
-    return "host_components";
-  }
-
-  @Override
-  public String getSingularName() {
-    return "host_component";
-  }
 
+  /**
+   * Constructor.
+   *
+   * @param id        value of host_component id
+   * @param clusterId value of cluster id foreign key
+   * @param hostId    value of host id foreign key
+   */
   public HostComponentResourceDefinition(String id, String clusterId, String hostId) {
     super(Resource.Type.HostComponent, id);
     m_clusterId = clusterId;
@@ -55,30 +63,90 @@ public class HostComponentResourceDefinition extends BaseResourceDefinition {
   }
 
   @Override
-  public Set<ResourceDefinition> getChildren() {
-    return Collections.emptySet();
+  public String getPluralName() {
+    return "host_components";
+  }
+
+  @Override
+  public String getSingularName() {
+    return "host_component";
   }
 
+
   @Override
-  public Set<ResourceDefinition> getRelations() {
-    Set<ResourceDefinition> setRelated = new HashSet<ResourceDefinition>();
-    // already have all information necessary for host
-    //todo: adding host here causes a cycle
-    //setRelated.add(new HostResourceDefinition(m_hostId, m_clusterId));
-    // for component need service id property
+  public Map<String, ResourceDefinition> getSubResources() {
+    Map<String, ResourceDefinition> mapChildren = new HashMap<String, ResourceDefinition>();
+
     ComponentResourceDefinition componentResource = new ComponentResourceDefinition(
         getId(), m_clusterId, null);
     PropertyId serviceIdProperty = getClusterController().getSchema(
         Resource.Type.Component).getKeyPropertyId(Resource.Type.Service);
     componentResource.getQuery().addProperty(serviceIdProperty);
-    setRelated.add(componentResource);
+    mapChildren.put(componentResource.getSingularName(), componentResource);
 
-    return setRelated;
+    return mapChildren;
   }
 
   @Override
-  public ResultFormatter getResultFormatter() {
-    //todo: instance formatter
-    return getId() == null ? new CollectionFormatter(this) : new HostComponentInstanceFormatter(this);
+  public List<PostProcessor> getPostProcessors() {
+    List<PostProcessor> listProcessors = new ArrayList<PostProcessor>();
+    listProcessors.add(new HostComponentHrefProcessor());
+    listProcessors.add(new HostComponentHostProcessor());
+
+    return listProcessors;
+  }
+
+  @Override
+  public void setParentId(Resource.Type type, String value) {
+    if (type == Resource.Type.Component) {
+      setId(value);
+    } else {
+      super.setParentId(type, value);
+    }
+  }
+
+
+  /**
+   * Host_Component resource processor which is responsible for generating href's for host components.
+   * This is called by the {@link ResultPostProcessor} during post processing of a result.
+   */
+  private class HostComponentHrefProcessor extends BaseHrefPostProcessor {
+    @Override
+    public void process(Request request, TreeNode<Resource> resultNode, String href) {
+      TreeNode<Resource> parent = resultNode.getParent();
+
+      if (parent.getParent() != null && parent.getParent().getObject().getType() == Resource.Type.Component) {
+        Resource r = resultNode.getObject();
+        String clusterId = getResourceIds().get(Resource.Type.Cluster);
+        Schema schema = ClusterControllerHelper.getClusterController().getSchema(r.getType());
+        String host = r.getPropertyValue(schema.getKeyPropertyId(Resource.Type.Host));
+        String hostComponent = r.getPropertyValue(schema.getKeyPropertyId(r.getType()));
+
+        href = href.substring(0, href.indexOf(clusterId) + clusterId.length() + 1) +
+            "hosts/" + host + "/host_components/" + hostComponent;
+
+        resultNode.setProperty("href", href);
+      } else {
+        super.process(request, resultNode, href);
+      }
+
+    }
+  }
+
+  /**
+   * Host_Component resource processor which is responsible for generating a host section for host components.
+   * This is called by the {@link ResultPostProcessor} during post processing of a result.
+   */
+  private class HostComponentHostProcessor implements PostProcessor {
+    @Override
+    public void process(Request request, TreeNode<Resource> resultNode, String href) {
+      //todo: look at partial request fields to ensure that hosts should be returned
+      if (request.getResourceDefinition().getType() == getType()) {
+        // only add host if query host_resource was directly queried
+        String nodeHref = resultNode.getProperty("href");
+        resultNode.getObject().setProperty(new PropertyIdImpl("href", "host", false),
+            nodeHref.substring(0, nodeHref.indexOf("/host_components/")));
+      }
+    }
   }
 }

+ 23 - 30
ambari-api/src/main/java/org/apache/ambari/api/resource/HostResourceDefinition.java

@@ -18,23 +18,33 @@
 
 package org.apache.ambari.api.resource;
 
-import org.apache.ambari.api.services.formatters.CollectionFormatter;
-import org.apache.ambari.api.services.formatters.HostInstanceFormatter;
-import org.apache.ambari.api.services.formatters.ResultFormatter;
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.Resource;
 
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
+import java.util.*;
 
 /**
- *
+ * Host resource definition.
  */
 public class HostResourceDefinition extends BaseResourceDefinition {
 
+  /**
+   * value of cluster id foreign key
+   */
   private String m_clusterId;
 
+  /**
+   * Constructor.
+   *
+   * @param id        host id value
+   * @param clusterId cluster id value
+   */
+  public HostResourceDefinition(String id, String clusterId) {
+    super(Resource.Type.Host, id);
+    m_clusterId = clusterId;
+    setResourceId(Resource.Type.Cluster, m_clusterId);
+  }
+
   @Override
   public String getPluralName() {
     return "hosts";
@@ -45,33 +55,16 @@ public class HostResourceDefinition extends BaseResourceDefinition {
     return "host";
   }
 
-  public HostResourceDefinition(String id, String clusterId) {
-    super(Resource.Type.Host, id);
-    m_clusterId = clusterId;
-    setResourceId(Resource.Type.Cluster, m_clusterId);
-  }
-
   @Override
-  public Set<ResourceDefinition> getChildren() {
-    Set<ResourceDefinition> setChildren = new HashSet<ResourceDefinition>();
+  public Map<String, ResourceDefinition> getSubResources() {
+    Map<String, ResourceDefinition> mapChildren = new HashMap<String, ResourceDefinition>();
 
     HostComponentResourceDefinition hostComponentResource = new HostComponentResourceDefinition(
         null, m_clusterId, getId());
     PropertyId hostComponentIdProperty = getClusterController().getSchema(
         Resource.Type.HostComponent).getKeyPropertyId(Resource.Type.HostComponent);
     hostComponentResource.getQuery().addProperty(hostComponentIdProperty);
-    setChildren.add(hostComponentResource);
-    return setChildren;
-  }
-
-  @Override
-  public Set<ResourceDefinition> getRelations() {
-    return Collections.emptySet();
-  }
-
-  @Override
-  public ResultFormatter getResultFormatter() {
-    //todo: instance formatter
-    return getId() == null ? new CollectionFormatter(this) : new HostInstanceFormatter(this);
+    mapChildren.put(hostComponentResource.getPluralName(), hostComponentResource);
+    return mapChildren;
   }
 }

+ 70 - 9
ambari-api/src/main/java/org/apache/ambari/api/resource/ResourceDefinition.java

@@ -19,31 +19,92 @@
 package org.apache.ambari.api.resource;
 
 import org.apache.ambari.api.query.Query;
-import org.apache.ambari.api.services.formatters.ResultFormatter;
-import org.apache.ambari.api.controller.spi.Resource;
+import org.apache.ambari.api.services.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.api.util.TreeNode;
 
+import org.apache.ambari.api.services.ResultPostProcessor;
+
+import java.util.List;
 import java.util.Map;
-import java.util.Set;
 
 /**
- *
+ * Resource Definition.
+ * Provides information specific to a specific resource type.
  */
 public interface ResourceDefinition {
+  /**
+   * Obtain the plural name of the resource.
+   *
+   * @return the plural name of the resource
+   */
   public String getPluralName();
 
+  /**
+   * Obtain the singular name of the resource.
+   *
+   * @return the singular name of the resource
+   */
   public String getSingularName();
 
+  /**
+   * Obtain the value of the primary id of the resource.
+   *
+   * @return the value of the primary id of the resource
+   */
   public String getId();
 
-  public Set<ResourceDefinition> getChildren();
+  /**
+   * Obtain the type of resource.  Is one of {@link Resource.Type}.
+   *
+   * @return the type of resource
+   */
+  public Resource.Type getType();
 
-  public Set<ResourceDefinition> getRelations();
+  /**
+   * Set the value of the parent foreign key.
+   *
+   * @param type  resource type of the parent
+   * @param value vale of the parent id
+   */
+  public void setParentId(Resource.Type type, String value);
 
+  /**
+   * Obtain the primary and foreign key properties for the resource.
+   *
+   * @return map of primary and foreign key values keyed by resource type
+   */
   public Map<Resource.Type, String> getResourceIds();
 
-  public ResultFormatter getResultFormatter();
-
-  public Resource.Type getType();
+  /**
+   * Obtain sub-resources of this resource.  A sub-resource is a resource that is contained in
+   * another parent resource.
+   *
+   * @return map of sub resource definitions keyed by resource name
+   */
+  public Map<String, ResourceDefinition> getSubResources();
 
+  /**
+   * Return the query associated with the resource.
+   * Each resource has one query.
+   *
+   * @return the associated query
+   */
   public Query getQuery();
+
+  /**
+   * Obtain any resource post processors.  A resource processor is used to provide resource specific processing of
+   * results and is called by the {@link ResultPostProcessor} while post processing a result.
+   *
+   * @return list of resource specific result processors
+   */
+  public List<PostProcessor> getPostProcessors();
+
+  /**
+   * Resource specific result processor.
+   * Used to provide resource specific processing of a result.
+   */
+  public interface PostProcessor {
+    public void process(Request request, TreeNode<Resource> resultNode, String href);
+  }
 }

+ 23 - 30
ambari-api/src/main/java/org/apache/ambari/api/resource/ServiceResourceDefinition.java

@@ -18,23 +18,33 @@
 
 package org.apache.ambari.api.resource;
 
-import org.apache.ambari.api.services.formatters.CollectionFormatter;
-import org.apache.ambari.api.services.formatters.ResultFormatter;
-import org.apache.ambari.api.services.formatters.ServiceInstanceFormatter;
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.Resource;
 
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
+import java.util.*;
 
 /**
- *
+ * Service resource definition.
  */
 public class ServiceResourceDefinition extends BaseResourceDefinition {
 
+  /**
+   * value of cluster id foreign key
+   */
   private String m_clusterId;
 
+  /**
+   * Constructor.
+   *
+   * @param id        service id value
+   * @param clusterId cluster id value
+   */
+  public ServiceResourceDefinition(String id, String clusterId) {
+    super(Resource.Type.Service, id);
+    m_clusterId = clusterId;
+    setResourceId(Resource.Type.Cluster, m_clusterId);
+  }
+
   @Override
   public String getPluralName() {
     return "services";
@@ -45,33 +55,16 @@ public class ServiceResourceDefinition extends BaseResourceDefinition {
     return "service";
   }
 
-  public ServiceResourceDefinition(String id, String clusterId) {
-    super(Resource.Type.Service, id);
-    m_clusterId = clusterId;
-    setResourceId(Resource.Type.Cluster, m_clusterId);
-  }
-
   @Override
-  public Set<ResourceDefinition> getChildren() {
-    Set<ResourceDefinition> setChildren = new HashSet<ResourceDefinition>();
+  public Map<String, ResourceDefinition> getSubResources() {
+    Map<String, ResourceDefinition> mapChildren = new HashMap<String, ResourceDefinition>();
     // for component collection need id property
     ComponentResourceDefinition componentResourceDefinition =
         new ComponentResourceDefinition(null, m_clusterId, getId());
     PropertyId componentIdProperty = getClusterController().getSchema(
         Resource.Type.Component).getKeyPropertyId(Resource.Type.Component);
     componentResourceDefinition.getQuery().addProperty(componentIdProperty);
-    setChildren.add(componentResourceDefinition);
-    return setChildren;
-  }
-
-  @Override
-  public Set<ResourceDefinition> getRelations() {
-    return Collections.emptySet();
-  }
-
-  @Override
-  public ResultFormatter getResultFormatter() {
-    //todo: instance formatter
-    return getId() == null ? new CollectionFormatter(this) : new ServiceInstanceFormatter(this);
+    mapChildren.put(componentResourceDefinition.getPluralName(), componentResourceDefinition);
+    return mapChildren;
   }
 }

+ 36 - 7
ambari-api/src/main/java/org/apache/ambari/api/services/BaseService.java

@@ -21,33 +21,62 @@ package org.apache.ambari.api.services;
 import org.apache.ambari.api.handlers.DelegatingRequestHandler;
 import org.apache.ambari.api.handlers.RequestHandler;
 import org.apache.ambari.api.resource.ResourceDefinition;
+import org.apache.ambari.api.services.serializers.ResultSerializer;
 
 import javax.ws.rs.core.HttpHeaders;
 import javax.ws.rs.core.Response;
 import javax.ws.rs.core.UriInfo;
 
 /**
- *
+ * Provides common functionality to all services.
  */
-public class BaseService {
+public abstract class BaseService {
 
-  protected Response handleRequest(HttpHeaders headers, UriInfo uriInfo, Request.RequestType requestType,
+  /**
+   * All requests are funneled through this method so that common logic can be executed.
+   * This consists of creating a {@link Request} instance, invoking the correct {@link RequestHandler} and
+   * applying the proper {@link ResultSerializer} to the result.
+   *
+   * @param headers            http headers
+   * @param uriInfo            uri information
+   * @param requestType        http request type
+   * @param resourceDefinition resource definition that is being acted on
+   * @return the response of the operation in serialized form
+   */
+  protected Response handleRequest(HttpHeaders headers, UriInfo uriInfo, Request.Type requestType,
                                    ResourceDefinition resourceDefinition) {
 
-    Request req = getRequestFactory().createRequest(headers, uriInfo, requestType, resourceDefinition);
-    Result result = getRequestHandler().handleRequest(req);
-    Object formattedResult = resourceDefinition.getResultFormatter().format(result, uriInfo);
-    return getResponseFactory().createResponse(req.getSerializer().serialize(formattedResult));
+    Request request = getRequestFactory().createRequest(headers, uriInfo, requestType, resourceDefinition);
+    Result result = getRequestHandler().handleRequest(request);
+
+    return getResponseFactory().createResponse(request.getResultSerializer().serialize(result, uriInfo));
   }
 
+  /**
+   * Obtain the factory from which to create Request instances.
+   *
+   * @return the Request factory
+   */
   RequestFactory getRequestFactory() {
     return new RequestFactory();
   }
 
+  /**
+   * Obtain the factory from which to create Response instances.
+   *
+   * @return the Response factory
+   */
   ResponseFactory getResponseFactory() {
     return new ResponseFactory();
   }
 
+  /**
+   * Obtain the appropriate RequestHandler for the request.  At this time all requests are funneled through
+   * a delegating request handler which will ultimately delegate the request to the appropriate concrete
+   * request handler.
+   *
+   * @return the request handler to invoke
+   */
   RequestHandler getRequestHandler() {
     return new DelegatingRequestHandler();
   }

+ 57 - 8
ambari-api/src/main/java/org/apache/ambari/api/services/ClusterService.java

@@ -21,12 +21,10 @@ package org.apache.ambari.api.services;
 import org.apache.ambari.api.resource.ClusterResourceDefinition;
 import org.apache.ambari.api.resource.ResourceDefinition;
 
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
+import javax.ws.rs.*;
 import javax.ws.rs.core.*;
 
+
 /**
  * Service responsible for cluster resource requests.
  */
@@ -34,7 +32,7 @@ import javax.ws.rs.core.*;
 public class ClusterService extends BaseService {
 
   /**
-   * Handles URL: /clusters/{clusterID}
+   * Handles: GET /clusters/{clusterID}
    * Get a specific cluster.
    *
    * @param headers     http headers
@@ -48,11 +46,11 @@ public class ClusterService extends BaseService {
   public Response getCluster(@Context HttpHeaders headers, @Context UriInfo ui,
                              @PathParam("clusterName") String clusterName) {
 
-    return handleRequest(headers, ui, Request.RequestType.GET, createResourceDefinition(clusterName));
+    return handleRequest(headers, ui, Request.Type.GET, createResourceDefinition(clusterName));
   }
 
   /**
-   * Handles URL:  /clusters
+   * Handles: GET  /clusters
    * Get all clusters.
    *
    * @param headers http headers
@@ -62,7 +60,58 @@ public class ClusterService extends BaseService {
   @GET
   @Produces("text/plain")
   public Response getClusters(@Context HttpHeaders headers, @Context UriInfo ui) {
-    return handleRequest(headers, ui, Request.RequestType.GET, createResourceDefinition(null));
+    return handleRequest(headers, ui, Request.Type.GET, createResourceDefinition(null));
+  }
+
+  /**
+   * Handles: PUT /clusters/{clusterID}
+   * Create a specific cluster.
+   *
+   * @param headers     http headers
+   * @param ui          uri info
+   * @param clusterName cluster id
+   * @return information regarding the created cluster
+   */
+  @PUT
+  @Produces("text/plain")
+  public Response createCluster(@Context HttpHeaders headers, @Context UriInfo ui,
+                                @PathParam("clusterName") String clusterName) {
+
+    return handleRequest(headers, ui, Request.Type.PUT, createResourceDefinition(clusterName));
+  }
+
+  /**
+   * Handles: POST /clusters/{clusterID}
+   * Update a specific cluster.
+   *
+   * @param headers     http headers
+   * @param ui          uri info
+   * @param clusterName cluster id
+   * @return information regarding the updated cluster
+   */
+  @POST
+  @Produces("text/plain")
+  public Response updateCluster(@Context HttpHeaders headers, @Context UriInfo ui,
+                                @PathParam("clusterName") String clusterName) {
+
+    return handleRequest(headers, ui, Request.Type.POST, createResourceDefinition(clusterName));
+  }
+
+  /**
+   * Handles: DELETE /clusters/{clusterID}
+   * Delete a specific cluster.
+   *
+   * @param headers     http headers
+   * @param ui          uri info
+   * @param clusterName cluster id
+   * @return information regarding the deleted cluster
+   */
+  @DELETE
+  @Produces("text/plain")
+  public Response deleteCluster(@Context HttpHeaders headers, @Context UriInfo ui,
+                                @PathParam("clusterName") String clusterName) {
+
+    return handleRequest(headers, ui, Request.Type.DELETE, createResourceDefinition(clusterName));
   }
 
   /**

+ 2 - 2
ambari-api/src/main/java/org/apache/ambari/api/services/ComponentService.java

@@ -67,7 +67,7 @@ public class ComponentService extends BaseService {
   public Response getComponent(@Context HttpHeaders headers, @Context UriInfo ui,
                                @PathParam("componentName") String componentName) {
 
-    return handleRequest(headers, ui, Request.RequestType.GET,
+    return handleRequest(headers, ui, Request.Type.GET,
         createResourceDefinition(componentName, m_clusterName, m_serviceName));
   }
 
@@ -82,7 +82,7 @@ public class ComponentService extends BaseService {
   @GET
   @Produces("text/plain")
   public Response getComponents(@Context HttpHeaders headers, @Context UriInfo ui) {
-    return handleRequest(headers, ui, Request.RequestType.GET,
+    return handleRequest(headers, ui, Request.Type.GET,
         createResourceDefinition(null, m_clusterName, m_serviceName));
   }
 

+ 2 - 2
ambari-api/src/main/java/org/apache/ambari/api/services/HostComponentService.java

@@ -67,7 +67,7 @@ public class HostComponentService extends BaseService {
   public Response getHostComponent(@Context HttpHeaders headers, @Context UriInfo ui,
                                    @PathParam("hostComponentName") String hostComponentName) {
 
-    return handleRequest(headers, ui, Request.RequestType.GET,
+    return handleRequest(headers, ui, Request.Type.GET,
         createResourceDefinition(hostComponentName, m_clusterName, m_hostName));
   }
 
@@ -82,7 +82,7 @@ public class HostComponentService extends BaseService {
   @GET
   @Produces("text/plain")
   public Response getHostComponents(@Context HttpHeaders headers, @Context UriInfo ui) {
-    return handleRequest(headers, ui, Request.RequestType.GET,
+    return handleRequest(headers, ui, Request.Type.GET,
         createResourceDefinition(null, m_clusterName, m_hostName));
   }
 

+ 2 - 2
ambari-api/src/main/java/org/apache/ambari/api/services/HostService.java

@@ -61,7 +61,7 @@ public class HostService extends BaseService {
   public Response getHost(@Context HttpHeaders headers, @Context UriInfo ui,
                           @PathParam("hostName") String hostName) {
 
-    return handleRequest(headers, ui, Request.RequestType.GET,
+    return handleRequest(headers, ui, Request.Type.GET,
         createResourceDefinition(hostName, m_clusterName));
   }
 
@@ -76,7 +76,7 @@ public class HostService extends BaseService {
   @GET
   @Produces("text/plain")
   public Response getHosts(@Context HttpHeaders headers, @Context UriInfo ui) {
-    return handleRequest(headers, ui, Request.RequestType.GET, createResourceDefinition(null, m_clusterName));
+    return handleRequest(headers, ui, Request.Type.GET, createResourceDefinition(null, m_clusterName));
   }
 
   /**

+ 71 - 22
ambari-api/src/main/java/org/apache/ambari/api/services/Request.java

@@ -19,6 +19,7 @@
 package org.apache.ambari.api.services;
 
 import org.apache.ambari.api.resource.ResourceDefinition;
+import org.apache.ambari.api.services.serializers.ResultSerializer;
 
 import java.net.URI;
 import java.util.List;
@@ -26,41 +27,89 @@ import java.util.Map;
 import java.util.Set;
 
 /**
- *
+ * Provides information on the current request.
  */
 public interface Request {
 
-  public enum RequestType {
+  /**
+   * Enum of request types.
+   */
+  public enum Type {
     GET,
     PUT,
     POST,
     DELETE
   }
 
-  public enum ResponseType {JSON}
-
-  public ResourceDefinition getResource();
-
+  /**
+   * Obtain the resource definition which corresponds to the resource being operated on by the request.
+   * The resource definition provides information about the resource type;
+   *
+   * @return the associated {@link ResourceDefinition}
+   */
+  public ResourceDefinition getResourceDefinition();
+
+  /**
+   * Obtain the URI of this request.
+   *
+   * @return the request uri
+   */
   public URI getURI();
 
-  public RequestType getRequestType();
-
+  /**
+   * Obtain the http request type.  Type is one of {@link Type}.
+   *
+   * @return the http request type
+   */
+  public Type getRequestType();
+
+  /**
+   * Obtain the api version of the request.  The api version is specified in the request URI.
+   *
+   * @return the api version of the request
+   */
   public int getAPIVersion();
 
-  public Map<String, List<String>> getQueryParameters();
-
-  public Map<String, List<String>> getQueryPredicates();
-
+  /**
+   * Obtain the query predicates that were provided in the URL query string.
+   *
+   * @return a map of request predicates
+   */
+  public Map<String, String> getQueryPredicates();
+
+  /**
+   * Obtain the set of partial response fields which were provided in the query string of the request uri.
+   *
+   * @return a set of the provided partial response fields
+   */
   public Set<String> getPartialResponseFields();
 
-  public Set<String> getExpandEntities();
-
-  public Map<String, List<String>> getHeaders();
-
-  public String getBody();
-
-  public Serializer getSerializer();
-
-  //todo: temporal information.  For now always specify in PR for each field.  Could use *[...] ?
-  //public Map<String, TemporalData> getTemporalFields();
+  /**
+   * Obtain the result serializer for the request. The default serializer is of type JSON.
+   *
+   * @return the result serializer fo rthe request
+   */
+  public ResultSerializer getResultSerializer();
+
+  /**
+   * Obtain the processor which processes the result returned from the request handler.
+   * The post processor adds additional information such as href fields to the result.
+   *
+   * @return the result processor associated with the request
+   */
+  public ResultPostProcessor getResultPostProcessor();
+
+  /**
+   * Obtain the http headers associated with the request.
+   *
+   * @return the http headers
+   */
+  public Map<String, List<String>> getHttpHeaders();
+
+  /**
+   * Obtain the http body associated with the request.
+   *
+   * @return the http body
+   */
+  public String getHttpBody();
 }

+ 11 - 2
ambari-api/src/main/java/org/apache/ambari/api/services/RequestFactory.java

@@ -24,10 +24,19 @@ import javax.ws.rs.core.HttpHeaders;
 import javax.ws.rs.core.UriInfo;
 
 /**
- *
+ * Factory for {@link Request} instances.
  */
 public class RequestFactory {
-  public Request createRequest(HttpHeaders headers, UriInfo uriInfo, Request.RequestType requestType,
+  /**
+   * Create a request instance.
+   *
+   * @param headers            http headers
+   * @param uriInfo            uri information
+   * @param requestType        http request type
+   * @param resourceDefinition associated resource definition
+   * @return a new Request instance
+   */
+  public Request createRequest(HttpHeaders headers, UriInfo uriInfo, Request.Type requestType,
                                ResourceDefinition resourceDefinition) {
 
     return new RequestImpl(headers, uriInfo, requestType, resourceDefinition);

+ 49 - 28
ambari-api/src/main/java/org/apache/ambari/api/services/RequestImpl.java

@@ -19,34 +19,56 @@
 package org.apache.ambari.api.services;
 
 import org.apache.ambari.api.resource.*;
+import org.apache.ambari.api.services.serializers.JsonSerializer;
+import org.apache.ambari.api.services.serializers.ResultSerializer;
 
 import javax.ws.rs.core.HttpHeaders;
 import javax.ws.rs.core.UriInfo;
 import java.net.URI;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
 
 /**
- *
+ * Request implementation.
  */
 public class RequestImpl implements Request {
 
+  /**
+   * URI information
+   */
   private UriInfo m_uriInfo;
+
+  /**
+   * Http headers
+   */
   private HttpHeaders m_headers;
-  private RequestType m_requestType;
-  private ResourceDefinition m_resourceDefinition;
 
+  /**
+   * Http request type
+   */
+  private Type m_Type;
 
-  public RequestImpl(HttpHeaders headers, UriInfo uriInfo, RequestType requestType, ResourceDefinition resourceDefinition) {
+  /**
+   * Associated resource definition
+   */
+  private ResourceDefinition m_resourceDefinition;
+
+  /**
+   * Constructor.
+   *
+   * @param headers            http headers
+   * @param uriInfo            uri information
+   * @param requestType        http request type
+   * @param resourceDefinition associated resource definition
+   */
+  public RequestImpl(HttpHeaders headers, UriInfo uriInfo, Type requestType, ResourceDefinition resourceDefinition) {
     m_uriInfo = uriInfo;
     m_headers = headers;
-    m_requestType = requestType;
+    m_Type = requestType;
     m_resourceDefinition = resourceDefinition;
   }
 
   @Override
-  public ResourceDefinition getResource() {
+  public ResourceDefinition getResourceDefinition() {
     return m_resourceDefinition;
   }
 
@@ -56,8 +78,8 @@ public class RequestImpl implements Request {
   }
 
   @Override
-  public RequestType getRequestType() {
-    return m_requestType;
+  public Type getRequestType() {
+    return m_Type;
   }
 
   @Override
@@ -66,38 +88,37 @@ public class RequestImpl implements Request {
   }
 
   @Override
-  public Map<String, List<String>> getQueryParameters() {
-    return m_uriInfo.getQueryParameters();
-  }
-
-  @Override
-  public Map<String, List<String>> getQueryPredicates() {
-    //todo: handle expand/fields ...
-    return getQueryParameters();
+  public Map<String, String> getQueryPredicates() {
+    return null;
   }
 
   @Override
   public Set<String> getPartialResponseFields() {
-    return null;
+    String partialResponseFields = m_uriInfo.getQueryParameters().getFirst("fields");
+    if (partialResponseFields == null) {
+      return Collections.emptySet();
+    } else {
+      return new HashSet<String>(Arrays.asList(partialResponseFields.split(",")));
+    }
   }
 
   @Override
-  public Set<String> getExpandEntities() {
-    return null;  //To change body of implemented methods use File | Settings | File Templates.
+  public Map<String, List<String>> getHttpHeaders() {
+    return m_headers.getRequestHeaders();
   }
 
   @Override
-  public Map<String, List<String>> getHeaders() {
-    return m_headers.getRequestHeaders();
+  public String getHttpBody() {
+    return null;
   }
 
   @Override
-  public String getBody() {
-    return null;
+  public ResultSerializer getResultSerializer() {
+    return new JsonSerializer();
   }
 
   @Override
-  public Serializer getSerializer() {
-    return new JSONSerializer();
+  public ResultPostProcessor getResultPostProcessor() {
+    return new ResultPostProcessorImpl(this);
   }
 }

+ 8 - 1
ambari-api/src/main/java/org/apache/ambari/api/services/ResponseFactory.java

@@ -21,9 +21,16 @@ package org.apache.ambari.api.services;
 import javax.ws.rs.core.Response;
 
 /**
- *
+ * Factory for creating jax-rs responses from results.
  */
 public class ResponseFactory {
+  /**
+   * Create a response from a provided result.
+   *
+   * @param result  the result to wrap
+   *
+   * @return a new jax-rs Response instance for the provided result
+   */
   public Response createResponse(Object result) {
     return Response.ok(result).build();
   }

+ 9 - 8
ambari-api/src/main/java/org/apache/ambari/api/services/Result.java

@@ -19,16 +19,17 @@
 package org.apache.ambari.api.services;
 
 
-import org.apache.ambari.api.controller.spi.Resource;
-
-import java.util.List;
-import java.util.Map;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.api.util.TreeNode;
 
 /**
- *
+ * Represents a result from a request handler invocation.
  */
 public interface Result {
-  public void addResources(String groupName, List<Resource> listResources);
-
-  public Map<String, List<Resource>> getResources();
+  /**
+   * the results of the request invocation as a Tree structure.
+   *
+   * @return the results of the request a a Tree structure
+   */
+  public TreeNode<Resource> getResultTree();
 }

+ 10 - 20
ambari-api/src/main/java/org/apache/ambari/api/services/ResultImpl.java

@@ -19,35 +19,25 @@
 package org.apache.ambari.api.services;
 
 
-import org.apache.ambari.api.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.api.util.TreeNode;
+import org.apache.ambari.api.util.TreeNodeImpl;
 
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
 
 /**
- *
+ * Result implementation.
  */
-//todo: at the moment only supports one level of nesting.
-//todo: need to allow arbitrary nesting depth for expansion.
-//todo: consider building a tree structure.
 public class ResultImpl implements Result {
 
-  private Map<String, List<Resource>> m_mapResources = new HashMap<String, List<Resource>>();
+  /**
+   * Tree structure which holds the results
+   */
+  private TreeNode<Resource> m_tree = new TreeNodeImpl<Resource>(null, null, null);
 
-  @Override
-  public void addResources(String groupName, List<Resource> listResources) {
-    List<Resource> resources = m_mapResources.get(groupName);
-    if (resources == null) {
-      m_mapResources.put(groupName, listResources);
-    } else {
-      resources.addAll(listResources);
-    }
-  }
 
   @Override
-  public Map<String, List<Resource>> getResources() {
-    return m_mapResources;
+  public TreeNode<Resource> getResultTree() {
+    return m_tree;
   }
 }
 

+ 33 - 0
ambari-api/src/main/java/org/apache/ambari/api/services/ResultPostProcessor.java

@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.api.services;
+
+/**
+ * Processor which processes result objects prior to them being returned to the service.
+ * Processing can include adding additional data such as hrefs, or modifying/deleting existing data.
+ */
+public interface ResultPostProcessor {
+  /**
+   * Process the given result.
+   * The passed in process is directly modified.
+   *
+   * @param result the result to process.
+   */
+  public void process(Result result);
+}

+ 115 - 0
ambari-api/src/main/java/org/apache/ambari/api/services/ResultPostProcessorImpl.java

@@ -0,0 +1,115 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.api.services;
+
+import org.apache.ambari.api.resource.ResourceDefinition;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.api.util.TreeNode;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Processes returned results to add href's and other content.
+ */
+public class ResultPostProcessorImpl implements ResultPostProcessor {
+  /**
+   * the associated request
+   */
+  private Request m_request;
+
+  /**
+   * Map of resource post processors keyed by resource type.
+   * These are used to act on specific resource types contained in the result.
+   */
+  Map<Resource.Type, List<ResourceDefinition.PostProcessor>> m_mapPostProcessors =
+      new HashMap<Resource.Type, List<ResourceDefinition.PostProcessor>>();
+
+
+  /**
+   * Constructor.
+   *
+   * @param request the associated request
+   */
+  public ResultPostProcessorImpl(Request request) {
+    m_request = request;
+
+    registerResourceProcessors(m_request.getResourceDefinition());
+  }
+
+  @Override
+  public void process(Result result) {
+    processNode(result.getResultTree(), m_request.getURI().toString());
+  }
+
+  /**
+   * Process a node of the result tree.  Recursively calls child nodes.
+   *
+   * @param node the node to process
+   * @param href the current href
+   */
+  private void processNode(TreeNode<Resource> node, String href) {
+    Resource r = node.getObject();
+    if (r != null) {
+      List<ResourceDefinition.PostProcessor> listProcessors = m_mapPostProcessors.get(r.getType());
+      for (ResourceDefinition.PostProcessor processor : listProcessors) {
+        processor.process(m_request, node, href);
+      }
+      href = node.getProperty("href");
+      int i = href.indexOf('?');
+      if (i != -1) {
+        href = href.substring(0, i);
+      }
+    } else {
+      String isItemsCollection = node.getProperty("isCollection");
+      if (node.getName() == null && "true".equals(isItemsCollection)) {
+        node.setName("items");
+        node.setProperty("href", href);
+      }
+    }
+    for (TreeNode<Resource> child : node.getChildren()) {
+      processNode(child, href);
+    }
+  }
+
+  /**
+   * Registers the resource processors.
+   * Recursively registers child resource processors.
+   *
+   * @param resource the root resource
+   */
+  private void registerResourceProcessors(ResourceDefinition resource) {
+    List<ResourceDefinition.PostProcessor> listProcessors = m_mapPostProcessors.get(resource.getType());
+    if (listProcessors == null) {
+      listProcessors = new ArrayList<ResourceDefinition.PostProcessor>();
+      m_mapPostProcessors.put(resource.getType(), listProcessors);
+    }
+    listProcessors.addAll(resource.getPostProcessors());
+
+    for (ResourceDefinition child : resource.getSubResources().values()) {
+      // avoid cycle
+      if (!m_mapPostProcessors.containsKey(child.getType())) {
+        registerResourceProcessors(child);
+      }
+    }
+  }
+
+}

+ 55 - 7
ambari-api/src/main/java/org/apache/ambari/api/services/ServiceService.java

@@ -21,10 +21,7 @@ package org.apache.ambari.api.services;
 import org.apache.ambari.api.resource.ResourceDefinition;
 import org.apache.ambari.api.resource.ServiceResourceDefinition;
 
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
+import javax.ws.rs.*;
 import javax.ws.rs.core.*;
 
 /**
@@ -60,12 +57,12 @@ public class ServiceService extends BaseService {
   public Response getService(@Context HttpHeaders headers, @Context UriInfo ui,
                              @PathParam("serviceName") String serviceName) {
 
-    return handleRequest(headers, ui, Request.RequestType.GET,
+    return handleRequest(headers, ui, Request.Type.GET,
         createResourceDefinition(serviceName, m_clusterName));
   }
 
   /**
-   * Handles URL: /clusters/{clusterID}/services
+   * Handles URL: /clusters/{clusterId}/services
    * Get all services for a cluster.
    *
    * @param headers http headers
@@ -75,10 +72,61 @@ public class ServiceService extends BaseService {
   @GET
   @Produces("text/plain")
   public Response getServices(@Context HttpHeaders headers, @Context UriInfo ui) {
-    return handleRequest(headers, ui, Request.RequestType.GET,
+    return handleRequest(headers, ui, Request.Type.GET,
         createResourceDefinition(null, m_clusterName));
   }
 
+  /**
+   * Handles: PUT /clusters/{clusterId}/services/{serviceId}
+   * Create a specific service.
+   *
+   * @param headers     http headers
+   * @param ui          uri info
+   * @param serviceName service id
+   * @return information regarding the created service
+   */
+  @PUT
+  @Produces("text/plain")
+  public Response createService(@Context HttpHeaders headers, @Context UriInfo ui,
+                                @PathParam("serviceName") String serviceName) {
+
+    return handleRequest(headers, ui, Request.Type.PUT, createResourceDefinition(serviceName, m_clusterName));
+  }
+
+  /**
+   * Handles: POST /clusters/{clusterId}/services/{serviceId}
+   * Update a specific service.
+   *
+   * @param headers     http headers
+   * @param ui          uri info
+   * @param serviceName service id
+   * @return information regarding the updated service
+   */
+  @POST
+  @Produces("text/plain")
+  public Response updateService(@Context HttpHeaders headers, @Context UriInfo ui,
+                                @PathParam("serviceName") String serviceName) {
+
+    return handleRequest(headers, ui, Request.Type.POST, createResourceDefinition(serviceName, m_clusterName));
+  }
+
+  /**
+   * Handles: DELETE /clusters/{clusterId}/services/{serviceId}
+   * Delete a specific service.
+   *
+   * @param headers     http headers
+   * @param ui          uri info
+   * @param serviceName service id
+   * @return information regarding the deleted service
+   */
+  @DELETE
+  @Produces("text/plain")
+  public Response deleteService(@Context HttpHeaders headers, @Context UriInfo ui,
+                                @PathParam("serviceName") String serviceName) {
+
+    return handleRequest(headers, ui, Request.Type.DELETE, createResourceDefinition(serviceName, m_clusterName));
+  }
+
   /**
    * Get the components sub-resource.
    *

+ 139 - 0
ambari-api/src/main/java/org/apache/ambari/api/services/serializers/JsonSerializer.java

@@ -0,0 +1,139 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.api.services.serializers;
+
+import org.apache.ambari.api.services.Result;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.api.util.TreeNode;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.util.DefaultPrettyPrinter;
+
+import javax.ws.rs.core.UriInfo;
+import java.io.*;
+import java.nio.charset.Charset;
+import java.util.Map;
+
+/**
+ * JSON serializer.
+ * Responsible for representing a result as JSON.
+ */
+public class JsonSerializer implements ResultSerializer {
+
+  /**
+   * Factory used to create JSON generator.
+   */
+  JsonFactory m_factory = new JsonFactory();
+
+  /**
+   * Generator which writes JSON.
+   */
+  JsonGenerator m_generator;
+
+  @Override
+  public Object serialize(Result result, UriInfo uriInfo) {
+    try {
+      ByteArrayOutputStream bytesOut = new ByteArrayOutputStream();
+      m_generator = createJsonGenerator(bytesOut);
+
+      DefaultPrettyPrinter p = new DefaultPrettyPrinter();
+      p.indentArraysWith(new DefaultPrettyPrinter.Lf2SpacesIndenter());
+      m_generator.setPrettyPrinter(p);
+
+      processNode(result.getResultTree());
+
+      m_generator.close();
+      return bytesOut.toString("UTF-8");
+    } catch (IOException e) {
+      throw new RuntimeException("Unable to serialize to json: " + e, e);
+    }
+  }
+
+  private void processNode(TreeNode<Resource> node) throws IOException {
+    String name = node.getName();
+    Resource r = node.getObject();
+
+    if (r == null) {
+      if (name != null) {
+        if (node.getParent() == null) {
+          m_generator.writeStartObject();
+          writeHref(node);
+        }
+        m_generator.writeArrayFieldStart(name);
+      }
+    } else {
+      m_generator.writeStartObject();
+      writeHref(node);
+      // resource props
+      handleResourceProperties(r.getCategories());
+    }
+
+    for (TreeNode<Resource> child : node.getChildren()) {
+      processNode(child);
+    }
+
+    if (r == null) {
+      if (name != null) {
+        m_generator.writeEndArray();
+        if (node.getParent() == null) {
+          m_generator.writeEndObject();
+        }
+      }
+    } else {
+      m_generator.writeEndObject();
+    }
+  }
+
+  private void handleResourceProperties(Map<String, Map<String, String>> mapCatProps) throws IOException {
+    for (Map.Entry<String, Map<String, String>> categoryEntry : mapCatProps.entrySet()) {
+      String category = categoryEntry.getKey();
+      Map<String, String> mapProps = categoryEntry.getValue();
+      if (category != null) {
+        m_generator.writeFieldName(category);
+        m_generator.writeStartObject();
+      }
+
+      for (Map.Entry<String, String> propEntry : mapProps.entrySet()) {
+        m_generator.writeStringField(propEntry.getKey(), propEntry.getValue());
+      }
+
+      if (category != null) {
+        m_generator.writeEndObject();
+      }
+    }
+  }
+
+  private JsonGenerator createJsonGenerator(ByteArrayOutputStream baos) throws IOException {
+    JsonGenerator generator = m_factory.createJsonGenerator(new OutputStreamWriter(baos,
+        Charset.forName("UTF-8").newEncoder()));
+
+    DefaultPrettyPrinter p = new DefaultPrettyPrinter();
+    p.indentArraysWith(new DefaultPrettyPrinter.Lf2SpacesIndenter());
+    generator.setPrettyPrinter(p);
+
+    return generator;
+  }
+
+  private void writeHref(TreeNode<Resource> node) throws IOException {
+    String hrefProp = node.getProperty("href");
+    if (hrefProp != null) {
+      m_generator.writeStringField("href", hrefProp);
+    }
+  }
+}

+ 38 - 0
ambari-api/src/main/java/org/apache/ambari/api/services/serializers/ResultSerializer.java

@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.api.services.serializers;
+
+
+import org.apache.ambari.api.services.Result;
+
+import javax.ws.rs.core.UriInfo;
+
+/**
+ * Format internal result to format expected by client.
+ */
+public interface ResultSerializer {
+  /**
+   * Serialize the given result to a format expected by client.
+   *
+   * @param result  internal result
+   * @param uriInfo URL info for request
+   * @return the serialized result
+   */
+  Object serialize(Result result, UriInfo uriInfo);
+}

+ 101 - 0
ambari-api/src/main/java/org/apache/ambari/api/util/TreeNode.java

@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.api.util;
+
+import java.util.List;
+
+/**
+ * Tree where each node can have a name, properties and an associated object.
+ */
+public interface TreeNode<T> {
+  /**
+   * Obtain the parent node or null if this node is the root.
+   *
+   * @return the parent node or null if this node is the root
+   */
+  public TreeNode<T> getParent();
+
+  /**
+   * Obtain the list of child nodes.
+   *
+   * @return a list of child nodes or an empty list if a leaf node
+   */
+  public List<TreeNode<T>> getChildren();
+
+  /**
+   * Obtain the object associated with this node.
+   *
+   * @return the object associated with this node or null
+   */
+  public T getObject();
+
+  /**
+   * Obtain the name of the node.
+   *
+   * @return the name of the node or null
+   */
+  public String getName();
+
+  /**
+   * Set the name of the node.
+   *
+   * @param name the name to set
+   */
+  public void setName(String name);
+
+  /**
+   * Set the parent node.
+   *
+   * @param parent the parent node to set
+   */
+  public void setParent(TreeNode<T> parent);
+
+  /**
+   * Add a child node for the provided object.
+   *
+   * @param child the object associated with the new child node
+   * @param name  the name of the child node
+   * @return the newly created child node
+   */
+  public TreeNode<T> addChild(T child, String name);
+
+  /**
+   * Add the specified child node.
+   *
+   * @param child the child node to add
+   * @return the added child node
+   */
+  public TreeNode<T> addChild(TreeNode<T> child);
+
+  /**
+   * Set a property on the node.
+   *
+   * @param name  the name of the property
+   * @param value the value of the property
+   */
+  public void setProperty(String name, String value);
+
+  /**
+   * Get the specified node property.
+   *
+   * @param name property name
+   * @return the requested property value or null
+   */
+  public String getProperty(String name);
+}

+ 127 - 0
ambari-api/src/main/java/org/apache/ambari/api/util/TreeNodeImpl.java

@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.api.util;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Basic implementation of TreeNode.
+ */
+public class TreeNodeImpl<T> implements TreeNode<T> {
+
+  /**
+   * name of the node
+   */
+  private String m_name;
+
+  /**
+   * parent of the node
+   */
+  private TreeNode<T> m_parent;
+
+  /**
+   * child nodes
+   */
+  private List<TreeNode<T>> m_listChildren = new ArrayList<TreeNode<T>>();
+
+  /**
+   * associated object
+   */
+  private T m_object;
+
+  /**
+   * properties
+   */
+  private Map<String, String> m_mapNodeProps;
+
+  /**
+   * Constructor.
+   *
+   * @param parent parent node
+   * @param object associated object
+   * @param name   node name
+   */
+  public TreeNodeImpl(TreeNode<T> parent, T object, String name) {
+    m_parent = parent;
+    m_object = object;
+    m_name = name;
+  }
+
+  @Override
+  public TreeNode<T> getParent() {
+    return m_parent;
+  }
+
+  @Override
+  public List<TreeNode<T>> getChildren() {
+    return m_listChildren;
+  }
+
+  @Override
+  public T getObject() {
+    return m_object;
+  }
+
+  @Override
+  public void setName(String name) {
+    m_name = name;
+  }
+
+  @Override
+  public String getName() {
+    return m_name;
+  }
+
+  @Override
+  public void setParent(TreeNode<T> parent) {
+    m_parent = parent;
+  }
+
+  @Override
+  public TreeNode<T> addChild(T child, String name) {
+    TreeNodeImpl<T> node = new TreeNodeImpl<T>(this, child, name);
+    m_listChildren.add(node);
+
+    return node;
+  }
+
+  @Override
+  public TreeNode<T> addChild(TreeNode<T> child) {
+    child.setParent(this);
+    m_listChildren.add(child);
+
+    return child;
+  }
+
+  @Override
+  public void setProperty(String name, String value) {
+    if (m_mapNodeProps == null) {
+      m_mapNodeProps = new HashMap<String, String>();
+    }
+    m_mapNodeProps.put(name, value);
+  }
+
+  @Override
+  public String getProperty(String name) {
+    return m_mapNodeProps == null ? null : m_mapNodeProps.get(name);
+  }
+}

+ 7 - 21
ambari-api/src/test/java/org/apache/ambari/api/TestSuite.java

@@ -1,20 +1,3 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
 package org.apache.ambari.api;
 
 /**
@@ -22,15 +5,18 @@ package org.apache.ambari.api;
  */
 
 import org.apache.ambari.api.handlers.DelegatingRequestHandlerTest;
-import org.apache.ambari.api.handlers.ReadRequestHandlerTest;
-import org.apache.ambari.api.query.QueryImplTest;
-import org.apache.ambari.api.services.*;
+import org.apache.ambari.api.handlers.ReadHandlerTest;
+import org.apache.ambari.api.services.ClusterServiceTest;
+import org.apache.ambari.api.services.ComponentServiceTest;
+import org.apache.ambari.api.services.HostComponentServiceTest;
+import org.apache.ambari.api.services.HostServiceTest;
+import org.apache.ambari.api.services.ServiceServiceTest;
 import org.junit.runner.RunWith;
 import org.junit.runners.Suite;
 
 @RunWith(Suite.class)
 @Suite.SuiteClasses({ClusterServiceTest.class, HostServiceTest.class, ServiceServiceTest.class,
     ComponentServiceTest.class, HostComponentServiceTest.class, DelegatingRequestHandlerTest.class,
-    ReadRequestHandlerTest.class, QueryImplTest.class})
+    ReadHandlerTest.class})
 public class TestSuite {
 }

+ 135 - 70
ambari-api/src/test/java/org/apache/ambari/api/controller/internal/ClusterControllerImplTest.java

@@ -19,19 +19,19 @@
 package org.apache.ambari.api.controller.internal;
 
 import junit.framework.Assert;
-import org.apache.ambari.api.controller.spi.ClusterController;
-import org.apache.ambari.api.controller.spi.Predicate;
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.PropertyProvider;
-import org.apache.ambari.api.controller.spi.Request;
-import org.apache.ambari.api.controller.spi.Resource;
-import org.apache.ambari.api.controller.spi.ResourceProvider;
-import org.apache.ambari.api.controller.spi.Schema;
+import org.apache.ambari.api.controller.ProviderModule;
+import org.apache.ambari.server.controller.spi.ClusterController;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.PropertyProvider;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.controller.spi.Schema;
 import org.apache.ambari.api.controller.utilities.PredicateBuilder;
 import org.apache.ambari.api.controller.utilities.Properties;
 import org.junit.Test;
 
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.LinkedList;
@@ -44,6 +44,44 @@ import java.util.Set;
  */
 public class ClusterControllerImplTest {
 
+  private static final Set<PropertyId> propertyProviderProperties = new HashSet<PropertyId>();
+
+  static {
+    propertyProviderProperties.add(Properties.getPropertyId("p5", "c3"));
+    propertyProviderProperties.add(Properties.getPropertyId("p6", "c3"));
+    propertyProviderProperties.add(Properties.getPropertyId("p7", "c4"));
+    propertyProviderProperties.add(Properties.getPropertyId("p8", "c4"));
+  }
+
+  private static final PropertyProvider propertyProvider = new PropertyProvider() {
+    @Override
+    public Set<Resource> populateResources(Set<Resource> resources, Request request, Predicate predicate) {
+
+      int cnt = 0;
+      for (Resource resource : resources){
+        resource.setProperty(Properties.getPropertyId("p5", "c3"), cnt + 100);
+        resource.setProperty(Properties.getPropertyId("p6", "c3"), cnt % 2);
+        resource.setProperty(Properties.getPropertyId("p7", "c4"), "monkey");
+        resource.setProperty(Properties.getPropertyId("p8", "c4"), "runner");
+        ++cnt;
+      }
+      return resources;
+    }
+
+    @Override
+    public Set<PropertyId> getPropertyIds() {
+      return propertyProviderProperties;
+    }
+  };
+
+  private static final List<PropertyProvider> propertyProviders = new LinkedList<PropertyProvider>();
+
+  static {
+    propertyProviders.add(propertyProvider);
+  }
+
+  private static final Map<Resource.Type, PropertyId> keyPropertyIds = new HashMap<Resource.Type, PropertyId>();
+
   private static final Set<PropertyId> resourceProviderProperties = new HashSet<PropertyId>();
 
   static {
@@ -74,79 +112,35 @@ public class ClusterControllerImplTest {
 
     @Override
     public void createResources(Request request) {
-      //To change body of implemented methods use File | Settings | File Templates.
+
     }
 
     @Override
     public void updateResources(Request request, Predicate predicate) {
-      //To change body of implemented methods use File | Settings | File Templates.
+
     }
 
     @Override
     public void deleteResources(Predicate predicate) {
-      //To change body of implemented methods use File | Settings | File Templates.
+
     }
 
     @Override
     public Set<PropertyId> getPropertyIds() {
       return resourceProviderProperties;
     }
-  };
 
-  private static final Set<PropertyId> propertyProviderProperties = new HashSet<PropertyId>();
-
-  static {
-    propertyProviderProperties.add(Properties.getPropertyId("p5", "c3"));
-    propertyProviderProperties.add(Properties.getPropertyId("p6", "c3"));
-    propertyProviderProperties.add(Properties.getPropertyId("p7", "c4"));
-    propertyProviderProperties.add(Properties.getPropertyId("p8", "c4"));
-  }
-
-  private static final PropertyProvider propertyProvider = new PropertyProvider() {
     @Override
-    public Set<Resource> populateResources(Set<Resource> resources, Request request, Predicate predicate) {
-
-      int cnt = 0;
-      for (Resource resource : resources){
-        resource.setProperty(Properties.getPropertyId("p5", "c3"), cnt + 100);
-        resource.setProperty(Properties.getPropertyId("p6", "c3"), cnt % 2);
-        resource.setProperty(Properties.getPropertyId("p7", "c4"), "monkey");
-        resource.setProperty(Properties.getPropertyId("p8", "c4"), "runner");
-        ++cnt;
-      }
-      return resources;
+    public List<PropertyProvider> getPropertyProviders() {
+      return propertyProviders;
     }
 
     @Override
-    public Set<PropertyId> getPropertyIds() {
-      return propertyProviderProperties;
+    public Schema getSchema() {
+      return new SchemaImpl(this, keyPropertyIds);
     }
   };
 
-  private static final List<PropertyProvider> propertyProviders = new LinkedList<PropertyProvider>();
-
-  static {
-    propertyProviders.add(propertyProvider);
-  }
-
-  private static final Map<Resource.Type, PropertyId> keyPropertyIds = new HashMap<Resource.Type, PropertyId>();
-
-  private static Map<Resource.Type, Schema> schemas = new HashMap<Resource.Type, Schema>();
-
-  private static final SchemaImpl hostSchema = new SchemaImpl(resourceProvider, propertyProviders, keyPropertyIds);
-  private static final SchemaImpl serviceSchema = new SchemaImpl(resourceProvider, propertyProviders, keyPropertyIds);
-  private static final SchemaImpl clusterSchema = new SchemaImpl(resourceProvider, propertyProviders, keyPropertyIds);
-  private static final SchemaImpl componentSchema = new SchemaImpl(resourceProvider, propertyProviders, keyPropertyIds);
-  private static final SchemaImpl hostComponentSchema = new SchemaImpl(resourceProvider, propertyProviders, keyPropertyIds);
-
-  static {
-    schemas.put(Resource.Type.Host, hostSchema);
-    schemas.put(Resource.Type.Service, serviceSchema);
-    schemas.put(Resource.Type.Cluster, clusterSchema);
-    schemas.put(Resource.Type.Component, componentSchema);
-    schemas.put(Resource.Type.HostComponent, hostComponentSchema);
-  }
-
   private static final Set<PropertyId> propertyIds = new HashSet<PropertyId>();
 
   static {
@@ -159,8 +153,8 @@ public class ClusterControllerImplTest {
   }
 
   @Test
-  public void testGetResources() {
-    ClusterController controller = new ClusterControllerImpl(schemas);
+  public void testGetResources() throws Exception{
+    ClusterController controller = new ClusterControllerImpl(new TestProviderModule());
 
     Request request = new RequestImpl(propertyIds, null);
 
@@ -175,8 +169,8 @@ public class ClusterControllerImplTest {
   }
 
   @Test
-  public void testGetResourcesWithPredicate() {
-    ClusterController controller = new ClusterControllerImpl(schemas);
+  public void testGetResourcesWithPredicate() throws Exception{
+    ClusterController controller = new ClusterControllerImpl(new TestProviderModule());
 
     Request request = new RequestImpl(propertyIds, null);
 
@@ -194,14 +188,85 @@ public class ClusterControllerImplTest {
 
   @Test
   public void testGetSchema() {
-    ClusterController controller = new ClusterControllerImpl(schemas);
+    ProviderModule module = new TestProviderModule();
+    ClusterController controller = new ClusterControllerImpl(module);
+
+    Assert.assertSame(module.getResourceProvider(Resource.Type.Host).getSchema(), controller.getSchema(Resource.Type.Host));
+    Assert.assertSame(module.getResourceProvider(Resource.Type.Service).getSchema(), controller.getSchema(Resource.Type.Service));
+    Assert.assertSame(module.getResourceProvider(Resource.Type.Cluster).getSchema(), controller.getSchema(Resource.Type.Cluster));
+    Assert.assertSame(module.getResourceProvider(Resource.Type.Component).getSchema(), controller.getSchema(Resource.Type.Component));
+    Assert.assertSame(module.getResourceProvider(Resource.Type.HostComponent).getSchema(), controller.getSchema(Resource.Type.HostComponent));
+  }
 
-    Assert.assertSame(hostSchema, controller.getSchema(Resource.Type.Host));
-    Assert.assertSame(serviceSchema, controller.getSchema(Resource.Type.Service));
-    Assert.assertSame(clusterSchema, controller.getSchema(Resource.Type.Cluster));
-    Assert.assertSame(componentSchema, controller.getSchema(Resource.Type.Component));
-    Assert.assertSame(hostComponentSchema, controller.getSchema(Resource.Type.HostComponent));
+  private static class TestProviderModule implements ProviderModule {
+    private Map<Resource.Type, ResourceProvider> providers = new HashMap<Resource.Type, ResourceProvider>();
+
+    private TestProviderModule() {
+      providers.put(Resource.Type.Cluster, new TestResourceProvider());
+      providers.put(Resource.Type.Service, new TestResourceProvider());
+      providers.put(Resource.Type.Component, new TestResourceProvider());
+      providers.put(Resource.Type.Host, new TestResourceProvider());
+      providers.put(Resource.Type.HostComponent, new TestResourceProvider());
+    }
+
+    @Override
+    public ResourceProvider getResourceProvider(Resource.Type type) {
+      return providers.get(type);
+    }
   }
+
+  private static class TestResourceProvider implements ResourceProvider {
+    private Schema schema = new SchemaImpl(this, keyPropertyIds);
+
+    @Override
+    public Set<Resource> getResources(Request request, Predicate predicate) {
+
+      Set<Resource> resources = new HashSet<Resource>();
+
+      for (int cnt = 0; cnt < 4; ++ cnt) {
+        ResourceImpl resource = new ResourceImpl(Resource.Type.Host);
+
+        resource.setProperty(Properties.getPropertyId("p1", "c1"), cnt);
+        resource.setProperty(Properties.getPropertyId("p2", "c1"), cnt % 2);
+        resource.setProperty(Properties.getPropertyId("p3", "c1"), "foo");
+        resource.setProperty(Properties.getPropertyId("p4", "c2"), "bar");
+        resources.add(resource);
+      }
+
+      return resources;
+    }
+
+    @Override
+    public void createResources(Request request) {
+
+    }
+
+    @Override
+    public void updateResources(Request request, Predicate predicate) {
+
+    }
+
+    @Override
+    public void deleteResources(Predicate predicate) {
+
+    }
+
+    @Override
+    public Set<PropertyId> getPropertyIds() {
+      return resourceProviderProperties;
+    }
+
+    @Override
+    public List<PropertyProvider> getPropertyProviders() {
+      return propertyProviders;
+    }
+
+    @Override
+    public Schema getSchema() {
+      return schema;
+    }
+  }
+
 }
 
 

+ 1 - 1
ambari-api/src/test/java/org/apache/ambari/api/controller/internal/PropertyIdImplTest.java

@@ -19,7 +19,7 @@
 package org.apache.ambari.api.controller.internal;
 
 import junit.framework.Assert;
-import org.apache.ambari.api.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.PropertyId;
 import org.junit.Test;
 
 /**

+ 2 - 2
ambari-api/src/test/java/org/apache/ambari/api/controller/internal/RequestImplTest.java

@@ -19,8 +19,8 @@
 package org.apache.ambari.api.controller.internal;
 
 import junit.framework.Assert;
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.api.controller.utilities.Properties;
 import org.junit.Test;
 

+ 2 - 2
ambari-api/src/test/java/org/apache/ambari/api/controller/internal/ResourceImplTest.java

@@ -19,8 +19,8 @@
 package org.apache.ambari.api.controller.internal;
 
 import junit.framework.Assert;
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.api.controller.utilities.Properties;
 import org.junit.Test;
 

+ 22 - 27
ambari-api/src/test/java/org/apache/ambari/api/controller/internal/SchemaImplTest.java

@@ -19,13 +19,13 @@
 package org.apache.ambari.api.controller.internal;
 
 import junit.framework.Assert;
-import org.apache.ambari.api.controller.spi.Predicate;
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.PropertyProvider;
-import org.apache.ambari.api.controller.spi.Request;
-import org.apache.ambari.api.controller.spi.Resource;
-import org.apache.ambari.api.controller.spi.ResourceProvider;
-import org.apache.ambari.api.controller.spi.Schema;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.PropertyProvider;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.controller.spi.Schema;
 import org.apache.ambari.api.controller.utilities.Properties;
 import org.junit.Test;
 
@@ -58,23 +58,33 @@ public class SchemaImplTest {
 
     @Override
     public void createResources(Request request) {
-      //To change body of implemented methods use File | Settings | File Templates.
+
     }
 
     @Override
     public void updateResources(Request request, Predicate predicate) {
-      //To change body of implemented methods use File | Settings | File Templates.
+
     }
 
     @Override
     public void deleteResources(Predicate predicate) {
-      //To change body of implemented methods use File | Settings | File Templates.
+
     }
 
     @Override
     public Set<PropertyId> getPropertyIds() {
       return resourceProviderProperties;
     }
+
+    @Override
+    public List<PropertyProvider> getPropertyProviders() {
+      return propertyProviders;
+    }
+
+    @Override
+    public Schema getSchema() {
+      return null;
+    }
   };
 
   private static final Set<PropertyId> propertyProviderProperties = new HashSet<PropertyId>();
@@ -114,7 +124,7 @@ public class SchemaImplTest {
 
   @Test
   public void testGetKeyPropertyId() {
-    Schema schema = new SchemaImpl(resourceProvider, propertyProviders, keyPropertyIds);
+    Schema schema = new SchemaImpl(resourceProvider, keyPropertyIds);
 
     Assert.assertEquals(Properties.getPropertyId("p1", "c1"), schema.getKeyPropertyId(Resource.Type.Cluster));
     Assert.assertEquals(Properties.getPropertyId("p2", "c1"), schema.getKeyPropertyId(Resource.Type.Host));
@@ -123,7 +133,7 @@ public class SchemaImplTest {
 
   @Test
   public void testGetCategories() {
-    Schema schema = new SchemaImpl(resourceProvider, propertyProviders, keyPropertyIds);
+    Schema schema = new SchemaImpl(resourceProvider, keyPropertyIds);
 
     Map<String, Set<String>> categories = schema.getCategories();
     Assert.assertEquals(4, categories.size());
@@ -152,19 +162,4 @@ public class SchemaImplTest {
     Assert.assertTrue(properties.contains("p7"));
     Assert.assertTrue(properties.contains("p8"));
   }
-
-  @Test
-  public void testGetResourceProvider() {
-    Schema schema = new SchemaImpl(resourceProvider, propertyProviders, keyPropertyIds);
-
-    Assert.assertSame(resourceProvider, schema.getResourceProvider());
-  }
-
-  @Test
-  public void testGetPropertyProviders() {
-    Schema schema = new SchemaImpl(resourceProvider, propertyProviders, keyPropertyIds);
-
-    Assert.assertSame(propertyProviders, schema.getPropertyProviders());
-  }
-
 }

+ 827 - 131
ambari-api/src/test/java/org/apache/ambari/api/controller/jdbc/JDBCManagementControllerTest.java

@@ -18,18 +18,25 @@
 
 package org.apache.ambari.api.controller.jdbc;
 
+import junit.framework.Assert;
 import org.apache.ambari.api.controller.internal.RequestImpl;
-import org.apache.ambari.api.controller.spi.Predicate;
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.api.controller.utilities.ClusterControllerHelper;
 import org.apache.ambari.api.controller.utilities.PredicateBuilder;
 import org.apache.ambari.api.controller.utilities.Properties;
+import org.easymock.EasyMock;
 import org.junit.Test;
 
 import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
 import java.sql.Statement;
-import java.util.HashMap;
-import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
 import java.util.Map;
 import java.util.Set;
 
@@ -39,132 +46,821 @@ import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.verify;
 
 /**
- *
- */
+*
+*/
 public class JDBCManagementControllerTest {
 
-  @Test
-  public void testCreateCluster() throws Exception {
-    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
-    Connection connection = createNiceMock(Connection.class);
-    Statement statement = createNiceMock(Statement.class);
-
-    expect(connectionFactory.getConnection()).andReturn(connection).once();
-    expect(connection.createStatement()).andReturn(statement).once();
-    expect(statement.execute("insert into Clusters (state, version, cluster_name) values ('initial', '1.0', 'MyCluster')")).andReturn(true).once();
-
-    replay(connectionFactory, connection, statement);
-
-    JDBCManagementController provider =  new JDBCManagementController(connectionFactory);
-
-    Map<PropertyId, String> properties = new HashMap<PropertyId, String>();
-
-    PropertyId id = Properties.getPropertyId("cluster_name", "Clusters");
-    properties.put(id, "MyCluster");
-
-    id = Properties.getPropertyId("version", "Clusters");
-    properties.put(id, "1.0");
-
-    id = Properties.getPropertyId("state", "Clusters");
-    properties.put(id, "initial");
-
-    Set<Map<PropertyId, String>> propertySet = new HashSet<Map<PropertyId, String>>();
-    propertySet.add(properties);
-
-    Request request = new RequestImpl(null, propertySet);
-
-    provider.createClusters(request);
-
-    verify(connectionFactory, connection, statement);
-  }
-
-  @Test
-  public void testCreateService() throws Exception{
-
-    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
-    Connection connection = createNiceMock(Connection.class);
-    Statement statement = createNiceMock(Statement.class);
-
-    expect(connectionFactory.getConnection()).andReturn(connection).once();
-    expect(connection.createStatement()).andReturn(statement).once();
-    expect(statement.execute("insert into ServiceInfo (service_name, cluster_name, state) values ('MyService', 'MyCluster', 'initial')")).andReturn(true).once();
-
-    replay(connectionFactory, connection, statement);
-
-    JDBCManagementController provider =  new JDBCManagementController(connectionFactory);
-
-    Map<PropertyId, String> properties = new HashMap<PropertyId, String>();
-
-    PropertyId id = Properties.getPropertyId("cluster_name", "ServiceInfo");
-    properties.put(id, "MyCluster");
-
-    id = Properties.getPropertyId("service_name", "ServiceInfo");
-    properties.put(id, "MyService");
-
-    id = Properties.getPropertyId("state", "ServiceInfo");
-    properties.put(id, "initial");
-
-    Set<Map<PropertyId, String>> propertySet = new HashSet<Map<PropertyId, String>>();
-    propertySet.add(properties);
-
-    Request request = new RequestImpl(null, propertySet);
-
-    provider.createServices(request);
-
-    verify(connectionFactory, connection, statement);
-  }
-
-  @Test
-  public void testDeleteCluster() throws Exception{
-
-    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
-    Connection connection = createNiceMock(Connection.class);
-    Statement statement = createNiceMock(Statement.class);
-
-    expect(connectionFactory.getConnection()).andReturn(connection).once();
-    expect(connection.createStatement()).andReturn(statement).once();
-    expect(statement.execute("delete from Clusters where Clusters.cluster_name = \"MyCluster\"")).andReturn(true).once();
-
-    replay(connectionFactory, connection, statement);
-
-    JDBCManagementController provider =  new JDBCManagementController(connectionFactory);
-
-    Predicate predicate = new PredicateBuilder().property("cluster_name", "Clusters").equals("MyCluster").toPredicate();
-
-    provider.deleteServices(predicate);
-
-    verify(connectionFactory, connection, statement);
-  }
-
-  @Test
-  public void testUpdateCluster() throws Exception{
-
-    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
-    Connection connection = createNiceMock(Connection.class);
-    Statement statement = createNiceMock(Statement.class);
-
-    expect(connectionFactory.getConnection()).andReturn(connection).once();
-    expect(connection.createStatement()).andReturn(statement).once();
-    expect(statement.execute("update Clusters set state = 'running' where Clusters.cluster_name = \"MyCluster\"")).andReturn(true).once();
-
-    replay(connectionFactory, connection, statement);
-
-    JDBCManagementController provider =  new JDBCManagementController(connectionFactory);
-
-    Map<PropertyId, String> properties = new HashMap<PropertyId, String>();
-
-    PropertyId id = Properties.getPropertyId("state", "Clusters");
-    properties.put(id, "running");
-
-    Predicate predicate = new PredicateBuilder().property("cluster_name", "Clusters").equals("MyCluster").toPredicate();
-
-    Set<Map<PropertyId, String>> propertySet = new HashSet<Map<PropertyId, String>>();
-    propertySet.add(properties);
-
-    Request request = new RequestImpl(null, propertySet);
-
-    provider.updateClusters(request, predicate);
-
-    verify(connectionFactory, connection, statement);
-  }
+//  @Test
+//  public void testCreateClusters() throws Exception {
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    Statement statement = createNiceMock(Statement.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.execute("insert into Clusters (cluster_name, version, state) values ('MyCluster', '1.0', 'initial')")).andReturn(true).once();
+//
+//    replay(connectionFactory, connection, statement);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Map<PropertyId, Object> properties = new LinkedHashMap<PropertyId, Object>();
+//
+//    PropertyId id = Properties.getPropertyId("cluster_name", "Clusters");
+//    properties.put(id, "MyCluster");
+//
+//    id = Properties.getPropertyId("version", "Clusters");
+//    properties.put(id, "1.0");
+//
+//    id = Properties.getPropertyId("state", "Clusters");
+//    properties.put(id, "initial");
+//
+//    Set<Map<PropertyId, Object>> propertySet = new LinkedHashSet<Map<PropertyId, Object>>();
+//    propertySet.add(properties);
+//
+//    Request request = new RequestImpl(null, propertySet);
+//
+//    controller.createClusters(request);
+//
+//    verify(connectionFactory, connection, statement);
+//  }
+//
+//  @Test
+//  public void testCreateServices() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    Statement statement = createNiceMock(Statement.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.execute("insert into ServiceInfo (cluster_name, service_name, state) values ('MyCluster', 'MyService', 'initial')")).andReturn(true).once();
+//
+//    replay(connectionFactory, connection, statement);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Map<PropertyId, Object> properties = new LinkedHashMap<PropertyId, Object>();
+//
+//    PropertyId id = Properties.getPropertyId("cluster_name", "ServiceInfo");
+//    properties.put(id, "MyCluster");
+//
+//    id = Properties.getPropertyId("service_name", "ServiceInfo");
+//    properties.put(id, "MyService");
+//
+//    id = Properties.getPropertyId("state", "ServiceInfo");
+//    properties.put(id, "initial");
+//
+//    Set<Map<PropertyId, Object>> propertySet = new LinkedHashSet<Map<PropertyId, Object>>();
+//    propertySet.add(properties);
+//
+//    Request request = new RequestImpl(null, propertySet);
+//
+//    controller.createServices(request);
+//
+//    verify(connectionFactory, connection, statement);
+//  }
+//
+//  @Test
+//  public void testCreateHosts() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    Statement statement = createNiceMock(Statement.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.execute("insert into Hosts (cluster_name, host_name, ip) values ('MyCluster', 'MyHost1', '10.68.18.171')")).andReturn(true).once();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.execute("insert into Hosts (cluster_name, host_name, ip) values ('MyCluster', 'MyHost2', '10.111.35.113')")).andReturn(true).once();
+//
+//    replay(connectionFactory, connection, statement);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Set<Map<PropertyId, Object>> propertySet = new LinkedHashSet<Map<PropertyId, Object>>();
+//
+//    // first host
+//    Map<PropertyId, Object> properties = new LinkedHashMap<PropertyId, Object>();
+//
+//    PropertyId id = Properties.getPropertyId("cluster_name", "Hosts");
+//    properties.put(id, "MyCluster");
+//
+//    id = Properties.getPropertyId("host_name", "Hosts");
+//    properties.put(id, "MyHost1");
+//
+//    id = Properties.getPropertyId("ip", "Hosts");
+//    properties.put(id, "10.68.18.171");
+//
+//    propertySet.add(properties);
+//
+//    // second host
+//    properties = new LinkedHashMap<PropertyId, Object>();
+//
+//    id = Properties.getPropertyId("cluster_name", "Hosts");
+//    properties.put(id, "MyCluster");
+//
+//    id = Properties.getPropertyId("host_name", "Hosts");
+//    properties.put(id, "MyHost2");
+//
+//    id = Properties.getPropertyId("ip", "Hosts");
+//    properties.put(id, "10.111.35.113");
+//
+//    propertySet.add(properties);
+//
+//    Request request = new RequestImpl(null, propertySet);
+//
+//    controller.createHosts(request);
+//
+//    verify(connectionFactory, connection, statement);
+//  }
+//
+//  @Test
+//  public void testCreateComponents() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    Statement statement = createNiceMock(Statement.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.execute("insert into ServiceComponentInfo (cluster_name, service_name, component_name, description) values ('MyCluster', 'MyService', 'MyComponent', 'This is my component')")).andReturn(true).once();
+//
+//    replay(connectionFactory, connection, statement);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Map<PropertyId, Object> properties = new LinkedHashMap<PropertyId, Object>();
+//
+//    PropertyId id = Properties.getPropertyId("cluster_name", "ServiceComponentInfo");
+//    properties.put(id, "MyCluster");
+//
+//    id = Properties.getPropertyId("service_name", "ServiceComponentInfo");
+//    properties.put(id, "MyService");
+//
+//    id = Properties.getPropertyId("component_name", "ServiceComponentInfo");
+//    properties.put(id, "MyComponent");
+//
+//    id = Properties.getPropertyId("description", "ServiceComponentInfo");
+//    properties.put(id, "This is my component");
+//
+//    Set<Map<PropertyId, Object>> propertySet = new LinkedHashSet<Map<PropertyId, Object>>();
+//    propertySet.add(properties);
+//
+//    Request request = new RequestImpl(null, propertySet);
+//
+//    controller.createComponents(request);
+//
+//    verify(connectionFactory, connection, statement);
+//  }
+//
+//  @Test
+//  public void testCreateHostComponents() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    Statement statement = createNiceMock(Statement.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.execute("insert into HostRoles (cluster_name, host_name, component_name, role_id) values ('MyCluster', 'MyHost', 'MyComponent', 1)")).andReturn(true).once();
+//
+//    replay(connectionFactory, connection, statement);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Map<PropertyId, Object> properties = new LinkedHashMap<PropertyId, Object>();
+//
+//    PropertyId id = Properties.getPropertyId("cluster_name", "HostRoles");
+//    properties.put(id, "MyCluster");
+//
+//    id = Properties.getPropertyId("host_name", "HostRoles");
+//    properties.put(id, "MyHost");
+//
+//    id = Properties.getPropertyId("component_name", "HostRoles");
+//    properties.put(id, "MyComponent");
+//
+//    id = Properties.getPropertyId("role_id", "HostRoles");
+//    properties.put(id, 1);
+//
+//    Set<Map<PropertyId, Object>> propertySet = new LinkedHashSet<Map<PropertyId, Object>>();
+//    propertySet.add(properties);
+//
+//    Request request = new RequestImpl(null, propertySet);
+//
+//    controller.createHostComponents(request);
+//
+//    verify(connectionFactory, connection, statement);
+//  }
+//
+//  @Test
+//  public void testDeleteClusters() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    Statement statement = createNiceMock(Statement.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.execute("delete from Clusters where Clusters.cluster_name = \"MyCluster\"")).andReturn(true).once();
+//
+//    replay(connectionFactory, connection, statement);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Predicate predicate = new PredicateBuilder().property("cluster_name", "Clusters").equals("MyCluster").toPredicate();
+//
+//    controller.deleteClusters(predicate);
+//
+//    verify(connectionFactory, connection, statement);
+//  }
+//
+//  @Test
+//  public void testDeleteServices() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    DatabaseMetaData databaseMetaData = createNiceMock(DatabaseMetaData.class);
+//    ResultSet metaDataResultSet = createNiceMock(ResultSet.class);
+//    Statement statement = createNiceMock(Statement.class);
+//    ResultSet resultSet = createNiceMock(ResultSet.class);
+//    ResultSetMetaData resultSetMetaData = createNiceMock(ResultSetMetaData.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.getMetaData()).andReturn(databaseMetaData).atLeastOnce();
+//    expect(databaseMetaData.getImportedKeys((String) EasyMock.anyObject(), (String) EasyMock.anyObject(), (String) EasyMock.anyObject())).andReturn(metaDataResultSet).atLeastOnce();
+//    expect(metaDataResultSet.next()).andReturn(true).once();
+//    expect(metaDataResultSet.getString("PKCOLUMN_NAME")).andReturn("service_name").once();
+//    expect(metaDataResultSet.getString("PKTABLE_NAME")).andReturn("ServiceInfo").once();
+//    expect(metaDataResultSet.getString("FKCOLUMN_NAME")).andReturn("service_name").once();
+//    expect(metaDataResultSet.getString("FKTABLE_NAME")).andReturn("Services").once();
+//    expect(metaDataResultSet.next()).andReturn(false).once();
+//    expect(databaseMetaData.getPrimaryKeys((String) EasyMock.anyObject(), (String) EasyMock.anyObject(), (String) EasyMock.anyObject())).andReturn(metaDataResultSet).atLeastOnce();
+//    expect(metaDataResultSet.next()).andReturn(true).once();
+//    expect(metaDataResultSet.getString("COLUMN_NAME")).andReturn("service_name").once();
+//    expect(metaDataResultSet.getString("TABLE_NAME")).andReturn("ServiceInfo").once();
+//    expect(metaDataResultSet.next()).andReturn(true).once();
+//    expect(metaDataResultSet.getString("COLUMN_NAME")).andReturn("cluster_name").once();
+//    expect(metaDataResultSet.getString("TABLE_NAME")).andReturn("ServiceInfo").once();
+//    expect(metaDataResultSet.next()).andReturn(false).once();
+//    expect(connection.createStatement()).andReturn(statement).atLeastOnce();
+//    expect(statement.executeQuery("select ServiceInfo.service_name, ServiceInfo.cluster_name from Services, ServiceInfo where Services.display_name = \"my service\" AND ServiceInfo.service_name = Services.service_name")).andReturn(resultSet).once();
+//    expect(resultSet.getMetaData()).andReturn(resultSetMetaData).once();
+//    expect(resultSetMetaData.getColumnCount()).andReturn(2).once();
+//    expect(resultSet.next()).andReturn(true).once();
+//    expect(resultSetMetaData.getColumnName(1)).andReturn("service_name").once();
+//    expect(resultSetMetaData.getTableName(1)).andReturn("ServiceInfo").once();
+//    expect(resultSet.getString(1)).andReturn("MyService").once();
+//    expect(resultSetMetaData.getColumnName(2)).andReturn("cluster_name").once();
+//    expect(resultSetMetaData.getTableName(2)).andReturn("ServiceInfo").once();
+//    expect(resultSet.getString(2)).andReturn("MyCluster").once();
+//    expect(resultSet.next()).andReturn(false).once();
+//    expect(statement.execute("delete from ServiceInfo where (ServiceInfo.cluster_name = \"MyCluster\" AND ServiceInfo.service_name = \"MyService\")")).andReturn(true).once();
+//
+//    replay(connectionFactory, connection, databaseMetaData, metaDataResultSet, statement, resultSet, resultSetMetaData);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Predicate predicate = new PredicateBuilder().property("display_name", "Services").equals("my service").toPredicate();
+//
+//    controller.deleteServices(predicate);
+//
+//    verify(connectionFactory, connection, databaseMetaData, metaDataResultSet, statement, resultSet, resultSetMetaData);
+//  }
+//
+//  @Test
+//  public void testDeleteHosts() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    Statement statement = createNiceMock(Statement.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.execute("delete from Hosts where (Hosts.host_name = \"MyHost1\" OR Hosts.host_name = \"MyHost2\")")).andReturn(true).once();
+//
+//    replay(connectionFactory, connection, statement);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Predicate predicate = new PredicateBuilder().property("host_name", "Hosts").equals("MyHost1").or().
+//                                                 property("host_name", "Hosts").equals("MyHost2").toPredicate();
+//
+//    controller.deleteHosts(predicate);
+//
+//    verify(connectionFactory, connection, statement);
+//  }
+//
+//  @Test
+//  public void testDeleteComponents() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    Statement statement = createNiceMock(Statement.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.execute("delete from ServiceComponentInfo where ServiceComponentInfo.service_name = \"MyService\"")).andReturn(true).once();
+//
+//    replay(connectionFactory, connection, statement);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Predicate predicate = new PredicateBuilder().property("service_name", "ServiceComponentInfo").equals("MyService").toPredicate();
+//
+//    controller.deleteComponents(predicate);
+//
+//    verify(connectionFactory, connection, statement);
+//  }
+//
+//  @Test
+//  public void testDeleteHostComponents() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    Statement statement = createNiceMock(Statement.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.execute("delete from HostRoles where HostRoles.component_name = \"MyComponent\"")).andReturn(true).once();
+//
+//    replay(connectionFactory, connection, statement);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Predicate predicate = new PredicateBuilder().property("component_name", "HostRoles").equals("MyComponent").toPredicate();
+//
+//    controller.deleteHostComponents(predicate);
+//
+//    verify(connectionFactory, connection, statement);
+//  }
+//
+//  @Test
+//  public void testUpdateClusters() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    Statement statement = createNiceMock(Statement.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.execute("update Clusters set state = 'running' where Clusters.cluster_name = \"MyCluster\"")).andReturn(true).once();
+//
+//    replay(connectionFactory, connection, statement);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Map<PropertyId, Object> properties = new LinkedHashMap<PropertyId, Object>();
+//
+//    PropertyId id = Properties.getPropertyId("state", "Clusters");
+//    properties.put(id, "running");
+//
+//    Predicate predicate = new PredicateBuilder().property("cluster_name", "Clusters").equals("MyCluster").toPredicate();
+//
+//    Set<Map<PropertyId, Object>> propertySet = new LinkedHashSet<Map<PropertyId, Object>>();
+//    propertySet.add(properties);
+//
+//    Request request = new RequestImpl(null, propertySet);
+//
+//    controller.updateClusters(request, predicate);
+//
+//    verify(connectionFactory, connection, statement);
+//  }
+//
+//  @Test
+//  public void testUpdateServices() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    DatabaseMetaData databaseMetaData = createNiceMock(DatabaseMetaData.class);
+//    ResultSet metaDataResultSet = createNiceMock(ResultSet.class);
+//    Statement statement = createNiceMock(Statement.class);
+//    ResultSet resultSet = createNiceMock(ResultSet.class);
+//    ResultSetMetaData resultSetMetaData = createNiceMock(ResultSetMetaData.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.getMetaData()).andReturn(databaseMetaData).atLeastOnce();
+//    expect(databaseMetaData.getImportedKeys((String) EasyMock.anyObject(), (String) EasyMock.anyObject(), (String) EasyMock.anyObject())).andReturn(metaDataResultSet).atLeastOnce();
+//    expect(metaDataResultSet.next()).andReturn(true).once();
+//    expect(metaDataResultSet.getString("PKCOLUMN_NAME")).andReturn("service_name").once();
+//    expect(metaDataResultSet.getString("PKTABLE_NAME")).andReturn("ServiceInfo").once();
+//    expect(metaDataResultSet.getString("FKCOLUMN_NAME")).andReturn("service_name").once();
+//    expect(metaDataResultSet.getString("FKTABLE_NAME")).andReturn("Services").once();
+//    expect(metaDataResultSet.next()).andReturn(false).once();
+//    expect(metaDataResultSet.next()).andReturn(false).once();
+//    expect(databaseMetaData.getPrimaryKeys((String) EasyMock.anyObject(), (String) EasyMock.anyObject(), (String) EasyMock.anyObject())).andReturn(metaDataResultSet).atLeastOnce();
+//    expect(metaDataResultSet.next()).andReturn(true).once();
+//    expect(metaDataResultSet.getString("COLUMN_NAME")).andReturn("service_name").once();
+//    expect(metaDataResultSet.getString("TABLE_NAME")).andReturn("ServiceInfo").once();
+//    expect(metaDataResultSet.next()).andReturn(true).once();
+//    expect(metaDataResultSet.getString("COLUMN_NAME")).andReturn("cluster_name").once();
+//    expect(metaDataResultSet.getString("TABLE_NAME")).andReturn("ServiceInfo").once();
+//    expect(metaDataResultSet.next()).andReturn(false).once();
+//    expect(connection.createStatement()).andReturn(statement).atLeastOnce();
+//    expect(statement.executeQuery("select ServiceInfo.service_name, ServiceInfo.cluster_name from Services, ServiceInfo where (ServiceInfo.service_name = \"MyService\" AND Services.display_name = \"my service\") AND ServiceInfo.service_name = Services.service_name")).andReturn(resultSet).once();
+//    expect(resultSet.getMetaData()).andReturn(resultSetMetaData).once();
+//    expect(resultSetMetaData.getColumnCount()).andReturn(2).once();
+//    expect(resultSet.next()).andReturn(true).once();
+//    expect(resultSetMetaData.getColumnName(1)).andReturn("service_name").once();
+//    expect(resultSetMetaData.getTableName(1)).andReturn("ServiceInfo").once();
+//    expect(resultSet.getString(1)).andReturn("MyService").once();
+//    expect(resultSetMetaData.getColumnName(2)).andReturn("cluster_name").once();
+//    expect(resultSetMetaData.getTableName(2)).andReturn("ServiceInfo").once();
+//    expect(resultSet.getString(2)).andReturn("MyCluster").once();
+//    expect(resultSet.next()).andReturn(false).once();
+//    expect(statement.execute("update ServiceInfo set state = 'running' where (ServiceInfo.cluster_name = \"MyCluster\" AND ServiceInfo.service_name = \"MyService\")")).andReturn(true).once();
+//
+//    replay(connectionFactory, connection, databaseMetaData, metaDataResultSet, statement, resultSet, resultSetMetaData);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Map<PropertyId, Object> properties = new LinkedHashMap<PropertyId, Object>();
+//
+//    PropertyId id = Properties.getPropertyId("state", "ServiceInfo");
+//    properties.put(id, "running");
+//
+//    Predicate predicate = new PredicateBuilder().property("service_name", "ServiceInfo").equals("MyService").and().
+//                                                 property("display_name", "Services").equals("my service").toPredicate();
+//
+//    Set<Map<PropertyId, Object>> propertySet = new LinkedHashSet<Map<PropertyId, Object>>();
+//    propertySet.add(properties);
+//
+//    Request request = new RequestImpl(null, propertySet);
+//
+//    controller.updateServices(request, predicate);
+//
+//    verify(connectionFactory, connection, databaseMetaData, metaDataResultSet, statement, resultSet, resultSetMetaData);
+//  }
+//
+//  @Test
+//  public void testUpdateHosts() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    Statement statement = createNiceMock(Statement.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.execute("update Hosts set cpu_count = 4 where (Hosts.host_name = \"MyHost1\" OR Hosts.host_name = \"MyHost2\")")).andReturn(true).once();
+//
+//    replay(connectionFactory, connection, statement);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Map<PropertyId, Object> properties = new LinkedHashMap<PropertyId, Object>();
+//
+//    PropertyId id = Properties.getPropertyId("cpu_count", "Hosts");
+//    properties.put(id, 4);
+//
+//    Set<Map<PropertyId, Object>> propertySet = new LinkedHashSet<Map<PropertyId, Object>>();
+//    propertySet.add(properties);
+//
+//    Predicate predicate = new PredicateBuilder().property("host_name", "Hosts").equals("MyHost1").or().
+//                                                 property("host_name", "Hosts").equals("MyHost2").toPredicate();
+//
+//    Request request = new RequestImpl(null, propertySet);
+//
+//    controller.updateHosts(request, predicate);
+//
+//    verify(connectionFactory, connection, statement);
+//  }
+//
+//  @Test
+//  public void testUpdateComponents() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    Statement statement = createNiceMock(Statement.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.execute("update ServiceComponentInfo set description = 'new description' where ServiceComponentInfo.service_name = \"MyService\"")).andReturn(true).once();
+//
+//    replay(connectionFactory, connection, statement);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Map<PropertyId, Object> properties = new LinkedHashMap<PropertyId, Object>();
+//
+//    PropertyId id = Properties.getPropertyId("description", "ServiceComponentInfo");
+//    properties.put(id, "new description");
+//
+//    Predicate predicate = new PredicateBuilder().property("service_name", "ServiceComponentInfo").equals("MyService").toPredicate();
+//
+//    Set<Map<PropertyId, Object>> propertySet = new LinkedHashSet<Map<PropertyId, Object>>();
+//    propertySet.add(properties);
+//
+//    Request request = new RequestImpl(null, propertySet);
+//
+//    controller.updateComponents(request, predicate);
+//
+//    verify(connectionFactory, connection, statement);
+//  }
+//
+//  @Test
+//  public void testUpdateHostComponents() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    Statement statement = createNiceMock(Statement.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.execute("update HostRoles set state = 'running' where HostRoles.component_name = \"MyComponent\"")).andReturn(true).once();
+//
+//    replay(connectionFactory, connection, statement);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Map<PropertyId, Object> properties = new LinkedHashMap<PropertyId, Object>();
+//
+//    PropertyId id = Properties.getPropertyId("state", "HostRoles");
+//    properties.put(id, "running");
+//
+//    Predicate predicate = new PredicateBuilder().property("component_name", "HostRoles").equals("MyComponent").toPredicate();
+//
+//    Set<Map<PropertyId, Object>> propertySet = new LinkedHashSet<Map<PropertyId, Object>>();
+//    propertySet.add(properties);
+//
+//    Request request = new RequestImpl(null, propertySet);
+//
+//    controller.updateHostComponents(request, predicate);
+//
+//    verify(connectionFactory, connection, statement);
+//  }
+//
+//  @Test
+//  public void testGetClusters() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    DatabaseMetaData databaseMetaData = createNiceMock(DatabaseMetaData.class);
+//    ResultSet metaDataResultSet = createNiceMock(ResultSet.class);
+//    Statement statement = createNiceMock(Statement.class);
+//    ResultSet resultSet = createNiceMock(ResultSet.class);
+//    ResultSetMetaData resultSetMetaData = createNiceMock(ResultSetMetaData.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.getMetaData()).andReturn(databaseMetaData).once();
+//    expect(databaseMetaData.getImportedKeys((String) EasyMock.anyObject(), (String) EasyMock.anyObject(), (String) EasyMock.anyObject())).andReturn(metaDataResultSet).atLeastOnce();
+//    expect(metaDataResultSet.next()).andReturn(false).atLeastOnce();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.executeQuery("select Clusters.state, Clusters.cluster_name from Clusters where Clusters.cluster_name = \"MyCluster\"")).andReturn(resultSet).once();
+//    expect(resultSet.getMetaData()).andReturn(resultSetMetaData).once();
+//    expect(resultSetMetaData.getColumnCount()).andReturn(2).once();
+//    expect(resultSet.next()).andReturn(true).once();
+//    expect(resultSetMetaData.getColumnName(1)).andReturn("state").once();
+//    expect(resultSetMetaData.getTableName(1)).andReturn("Clusters").once();
+//    expect(resultSet.getString(1)).andReturn("running").once();
+//    expect(resultSetMetaData.getColumnName(2)).andReturn("cluster_name").once();
+//    expect(resultSetMetaData.getTableName(2)).andReturn("Clusters").once();
+//    expect(resultSet.getString(2)).andReturn("MyCluster").once();
+//    expect(resultSet.next()).andReturn(false).once();
+//
+//    replay(connectionFactory, connection, databaseMetaData, metaDataResultSet, statement, resultSet, resultSetMetaData);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Predicate predicate = new PredicateBuilder().property("cluster_name", "Clusters").equals("MyCluster").toPredicate();
+//
+//    Set<PropertyId> propertyIds = new LinkedHashSet<PropertyId>();
+//    propertyIds.add(Properties.getPropertyId("state", "Clusters"));
+//
+//    Request request = new RequestImpl(propertyIds, null);
+//
+//    Set<Resource> resources = controller.getClusters(request, predicate);
+//
+//    Assert.assertEquals(1, resources.size());
+//
+//    Resource resource = resources.iterator().next();
+//
+//    Assert.assertEquals(Resource.Type.Cluster, resource.getType());
+//
+//    Assert.assertEquals("running", resource.getPropertyValue(Properties.getPropertyId("state", "Clusters")));
+//
+//    verify(connectionFactory, connection, databaseMetaData, metaDataResultSet, statement, resultSet, resultSetMetaData);
+//  }
+//
+//  @Test
+//  public void testGetServices() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    DatabaseMetaData databaseMetaData = createNiceMock(DatabaseMetaData.class);
+//    ResultSet metaDataResultSet = createNiceMock(ResultSet.class);
+//    Statement statement = createNiceMock(Statement.class);
+//    ResultSet resultSet = createNiceMock(ResultSet.class);
+//    ResultSetMetaData resultSetMetaData = createNiceMock(ResultSetMetaData.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.getMetaData()).andReturn(databaseMetaData).atLeastOnce();
+//    expect(databaseMetaData.getImportedKeys((String) EasyMock.anyObject(), (String) EasyMock.anyObject(), (String) EasyMock.anyObject())).andReturn(metaDataResultSet).atLeastOnce();
+//    expect(metaDataResultSet.next()).andReturn(true).once();
+//    expect(metaDataResultSet.getString("PKCOLUMN_NAME")).andReturn("service_name").once();
+//    expect(metaDataResultSet.getString("PKTABLE_NAME")).andReturn("ServiceInfo").once();
+//    expect(metaDataResultSet.getString("FKCOLUMN_NAME")).andReturn("service_name").once();
+//    expect(metaDataResultSet.getString("FKTABLE_NAME")).andReturn("Services").once();
+//    expect(metaDataResultSet.next()).andReturn(false).once();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.executeQuery("select ServiceInfo.service_name, Services.description from Services, ServiceInfo where ServiceInfo.service_name = \"MyService\" AND ServiceInfo.service_name = Services.service_name")).andReturn(resultSet).once();
+//    expect(resultSet.getMetaData()).andReturn(resultSetMetaData).once();
+//    expect(resultSetMetaData.getColumnCount()).andReturn(2).once();
+//    expect(resultSet.next()).andReturn(true).once();
+//    expect(resultSetMetaData.getColumnName(1)).andReturn("service_name").once();
+//    expect(resultSetMetaData.getTableName(1)).andReturn("ServiceInfo").once();
+//    expect(resultSet.getString(1)).andReturn("MyService").once();
+//    expect(resultSetMetaData.getColumnName(2)).andReturn("description").once();
+//    expect(resultSetMetaData.getTableName(2)).andReturn("Services").once();
+//    expect(resultSet.getString(2)).andReturn("some description").once();
+//    expect(resultSet.next()).andReturn(false).once();
+//
+//    replay(connectionFactory, connection, databaseMetaData, metaDataResultSet, statement, resultSet, resultSetMetaData);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Predicate predicate = new PredicateBuilder().property("service_name", "ServiceInfo").equals("MyService").toPredicate();
+//
+//    Set<PropertyId> propertyIds = new LinkedHashSet<PropertyId>();
+//    propertyIds.add(Properties.getPropertyId("service_name", "ServiceInfo"));
+//    propertyIds.add(Properties.getPropertyId("description", "Services"));
+//
+//    Request request = new RequestImpl(propertyIds, null);
+//
+//    Set<Resource> resources = controller.getServices(request, predicate);
+//
+//    Assert.assertEquals(1, resources.size());
+//
+//    Resource resource = resources.iterator().next();
+//
+//    Assert.assertEquals(Resource.Type.Service, resource.getType());
+//
+//    Assert.assertEquals("MyService", resource.getPropertyValue(Properties.getPropertyId("service_name", "ServiceInfo")));
+//    Assert.assertEquals("some description", resource.getPropertyValue(Properties.getPropertyId("description", "Services")));
+//
+//    verify(connectionFactory, connection, databaseMetaData, metaDataResultSet, statement, resultSet, resultSetMetaData);
+//  }
+//
+//  @Test
+//  public void testGetComponents() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    DatabaseMetaData databaseMetaData = createNiceMock(DatabaseMetaData.class);
+//    ResultSet metaDataResultSet = createNiceMock(ResultSet.class);
+//    Statement statement = createNiceMock(Statement.class);
+//    ResultSet resultSet = createNiceMock(ResultSet.class);
+//    ResultSetMetaData resultSetMetaData = createNiceMock(ResultSetMetaData.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.getMetaData()).andReturn(databaseMetaData).atLeastOnce();
+//    expect(databaseMetaData.getImportedKeys((String) EasyMock.anyObject(), (String) EasyMock.anyObject(), (String) EasyMock.anyObject())).andReturn(metaDataResultSet).atLeastOnce();
+//    expect(metaDataResultSet.next()).andReturn(true).once();
+//    expect(metaDataResultSet.getString("PKCOLUMN_NAME")).andReturn("component_name").once();
+//    expect(metaDataResultSet.getString("PKTABLE_NAME")).andReturn("ServiceComponentInfo").once();
+//    expect(metaDataResultSet.getString("FKCOLUMN_NAME")).andReturn("component_name").once();
+//    expect(metaDataResultSet.getString("FKTABLE_NAME")).andReturn("ServiceComponents").once();
+//    expect(metaDataResultSet.next()).andReturn(false).once();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.executeQuery("select ServiceComponentInfo.component_name, ServiceComponents.description from ServiceComponentInfo, ServiceComponents where ServiceComponentInfo.component_name = \"MyService\" AND ServiceComponentInfo.component_name = ServiceComponents.component_name")).andReturn(resultSet).once();
+//    expect(resultSet.getMetaData()).andReturn(resultSetMetaData).once();
+//    expect(resultSetMetaData.getColumnCount()).andReturn(2).once();
+//    expect(resultSet.next()).andReturn(true).once();
+//    expect(resultSetMetaData.getColumnName(1)).andReturn("component_name").once();
+//    expect(resultSetMetaData.getTableName(1)).andReturn("ServiceComponentInfo").once();
+//    expect(resultSet.getString(1)).andReturn("MyService").once();
+//    expect(resultSetMetaData.getColumnName(2)).andReturn("description").once();
+//    expect(resultSetMetaData.getTableName(2)).andReturn("ServiceComponents").once();
+//    expect(resultSet.getString(2)).andReturn("some description").once();
+//    expect(resultSet.next()).andReturn(false).once();
+//
+//    replay(connectionFactory, connection, databaseMetaData, metaDataResultSet, statement, resultSet, resultSetMetaData);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Predicate predicate = new PredicateBuilder().property("component_name", "ServiceComponentInfo").equals("MyService").toPredicate();
+//
+//    Set<PropertyId> propertyIds = new LinkedHashSet<PropertyId>();
+//    propertyIds.add(Properties.getPropertyId("component_name", "ServiceComponentInfo"));
+//    propertyIds.add(Properties.getPropertyId("description", "ServiceComponents"));
+//
+//    Request request = new RequestImpl(propertyIds, null);
+//
+//    Set<Resource> resources = controller.getComponents(request, predicate);
+//
+//    Assert.assertEquals(1, resources.size());
+//
+//    Resource resource = resources.iterator().next();
+//
+//    Assert.assertEquals(Resource.Type.Component, resource.getType());
+//
+//    Assert.assertEquals("MyService", resource.getPropertyValue(Properties.getPropertyId("component_name", "ServiceComponentInfo")));
+//    Assert.assertEquals("some description", resource.getPropertyValue(Properties.getPropertyId("description", "ServiceComponents")));
+//
+//    verify(connectionFactory, connection, databaseMetaData, metaDataResultSet, statement, resultSet, resultSetMetaData);
+//  }
+//
+//  @Test
+//  public void testGetHosts() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    DatabaseMetaData databaseMetaData = createNiceMock(DatabaseMetaData.class);
+//    ResultSet metaDataResultSet = createNiceMock(ResultSet.class);
+//    Statement statement = createNiceMock(Statement.class);
+//    ResultSet resultSet = createNiceMock(ResultSet.class);
+//    ResultSetMetaData resultSetMetaData = createNiceMock(ResultSetMetaData.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.getMetaData()).andReturn(databaseMetaData).once();
+//    expect(databaseMetaData.getImportedKeys((String) EasyMock.anyObject(), (String) EasyMock.anyObject(), (String) EasyMock.anyObject())).andReturn(metaDataResultSet).atLeastOnce();
+//    expect(metaDataResultSet.next()).andReturn(false).atLeastOnce();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.executeQuery("select Hosts.cpu_count, Hosts.host_name from Hosts where Hosts.host_name = \"MyHost\"")).andReturn(resultSet).once();
+//    expect(resultSet.getMetaData()).andReturn(resultSetMetaData).once();
+//    expect(resultSetMetaData.getColumnCount()).andReturn(2).once();
+//    expect(resultSet.next()).andReturn(true).once();
+//    expect(resultSetMetaData.getColumnName(1)).andReturn("cpu_count").once();
+//    expect(resultSetMetaData.getTableName(1)).andReturn("Hosts").once();
+//    expect(resultSet.getString(1)).andReturn("4").once();
+//    expect(resultSetMetaData.getColumnName(2)).andReturn("host_name").once();
+//    expect(resultSetMetaData.getTableName(2)).andReturn("Hosts").once();
+//    expect(resultSet.getString(2)).andReturn("MyHost").once();
+//    expect(resultSet.next()).andReturn(false).once();
+//
+//    replay(connectionFactory, connection, databaseMetaData, metaDataResultSet, statement, resultSet, resultSetMetaData);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Predicate predicate = new PredicateBuilder().property("host_name", "Hosts").equals("MyHost").toPredicate();
+//
+//    Set<PropertyId> propertyIds = new LinkedHashSet<PropertyId>();
+//    propertyIds.add(Properties.getPropertyId("cpu_count", "Hosts"));
+//
+//    Request request = new RequestImpl(propertyIds, null);
+//
+//    Set<Resource> resources = controller.getHosts(request, predicate);
+//
+//    Assert.assertEquals(1, resources.size());
+//
+//    Resource resource = resources.iterator().next();
+//
+//    Assert.assertEquals(Resource.Type.Host, resource.getType());
+//
+//    Assert.assertEquals("4", resource.getPropertyValue(Properties.getPropertyId("cpu_count", "Hosts")));
+//
+//    verify(connectionFactory, connection, databaseMetaData, metaDataResultSet, statement, resultSet, resultSetMetaData);
+//  }
+//
+//  @Test
+//  public void testGetHostComponents() throws Exception{
+//
+//    ConnectionFactory connectionFactory = createNiceMock(ConnectionFactory.class);
+//    Connection connection = createNiceMock(Connection.class);
+//    DatabaseMetaData databaseMetaData = createNiceMock(DatabaseMetaData.class);
+//    ResultSet metaDataResultSet = createNiceMock(ResultSet.class);
+//    Statement statement = createNiceMock(Statement.class);
+//    ResultSet resultSet = createNiceMock(ResultSet.class);
+//    ResultSetMetaData resultSetMetaData = createNiceMock(ResultSetMetaData.class);
+//
+//    expect(connectionFactory.getConnection()).andReturn(connection).once();
+//    expect(connection.getMetaData()).andReturn(databaseMetaData).once();
+//    expect(databaseMetaData.getImportedKeys((String) EasyMock.anyObject(), (String) EasyMock.anyObject(), (String) EasyMock.anyObject())).andReturn(metaDataResultSet).atLeastOnce();
+//    expect(metaDataResultSet.next()).andReturn(false).atLeastOnce();
+//    expect(connection.createStatement()).andReturn(statement).once();
+//    expect(statement.executeQuery("select HostRoles.host_name, HostRoles.state from HostRoles where HostRoles.host_name = \"MyHost\"")).andReturn(resultSet).once();
+//    expect(resultSet.getMetaData()).andReturn(resultSetMetaData).once();
+//    expect(resultSetMetaData.getColumnCount()).andReturn(2).once();
+//    expect(resultSet.next()).andReturn(true).once();
+//    expect(resultSetMetaData.getColumnName(1)).andReturn("state").once();
+//    expect(resultSetMetaData.getTableName(1)).andReturn("HostRoles").once();
+//    expect(resultSet.getString(1)).andReturn("running").once();
+//    expect(resultSetMetaData.getColumnName(2)).andReturn("host_name").once();
+//    expect(resultSetMetaData.getTableName(2)).andReturn("HostRoles").once();
+//    expect(resultSet.getString(2)).andReturn("MyHost").once();
+//    expect(resultSet.next()).andReturn(false).once();
+//
+//    replay(connectionFactory, connection, databaseMetaData, metaDataResultSet, statement, resultSet, resultSetMetaData);
+//
+//    JDBCManagementController controller =  new JDBCManagementController(connectionFactory, ClusterControllerHelper.RESOURCE_TABLES);
+//
+//    Predicate predicate = new PredicateBuilder().property("host_name", "HostRoles").equals("MyHost").toPredicate();
+//
+//    Set<PropertyId> propertyIds = new LinkedHashSet<PropertyId>();
+//    propertyIds.add(Properties.getPropertyId("state", "HostRoles"));
+//
+//    Request request = new RequestImpl(propertyIds, null);
+//
+//    Set<Resource> resources = controller.getHostComponents(request, predicate);
+//
+//    Assert.assertEquals(1, resources.size());
+//
+//    Resource resource = resources.iterator().next();
+//
+//    Assert.assertEquals(Resource.Type.HostComponent, resource.getType());
+//
+//    Assert.assertEquals("running", resource.getPropertyValue(Properties.getPropertyId("state", "HostRoles")));
+//
+//    verify(connectionFactory, connection, databaseMetaData, metaDataResultSet, statement, resultSet, resultSetMetaData);
+//  }
 }
+

+ 3 - 3
ambari-api/src/test/java/org/apache/ambari/api/controller/utilities/PredicateBuilderTest.java

@@ -20,9 +20,9 @@ package org.apache.ambari.api.controller.utilities;
 import junit.framework.Assert;
 import org.apache.ambari.api.controller.internal.PropertyIdImpl;
 import org.apache.ambari.api.controller.internal.ResourceImpl;
-import org.apache.ambari.api.controller.spi.Predicate;
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.Resource;
 import org.junit.Test;
 
 /**

+ 2 - 2
ambari-api/src/test/java/org/apache/ambari/api/controller/utilities/PropertiesTest.java

@@ -17,8 +17,8 @@
  */
 package org.apache.ambari.api.controller.utilities;
 
-import org.apache.ambari.api.controller.spi.PropertyId;
-import org.apache.ambari.api.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.Resource;
 import org.junit.Ignore;
 import org.junit.Test;
 

+ 34 - 34
ambari-api/src/test/java/org/apache/ambari/api/handlers/DelegatingRequestHandlerTest.java

@@ -1,31 +1,19 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
 package org.apache.ambari.api.handlers;
 
 import org.apache.ambari.api.services.Request;
 import org.apache.ambari.api.services.Result;
+import org.apache.ambari.api.services.ResultPostProcessor;
 import org.junit.Test;
 
 import static org.easymock.EasyMock.*;
 import static org.junit.Assert.assertSame;
 
 /**
- *
+ * Created with IntelliJ IDEA.
+ * User: john
+ * Date: 9/12/12
+ * Time: 12:06 PM
+ * To change this template use File | Settings | File Templates.
  */
 public class DelegatingRequestHandlerTest {
 
@@ -35,18 +23,21 @@ public class DelegatingRequestHandlerTest {
     RequestHandlerFactory factory = createStrictMock(RequestHandlerFactory.class);
     RequestHandler readRequestHandler = createStrictMock(RequestHandler.class);
     Result result = createStrictMock(Result.class);
+    ResultPostProcessor resultProcessor = createStrictMock(ResultPostProcessor.class);
 
     // expectations
-    expect(request.getRequestType()).andReturn(Request.RequestType.GET);
-    expect(factory.getRequestHandler(Request.RequestType.GET)).andReturn(readRequestHandler);
+    expect(request.getRequestType()).andReturn(Request.Type.GET);
+    expect(factory.getRequestHandler(Request.Type.GET)).andReturn(readRequestHandler);
     expect(readRequestHandler.handleRequest(request)).andReturn(result);
+    expect(request.getResultPostProcessor()).andReturn(resultProcessor);
+    resultProcessor.process(result);
 
-    replay(request, factory, readRequestHandler, result);
+    replay(request, factory, readRequestHandler, result, resultProcessor);
 
     RequestHandler delegatingRequestHandler = new TestDelegatingRequestHandler(factory);
 
     assertSame(result, delegatingRequestHandler.handleRequest(request));
-    verify(request, factory, readRequestHandler, result);
+    verify(request, factory, readRequestHandler, result, resultProcessor);
   }
 
   @Test
@@ -55,18 +46,21 @@ public class DelegatingRequestHandlerTest {
     RequestHandlerFactory factory = createStrictMock(RequestHandlerFactory.class);
     RequestHandler requestHandler = createStrictMock(RequestHandler.class);
     Result result = createStrictMock(Result.class);
+    ResultPostProcessor resultProcessor = createStrictMock(ResultPostProcessor.class);
 
     // expectations
-    expect(request.getRequestType()).andReturn(Request.RequestType.PUT);
-    expect(factory.getRequestHandler(Request.RequestType.PUT)).andReturn(requestHandler);
+    expect(request.getRequestType()).andReturn(Request.Type.PUT);
+    expect(factory.getRequestHandler(Request.Type.PUT)).andReturn(requestHandler);
     expect(requestHandler.handleRequest(request)).andReturn(result);
+    expect(request.getResultPostProcessor()).andReturn(resultProcessor);
+    resultProcessor.process(result);
 
-    replay(request, factory, requestHandler, result);
+    replay(request, factory, requestHandler, result, resultProcessor);
 
     RequestHandler delegatingRequestHandler = new TestDelegatingRequestHandler(factory);
 
     assertSame(result, delegatingRequestHandler.handleRequest(request));
-    verify(request, factory, requestHandler, result);
+    verify(request, factory, requestHandler, result, resultProcessor);
   }
 
   @Test
@@ -75,18 +69,21 @@ public class DelegatingRequestHandlerTest {
     RequestHandlerFactory factory = createStrictMock(RequestHandlerFactory.class);
     RequestHandler requestHandler = createStrictMock(RequestHandler.class);
     Result result = createStrictMock(Result.class);
+    ResultPostProcessor resultProcessor = createStrictMock(ResultPostProcessor.class);
 
     // expectations
-    expect(request.getRequestType()).andReturn(Request.RequestType.POST);
-    expect(factory.getRequestHandler(Request.RequestType.POST)).andReturn(requestHandler);
+    expect(request.getRequestType()).andReturn(Request.Type.POST);
+    expect(factory.getRequestHandler(Request.Type.POST)).andReturn(requestHandler);
     expect(requestHandler.handleRequest(request)).andReturn(result);
+    expect(request.getResultPostProcessor()).andReturn(resultProcessor);
+    resultProcessor.process(result);
 
-    replay(request, factory, requestHandler, result);
+    replay(request, factory, requestHandler, result, resultProcessor);
 
     RequestHandler delegatingRequestHandler = new TestDelegatingRequestHandler(factory);
 
     assertSame(result, delegatingRequestHandler.handleRequest(request));
-    verify(request, factory, requestHandler, result);
+    verify(request, factory, requestHandler, result, resultProcessor);
   }
 
   @Test
@@ -95,18 +92,21 @@ public class DelegatingRequestHandlerTest {
     RequestHandlerFactory factory = createStrictMock(RequestHandlerFactory.class);
     RequestHandler requestHandler = createStrictMock(RequestHandler.class);
     Result result = createStrictMock(Result.class);
+    ResultPostProcessor resultProcessor = createStrictMock(ResultPostProcessor.class);
 
     // expectations
-    expect(request.getRequestType()).andReturn(Request.RequestType.DELETE);
-    expect(factory.getRequestHandler(Request.RequestType.DELETE)).andReturn(requestHandler);
+    expect(request.getRequestType()).andReturn(Request.Type.DELETE);
+    expect(factory.getRequestHandler(Request.Type.DELETE)).andReturn(requestHandler);
     expect(requestHandler.handleRequest(request)).andReturn(result);
+    expect(request.getResultPostProcessor()).andReturn(resultProcessor);
+    resultProcessor.process(result);
 
-    replay(request, factory, requestHandler, result);
+    replay(request, factory, requestHandler, result, resultProcessor);
 
     RequestHandler delegatingRequestHandler = new TestDelegatingRequestHandler(factory);
 
     assertSame(result, delegatingRequestHandler.handleRequest(request));
-    verify(request, factory, requestHandler, result);
+    verify(request, factory, requestHandler, result, resultProcessor);
   }
 
   private class TestDelegatingRequestHandler extends DelegatingRequestHandler {

+ 56 - 0
ambari-api/src/test/java/org/apache/ambari/api/handlers/ReadHandlerTest.java

@@ -0,0 +1,56 @@
+package org.apache.ambari.api.handlers;
+
+import org.apache.ambari.api.query.Query;
+import org.apache.ambari.api.resource.ResourceDefinition;
+import org.apache.ambari.api.services.Request;
+import org.apache.ambari.api.services.Result;
+import org.junit.Test;
+
+
+import java.util.HashSet;
+import java.util.Set;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.assertSame;
+
+/**
+ * Created with IntelliJ IDEA.
+ * User: john
+ * Date: 9/12/12
+ * Time: 12:45 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public class ReadHandlerTest {
+
+  @Test
+  public void testHandlerRequest() throws Exception {
+    Request request = createStrictMock(Request.class);
+    ResourceDefinition resourceDefinition = createStrictMock(ResourceDefinition.class);
+    Query query = createMock(Query.class);
+    Result result = createStrictMock(Result.class);
+
+    Set<String> setPartialResponseFields = new HashSet<String>();
+    setPartialResponseFields.add("foo");
+    setPartialResponseFields.add("bar/c");
+    setPartialResponseFields.add("bar/d/e");
+
+    //expectations
+    expect(request.getResourceDefinition()).andReturn(resourceDefinition);
+    expect(resourceDefinition.getQuery()).andReturn(query);
+
+    expect(request.getPartialResponseFields()).andReturn(setPartialResponseFields);
+    query.addProperty(null, "foo");
+    query.addProperty("bar", "c");
+    query.addProperty("bar/d", "e");
+    expect(query.execute()).andReturn(result);
+
+    replay(request, resourceDefinition, query, result);
+
+    //test
+    ReadHandler handler = new ReadHandler();
+    assertSame(result, handler.handleRequest(request));
+
+    verify(request, resourceDefinition, query, result);
+
+  }
+}

+ 246 - 246
ambari-api/src/test/java/org/apache/ambari/api/query/QueryImplTest.java

@@ -1,32 +1,12 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
 package org.apache.ambari.api.query;
 
 import org.apache.ambari.api.controller.internal.PropertyIdImpl;
-import org.apache.ambari.api.controller.predicate.EqualsPredicate;
+import org.apache.ambari.server.controller.predicate.EqualsPredicate;
 import org.apache.ambari.api.resource.ResourceDefinition;
 import org.apache.ambari.api.services.Result;
-import org.apache.ambari.api.controller.spi.ClusterController;
-import org.apache.ambari.api.controller.spi.Predicate;
-import org.apache.ambari.api.controller.spi.Request;
-import org.apache.ambari.api.controller.spi.Resource;
-import org.apache.ambari.api.controller.spi.Schema;
+import org.apache.ambari.server.controller.spi.*;
 import org.apache.ambari.api.controller.utilities.PredicateBuilder;
+import org.junit.After;
 import org.junit.Test;
 
 import java.util.*;
@@ -37,230 +17,250 @@ import static org.junit.Assert.assertSame;
 
 
 /**
- *
+ * Created with IntelliJ IDEA.
+ * User: john
+ * Date: 9/12/12
+ * Time: 12:55 PM
+ * To change this template use File | Settings | File Templates.
  */
 public class QueryImplTest {
-  @Test
-  public void testExecute__Component_Instance() {
-    ResourceDefinition componentResourceDef = createMock(ResourceDefinition.class);
-    ResourceDefinition hostComponentResourceDef = createStrictMock(ResourceDefinition.class);
-    Request request = createStrictMock(Request.class);
-    Result result = createStrictMock(Result.class);
-    ClusterController controller = createStrictMock(ClusterController.class);
-    Schema componentSchema = createMock(Schema.class);
-    Resource componentResource = createStrictMock(Resource.class);
-    Query hostComponentQuery = createStrictMock(Query.class);
-    Result hostComponentQueryResult = createStrictMock(Result.class);
-    Resource hostComponentResource = createStrictMock(Resource.class);
-
-    List<Resource> listResources = new ArrayList<Resource>();
-    listResources.add(componentResource);
-
-    Map<Resource.Type, String> mapResourceIds = new HashMap<Resource.Type, String>();
-    mapResourceIds.put(Resource.Type.Cluster, "clusterName");
-    mapResourceIds.put(Resource.Type.Service, "serviceName");
-    mapResourceIds.put(Resource.Type.Component, "componentName");
-
-    Set<ResourceDefinition> setChildren = new HashSet<ResourceDefinition>();
-    Set<ResourceDefinition> setForeign = new HashSet<ResourceDefinition>();
-    setForeign.add(hostComponentResourceDef);
-
-    Map<String, List<Resource>> mapHostComponentResources = new HashMap<String, List<Resource>>();
-    mapHostComponentResources.put("/", Collections.singletonList(hostComponentResource));
-
-    // expectations
-    expect(componentResourceDef.getType()).andReturn(Resource.Type.Component).atLeastOnce();
-    expect(componentResourceDef.getResourceIds()).andReturn(mapResourceIds);
-    expect(controller.getSchema(Resource.Type.Component)).andReturn(componentSchema).atLeastOnce();
-    expect(componentSchema.getKeyPropertyId(Resource.Type.Cluster)).andReturn(new PropertyIdImpl("clusterId", "", false));
-    expect(componentSchema.getKeyPropertyId(Resource.Type.Service)).andReturn(new PropertyIdImpl("serviceId", "", false));
-    expect(componentSchema.getKeyPropertyId(Resource.Type.Component)).andReturn(new PropertyIdImpl("componentId", "", false));
-
-    expect(componentResourceDef.getId()).andReturn("componentName").atLeastOnce();
-    expect(componentResourceDef.getChildren()).andReturn(setChildren);
-    expect(componentResourceDef.getRelations()).andReturn(setForeign);
-    expect(hostComponentResourceDef.getQuery()).andReturn(hostComponentQuery);
-
-    PredicateBuilder pb = new PredicateBuilder();
-    Predicate predicate = pb.property("clusterId", "").equals("clusterName").and().
-        property("serviceId", "").equals("serviceName").and().
-        property("componentId", "").equals("componentName").toPredicate();
-
-    expect(controller.getResources(eq(Resource.Type.Component), eq(request), eq(predicate))).
-        andReturn(listResources);
-
-    result.addResources("/", listResources);
-
-    expect(hostComponentQuery.execute()).andReturn(hostComponentQueryResult);
-    expect(hostComponentQueryResult.getResources()).andReturn(mapHostComponentResources);
-    expect(hostComponentResourceDef.getId()).andReturn("hostComponentName");
-    expect(hostComponentResourceDef.getSingularName()).andReturn("host_component");
-    result.addResources("host_component", Collections.singletonList(hostComponentResource));
-
-    replay(componentResourceDef, request, result, controller, componentSchema, componentResource,
-        hostComponentResourceDef, hostComponentQuery, hostComponentQueryResult, hostComponentResource);
-
-    QueryImpl query = new TestQuery(componentResourceDef, result, request, controller);
-    Result testResult = query.execute();
-    // todo: assert return value.  This is currently a mock.
-
-    verify(componentResourceDef, request, result, controller, componentSchema, componentResource,
-        hostComponentResourceDef, hostComponentQuery, hostComponentQueryResult, hostComponentResource);
-  }
-
-  @Test
-  public void testExecute__Component_Collection() {
-    ResourceDefinition componentResourceDef = createMock(ResourceDefinition.class);
-    Request request = createStrictMock(Request.class);
-    Result result = createStrictMock(Result.class);
-    ClusterController controller = createStrictMock(ClusterController.class);
-    Schema componentSchema = createMock(Schema.class);
-    Resource componentResource1 = createStrictMock(Resource.class);
-    Resource componentResource2 = createStrictMock(Resource.class);
-
-    List<Resource> listResources = new ArrayList<Resource>();
-    listResources.add(componentResource1);
-    listResources.add(componentResource2);
-
-    Map<Resource.Type, String> mapResourceIds = new HashMap<Resource.Type, String>();
-    mapResourceIds.put(Resource.Type.Cluster, "clusterName");
-    mapResourceIds.put(Resource.Type.Service, "serviceName");
-
-    // expectations
-    expect(componentResourceDef.getType()).andReturn(Resource.Type.Component).atLeastOnce();
-    expect(componentResourceDef.getResourceIds()).andReturn(mapResourceIds);
-    expect(controller.getSchema(Resource.Type.Component)).andReturn(componentSchema).atLeastOnce();
-    expect(componentSchema.getKeyPropertyId(Resource.Type.Cluster)).andReturn(new PropertyIdImpl("clusterId", "", false));
-    expect(componentSchema.getKeyPropertyId(Resource.Type.Service)).andReturn(new PropertyIdImpl("serviceId", "", false));
-
-    expect(componentResourceDef.getId()).andReturn(null).atLeastOnce();
-
-    PredicateBuilder pb = new PredicateBuilder();
-    Predicate predicate = pb.property("clusterId", "").equals("clusterName").and().
-        property("serviceId", "").equals("serviceName").toPredicate();
-
-    expect(controller.getResources(eq(Resource.Type.Component), eq(request), eq(predicate))).
-        andReturn(listResources);
-
-    result.addResources("/", listResources);
-
-    replay(componentResourceDef, request, result, controller, componentSchema, componentResource1, componentResource2);
-
-    QueryImpl query = new TestQuery(componentResourceDef, result, request, controller);
-    Result testResult = query.execute();
-    // todo: assert return value.  This is currently a mock.
-
-    verify(componentResourceDef, request, result, controller, componentSchema, componentResource1, componentResource2);
-  }
-
-  @Test
-  public void testExecute__Cluster_Instance() {
-    ResourceDefinition clusterResourceDef = createMock(ResourceDefinition.class);
-    ResourceDefinition serviceResourceDef = createMock(ResourceDefinition.class);
-    ResourceDefinition hostResourceDef = createMock(ResourceDefinition.class);
-    Request request = createStrictMock(Request.class);
-    Result result = createMock(Result.class);
-    ClusterController controller = createStrictMock(ClusterController.class);
-    Schema clusterSchema = createMock(Schema.class);
-    Resource clusterResource = createStrictMock(Resource.class);
-    Query serviceQuery = createStrictMock(Query.class);
-    Result serviceQueryResult = createStrictMock(Result.class);
-    Resource serviceResource = createStrictMock(Resource.class);
-    Resource serviceResource2 = createStrictMock(Resource.class);
-    Query hostQuery = createStrictMock(Query.class);
-    Result hostQueryResult = createStrictMock(Result.class);
-    Resource hostResource = createStrictMock(Resource.class);
-
-    List<Resource> listResources = new ArrayList<Resource>();
-    listResources.add(clusterResource);
-
-    Map<Resource.Type, String> mapResourceIds = new HashMap<Resource.Type, String>();
-    mapResourceIds.put(Resource.Type.Cluster, "clusterName");
-
-    Set<ResourceDefinition> setChildren = new HashSet<ResourceDefinition>();
-    setChildren.add(serviceResourceDef);
-    setChildren.add(hostResourceDef);
-    Set<ResourceDefinition> setForeign = new HashSet<ResourceDefinition>();
-
-    Map<String, List<Resource>> mapServiceResources = new HashMap<String, List<Resource>>();
-    List<Resource> listServiceResources = new ArrayList<Resource>();
-    listServiceResources.add(serviceResource);
-    listServiceResources.add(serviceResource2);
-    mapServiceResources.put("/", listServiceResources);
-
-    Map<String, List<Resource>> mapHostResources = new HashMap<String, List<Resource>>();
-    mapHostResources.put("/", Collections.singletonList(hostResource));
-
-    // expectations
-    expect(clusterResourceDef.getType()).andReturn(Resource.Type.Cluster).atLeastOnce();
-    expect(clusterResourceDef.getResourceIds()).andReturn(mapResourceIds);
-    expect(controller.getSchema(Resource.Type.Cluster)).andReturn(clusterSchema).atLeastOnce();
-    expect(clusterSchema.getKeyPropertyId(Resource.Type.Cluster)).andReturn(new PropertyIdImpl("clusterId", "", false));
-    expect(clusterResourceDef.getId()).andReturn("clusterName").atLeastOnce();
-
-    expect(clusterResourceDef.getChildren()).andReturn(setChildren);
-    expect(serviceResourceDef.getQuery()).andReturn(serviceQuery);
-    expect(hostResourceDef.getQuery()).andReturn(hostQuery);
-    expect(clusterResourceDef.getRelations()).andReturn(setForeign);
-
-    Predicate clusterEqualsPredicate = new EqualsPredicate(new PropertyIdImpl("clusterId", "", false), "clusterName");
-
-    expect(controller.getResources(eq(Resource.Type.Cluster), eq(request), eq(clusterEqualsPredicate))).
-        andReturn(listResources);
-
-    result.addResources("/", listResources);
-
-    expect(serviceQuery.execute()).andReturn(serviceQueryResult);
-    expect(serviceQueryResult.getResources()).andReturn(mapServiceResources);
-    expect(serviceResourceDef.getId()).andReturn(null);
-    expect(serviceResourceDef.getPluralName()).andReturn("services");
-    result.addResources("services", listServiceResources);
-
-    expect(hostQuery.execute()).andReturn(hostQueryResult);
-    expect(hostQueryResult.getResources()).andReturn(mapHostResources);
-    expect(hostResourceDef.getId()).andReturn(null);
-    expect(hostResourceDef.getPluralName()).andReturn("hosts");
-    result.addResources("hosts", Collections.singletonList(hostResource));
-
-    replay(clusterResourceDef, request, result, controller, clusterSchema, clusterResource,
-        serviceResourceDef, serviceQuery, serviceQueryResult, serviceResource, serviceResource2,
-        hostResourceDef, hostQuery, hostQueryResult, hostResource);
-
-    QueryImpl query = new TestQuery(clusterResourceDef, result, request, controller);
-    Result testResult = query.execute();
-    // todo: assert return value.  This is currently a mock.
-
-    verify(clusterResourceDef, request, result, controller, clusterSchema, clusterResource,
-        serviceResourceDef, serviceQuery, serviceQueryResult, serviceResource, serviceResource2,
-        hostResourceDef, hostQuery, hostQueryResult, hostResource);
-  }
-
-  private class TestQuery extends QueryImpl {
-
-    private Result m_result;
-    private Request m_request;
-    private ClusterController m_clusterController;
-
-    public TestQuery(ResourceDefinition resourceDefinition, Result result, Request request, ClusterController controller) {
-      super(resourceDefinition);
-      m_result = result;
-      m_request = request;
-      m_clusterController = controller;
-    }
-
-    @Override
-    Result createResult() {
-      return m_result;
-    }
-
-    @Override
-    Request createRequest() {
-      return m_request;
-    }
 
-    @Override
-    ClusterController getClusterController() {
-      return m_clusterController;
-    }
-  }
+//    ClusterController m_controller = createStrictMock(ClusterController.class);
+//    @Test
+//    public void testExecute__Component_Instance() throws Exception {
+//        ResourceDefinition componentResourceDef = createMock(ResourceDefinition.class);
+//        ResourceDefinition hostComponentResourceDef = createStrictMock(ResourceDefinition.class);
+//        Request request = createStrictMock(Request.class);
+//        Result result = createStrictMock(Result.class);
+//
+//        Schema componentSchema = createMock(Schema.class);
+//        Resource componentResource = createStrictMock(Resource.class);
+//        Query hostComponentQuery = createStrictMock(Query.class);
+//        Result hostComponentQueryResult  = createStrictMock(Result.class);
+//        Resource hostComponentResource = createStrictMock(Resource.class);
+//
+//        List<Resource> listResources = new ArrayList<Resource>();
+//        listResources.add(componentResource);
+//
+//        Map<Resource.Type, String> mapResourceIds = new HashMap<Resource.Type, String>();
+//        mapResourceIds.put(Resource.Type.Cluster, "clusterName");
+//        mapResourceIds.put(Resource.Type.Service, "serviceName");
+//        mapResourceIds.put(Resource.Type.Component, "componentName");
+//
+//        Set<ResourceDefinition> setChildren = new HashSet<ResourceDefinition>();
+//        Set<ResourceDefinition> setForeign = new HashSet<ResourceDefinition>();
+//        setForeign.add(hostComponentResourceDef);
+//
+//        Map<String, List<Resource>> mapHostComponentResources = new HashMap<String, List<Resource>>();
+//        mapHostComponentResources.put("/", Collections.singletonList(hostComponentResource));
+//
+//        // expectations
+//        expect(componentResourceDef.getType()).andReturn(Resource.Type.Component).atLeastOnce();
+//        expect(componentResourceDef.getResourceIds()).andReturn(mapResourceIds);
+//        expect(m_controller.getSchema(Resource.Type.Component)).andReturn(componentSchema).atLeastOnce();
+//        expect(componentSchema.getKeyPropertyId(Resource.Type.Cluster)).andReturn(new PropertyIdImpl("clusterId", "", false));
+//        expect(componentSchema.getKeyPropertyId(Resource.Type.Service)).andReturn(new PropertyIdImpl("serviceId", "", false));
+//        expect(componentSchema.getKeyPropertyId(Resource.Type.Component)).andReturn(new PropertyIdImpl("componentId", "", false));
+//
+//        expect(componentResourceDef.getId()).andReturn("componentName").atLeastOnce();
+//        //expect(componentResourceDef.getChildren()).andReturn(setChildren);
+//        //expect(componentResourceDef.getRelations()).andReturn(setForeign);
+//        expect(hostComponentResourceDef.getQuery()).andReturn(hostComponentQuery);
+//
+//        PredicateBuilder pb = new PredicateBuilder();
+//        Predicate predicate = pb.property("clusterId", "").equals("clusterName").and().
+//                property("serviceId", "").equals("serviceName").and().
+//                property("componentId", "").equals("componentName").toPredicate();
+//
+//        expect(m_controller.getResources(eq(Resource.Type.Component), eq(request), eq(predicate))).
+//                andReturn(listResources);
+//
+//        result.addResources("/", listResources);
+//
+//        expect(hostComponentQuery.execute()).andReturn(hostComponentQueryResult);
+//        expect(hostComponentQueryResult.getResources()).andReturn(mapHostComponentResources);
+//        expect(hostComponentResourceDef.getId()).andReturn("hostComponentName");
+//        expect(hostComponentResourceDef.getSingularName()).andReturn("host_component");
+//        result.addResources("host_component", Collections.singletonList(hostComponentResource));
+//
+//        replay(componentResourceDef, request, result, m_controller, componentSchema, componentResource,
+//                hostComponentResourceDef, hostComponentQuery, hostComponentQueryResult, hostComponentResource);
+//
+//        QueryImpl query = new TestQuery(componentResourceDef, result, request);
+//        Result testResult = query.execute();
+//        // todo: assert return value.  This is currently a mock.
+//
+//        verify(componentResourceDef, request, result, m_controller, componentSchema, componentResource,
+//                hostComponentResourceDef, hostComponentQuery, hostComponentQueryResult, hostComponentResource);
+//    }
+//
+//    @Test
+//    public void testExecute__Component_Collection() {
+//        ResourceDefinition componentResourceDef = createMock(ResourceDefinition.class);
+//        Request request = createStrictMock(Request.class);
+//        Result result = createStrictMock(Result.class);
+//
+//        Schema componentSchema = createMock(Schema.class);
+//        Resource componentResource1 = createStrictMock(Resource.class);
+//        Resource componentResource2 = createStrictMock(Resource.class);
+//
+//        List<Resource> listResources = new ArrayList<Resource>();
+//        listResources.add(componentResource1);
+//        listResources.add(componentResource2);
+//
+//        Map<Resource.Type, String> mapResourceIds = new HashMap<Resource.Type, String>();
+//        mapResourceIds.put(Resource.Type.Cluster, "clusterName");
+//        mapResourceIds.put(Resource.Type.Service, "serviceName");
+//
+//        // expectations
+//        expect(componentResourceDef.getType()).andReturn(Resource.Type.Component).atLeastOnce();
+//        expect(componentResourceDef.getResourceIds()).andReturn(mapResourceIds);
+//        expect(m_controller.getSchema(Resource.Type.Component)).andReturn(componentSchema).atLeastOnce();
+//        expect(componentSchema.getKeyPropertyId(Resource.Type.Cluster)).andReturn(new PropertyIdImpl("clusterId", "", false));
+//        expect(componentSchema.getKeyPropertyId(Resource.Type.Service)).andReturn(new PropertyIdImpl("serviceId", "", false));
+//
+//        expect(componentResourceDef.getId()).andReturn(null).atLeastOnce();
+//
+//        PredicateBuilder pb = new PredicateBuilder();
+//        Predicate predicate = pb.property("clusterId", "").equals("clusterName").and().
+//                property("serviceId", "").equals("serviceName").toPredicate();
+//
+//        expect(m_controller.getResources(eq(Resource.Type.Component), eq(request), eq(predicate))).
+//                andReturn(listResources);
+//
+//        result.addResources("/", listResources);
+//
+//        replay(componentResourceDef, request, result, m_controller, componentSchema, componentResource1, componentResource2);
+//
+//        QueryImpl query = new TestQuery(componentResourceDef, result, request);
+//        Result testResult = query.execute();
+//        // todo: assert return value.  This is currently a mock.
+//
+//        verify(componentResourceDef, request, result, m_controller, componentSchema, componentResource1, componentResource2);
+//    }
+//
+//    @Test
+//    public void testExecute__Cluster_Instance() {
+//        ResourceDefinition clusterResourceDef = createMock(ResourceDefinition.class);
+//        ResourceDefinition serviceResourceDef = createMock(ResourceDefinition.class);
+//        ResourceDefinition hostResourceDef = createMock(ResourceDefinition.class);
+//        Request request = createStrictMock(Request.class);
+//        Result result = createMock(Result.class);
+//
+//        Schema clusterSchema = createMock(Schema.class);
+//        Resource clusterResource = createStrictMock(Resource.class);
+//        Query serviceQuery = createStrictMock(Query.class);
+//        Result serviceQueryResult  = createStrictMock(Result.class);
+//        Resource serviceResource = createStrictMock(Resource.class);
+//        Resource serviceResource2 = createStrictMock(Resource.class);
+//        Query hostQuery = createStrictMock(Query.class);
+//        Result hostQueryResult  = createStrictMock(Result.class);
+//        Resource hostResource = createStrictMock(Resource.class);
+//
+//        List<Resource> listResources = new ArrayList<Resource>();
+//        listResources.add(clusterResource);
+//
+//        Map<Resource.Type, String> mapResourceIds = new HashMap<Resource.Type, String>();
+//        mapResourceIds.put(Resource.Type.Cluster, "clusterName");
+//
+//        Set<ResourceDefinition> setChildren = new HashSet<ResourceDefinition>();
+//        setChildren.add(serviceResourceDef);
+//        setChildren.add(hostResourceDef);
+//        Set<ResourceDefinition> setForeign = new HashSet<ResourceDefinition>();
+//
+//        Map<String, List<Resource>> mapServiceResources = new HashMap<String, List<Resource>>();
+//        List<Resource> listServiceResources = new ArrayList<Resource>();
+//        listServiceResources.add(serviceResource);
+//        listServiceResources.add(serviceResource2);
+//        mapServiceResources.put("/", listServiceResources);
+//
+//        Map<String, List<Resource>> mapHostResources = new HashMap<String, List<Resource>>();
+//        mapHostResources.put("/", Collections.singletonList(hostResource));
+//
+//        Map<String, Set<String>> mapPropertiesAll = new HashMap<String, Set<String>>();
+//        Set<String> setRootProps = new HashSet<String>();
+//        Set<String> setCategoryProps = new HashSet<String>();
+//        mapPropertiesAll.put(null,setRootProps);
+//        mapPropertiesAll.put("category", setCategoryProps);
+//
+//
+//        // expectations
+//        expect(clusterResourceDef.getType()).andReturn(Resource.Type.Cluster).atLeastOnce();
+//        expect(m_controller.getSchema(Resource.Type.Cluster)).andReturn(clusterSchema).atLeastOnce();
+//        expect(clusterSchema.getCategories()).andReturn(mapPropertiesAll);
+//
+//        expect(clusterResourceDef.getResourceIds()).andReturn(mapResourceIds);
+//        expect(clusterSchema.getKeyPropertyId(Resource.Type.Cluster)).andReturn(new PropertyIdImpl("clusterId", "", false));
+//        expect(clusterResourceDef.getId()).andReturn("clusterName").atLeastOnce();
+//
+//
+//        //expect(clusterResourceDef.getChildren()).andReturn(setChildren);
+//        expect(serviceResourceDef.getQuery()).andReturn(serviceQuery);
+//        expect(hostResourceDef.getQuery()).andReturn(hostQuery);
+//        //expect(clusterResourceDef.getRelations()).andReturn(setForeign);
+//
+//        Predicate clusterEqualsPredicate = new EqualsPredicate(new PropertyIdImpl("clusterId", "", false), "clusterName");
+//
+//        expect(m_controller.getResources(eq(Resource.Type.Cluster), eq(request), eq(clusterEqualsPredicate))).
+//                andReturn(listResources);
+//
+//        result.addResources("/", listResources);
+//
+//        expect(serviceQuery.execute()).andReturn(serviceQueryResult);
+//        expect(serviceQueryResult.getResources()).andReturn(mapServiceResources);
+//        expect(serviceResourceDef.getId()).andReturn(null);
+//        expect(serviceResourceDef.getPluralName()).andReturn("services");
+//        result.addResources("services",listServiceResources);
+//
+//        expect(hostQuery.execute()).andReturn(hostQueryResult);
+//        expect(hostQueryResult.getResources()).andReturn(mapHostResources);
+//        expect(hostResourceDef.getId()).andReturn(null);
+//        expect(hostResourceDef.getPluralName()).andReturn("hosts");
+//        result.addResources("hosts", Collections.singletonList(hostResource));
+//
+//        replay(clusterResourceDef, request, result, m_controller, clusterSchema, clusterResource,
+//                serviceResourceDef, serviceQuery, serviceQueryResult, serviceResource, serviceResource2,
+//                hostResourceDef, hostQuery, hostQueryResult, hostResource);
+//
+//        // test
+//        QueryImpl query = new TestQuery(clusterResourceDef, result, request);
+//        Result testResult = query.execute();
+//        // todo: assert return value.  This is currently a mock.
+//
+//        verify(clusterResourceDef, request, result, m_controller, clusterSchema, clusterResource,
+//                serviceResourceDef, serviceQuery, serviceQueryResult, serviceResource, serviceResource2,
+//                hostResourceDef, hostQuery, hostQueryResult, hostResource);
+//    }
+//
+//    private class TestQuery extends QueryImpl {
+//
+//        private Result m_result;
+//        private Request m_request;
+//
+//        public TestQuery(ResourceDefinition resourceDefinition, Result result, Request request) {
+//            super(resourceDefinition);
+//            m_result = result;
+//            m_request = request;
+//        }
+//
+//        @Override
+//        Result createResult() {
+//            return m_result;
+//        }
+//
+//        @Override
+//        Request createRequest() {
+//            return m_request;
+//        }
+//
+//        @Override
+//        ClusterController getClusterController() {
+//            return m_controller;
+//        }
+//    }
+//
+//    @After
+//    public void resetGlobalMocks() {
+//        reset(m_controller);
+//    }
 }

+ 126 - 42
ambari-api/src/test/java/org/apache/ambari/api/services/ClusterServiceTest.java

@@ -1,25 +1,8 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
 package org.apache.ambari.api.services;
 
 import org.apache.ambari.api.handlers.RequestHandler;
 import org.apache.ambari.api.resource.ResourceDefinition;
-import org.apache.ambari.api.services.formatters.ResultFormatter;
+import org.apache.ambari.api.services.serializers.ResultSerializer;
 import org.junit.Test;
 
 import javax.ws.rs.core.HttpHeaders;
@@ -32,16 +15,18 @@ import static org.junit.Assert.assertSame;
 
 
 /**
- *
+ * Created with IntelliJ IDEA.
+ * User: john
+ * Date: 6/21/12
+ * Time: 2:06 PM
+ * To change this template use File | Settings | File Templates.
  */
 public class ClusterServiceTest {
 
   @Test
   public void testGetCluster() {
     ResourceDefinition resourceDef = createStrictMock(ResourceDefinition.class);
-    ResultFormatter resultFormatter = createStrictMock(ResultFormatter.class);
-    Object formattedResult = new Object();
-    Serializer serializer = createStrictMock(Serializer.class);
+    ResultSerializer resultSerializer = createStrictMock(ResultSerializer.class);
     Object serializedResult = new Object();
     RequestFactory requestFactory = createStrictMock(RequestFactory.class);
     ResponseFactory responseFactory = createStrictMock(ResponseFactory.class);
@@ -49,41 +34,35 @@ public class ClusterServiceTest {
     RequestHandler requestHandler = createStrictMock(RequestHandler.class);
     Result result = createStrictMock(Result.class);
     Response response = createStrictMock(Response.class);
-
     HttpHeaders httpHeaders = createNiceMock(HttpHeaders.class);
     UriInfo uriInfo = createNiceMock(UriInfo.class);
 
     String clusterName = "clusterName";
 
     // expectations
-    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.RequestType.GET),
+    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.Type.GET),
         eq(resourceDef))).andReturn(request);
 
     expect(requestHandler.handleRequest(request)).andReturn(result);
-    expect(resourceDef.getResultFormatter()).andReturn(resultFormatter);
-    expect(resultFormatter.format(result, uriInfo)).andReturn(formattedResult);
-    expect(request.getSerializer()).andReturn(serializer);
-    expect(serializer.serialize(formattedResult)).andReturn(serializedResult);
-
+    expect(request.getResultSerializer()).andReturn(resultSerializer);
+    expect(resultSerializer.serialize(result, uriInfo)).andReturn(serializedResult);
     expect(responseFactory.createResponse(serializedResult)).andReturn(response);
 
-    replay(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    replay(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
 
     //test
     ClusterService clusterService = new TestClusterService(resourceDef, clusterName, requestFactory, responseFactory, requestHandler);
     assertSame(response, clusterService.getCluster(httpHeaders, uriInfo, clusterName));
 
-    verify(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    verify(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
   }
 
   @Test
   public void testGetClusters() {
     ResourceDefinition resourceDef = createStrictMock(ResourceDefinition.class);
-    ResultFormatter resultFormatter = createStrictMock(ResultFormatter.class);
-    Object formattedResult = new Object();
-    Serializer serializer = createStrictMock(Serializer.class);
+    ResultSerializer resultSerializer = createStrictMock(ResultSerializer.class);
     Object serializedResult = new Object();
     RequestFactory requestFactory = createStrictMock(RequestFactory.class);
     ResponseFactory responseFactory = createStrictMock(ResponseFactory.class);
@@ -96,25 +75,130 @@ public class ClusterServiceTest {
     UriInfo uriInfo = createNiceMock(UriInfo.class);
 
     // expectations
-    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.RequestType.GET),
+    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.Type.GET),
         eq(resourceDef))).andReturn(request);
 
     expect(requestHandler.handleRequest(request)).andReturn(result);
-    expect(resourceDef.getResultFormatter()).andReturn(resultFormatter);
-    expect(resultFormatter.format(result, uriInfo)).andReturn(formattedResult);
-    expect(request.getSerializer()).andReturn(serializer);
-    expect(serializer.serialize(formattedResult)).andReturn(serializedResult);
-
+    expect(request.getResultSerializer()).andReturn(resultSerializer);
+    expect(resultSerializer.serialize(result, uriInfo)).andReturn(serializedResult);
     expect(responseFactory.createResponse(serializedResult)).andReturn(response);
 
-    replay(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    replay(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
 
     //test
     ClusterService clusterService = new TestClusterService(resourceDef, null, requestFactory, responseFactory, requestHandler);
     assertSame(response, clusterService.getClusters(httpHeaders, uriInfo));
 
-    verify(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    verify(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
+        result, response, httpHeaders, uriInfo);
+  }
+
+  @Test
+  public void testCreateCluster() {
+    ResourceDefinition resourceDef = createStrictMock(ResourceDefinition.class);
+    ResultSerializer resultSerializer = createStrictMock(ResultSerializer.class);
+    Object serializedResult = new Object();
+    RequestFactory requestFactory = createStrictMock(RequestFactory.class);
+    ResponseFactory responseFactory = createStrictMock(ResponseFactory.class);
+    Request request = createNiceMock(Request.class);
+    RequestHandler requestHandler = createStrictMock(RequestHandler.class);
+    Result result = createStrictMock(Result.class);
+    Response response = createStrictMock(Response.class);
+    HttpHeaders httpHeaders = createNiceMock(HttpHeaders.class);
+    UriInfo uriInfo = createNiceMock(UriInfo.class);
+
+    String clusterName = "clusterName";
+
+    // expectations
+    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.Type.PUT),
+        eq(resourceDef))).andReturn(request);
+
+    expect(requestHandler.handleRequest(request)).andReturn(result);
+    expect(request.getResultSerializer()).andReturn(resultSerializer);
+    expect(resultSerializer.serialize(result, uriInfo)).andReturn(serializedResult);
+    expect(responseFactory.createResponse(serializedResult)).andReturn(response);
+
+    replay(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
+        result, response, httpHeaders, uriInfo);
+
+    //test
+    ClusterService clusterService = new TestClusterService(resourceDef, clusterName, requestFactory, responseFactory, requestHandler);
+    assertSame(response, clusterService.createCluster(httpHeaders, uriInfo, clusterName));
+
+    verify(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
+        result, response, httpHeaders, uriInfo);
+  }
+
+  @Test
+  public void testUpdateCluster() {
+    ResourceDefinition resourceDef = createStrictMock(ResourceDefinition.class);
+    ResultSerializer resultSerializer = createStrictMock(ResultSerializer.class);
+    Object serializedResult = new Object();
+    RequestFactory requestFactory = createStrictMock(RequestFactory.class);
+    ResponseFactory responseFactory = createStrictMock(ResponseFactory.class);
+    Request request = createNiceMock(Request.class);
+    RequestHandler requestHandler = createStrictMock(RequestHandler.class);
+    Result result = createStrictMock(Result.class);
+    Response response = createStrictMock(Response.class);
+    HttpHeaders httpHeaders = createNiceMock(HttpHeaders.class);
+    UriInfo uriInfo = createNiceMock(UriInfo.class);
+
+    String clusterName = "clusterName";
+
+    // expectations
+    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.Type.POST),
+        eq(resourceDef))).andReturn(request);
+
+    expect(requestHandler.handleRequest(request)).andReturn(result);
+    expect(request.getResultSerializer()).andReturn(resultSerializer);
+    expect(resultSerializer.serialize(result, uriInfo)).andReturn(serializedResult);
+    expect(responseFactory.createResponse(serializedResult)).andReturn(response);
+
+    replay(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
+        result, response, httpHeaders, uriInfo);
+
+    //test
+    ClusterService clusterService = new TestClusterService(resourceDef, clusterName, requestFactory, responseFactory, requestHandler);
+    assertSame(response, clusterService.updateCluster(httpHeaders, uriInfo, clusterName));
+
+    verify(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
+        result, response, httpHeaders, uriInfo);
+  }
+
+  @Test
+  public void testDeleteCluster() {
+    ResourceDefinition resourceDef = createStrictMock(ResourceDefinition.class);
+    ResultSerializer resultSerializer = createStrictMock(ResultSerializer.class);
+    Object serializedResult = new Object();
+    RequestFactory requestFactory = createStrictMock(RequestFactory.class);
+    ResponseFactory responseFactory = createStrictMock(ResponseFactory.class);
+    Request request = createNiceMock(Request.class);
+    RequestHandler requestHandler = createStrictMock(RequestHandler.class);
+    Result result = createStrictMock(Result.class);
+    Response response = createStrictMock(Response.class);
+    HttpHeaders httpHeaders = createNiceMock(HttpHeaders.class);
+    UriInfo uriInfo = createNiceMock(UriInfo.class);
+
+    String clusterName = "clusterName";
+
+    // expectations
+    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.Type.DELETE),
+        eq(resourceDef))).andReturn(request);
+
+    expect(requestHandler.handleRequest(request)).andReturn(result);
+    expect(request.getResultSerializer()).andReturn(resultSerializer);
+    expect(resultSerializer.serialize(result, uriInfo)).andReturn(serializedResult);
+    expect(responseFactory.createResponse(serializedResult)).andReturn(response);
+
+    replay(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
+        result, response, httpHeaders, uriInfo);
+
+    //test
+    ClusterService clusterService = new TestClusterService(resourceDef, clusterName, requestFactory, responseFactory, requestHandler);
+    assertSame(response, clusterService.deleteCluster(httpHeaders, uriInfo, clusterName));
+
+    verify(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
   }
 

+ 19 - 40
ambari-api/src/test/java/org/apache/ambari/api/services/ComponentServiceTest.java

@@ -1,26 +1,9 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
 package org.apache.ambari.api.services;
 
 
 import org.apache.ambari.api.handlers.RequestHandler;
 import org.apache.ambari.api.resource.ResourceDefinition;
-import org.apache.ambari.api.services.formatters.ResultFormatter;
+import org.apache.ambari.api.services.serializers.ResultSerializer;
 import org.junit.Test;
 
 import javax.ws.rs.core.HttpHeaders;
@@ -33,16 +16,18 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertSame;
 
 /**
- *
+ * Created with IntelliJ IDEA.
+ * User: john
+ * Date: 9/12/12
+ * Time: 11:45 AM
+ * To change this template use File | Settings | File Templates.
  */
 public class ComponentServiceTest {
 
   @Test
   public void testGetComponent() {
     ResourceDefinition resourceDef = createStrictMock(ResourceDefinition.class);
-    ResultFormatter resultFormatter = createStrictMock(ResultFormatter.class);
-    Object formattedResult = new Object();
-    Serializer serializer = createStrictMock(Serializer.class);
+    ResultSerializer resultSerializer = createStrictMock(ResultSerializer.class);
     Object serializedResult = new Object();
     RequestFactory requestFactory = createStrictMock(RequestFactory.class);
     ResponseFactory responseFactory = createStrictMock(ResponseFactory.class);
@@ -59,18 +44,16 @@ public class ComponentServiceTest {
     String componentName = "componentName";
 
     // expectations
-    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.RequestType.GET),
+    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.Type.GET),
         eq(resourceDef))).andReturn(request);
 
     expect(requestHandler.handleRequest(request)).andReturn(result);
-    expect(resourceDef.getResultFormatter()).andReturn(resultFormatter);
-    expect(resultFormatter.format(result, uriInfo)).andReturn(formattedResult);
-    expect(request.getSerializer()).andReturn(serializer);
-    expect(serializer.serialize(formattedResult)).andReturn(serializedResult);
+    expect(request.getResultSerializer()).andReturn(resultSerializer);
+    expect(resultSerializer.serialize(result, uriInfo)).andReturn(serializedResult);
 
     expect(responseFactory.createResponse(serializedResult)).andReturn(response);
 
-    replay(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    replay(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
 
     //test
@@ -78,16 +61,14 @@ public class ComponentServiceTest {
         requestFactory, responseFactory, requestHandler);
     assertSame(response, componentService.getComponent(httpHeaders, uriInfo, componentName));
 
-    verify(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    verify(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
   }
 
   @Test
   public void testGetComponents() {
     ResourceDefinition resourceDef = createStrictMock(ResourceDefinition.class);
-    ResultFormatter resultFormatter = createStrictMock(ResultFormatter.class);
-    Object formattedResult = new Object();
-    Serializer serializer = createStrictMock(Serializer.class);
+    ResultSerializer resultSerializer = createStrictMock(ResultSerializer.class);
     Object serializedResult = new Object();
     RequestFactory requestFactory = createStrictMock(RequestFactory.class);
     ResponseFactory responseFactory = createStrictMock(ResponseFactory.class);
@@ -103,24 +84,22 @@ public class ComponentServiceTest {
     String serviceName = "serviceName";
 
     // expectations
-    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.RequestType.GET),
+    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.Type.GET),
         eq(resourceDef))).andReturn(request);
 
     expect(requestHandler.handleRequest(request)).andReturn(result);
-    expect(resourceDef.getResultFormatter()).andReturn(resultFormatter);
-    expect(resultFormatter.format(result, uriInfo)).andReturn(formattedResult);
-    expect(request.getSerializer()).andReturn(serializer);
-    expect(serializer.serialize(formattedResult)).andReturn(serializedResult);
-
+    expect(request.getResultSerializer()).andReturn(resultSerializer);
+    expect(resultSerializer.serialize(result, uriInfo)).andReturn(serializedResult);
     expect(responseFactory.createResponse(serializedResult)).andReturn(response);
-    replay(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+
+    replay(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
 
     //test
     ComponentService componentService = new TestComponentService(resourceDef, clusterName, serviceName, null, requestFactory, responseFactory, requestHandler);
     assertSame(response, componentService.getComponents(httpHeaders, uriInfo));
 
-    verify(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    verify(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
   }
 

+ 18 - 41
ambari-api/src/test/java/org/apache/ambari/api/services/HostComponentServiceTest.java

@@ -1,25 +1,8 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
 package org.apache.ambari.api.services;
 
 import org.apache.ambari.api.handlers.RequestHandler;
 import org.apache.ambari.api.resource.ResourceDefinition;
-import org.apache.ambari.api.services.formatters.ResultFormatter;
+import org.apache.ambari.api.services.serializers.ResultSerializer;
 import org.junit.Test;
 
 import javax.ws.rs.core.HttpHeaders;
@@ -32,15 +15,17 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertSame;
 
 /**
- *
+ * Created with IntelliJ IDEA.
+ * User: john
+ * Date: 9/12/12
+ * Time: 11:56 AM
+ * To change this template use File | Settings | File Templates.
  */
 public class HostComponentServiceTest {
   @Test
   public void testGetHostComponent() {
     ResourceDefinition resourceDef = createStrictMock(ResourceDefinition.class);
-    ResultFormatter resultFormatter = createStrictMock(ResultFormatter.class);
-    Object formattedResult = new Object();
-    Serializer serializer = createStrictMock(Serializer.class);
+    ResultSerializer resultSerializer = createStrictMock(ResultSerializer.class);
     Object serializedResult = new Object();
     RequestFactory requestFactory = createStrictMock(RequestFactory.class);
     ResponseFactory responseFactory = createStrictMock(ResponseFactory.class);
@@ -57,18 +42,15 @@ public class HostComponentServiceTest {
     String hostComponentName = "hostComponentName";
 
     // expectations
-    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.RequestType.GET),
+    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.Type.GET),
         eq(resourceDef))).andReturn(request);
 
     expect(requestHandler.handleRequest(request)).andReturn(result);
-    expect(resourceDef.getResultFormatter()).andReturn(resultFormatter);
-    expect(resultFormatter.format(result, uriInfo)).andReturn(formattedResult);
-    expect(request.getSerializer()).andReturn(serializer);
-    expect(serializer.serialize(formattedResult)).andReturn(serializedResult);
-
+    expect(request.getResultSerializer()).andReturn(resultSerializer);
+    expect(resultSerializer.serialize(result, uriInfo)).andReturn(serializedResult);
     expect(responseFactory.createResponse(serializedResult)).andReturn(response);
 
-    replay(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    replay(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
 
     //test
@@ -76,16 +58,14 @@ public class HostComponentServiceTest {
         requestFactory, responseFactory, requestHandler);
     assertSame(response, hostComponentService.getHostComponent(httpHeaders, uriInfo, hostComponentName));
 
-    verify(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    verify(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
   }
 
   @Test
   public void testGetHostComponents() {
     ResourceDefinition resourceDef = createStrictMock(ResourceDefinition.class);
-    ResultFormatter resultFormatter = createStrictMock(ResultFormatter.class);
-    Object formattedResult = new Object();
-    Serializer serializer = createStrictMock(Serializer.class);
+    ResultSerializer resultSerializer = createStrictMock(ResultSerializer.class);
     Object serializedResult = new Object();
     RequestFactory requestFactory = createStrictMock(RequestFactory.class);
     ResponseFactory responseFactory = createStrictMock(ResponseFactory.class);
@@ -101,18 +81,15 @@ public class HostComponentServiceTest {
     String hostName = "hostName";
 
     // expectations
-    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.RequestType.GET),
+    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.Type.GET),
         eq(resourceDef))).andReturn(request);
 
     expect(requestHandler.handleRequest(request)).andReturn(result);
-    expect(resourceDef.getResultFormatter()).andReturn(resultFormatter);
-    expect(resultFormatter.format(result, uriInfo)).andReturn(formattedResult);
-    expect(request.getSerializer()).andReturn(serializer);
-    expect(serializer.serialize(formattedResult)).andReturn(serializedResult);
-
+    expect(request.getResultSerializer()).andReturn(resultSerializer);
+    expect(resultSerializer.serialize(result, uriInfo)).andReturn(serializedResult);
     expect(responseFactory.createResponse(serializedResult)).andReturn(response);
 
-    replay(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    replay(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
 
     //test
@@ -120,7 +97,7 @@ public class HostComponentServiceTest {
         responseFactory, requestHandler);
     assertSame(response, componentService.getHostComponents(httpHeaders, uriInfo));
 
-    verify(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    verify(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
   }
 

+ 18 - 41
ambari-api/src/test/java/org/apache/ambari/api/services/HostServiceTest.java

@@ -1,26 +1,9 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
 package org.apache.ambari.api.services;
 
 
 import org.apache.ambari.api.handlers.RequestHandler;
 import org.apache.ambari.api.resource.ResourceDefinition;
-import org.apache.ambari.api.services.formatters.ResultFormatter;
+import org.apache.ambari.api.services.serializers.ResultSerializer;
 import org.junit.Test;
 
 import javax.ws.rs.core.HttpHeaders;
@@ -32,16 +15,18 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertSame;
 
 /**
- *
+ * Created with IntelliJ IDEA.
+ * User: john
+ * Date: 6/21/12
+ * Time: 2:06 PM
+ * To change this template use File | Settings | File Templates.
  */
 public class HostServiceTest {
 
   @Test
   public void testGetHost() {
     ResourceDefinition resourceDef = createStrictMock(ResourceDefinition.class);
-    ResultFormatter resultFormatter = createStrictMock(ResultFormatter.class);
-    Object formattedResult = new Object();
-    Serializer serializer = createStrictMock(Serializer.class);
+    ResultSerializer resultSerializer = createStrictMock(ResultSerializer.class);
     Object serializedResult = new Object();
     RequestFactory requestFactory = createStrictMock(RequestFactory.class);
     ResponseFactory responseFactory = createStrictMock(ResponseFactory.class);
@@ -57,34 +42,29 @@ public class HostServiceTest {
     String hostName = "hostName";
 
     // expectations
-    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.RequestType.GET),
+    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.Type.GET),
         eq(resourceDef))).andReturn(request);
 
     expect(requestHandler.handleRequest(request)).andReturn(result);
-    expect(resourceDef.getResultFormatter()).andReturn(resultFormatter);
-    expect(resultFormatter.format(result, uriInfo)).andReturn(formattedResult);
-    expect(request.getSerializer()).andReturn(serializer);
-    expect(serializer.serialize(formattedResult)).andReturn(serializedResult);
-
+    expect(request.getResultSerializer()).andReturn(resultSerializer);
+    expect(resultSerializer.serialize(result, uriInfo)).andReturn(serializedResult);
     expect(responseFactory.createResponse(serializedResult)).andReturn(response);
 
-    replay(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    replay(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
 
     //test
     HostService hostService = new TestHostService(resourceDef, clusterName, hostName, requestFactory, responseFactory, requestHandler);
     assertSame(response, hostService.getHost(httpHeaders, uriInfo, hostName));
 
-    verify(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    verify(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
   }
 
   @Test
   public void testGetHosts() {
     ResourceDefinition resourceDef = createStrictMock(ResourceDefinition.class);
-    ResultFormatter resultFormatter = createStrictMock(ResultFormatter.class);
-    Object formattedResult = new Object();
-    Serializer serializer = createStrictMock(Serializer.class);
+    ResultSerializer resultSerializer = createStrictMock(ResultSerializer.class);
     Object serializedResult = new Object();
     RequestFactory requestFactory = createStrictMock(RequestFactory.class);
     ResponseFactory responseFactory = createStrictMock(ResponseFactory.class);
@@ -99,25 +79,22 @@ public class HostServiceTest {
     String clusterName = "clusterName";
 
     // expectations
-    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.RequestType.GET),
+    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.Type.GET),
         eq(resourceDef))).andReturn(request);
 
     expect(requestHandler.handleRequest(request)).andReturn(result);
-    expect(resourceDef.getResultFormatter()).andReturn(resultFormatter);
-    expect(resultFormatter.format(result, uriInfo)).andReturn(formattedResult);
-    expect(request.getSerializer()).andReturn(serializer);
-    expect(serializer.serialize(formattedResult)).andReturn(serializedResult);
-
+    expect(request.getResultSerializer()).andReturn(resultSerializer);
+    expect(resultSerializer.serialize(result, uriInfo)).andReturn(serializedResult);
     expect(responseFactory.createResponse(serializedResult)).andReturn(response);
 
-    replay(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    replay(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
 
     //test
     HostService hostService = new TestHostService(resourceDef, clusterName, null, requestFactory, responseFactory, requestHandler);
     assertSame(response, hostService.getHosts(httpHeaders, uriInfo));
 
-    verify(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    verify(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
   }
 

+ 133 - 42
ambari-api/src/test/java/org/apache/ambari/api/services/ServiceServiceTest.java

@@ -1,25 +1,8 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
 package org.apache.ambari.api.services;
 
 import org.apache.ambari.api.handlers.RequestHandler;
 import org.apache.ambari.api.resource.ResourceDefinition;
-import org.apache.ambari.api.services.formatters.ResultFormatter;
+import org.apache.ambari.api.services.serializers.ResultSerializer;
 import org.junit.Test;
 
 import javax.ws.rs.core.HttpHeaders;
@@ -32,16 +15,18 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertSame;
 
 /**
- *
+ * Created with IntelliJ IDEA.
+ * User: john
+ * Date: 9/12/12
+ * Time: 11:30 AM
+ * To change this template use File | Settings | File Templates.
  */
 public class ServiceServiceTest {
 
   @Test
   public void testGetService() {
     ResourceDefinition resourceDef = createStrictMock(ResourceDefinition.class);
-    ResultFormatter resultFormatter = createStrictMock(ResultFormatter.class);
-    Object formattedResult = new Object();
-    Serializer serializer = createStrictMock(Serializer.class);
+    ResultSerializer resultSerializer = createStrictMock(ResultSerializer.class);
     Object serializedResult = new Object();
     RequestFactory requestFactory = createStrictMock(RequestFactory.class);
     ResponseFactory responseFactory = createStrictMock(ResponseFactory.class);
@@ -54,37 +39,32 @@ public class ServiceServiceTest {
     UriInfo uriInfo = createNiceMock(UriInfo.class);
 
     String clusterName = "clusterName";
-    String serviceName = "hostName";
+    String serviceName = "serviceName";
 
     // expectations
-    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.RequestType.GET),
+    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.Type.GET),
         eq(resourceDef))).andReturn(request);
 
     expect(requestHandler.handleRequest(request)).andReturn(result);
-    expect(resourceDef.getResultFormatter()).andReturn(resultFormatter);
-    expect(resultFormatter.format(result, uriInfo)).andReturn(formattedResult);
-    expect(request.getSerializer()).andReturn(serializer);
-    expect(serializer.serialize(formattedResult)).andReturn(serializedResult);
-
+    expect(request.getResultSerializer()).andReturn(resultSerializer);
+    expect(resultSerializer.serialize(result, uriInfo)).andReturn(serializedResult);
     expect(responseFactory.createResponse(serializedResult)).andReturn(response);
 
-    replay(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    replay(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
 
     //test
     ServiceService hostService = new TestServiceService(resourceDef, clusterName, serviceName, requestFactory, responseFactory, requestHandler);
     assertSame(response, hostService.getService(httpHeaders, uriInfo, serviceName));
 
-    verify(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    verify(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
   }
 
   @Test
   public void testGetServices() {
     ResourceDefinition resourceDef = createStrictMock(ResourceDefinition.class);
-    ResultFormatter resultFormatter = createStrictMock(ResultFormatter.class);
-    Object formattedResult = new Object();
-    Serializer serializer = createStrictMock(Serializer.class);
+    ResultSerializer resultSerializer = createStrictMock(ResultSerializer.class);
     Object serializedResult = new Object();
     RequestFactory requestFactory = createStrictMock(RequestFactory.class);
     ResponseFactory responseFactory = createStrictMock(ResponseFactory.class);
@@ -99,25 +79,136 @@ public class ServiceServiceTest {
     String clusterName = "clusterName";
 
     // expectations
-    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.RequestType.GET),
+    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.Type.GET),
         eq(resourceDef))).andReturn(request);
 
     expect(requestHandler.handleRequest(request)).andReturn(result);
-    expect(resourceDef.getResultFormatter()).andReturn(resultFormatter);
-    expect(resultFormatter.format(result, uriInfo)).andReturn(formattedResult);
-    expect(request.getSerializer()).andReturn(serializer);
-    expect(serializer.serialize(formattedResult)).andReturn(serializedResult);
-
+    expect(request.getResultSerializer()).andReturn(resultSerializer);
+    expect(resultSerializer.serialize(result, uriInfo)).andReturn(serializedResult);
     expect(responseFactory.createResponse(serializedResult)).andReturn(response);
 
-    replay(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    replay(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
 
     //test
     ServiceService hostService = new TestServiceService(resourceDef, clusterName, null, requestFactory, responseFactory, requestHandler);
     assertSame(response, hostService.getServices(httpHeaders, uriInfo));
 
-    verify(resourceDef, resultFormatter, serializer, requestFactory, responseFactory, request, requestHandler,
+    verify(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
+        result, response, httpHeaders, uriInfo);
+  }
+
+  @Test
+  public void testCreateService() {
+    ResourceDefinition resourceDef = createStrictMock(ResourceDefinition.class);
+    ResultSerializer resultSerializer = createStrictMock(ResultSerializer.class);
+    Object serializedResult = new Object();
+    RequestFactory requestFactory = createStrictMock(RequestFactory.class);
+    ResponseFactory responseFactory = createStrictMock(ResponseFactory.class);
+    Request request = createNiceMock(Request.class);
+    RequestHandler requestHandler = createStrictMock(RequestHandler.class);
+    Result result = createStrictMock(Result.class);
+    Response response = createStrictMock(Response.class);
+
+    HttpHeaders httpHeaders = createNiceMock(HttpHeaders.class);
+    UriInfo uriInfo = createNiceMock(UriInfo.class);
+
+    String clusterName = "clusterName";
+    String serviceName = "serviceName";
+
+    // expectations
+    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.Type.PUT),
+        eq(resourceDef))).andReturn(request);
+
+    expect(requestHandler.handleRequest(request)).andReturn(result);
+    expect(request.getResultSerializer()).andReturn(resultSerializer);
+    expect(resultSerializer.serialize(result, uriInfo)).andReturn(serializedResult);
+    expect(responseFactory.createResponse(serializedResult)).andReturn(response);
+
+    replay(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
+        result, response, httpHeaders, uriInfo);
+
+    //test
+    ServiceService hostService = new TestServiceService(resourceDef, clusterName, serviceName, requestFactory, responseFactory, requestHandler);
+    assertSame(response, hostService.createService(httpHeaders, uriInfo, serviceName));
+
+    verify(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
+        result, response, httpHeaders, uriInfo);
+  }
+
+  @Test
+  public void testUpdateService() {
+    ResourceDefinition resourceDef = createStrictMock(ResourceDefinition.class);
+    ResultSerializer resultSerializer = createStrictMock(ResultSerializer.class);
+    Object serializedResult = new Object();
+    RequestFactory requestFactory = createStrictMock(RequestFactory.class);
+    ResponseFactory responseFactory = createStrictMock(ResponseFactory.class);
+    Request request = createNiceMock(Request.class);
+    RequestHandler requestHandler = createStrictMock(RequestHandler.class);
+    Result result = createStrictMock(Result.class);
+    Response response = createStrictMock(Response.class);
+
+    HttpHeaders httpHeaders = createNiceMock(HttpHeaders.class);
+    UriInfo uriInfo = createNiceMock(UriInfo.class);
+
+    String clusterName = "clusterName";
+    String serviceName = "serviceName";
+
+    // expectations
+    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.Type.POST),
+        eq(resourceDef))).andReturn(request);
+
+    expect(requestHandler.handleRequest(request)).andReturn(result);
+    expect(request.getResultSerializer()).andReturn(resultSerializer);
+    expect(resultSerializer.serialize(result, uriInfo)).andReturn(serializedResult);
+    expect(responseFactory.createResponse(serializedResult)).andReturn(response);
+
+    replay(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
+        result, response, httpHeaders, uriInfo);
+
+    //test
+    ServiceService hostService = new TestServiceService(resourceDef, clusterName, serviceName, requestFactory, responseFactory, requestHandler);
+    assertSame(response, hostService.updateService(httpHeaders, uriInfo, serviceName));
+
+    verify(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
+        result, response, httpHeaders, uriInfo);
+  }
+
+  @Test
+  public void testDeleteService() {
+    ResourceDefinition resourceDef = createStrictMock(ResourceDefinition.class);
+    ResultSerializer resultSerializer = createStrictMock(ResultSerializer.class);
+    Object serializedResult = new Object();
+    RequestFactory requestFactory = createStrictMock(RequestFactory.class);
+    ResponseFactory responseFactory = createStrictMock(ResponseFactory.class);
+    Request request = createNiceMock(Request.class);
+    RequestHandler requestHandler = createStrictMock(RequestHandler.class);
+    Result result = createStrictMock(Result.class);
+    Response response = createStrictMock(Response.class);
+
+    HttpHeaders httpHeaders = createNiceMock(HttpHeaders.class);
+    UriInfo uriInfo = createNiceMock(UriInfo.class);
+
+    String clusterName = "clusterName";
+    String serviceName = "serviceName";
+
+    // expectations
+    expect(requestFactory.createRequest(eq(httpHeaders), eq(uriInfo), eq(Request.Type.DELETE),
+        eq(resourceDef))).andReturn(request);
+
+    expect(requestHandler.handleRequest(request)).andReturn(result);
+    expect(request.getResultSerializer()).andReturn(resultSerializer);
+    expect(resultSerializer.serialize(result, uriInfo)).andReturn(serializedResult);
+    expect(responseFactory.createResponse(serializedResult)).andReturn(response);
+
+    replay(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
+        result, response, httpHeaders, uriInfo);
+
+    //test
+    ServiceService hostService = new TestServiceService(resourceDef, clusterName, serviceName, requestFactory, responseFactory, requestHandler);
+    assertSame(response, hostService.deleteService(httpHeaders, uriInfo, serviceName));
+
+    verify(resourceDef, resultSerializer, requestFactory, responseFactory, request, requestHandler,
         result, response, httpHeaders, uriInfo);
   }
 

BIN
ambari-api/src/test/resources/data.db


+ 38 - 13
ambari-project/pom.xml

@@ -98,6 +98,11 @@
         <groupId>com.google.inject.extensions</groupId>
         <artifactId>guice-servlet</artifactId>
         <version>3.0</version>
+      </dependency>
+       <dependency>
+        <groupId>org.codehaus.jettison</groupId>
+        <artifactId>jettison</artifactId>
+        <version>1.1</version>
       </dependency>
       <dependency>
         <groupId>com.google.inject</groupId>
@@ -204,17 +209,37 @@
         <artifactId>servlet-api</artifactId>
         <version>2.5</version>
       </dependency>
-       <dependency>
+      <dependency>
         <groupId>com.sun.jersey</groupId>
         <artifactId>jersey-core</artifactId>
-        <version>1.8</version>
+        <version>1.11</version>
       </dependency>
-       <dependency>
+      <dependency>
         <groupId>com.sun.jersey</groupId>
         <artifactId>jersey-grizzly</artifactId>
-        <version>1.8</version>
+        <version>1.11</version>
       </dependency>
-       <dependency>
+      <dependency>
+        <groupId>org.codehaus.jackson</groupId>
+        <artifactId>jackson-core-asl</artifactId>
+        <version>1.9.9</version>
+      </dependency>
+      <dependency>
+        <groupId>org.codehaus.jackson</groupId>
+        <artifactId>jackson-jaxrs</artifactId>
+        <version>1.9.9</version>
+      </dependency>
+      <dependency>
+        <groupId>org.codehaus.jackson</groupId>
+        <artifactId>jackson-xc</artifactId>
+        <version>1.9.9</version>
+      </dependency>
+      <dependency>
+        <groupId>org.codehaus.jackson</groupId>
+        <artifactId>jackson-mappper</artifactId>
+        <version>1.9.9</version>
+      </dependency>
+      <dependency>
         <groupId>com.sun.grizzly</groupId>
         <artifactId>grizzly-comet-webserver</artifactId>
         <version>1.9.36</version>
@@ -222,43 +247,43 @@
       <dependency>
         <groupId>com.sun.jersey</groupId>
         <artifactId>jersey-bundle</artifactId>
-        <version>1.8</version>
+        <version>1.11</version>
       </dependency>
       <dependency>
         <groupId>com.sun.jersey</groupId>
         <artifactId>jersey-json</artifactId>
-        <version>1.8</version>
+        <version>1.11</version>
       </dependency>
       <dependency>
         <groupId>com.sun.jersey</groupId>
         <artifactId>jersey-server</artifactId>
-        <version>1.8</version>
+        <version>1.11</version>
       </dependency>
       <dependency>
         <groupId>com.sun.jersey</groupId>
         <artifactId>jersey-client</artifactId>
-        <version>1.8</version>
+        <version>1.11</version>
       </dependency>
       <dependency>
         <groupId>com.sun.jersey.contribs</groupId>
         <artifactId>jersey-multipart</artifactId>
-        <version>1.8</version>
+        <version>1.11</version>
       </dependency>
       <dependency>
         <groupId>com.sun.jersey.jersey-test-framework</groupId>
         <artifactId>jersey-test-framework-core</artifactId>
-        <version>1.8</version>
+        <version>1.11</version>
         <scope>test</scope>
       </dependency>
       <dependency>
         <groupId>com.sun.jersey.jersey-test-framework</groupId>
         <artifactId>jersey-test-framework-grizzly2</artifactId>
-        <version>1.8</version>
+        <version>1.11</version>
       </dependency>
       <dependency>
         <groupId>com.sun.jersey.contribs</groupId>
         <artifactId>jersey-guice</artifactId>
-        <version>1.8</version>
+        <version>1.11</version>
       </dependency>
       <dependency>
         <groupId>log4j</groupId>

+ 35 - 7
ambari-server/pom.xml

@@ -9,7 +9,8 @@
   License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS 
   OF ANY KIND, either express or implied. See the License for the specific 
   language governing permissions and limitations under the License. -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <parent>
     <groupId>org.apache.ambari</groupId>
     <artifactId>ambari-project</artifactId>
@@ -56,10 +57,6 @@
   <profiles>
   </profiles>
   <dependencies>
-    <dependency>
-      <groupId>org.apache.ambari</groupId>
-      <artifactId>ambari-api</artifactId>
-    </dependency>
     <dependency>
       <groupId>commons-io</groupId>
       <artifactId>commons-io</artifactId>
@@ -117,8 +114,8 @@
       <artifactId>slf4j-log4j12</artifactId>
     </dependency>
     <dependency>
-    <groupId>log4j</groupId>
-    <artifactId>log4j</artifactId>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
     </dependency>
     <dependency>
       <groupId>org.eclipse.persistence</groupId>
@@ -160,6 +157,20 @@
     <dependency>
       <groupId>com.sun.jersey</groupId>
       <artifactId>jersey-json</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>org.codehaus.jackson</groupId>
+          <artifactId>jackson-xc</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.codehaus.jettison</groupId>
+          <artifactId>jettison</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.codehaus.jackson</groupId>
+          <artifactId>jackson-mapper-asl</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>com.sun.jersey</groupId>
@@ -177,6 +188,18 @@
       <groupId>com.sun.jersey.contribs</groupId>
       <artifactId>jersey-guice</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-core-asl</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-jaxrs</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-xc</artifactId>
+    </dependency>
     <dependency>
       <groupId>com.sun.jersey.jersey-test-framework</groupId>
       <artifactId>jersey-test-framework-core</artifactId>
@@ -187,6 +210,11 @@
       <artifactId>jersey-test-framework-grizzly2</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.codehaus.jettison</groupId>
+      <artifactId>jettison</artifactId>
+      <scope>test</scope>
+    </dependency>
     <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>

+ 7 - 3
ambari-server/src/main/assemblies/server.xml

@@ -26,7 +26,11 @@
   <files>
     <file>
       <source>${project.build.directory}/${artifact.artifactId}-${artifact.version}.jar</source>
-      <outputDirectory>ambari-server-${project.version}/lib</outputDirectory>
+      <outputDirectory>ambari-server-${project.version}/lib/ambari-server</outputDirectory>
+    </file>
+    <file>
+      <source>${basedir}/src/main/python/ambari-server.py</source>
+      <outputDirectory>/ambari-server-${project.version}/sbin</outputDirectory>
     </file>
   </files>
   <fileSets>
@@ -59,7 +63,7 @@
     </fileSet>
     <fileSet>
       <directory>src/main/conf</directory>
-      <outputDirectory>/ambari-server-${project.version}/conf</outputDirectory>
+      <outputDirectory>/ambari-server-${project.version}/etc/ambari-server/conf</outputDirectory>
     </fileSet>
     <fileSet>
       <directory>src/main/assemblies</directory>
@@ -71,7 +75,7 @@
   </fileSets>
   <dependencySets>
     <dependencySet>
-      <outputDirectory>ambari-server-${project.version}/lib</outputDirectory>
+      <outputDirectory>ambari-server-${project.version}/lib/ambari-server</outputDirectory>
       <unpack>false</unpack>
       <scope>compile</scope>
     </dependencySet>

+ 3 - 2
ambari-server/src/main/java/org/apache/ambari/server/ServiceComponentHostNotFoundException.java

@@ -20,9 +20,10 @@ package org.apache.ambari.server;
 
 public class ServiceComponentHostNotFoundException extends AmbariException {
 
-  public ServiceComponentHostNotFoundException(String serviceName,
-      String serviceComponentName, String hostName) {
+  public ServiceComponentHostNotFoundException(String clusterName,
+      String serviceName, String serviceComponentName, String hostName) {
     super("ServiceComponentHost not found"
+        + ", clusterName=" + clusterName
         + ", serviceName=" + serviceName
         + ", serviceComponentName=" + serviceComponentName
         + ", hostName=" + hostName);

+ 31 - 0
ambari-server/src/main/java/org/apache/ambari/server/ServiceComponentNotFoundException.java

@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server;
+
+public class ServiceComponentNotFoundException extends AmbariException {
+
+  public ServiceComponentNotFoundException (String clusterName,
+      String serviceName, String serviceComponentName) {
+    super("ServiceComponent not found"
+        + ", clusterName=" + clusterName
+        + ", serviceName=" + serviceName
+        + ", serviceComponentName=" + serviceComponentName);
+  }
+
+}

+ 28 - 0
ambari-server/src/main/java/org/apache/ambari/server/ServiceNotFoundException.java

@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server;
+
+public class ServiceNotFoundException extends AmbariException {
+
+  public ServiceNotFoundException(String clusterName, String serviceName) {
+    super("Service not found"
+        + ", clusterName=" + clusterName
+        + ", serviceName=" + serviceName);
+  }
+}

+ 3 - 0
ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java

@@ -22,6 +22,9 @@ import java.util.List;
 import org.apache.ambari.server.Role;
 import org.apache.ambari.server.agent.CommandReport;
 
+import com.google.inject.Singleton;
+
+@Singleton
 public class ActionDBAccessorImpl implements ActionDBAccessor {
 
   private long stageId = 0;

+ 14 - 9
ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBInMemoryImpl.java

@@ -24,6 +24,9 @@ import org.apache.ambari.server.Role;
 import org.apache.ambari.server.agent.CommandReport;
 import org.apache.ambari.server.utils.StageUtils;
 
+import com.google.inject.Singleton;
+
+@Singleton
 public class ActionDBInMemoryImpl implements ActionDBAccessor {
 
   List<Stage> stageList = new ArrayList<Stage>();
@@ -81,17 +84,19 @@ public class ActionDBInMemoryImpl implements ActionDBAccessor {
         l.add(s);
       }
     }
-    //TODO: Remove this code
-    //HACK to add a stage so that something is sent to the agent
-    long requestId = 1;
-    long stageId = 1;
+    // TODO: Remove this code
+    // HACK to add a stage so that something is sent to the agent
     if (l.isEmpty()) {
-      requestId = stageList.get(stageList.size() - 1).getRequestId() + 1;
-      stageId = stageList.get(stageList.size() - 1).getStageId() + 1;
+      long requestId = 1;
+      long stageId = 1;
+      if (!stageList.isEmpty()) {
+        requestId = stageList.get(stageList.size() - 1).getRequestId() + 1;
+        stageId = stageList.get(stageList.size() - 1).getStageId() + 1;
+      }
+      Stage s = StageUtils.getATestStage(requestId, stageId);
+      stageList.add(s);
+      l.add(s);
     }
-    Stage s = StageUtils.getATestStage(requestId, stageId);
-    stageList.add(s);
-    l.add(s);
     return l;
   }
 

+ 1 - 3
ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionManager.java

@@ -24,8 +24,6 @@ import org.apache.ambari.server.agent.CommandReport;
 import org.apache.ambari.server.agent.rest.AgentResource;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.utils.StageUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -56,7 +54,7 @@ public class ActionManager {
     this.fsm = fsm;
   }
 
-  public void initialize() {
+  public void start() {
     scheduler.start();
   }
 

Kaikkia tiedostoja ei voida näyttää, sillä liian monta tiedostoa muuttui tässä diffissä