From 7ce1bdf6dc53500f4c9792168afe34f7d4649503 Mon Sep 17 00:00:00 2001 From: Sumit Jamgade Date: Mon, 16 Oct 2017 10:58:48 +0200 Subject: [PATCH] Combine batches of successive roles for same nodes, We can speed-up the application of (n+1)th role if both(n,n+1) roles are being applied on the same node. This speedup of deployment of ceilometer by atleast 1m20s (measured: 90sec) and swift by ~20s. eg. In our 2node deployment ceilometer{server,central} are always applied on the same node, given that they have different priorities, they are to be applied, one after the other. This does not violate any order constraints as the application procedure of (n+1)th role is transparent to the nth role --- .../app/models/service_object.rb | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/crowbar_framework/app/models/service_object.rb b/crowbar_framework/app/models/service_object.rb index ae1dcdca05..3fe04adba0 100644 --- a/crowbar_framework/app/models/service_object.rb +++ b/crowbar_framework/app/models/service_object.rb @@ -919,6 +919,35 @@ def self.proposal_to_role(proposal, bc_name) RoleObject.new role end + # we can speed-up the application of (n+1)th role if both(n,n+1) + # roles are being applied on the same node. + # + # eg. In our 2node deployment ceilometer{server,central} are always + # applied on the same node, given that they have different priorities, + # they are to be applied, one after the other. + # + # In other words: it's actually reducing the number of times chef-client + # is run rather than speeding up execution of any single run, by + # merging batches together + # + # a batch is [roles, nodes] + def mergebatches(batches) + merged_batches = [] + unless bathces.empty? + current_batch = batches[0] + batches[1..-1].each do |next_batch| + if next_batch[1] == current_batch[1] && !current_batch[0].nil? + current_batch[0] << next_batch[0] + next + end + merged_batches << current_batch + current_batch = next_batch + end + merged_batches << current_batch + end + merged_batches + end + # # After validation, this is where the role is applied to the system The old # instance (if one exists) is compared with the new instance. roles are @@ -1171,6 +1200,8 @@ def apply_role(role, inst, in_queue, bootstrap = false) batches << [roles, nodes_in_batch] unless nodes_in_batch.empty? end + + batches = mergebatches(batches) Rails.logger.debug "batches: #{batches.inspect}" # Cache attributes that are useful later on