diff --git a/app/controllers/apps_controller.rb b/app/controllers/apps_controller.rb index e9c66cc4..c172189c 100644 --- a/app/controllers/apps_controller.rb +++ b/app/controllers/apps_controller.rb @@ -15,6 +15,7 @@ def create barito_app_params[:name], barito_app_params[:tps_config].downcase, barito_app_params[:app_group].downcase, + Rails.env, ) if @app.valid? return redirect_to root_path diff --git a/app/models/barito_app.rb b/app/models/barito_app.rb index 56363b20..07eee31b 100644 --- a/app/models/barito_app.rb +++ b/app/models/barito_app.rb @@ -1,4 +1,5 @@ class BaritoApp < ActiveRecord::Base + CLUSTER_NAME_PADDING = 1000 validates :name, :tps_config, :app_group, :secret_key, :cluster_name, :setup_status, :app_status, presence: true validates :app_group, inclusion: { in: Figaro.env.app_groups.split(',').map(&:downcase) } @@ -14,14 +15,14 @@ class BaritoApp < ActiveRecord::Base blueprint_creation_error: 'BLUEPRINT_CREATION_ERROR', blueprint_executed: 'BLUEPRINT_EXECUTED', blueprint_executed_error: 'BLUEPRINT_EXECUTED_ERROR', - provisioning_stated: 'PROVISIONING_STARTED', + provisioning_started: 'PROVISIONING_STARTED', provisioning_error: 'PROVISIONING_ERROR', chef_bootstrap_started: 'CHEF_BOOTSTRAP_STARTED', chef_bootstrap_error: 'CHEF_BOOTSTRAP_ERROR', finished: 'FINISHED', } - def self.setup(name, tps_config, app_group) + def self.setup(name, tps_config, app_group, env) barito_app = BaritoApp.new( name: name, tps_config: tps_config, @@ -33,8 +34,8 @@ def self.setup(name, tps_config, app_group) ) if barito_app.valid? barito_app.save - blueprint = Blueprint.new(barito_app, Rails.env) - blueprint_path = blueprint.generate_file(barito_app) + blueprint = Blueprint.new(barito_app, env) + blueprint_path = blueprint.generate_file BlueprintWorker.perform_async(blueprint_path) end barito_app @@ -67,7 +68,7 @@ def increase_log_count(new_count) end def self.generate_cluster_index - BaritoApp.all.size + 1 + BaritoApp.all.size + CLUSTER_NAME_PADDING end def self.secret_key_valid?(token) diff --git a/app/models/blueprint.rb b/app/models/blueprint.rb index c89e7361..7277bb33 100644 --- a/app/models/blueprint.rb +++ b/app/models/blueprint.rb @@ -10,23 +10,26 @@ def initialize(application, env) } config = YAML.load_file("#{Rails.root}/config/tps_config.yml") @app = application - @tps_config = config[env][@app.tps_config] + @env = env + @tps_config = config[@env][@app.tps_config] end - def generate_file(env) - nodes = generate_nodes(env) + def generate_file + nodes = generate_nodes blueprint = { - application_id: @app.id, cluster_name: @app.cluster_name, environment: env, nodes: nodes + application_id: @app.id, cluster_name: @app.cluster_name, environment: @env, nodes: nodes } - File.open("#{Rails.root}/blueprints/jobs/#{filename}", 'w+') do |f| + filepath = "#{Rails.root}/blueprints/jobs/#{filename}.json" + File.open(filepath, 'w+') do |f| f.write(blueprint.to_json) end + filepath end - def generate_nodes(env) + def generate_nodes nodes = [] @tps_config['instances'].each do |type, count| - nodes += (1..count).map { |number| node_hash(env, type, number) } + nodes += (1..count).map { |number| node_hash(type, number) } end nodes end @@ -37,8 +40,8 @@ def filename private - def node_hash(env, type, count) - name = "#{@env_prefix[env.to_s]}-#{@app.cluster_name}-#{type}-#{format('%02d', count.to_i)}" + def node_hash(type, count) + name = "#{@env_prefix[@env.to_sym]}-#{@app.cluster_name}-#{type}-#{format('%02d', count.to_i)}" { name: name, type: type } end end diff --git a/config/tps_config.yml.example b/config/tps_config.yml.example index 20a86504..83d841b9 100644 --- a/config/tps_config.yml.example +++ b/config/tps_config.yml.example @@ -5,11 +5,11 @@ development: instances: yggdrasil: 1 consul: 1 - barito-flow-producer: 1 zookeeper: 1 kafka: 1 - barito-flow-consumer: 1 elasticsearch: 1 + barito-flow-producer: 1 + barito-flow-consumer: 1 kibana: 1 medium: @@ -18,11 +18,11 @@ development: instances: yggdrasil: 1 consul: 1 - barito-flow-producer: 1 zookeeper: 3 kafka: 3 - barito-flow-consumer: 1 elasticsearch: 1 + barito-flow-producer: 1 + barito-flow-consumer: 1 kibana: 1 large: @@ -30,12 +30,12 @@ development: tps_limit: 1000 instances: yggdrasil: 1 - consul: 1 - barito-flow-producer: 1 + consul: 1 zookeeper: 3 kafka: 3 - barito-flow-consumer: 1 elasticsearch: 1 + barito-flow-producer: 1 + barito-flow-consumer: 1 kibana: 1 test: @@ -45,11 +45,11 @@ test: instances: yggdrasil: 1 consul: 1 - barito-flow-producer: 1 zookeeper: 1 kafka: 1 - barito-flow-consumer: 1 elasticsearch: 1 + barito-flow-producer: 1 + barito-flow-consumer: 1 kibana: 1 medium: @@ -58,11 +58,11 @@ test: instances: yggdrasil: 1 consul: 1 - barito-flow-producer: 1 zookeeper: 3 kafka: 3 - barito-flow-consumer: 1 elasticsearch: 1 + barito-flow-producer: 1 + barito-flow-consumer: 1 kibana: 1 large: @@ -71,11 +71,11 @@ test: instances: yggdrasil: 1 consul: 1 - barito-flow-producer: 1 zookeeper: 3 kafka: 3 - barito-flow-consumer: 1 elasticsearch: 1 + barito-flow-producer: 1 + barito-flow-consumer: 1 kibana: 1 staging: @@ -85,11 +85,11 @@ staging: instances: yggdrasil: 1 consul: 1 - barito-flow-producer: 1 zookeeper: 1 kafka: 1 - barito-flow-consumer: 1 elasticsearch: 1 + barito-flow-producer: 1 + barito-flow-consumer: 1 kibana: 1 medium: @@ -98,11 +98,11 @@ staging: instances: yggdrasil: 1 consul: 1 - barito-flow-producer: 1 zookeeper: 3 kafka: 3 - barito-flow-consumer: 1 elasticsearch: 1 + barito-flow-producer: 1 + barito-flow-consumer: 1 kibana: 1 large: @@ -111,11 +111,11 @@ staging: instances: yggdrasil: 1 consul: 1 - barito-flow-producer: 1 zookeeper: 3 kafka: 3 - barito-flow-consumer: 1 elasticsearch: 1 + barito-flow-producer: 1 + barito-flow-consumer: 1 kibana: 1 production: @@ -125,11 +125,11 @@ production: instances: yggdrasil: 1 consul: 1 - barito-flow-producer: 1 zookeeper: 1 kafka: 1 - barito-flow-consumer: 1 elasticsearch: 1 + barito-flow-producer: 1 + barito-flow-consumer: 1 kibana: 1 medium: @@ -138,11 +138,11 @@ production: instances: yggdrasil: 1 consul: 1 - barito-flow-producer: 1 zookeeper: 3 kafka: 3 - barito-flow-consumer: 1 elasticsearch: 1 + barito-flow-producer: 1 + barito-flow-consumer: 1 kibana: 1 large: @@ -151,9 +151,9 @@ production: instances: yggdrasil: 1 consul: 1 - barito-flow-producer: 1 zookeeper: 3 kafka: 3 - barito-flow-consumer: 1 elasticsearch: 1 + barito-flow-producer: 1 + barito-flow-consumer: 1 kibana: 1 diff --git a/spec/models/barito_app_spec.rb b/spec/models/barito_app_spec.rb index 2479d3ab..e1c1b1ef 100644 --- a/spec/models/barito_app_spec.rb +++ b/spec/models/barito_app_spec.rb @@ -18,6 +18,7 @@ barito_app_props.name, barito_app_props.tps_config, barito_app_props.app_group, + Rails.env, ) expect(barito_app.persisted?).to eq(true) expect(barito_app.setup_status).to eq(BaritoApp.setup_statuses[:pending]) @@ -29,6 +30,7 @@ barito_app_props.name, barito_app_props.tps_config, 'invalid_group', + Rails.env, ) expect(barito_app.persisted?).to eq(false) expect(barito_app.valid?).to eq(false) @@ -39,6 +41,7 @@ barito_app_props.name, 'invalid_config', barito_app_props.app_group, + Rails.env, ) expect(barito_app.persisted?).to eq(false) expect(barito_app.valid?).to eq(false) @@ -49,6 +52,7 @@ barito_app_props.name, barito_app_props.tps_config, barito_app_props.app_group, + Rails.env, ) expect(barito_app.cluster_name).to eq( Rufus::Mnemo.from_i(BaritoApp.generate_cluster_index), @@ -60,6 +64,7 @@ barito_app_props.name, barito_app_props.tps_config, barito_app_props.app_group, + Rails.env, ) expect(barito_app.secret_key).to eq(barito_app_props.secret_key) end @@ -69,6 +74,18 @@ barito_app.increase_log_count(1) expect(barito_app.log_count).to eq 1 end + + it 'should generate blueprint file' do + barito_app = BaritoApp.setup( + barito_app_props.name, + barito_app_props.tps_config, + barito_app_props.app_group, + Rails.env, + ) + blueprint = Blueprint.new(barito_app, Rails.env) + @file_path = "#{Rails.root}/blueprints/jobs/#{blueprint.filename}.json" + expect(File.exist?(@file_path)).to eq(true) + end end context 'App Status Update' do @@ -106,7 +123,7 @@ context 'It should get the next cluster index' do let(:barito_app) { create(:barito_app) } it 'should get the the next cluster index' do - expect(BaritoApp.generate_cluster_index).to eq(BaritoApp.all.size + 1) + expect(BaritoApp.generate_cluster_index).to eq(BaritoApp.all.size + 1000) end end diff --git a/spec/models/blueprint_spec.rb b/spec/models/blueprint_spec.rb index cb13c6c0..a0776c2c 100644 --- a/spec/models/blueprint_spec.rb +++ b/spec/models/blueprint_spec.rb @@ -21,12 +21,12 @@ blueprint = Blueprint.new(app, env) tps_config = config[env][app.tps_config] node_count = tps_config['instances'].values.inject(:+) - expect(node_count).to eq(blueprint.generate_nodes(env).count) + expect(node_count).to eq(blueprint.generate_nodes.count) end it 'should validate node hash' do blueprint = Blueprint.new(app, env) - nodes = blueprint.generate_nodes(env) + nodes = blueprint.generate_nodes nodes.each do |node| expect(node.key?(:name) && node.key?(:type)).to eq(true) end @@ -35,11 +35,11 @@ it 'should validate node name' do blueprint = Blueprint.new(app, env) tps_config = config[env][app.tps_config] - nodes = blueprint.generate_nodes(env) + nodes = blueprint.generate_nodes names = [] tps_config['instances'].each do |type, count| (1..count).each do |number| - names << "#{blueprint.env_prefix[env.to_s]}-#{app.cluster_name}-#{type}-" + + names << "#{blueprint.env_prefix[env.to_sym]}-#{app.cluster_name}-#{type}-" + format('%02d', number) end end @@ -60,19 +60,19 @@ around(:each) do |example| @blueprint = Blueprint.new(app, env) - @file_path = "#{Rails.root}/blueprints/jobs/#{@blueprint.filename}" + @file_path = "#{Rails.root}/blueprints/jobs/#{@blueprint.filename}.json" example.run File.delete(@file_path) if File.exist?(@file_path) end it 'should create blueprint file' do - @blueprint.generate_file(env) + @blueprint.generate_file expect(File.exist?(@file_path)).to eq(true) end it 'should validate content of blueprint file' do - nodes = @blueprint.generate_nodes(env) - @blueprint.generate_file(env) + nodes = @blueprint.generate_nodes + @blueprint.generate_file content = File.read(@file_path) blueprint_content = { application_id: app.id, @@ -82,5 +82,9 @@ } expect(content).to eq(blueprint_content.to_json) end + + it 'should return file_path' do + expect(@blueprint.generate_file).to eq(@file_path) + end end end