Skip to content

Commit

Permalink
Remove obsolete CD-ROM remounting code
Browse files Browse the repository at this point in the history
- Previous CPI versions modified `env.json` and re-attached `env.iso` to
pass along disk metadata to the BOSH agent during `attach_disk` calls.
- After updating the vSphere CPI to use v2 of the interface, we now pass
disk metadata through the BOSH director by returning it from the
`attach_disk` call, so re-attaching the CD-ROM is no longer necessary.
- This commit removes the obsolete code.
- Note that the CD-ROM mounting code is still required at VM creation
time so that the BOSH agent can learn the IP address of the VM.
- Note also that `env.json` is still written out to the datastore even
though we no longer need it. We decided to leave it in case customer use
it for debugging or script automation.

[#179679846](https://www.pivotaltracker.com/story/show/179679846)
  • Loading branch information
julian-hj committed Sep 24, 2021
1 parent 4d8952b commit d60da74
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 170 deletions.
22 changes: 4 additions & 18 deletions src/vsphere_cpi/lib/cloud/vsphere/agent_env.rb
Original file line number Diff line number Diff line change
Expand Up @@ -10,31 +10,17 @@ def initialize(client:, file_provider:)
@file_provider = file_provider
end

def get_current_env(vm, datacenter_name)
logger.info("Getting current agent env from vm '#{vm.name}' in datacenter '#{datacenter_name}'")
cdrom = @client.get_cdrom_device(vm)
env_iso_folder = env_iso_folder(cdrom)
return unless env_iso_folder

datastore = cdrom.backing.datastore
datastore_pattern = Regexp.escape(datastore.name)
result = env_iso_folder.match(/\[#{datastore_pattern}\] (.*)/)
raise Bosh::Clouds::CloudError.new("Could not find matching datastore name '#{datastore.name}'") unless result
env_path = result[1]
ds_hosts = get_vm_vsphere_cluster_hosts(vm)
contents = @file_provider.fetch_file_from_datastore(datacenter_name, datastore, "#{env_path}/env.json", ds_hosts)
raise Bosh::Clouds::CloudError.new("Unable to load env.json from '#{env_path}/env.json'") unless contents

JSON.load(contents)
end

def set_env(vm, location, env)
logger.info("Updating current agent env from vm '#{vm.name}' in datacenter '#{location[:datacenter]}'")
env_json = JSON.dump(env)

disconnect_cdrom(vm)
clean_env(vm)
ds_hosts = get_vm_vsphere_cluster_hosts(vm)
# NOTE: env.json used to be used by a 'get_current_env' method to fetch the existing env state of the VM and
# update it. We no longer update the agent env, since disk information is passed by CPIv2 `disk_hint` so the
# 'get_current_env' method has been removed and 'env.json' is written out purely for consistency & debugging
# purposes.
@file_provider.upload_file_to_datastore(location[:datacenter],
location[:datastore].mob,
"#{location[:vm]}/env.json",
Expand Down
29 changes: 0 additions & 29 deletions src/vsphere_cpi/lib/cloud/vsphere/cloud.rb
Original file line number Diff line number Diff line change
Expand Up @@ -663,10 +663,6 @@ def attach_disk(vm_cid, raw_director_disk_cid)
disk_hint = disk_spec.device.unit_number.to_s
logger.info("adding disk to env, disk.enableUUID is FALSE, using relative device number #{disk_hint} for mounting")
end

# Overwrite cid with the director cid
# Since director sends messages with "director cid" to agent, the agent needs that ID in its env, not the clean_cid
add_disk_to_agent_env(vm, director_disk_cid, disk_hint)
disk_hint
end
end
Expand All @@ -682,11 +678,6 @@ def detach_disk(vm_cid, raw_director_disk_cid)
raise Bosh::Clouds::DiskNotAttached.new(true), "Disk '#{director_disk_cid.value}' is not attached to VM '#{vm_cid}'" if disk.nil?

vm.detach_disks([disk], @datacenter.disk_path)
begin
delete_disk_from_agent_env(vm, director_disk_cid)
rescue => e
logger.warn("Cannot delete disk from agent env: #{e}")
end
end
end

Expand Down Expand Up @@ -962,26 +953,6 @@ def get_vm_env_datastore(vm)
vm.accessible_datastores[cdrom.backing.datastore.name]
end

def add_disk_to_agent_env(vm, director_disk_cid, disk_hint)
env = @agent_env.get_current_env(vm.mob, @datacenter.name)

env['disks']['persistent'][director_disk_cid.raw] = disk_hint

location = { datacenter: @datacenter.name, datastore: get_vm_env_datastore(vm), vm: vm.cid }
@agent_env.set_env(vm.mob, location, env)
end

def delete_disk_from_agent_env(vm, director_disk_cid)
env = @agent_env.get_current_env(vm.mob, @datacenter.name)
location = { datacenter: @datacenter.name, datastore: get_vm_env_datastore(vm), vm: vm.cid }

if env['disks']['persistent'][director_disk_cid.raw]
env['disks']['persistent'].delete(director_disk_cid.raw)

@agent_env.set_env(vm.mob, location, env)
end
end

def verify_props(type, required_properties, properties)
for prop in required_properties
if properties[prop].nil?
Expand Down
52 changes: 0 additions & 52 deletions src/vsphere_cpi/spec/unit/cloud/vsphere/agent_env_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -22,58 +22,6 @@ module VSphereCloud
}
end

describe '#get_current_env' do
let(:vm) { instance_double('VimSdk::Vim::VirtualMachine', name: 'fake-vm-name') }

before do
allow(client).to receive(:get_cdrom_device).with(vm).and_return(cdrom_device)
allow(subject).to receive(:get_vm_vsphere_cluster_hosts).with(vm).and_return([])
end

let(:cdrom_device) do
instance_double(
'VimSdk::Vim::Vm::Device::VirtualCdrom',
backing: cdrom_backing
)
end

before do
allow(cdrom_device).to receive(:kind_of?).
with(VimSdk::Vim::Vm::Device::VirtualCdrom).and_return(true)
end

let(:cdrom_backing) do
instance_double('VimSdk::Vim::Vm::Device::VirtualCdrom::IsoBackingInfo',
datastore: cdrom_datastore,
file_name: '[fake-datastore-name 1] fake-vm-name/env.iso'
)
end

let(:cdrom_datastore) { instance_double('VimSdk::Vim::Datastore', name: 'fake-datastore-name 1') }

it 'gets current agent environment from fetched file' do
expect(file_provider).to receive(:fetch_file_from_datastore).with(
'fake-datacenter-name 1',
cdrom_datastore,
'fake-vm-name/env.json', []
).and_return('{"fake-response-json" : "some-value"}')

expect(subject.get_current_env(vm, 'fake-datacenter-name 1')).to eq({'fake-response-json' => 'some-value'})
end

it 'raises if env.json is empty' do
allow(file_provider).to receive(:fetch_file_from_datastore).with(
'fake-datacenter-name 1',
cdrom_datastore,
'fake-vm-name/env.json', []
).and_return(nil)

expect {
subject.get_current_env(vm, 'fake-datacenter-name 1')
}.to raise_error(Bosh::Clouds::CloudError)
end
end

describe '#clean_env' do
let(:vm) do
instance_double('VimSdk::Vim::VirtualMachine',
Expand Down
85 changes: 14 additions & 71 deletions src/vsphere_cpi/spec/unit/cloud/vsphere/cloud_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -1428,7 +1428,6 @@ module VSphereCloud
before do
allow(ds_mob).to receive_message_chain('summary.maintenance_mode').and_return("normal")
end
let(:agent_env_hash) { { 'disks' => { 'persistent' => { 'disk-cid' => 'fake-device-number' } } } }
let(:host_runtime_info) { instance_double(VimSdk::Vim::Host::RuntimeInfo, in_maintenance_mode: false) }
let(:host_system) {instance_double(VimSdk::Vim::HostSystem, runtime: host_runtime_info)}
let(:datastore_host_mount) { [instance_double('VimSdk::Vim::Datastore::HostMount', key: host_system)]}
Expand All @@ -1452,7 +1451,6 @@ module VSphereCloud
allow(datacenter).to receive(:persistent_pattern).and_return(/datastore\-.*/)
allow(datacenter).to receive(:persistent_datastore_cluster_pattern).and_return('')
allow(vm_provider).to receive(:find).with('fake-vm-cid').and_return(vm)
allow(agent_env).to receive(:get_current_env).and_return(agent_env_hash)
allow(cdrom).to receive_message_chain(:backing, :datastore, :name) { 'datastore-with-disk' }
allow(vcenter_client).to receive(:get_cdrom_device).with(vm_mob).and_return(cdrom)
end
Expand All @@ -1470,11 +1468,6 @@ module VSphereCloud
expect(disk.cid).to eq('disk-cid')
OpenStruct.new(device: OpenStruct.new(unit_number: 2))
end
expect(agent_env).to receive(:set_env) do|env_vm, env_location, env|
expect(env_vm).to eq(vm_mob)
expect(env_location).to eq(vm_location)
expect(env['disks']['persistent']['disk-cid']).to eq('2')
end
disk_hint = vsphere_cloud.attach_disk('fake-vm-cid', 'disk-cid')
expect(disk_hint).to match('2')
end
Expand All @@ -1494,12 +1487,8 @@ module VSphereCloud
OpenStruct.new(backing: OpenStruct.new(uuid: 'SOME-UUID'))
end

expect(agent_env).to receive(:set_env) do|env_vm, env_location, env|
expect(env_vm).to eq(vm_mob)
expect(env_location).to eq(vm_location)
expect(env['disks']['persistent']['disk-cid']).to eq({ 'id' => 'some-uuid'})
end
vsphere_cloud.attach_disk('fake-vm-cid', 'disk-cid')
disk_hint = vsphere_cloud.attach_disk('fake-vm-cid', 'disk-cid')
expect(disk_hint).to eq({ 'id' => 'some-uuid'})
end

it 'attaches the existing persistent disk with encoded metadata and without uuid' do
Expand All @@ -1519,13 +1508,8 @@ module VSphereCloud
end
expect(vm).to receive(:disk_uuid_is_enabled?).and_return(false)

expect(agent_env).to receive(:set_env) do |env_vm, env_location, env|
expect(env_vm).to eq(vm_mob)
expect(env_location).to eq(vm_location)
expect(env['disks']['persistent'][disk_cid_with_metadata]).to eq('some-unit-number')
end

vsphere_cloud.attach_disk('fake-vm-cid', disk_cid_with_metadata)
disk_hint = vsphere_cloud.attach_disk('fake-vm-cid', disk_cid_with_metadata)
expect(disk_hint).to eq('some-unit-number')
end

it 'attaches the existing persistent disk with encoded metadata and with uuid' do
Expand All @@ -1550,14 +1534,8 @@ module VSphereCloud
OpenStruct.new(backing: OpenStruct.new(uuid: 'SOME-UUID'))
end

expect(agent_env).to receive(:set_env) do |env_vm, env_location, env|
expect(env_vm).to eq(vm_mob)
expect(env_location).to eq(vm_location)
expect(env['disks']['persistent'][disk_cid_with_metadata]).to eq({'id' => 'some-uuid'})
end

ret = vsphere_cloud.attach_disk('fake-vm-cid', disk_cid_with_metadata)
expect(ret).to eq({'id' => 'some-uuid'})
disk_hint = vsphere_cloud.attach_disk('fake-vm-cid', disk_cid_with_metadata)
expect(disk_hint).to eq({'id' => 'some-uuid'})
end
end

Expand Down Expand Up @@ -1597,13 +1575,8 @@ module VSphereCloud

expect(vm).to receive(:disk_uuid_is_enabled?).and_return(false)

expect(agent_env).to receive(:set_env) do|env_vm, env_location, env|
expect(env_vm).to eq(vm_mob)
expect(env_location).to eq(vm_location)
expect(env['disks']['persistent']['disk-cid']).to eq('some-unit-number')
end

vsphere_cloud.attach_disk('fake-vm-cid', 'disk-cid')
disk_hint = vsphere_cloud.attach_disk('fake-vm-cid', 'disk-cid')
expect(disk_hint).to eq('some-unit-number')
end

it 'moves the UUID disk to an accessible datastore and attaches it' do
Expand All @@ -1622,13 +1595,8 @@ module VSphereCloud
OpenStruct.new(backing: OpenStruct.new(uuid: 'SOME-UUID'))
end

expect(agent_env).to receive(:set_env) do|env_vm, env_location, env|
expect(env_vm).to eq(vm_mob)
expect(env_location).to eq(vm_location)
expect(env['disks']['persistent']['disk-cid']).to eq({'id' => 'some-uuid'})
end

vsphere_cloud.attach_disk('fake-vm-cid', 'disk-cid')
disk_hint = vsphere_cloud.attach_disk('fake-vm-cid', 'disk-cid')
expect(disk_hint).to eq({'id' => 'some-uuid'})
end
end

Expand Down Expand Up @@ -1657,13 +1625,8 @@ module VSphereCloud

expect(vm).to receive(:disk_uuid_is_enabled?).and_return(false)

expect(agent_env).to receive(:set_env) do|env_vm, env_location, env|
expect(env_vm).to eq(vm_mob)
expect(env_location).to eq(vm_location)
expect(env['disks']['persistent']['disk-cid']).to eq('some-unit-number')
end

vsphere_cloud.attach_disk('fake-vm-cid', 'disk-cid')
disk_hint = vsphere_cloud.attach_disk('fake-vm-cid', 'disk-cid')
expect(disk_hint).to eq('some-unit-number')
end

it 'moves the UUID disk to a persistent datastore and attaches it' do
Expand All @@ -1683,13 +1646,8 @@ module VSphereCloud
OpenStruct.new(backing: OpenStruct.new(uuid: 'SOME-UUID'))
end

expect(agent_env).to receive(:set_env) do|env_vm, env_location, env|
expect(env_vm).to eq(vm_mob)
expect(env_location).to eq(vm_location)
expect(env['disks']['persistent']['disk-cid']).to eq({'id' => 'some-uuid'})
end

vsphere_cloud.attach_disk('fake-vm-cid', 'disk-cid')
disk_hint = vsphere_cloud.attach_disk('fake-vm-cid', 'disk-cid')
expect(disk_hint).to eq({'id' => 'some-uuid'})
end
end

Expand Down Expand Up @@ -1892,25 +1850,15 @@ module VSphereCloud
}
end
let(:cdrom) { instance_double(VimSdk::Vim::Vm::Device::VirtualCdrom) }
let(:env) do
{'disks' => {'persistent' => {'disk-cid' => 'fake-data'}}}
end

before do
allow(datacenter).to receive(:disk_path).and_return("fake-disk-path")
allow(cdrom).to receive_message_chain(:backing, :datastore, :name) { 'fake-datastore-name' }
allow(vcenter_client).to receive(:get_cdrom_device).with(vm_mob).and_return(cdrom)
allow(agent_env).to receive(:get_current_env).with(vm_mob, 'fake-datacenter').and_return(env)
allow(vm).to receive(:disk_by_cid).with('disk-cid').and_return(attached_disk)
allow(vm).to receive(:accessible_datastores).and_return({'fake-datastore-name'=>fake_datastore})
end

it 'updates VM with new settings' do
expect(agent_env).to receive(:set_env).with(
vm_mob,
vm_location,
{'disks' => {'persistent' => {}}}
)
expect(vm).to receive(:detach_disks).with([attached_disk], 'fake-disk-path')
vsphere_cloud.detach_disk('vm-id', 'disk-cid')
end
Expand Down Expand Up @@ -1938,11 +1886,6 @@ module VSphereCloud
end

it 'extracts the vSphere cid from the director disk cid and uses it' do
expect(agent_env).to receive(:set_env).with(
vm_mob,
vm_location,
{'disks' => {'persistent' => {}}}
)
allow(vm).to receive(:disk_by_cid).and_return(attached_disk)
allow(vm).to receive(:detach_disks)

Expand Down

0 comments on commit d60da74

Please sign in to comment.