Staging buffer deletion causes mesh to not appear on Intel graphics

I am using a staging buffer to send vertex and indice data to another buffer for mesh rendering. On Intel graphics deleting the staging buffer causes the mesh to not appear, even though the CopyBuffer function ends with a call to vkQueueWaitIdle(), which should complete the operation and make it okay to delete the staging buffer.

This only happens on Intel graphics. Nvidia works fine. I have that LunarG validation layer enabled and nothing is triggered.

Can you see what I am doing wrong?

void VkuMesh::SetData(const std::vector<VkuVertex> vertices, const std::vector<uint32_t> indices)
{
	VkBuffer stagingBuffer;
	VmaAllocation stagingallocation;
	VkDeviceSize bufferSize;
	void* data;

	//Staging buffer
	environment->CreateBuffer(max(sizeof(indices[0]) * indices.size(), sizeof(vertices[0]) * vertices.size()), VK_BUFFER_USAGE_TRANSFER_SRC_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, stagingBuffer, VMA_MEMORY_USAGE_CPU_ONLY, stagingallocation);

	// Indice buffer
	bufferSize = sizeof(indices[0]) * indices.size();
	vmaMapMemory(environment->allocator, stagingallocation, &data);
	memcpy(data, indices.data(), (size_t)bufferSize);
	vmaUnmapMemory(environment->allocator, stagingallocation);

	environment->CreateBuffer(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, this->indexBuffer, VMA_MEMORY_USAGE_GPU_ONLY, this->indexallocation);
	environment->CopyBuffer(stagingBuffer, this->indexBuffer, bufferSize);

	// Vertex buffer
	bufferSize = sizeof(vertices[0]) * vertices.size();
	vmaMapMemory(environment->allocator, stagingallocation, &data);
	memcpy(data, vertices.data(), (size_t)bufferSize);
	vmaUnmapMemory(environment->allocator, stagingallocation);

	environment->CreateBuffer(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, this->vertexBuffer, VMA_MEMORY_USAGE_GPU_ONLY, this->allocation);
	environment->CopyBuffer(stagingBuffer, this->vertexBuffer, bufferSize);
	
	vmaDestroyBuffer(environment->allocator, stagingBuffer, stagingallocation);
}

void Vku::CopyBuffer(VkBuffer srcBuffer, VkBuffer dstBuffer, VkDeviceSize size)
{
	VkCommandBuffer commandbuffer;
	VkFence fence;
	commandbuffermanager->GetManagedCommandBuffer(commandbuffer, fence);

	VkCommandBufferBeginInfo beginInfo = {};
	beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
	beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
	vkBeginCommandBuffer(commandbuffer, &beginInfo);

	VkBufferCopy copyRegion = {};
	copyRegion.size = size;
	vkCmdCopyBuffer(commandbuffer, srcBuffer, dstBuffer, 1, &copyRegion);

	vkEndCommandBuffer(commandbuffer);

	VkSubmitInfo submitInfo = {};
	submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
	submitInfo.commandBufferCount = 1;
	submitInfo.pCommandBuffers = &commandbuffer;

	VkResult res = vkQueueSubmit(devicequeue[0], 1, &submitInfo, fence);
	assert(VK_SUCCESS == res);

	res = vkQueueWaitIdle(devicequeue[0]);// have to wait because the buffer will be deleted
	assert(VK_SUCCESS == res);
}

For some reason in my internal buffer creation function that value bufferInfo.size was set to 65536 instead of the size argument. Not sure why, but when I fixed that this problem went away.

void Vku::CreateBuffer(VkDeviceSize size, VkBufferUsageFlags usage, VkMemoryPropertyFlags properties, VkBuffer& buffer, const VmaMemoryUsage vmausage, VmaAllocation& allocation)
{
	VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
	bufferInfo.size = 65536;
	bufferInfo.usage = usage;
	VmaAllocationCreateInfo allocInfo = {};
	allocInfo.usage = vmausage;
	VmaAllocationInfo info;
	VkResult res = vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr);
}