Problem writing to alpha channel of frame buffer

I’m trying to write data to the alpha channel of a framebuffer, but the result is always that the alpha value is 0.

The relevant parts of creating the frame buffer:

const static GLenum g_DrawBuffers[4] = {GL_COLOR_ATTACHMENT0, GL_COLOR_ATTACHMENT1, GL_COLOR_ATTACHMENT2, GL_COLOR_ATTACHMENT3};

	glBindTexture(GL_TEXTURE_2D_ARRAY, m_textures[IndexTexture]);
	glTexImage3D(GL_TEXTURE_2D_ARRAY, 0, GL_RGBA16, width, height, 4, 0, GL_RGBA, GL_UNSIGNED_SHORT, nullptr);
	
	glGenFramebuffers(2, m_fbo);
	glBindFramebuffer(GL_FRAMEBUFFER, m_fbo[0]);
	glFramebufferTextureLayer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, m_textures[IndexTexture], 0, 0);
	glFramebufferTextureLayer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT1, m_textures[IndexTexture], 0, 1);
	glFramebufferTextureLayer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT2, m_textures[IndexTexture], 0, 2);
	glFramebufferTextureLayer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT3, m_textures[IndexTexture], 0, 3);
	glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, m_stencil);
	glDrawBuffers(4, g_DrawBuffers);

I set my blending mode thusly:

	glEnable(GL_BLEND);
	glBlendEquation(GL_MAX);

	glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);

And the fragment shader for this buffer is this:

		uniform int u_layer;
		flat in int v_id;

		layout (location = 0) out vec4 frag0;
		layout (location = 1) out vec4 frag1;
		layout (location = 2) out vec4 frag2;
		layout (location = 4) out vec4 frag3;

		void main()
		{
//the layer ID relates to the stencil buffer settings, thats all that really matters about it. 
			ivec4 mask = ivec4(~u_layer&1, ~u_layer&1, u_layer&1, u_layer&1);
			int layer = u_layer / 2;

			vec4 flags = vec4(v_id, 65535 - v_id, v_id, 65535 - v_id) / float(65536);

			frag0  = flags * (mask * int(layer == 0));
			frag1  = flags * (mask * int(layer == 1));
			frag2  = flags * (mask * int(layer == 2));
			frag3  = flags * (mask * int(layer == 3));
		}

Then during rendering:

			ivec4 indices = ivec4(texelFetch(l_indices, ivec3(index, i), 0) * 65536 + .5);

			indices[1] = (65535 - indices[1]) * int(indices[1] != 0);

			frag_color = vec4(
					float(indices[2] != 0),
					float(indices[3] != 0),
					float(indices[2] != (65535 - indices[3])),
					1.f);

And it always shows that the value of indices[3] is 0. I’ve tried it on nvidia cards, amd cards, on ubuntu, and on windows; the result is always the same. And I don’t understand why.