Render to texture with multipass rendering issue

Hi,
I’m writing a rendering engine using OpenGL in LWJGL(I’m writing in Java). I render my scene using multipass rendering, one pass for each light.
Recently I’ve implemented rendering to texture using FBO. I firstly render the scene to texture and then render a quad with this texture on it. When I use only one light for example ambient light, everything works well, but when there are more than one light and I use blending to join results of multiple passes, rendered texture looks strangely, while the scene I see on screen still looks well.
Here are the fragments of code:

RenderToTextureCamera class


        public void init()
	{
		...
		framebuffer = glGenFramebuffers();
		glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
		
		renderTexture.bind();
		glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, getWidth(), getHeight(), 0, GL_RGB, GL_UNSIGNED_BYTE, 0);
		glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
		glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
		
		depthRenderbuffer = glGenRenderbuffers();
		glBindRenderbuffer(GL_RENDERBUFFER, depthRenderbuffer);
		glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT, getWidth(), getHeight());
		glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthRenderbuffer);
		
		glFramebufferTexture(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, renderTexture.getTextureId(), 0);
		glDrawBuffers(new int[] { GL_COLOR_ATTACHMENT0 });
		
		if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
			throw new RuntimeException("Cannot initialize framebuffer for camera.");
	}
	
	...
	public void render()
	{
		glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
		glViewport(getScreenOffsetX(), getScreenOffsetY(), getWidth(), getHeight());
		ForwardRendering.renderCamera(this);
	}

ForwardRendering class

public static void renderCamera(Camera camera)
	{
		...
		else
		{
			renderShader(ambientShader, ambientLight, camera);
			
			glEnable(GL_BLEND);
			glBlendFunc(GL_ONE, GL_ONE);
			glDepthMask(false);
			glDepthFunc(GL_EQUAL);
			
			directionalLights.forEach(light -> { if(light.isEnabled()) renderShader(directionalShader, light, camera); });
			pointLights.forEach(light -> { if(light.isEnabled()) renderShader(pointShader, light, camera); });
			spotLights.forEach(light -> { if(light.isEnabled()) renderShader(spotShader, light, camera); });
			
			glDepthFunc(GL_LESS);
			glDepthMask(true);
			glDisable(GL_BLEND);
		}
	}
	
	public static void renderShader(Shader shader, Light light, Camera camera)
	{
		meshRenderers.forEach(meshRenderer -> meshRenderer.render(shader, light, camera));
	}

depending on the total number of lights, “deferred rendering” could be an useful alternative
i would try to split the different lighting parts into different textures, and (for testing purposes) just display only 1 texture

render ambient into texture 1
render diffuse into texture 2
render specular into texture 3

if (KEY_1 pressed) show texture 1;
if (KEY_2 pressed) show texture 2;
if (KEY_3 pressed) show texture 3;

first try 1 light source, then 2, then …
what do(es) your fragment shader(s) look like ?

I will probably implement deferred rendering in the future but now, I would like to solve my problem with rendering to texture using rendering system I already have. I don’t know why all fragments affected by point light on rendered texture are white. I think it could be caused by blending in renderCamera function but I don’t know how do I solve it.

It’s one of my fragment shaders, used for point light:

#version 330

in mat3 TBN;
in vec3 pos;
in vec2 uv;

uniform vec3 cameraPos;
uniform vec3 diffuseColor;
uniform sampler2D diffuseTexture;
uniform vec3 specularColor;
uniform sampler2D specularTexture;
uniform float normalMapIntensity;
uniform sampler2D normalMap;
uniform vec3 lightColor;
uniform float lightIntensity;
uniform vec3 lightPos;
uniform float lightAttenLinear;
uniform float lightAttenQuadratic;
uniform float lightRange;

layout(location = 0) out vec3 fragColor;

void main()
{
	vec3 lightDirection = TBN * (pos - lightPos);
	float lightDistance = length(lightDirection);
	if(lightDistance > lightRange) return;
	lightDirection = normalize(lightDirection);
	vec3 cameraDirection = TBN * normalize(cameraPos - pos);
	vec3 normal = normalize(mix(vec3(0, 0, 1), texture2D(normalMap, uv).rgb * 2.0 - 1.0, normalMapIntensity));

	vec3 diffuseMaterial = texture2D(diffuseTexture, uv).rgb * diffuseColor;
	float diffuseFactor = clamp(dot(normal, -lightDirection), 0, 1);
	vec3 diffuseLight = lightColor * lightIntensity * diffuseFactor;
	vec3 diffuse = diffuseMaterial * diffuseLight;
	
	vec4 specularTexture = texture2D(specularTexture, uv);
	float specularPower = specularTexture.a * 10 + 1;
	vec3 specularMaterial = specularTexture.rgb * specularColor;
	vec3 reflection = reflect(lightDirection, normal);
	float specularFactor = clamp(dot(cameraDirection, reflection), 0, 1);
	specularFactor = pow(specularFactor, specularPower);
	vec3 specularLight = lightColor * lightIntensity * specularFactor;
	vec3 specular = specularMaterial * specularLight;
	
	float lightAtten = 1 / (1 + lightAttenLinear * lightDistance +
							   lightAttenQuadratic * pow(lightDistance, 2));
	
	fragColor = (diffuse + specular) * lightAtten;
}