Same data input same shader different results

Hey there, I have a shader that runs some code, and I always give it the same input, however the shader produces different results each time I run it. here is the code:

# version 450 core

const int localSizeX = 8;
const int localSizeY = 8;
const int localSizeZ = 8;
layout(local_size_x = localSizeX, local_size_y = localSizeY, local_size_z = localSizeZ) in;

uniform uint sizeX;
uniform uint sizeY;
uniform uint sizeZ;

layout(std430, binding = 0) buffer scalarFieldBuffer
{
		float density [];
}
scalarField;

layout(std430, binding = 1) buffer SignChangeEdgesXBuffer
{
		int height [] ;
}
signChangeEdgesX;

layout(std430, binding = 2) buffer signChangeEdgesIndexOffsetsBasePosBuffer
{
		int indexOffset [] ;
}
signChangeEdgesIndexOffsetsXBasePos;

layout(std430, binding = 3) buffer signChangeEdgesIndexOffsetsBaseNegBuffer
{
		int indexOffset [] ;
}
signChangeEdgesIndexOffsetsXBaseNeg;

layout(binding = 4) uniform atomic_uint testBaseNeg;
layout(binding = 5) uniform atomic_uint testBasePos;
layout(binding = 6) uniform atomic_uint testBaseNegIf;
layout(binding = 7) uniform atomic_uint testBasePosIf;


uint getScalarIndex(uint x, uint y, uint z)
{
    return z * sizeX * sizeY + y * sizeX + x;
}

uint getHeightmapIndex(uint widthIndex, uint heightIndex, uint depthIndex, uint width, uint depth)
{
    return heightIndex * width * depth + widthIndex * depth + depthIndex;
}

void main()
{
    uint currentXIndex = gl_LocalInvocationID.x + (gl_WorkGroupID.x * localSizeX);
    if (currentXIndex >= sizeX - 1)
    {
        return;
    }

    uint currentYIndex = gl_LocalInvocationID.y + (gl_WorkGroupID.y * localSizeY);
    if (currentYIndex >= sizeY)
    {
        return;
    }

    uint currentZIndex = gl_LocalInvocationID.z + (gl_WorkGroupID.z * localSizeZ);
    if (currentZIndex >= sizeZ)
    {
        return;
    }

    uint heightmapIndexOffsetIndex = currentYIndex * sizeZ + currentZIndex;

    uint scalarIndex = getScalarIndex(currentXIndex, currentYIndex, currentZIndex);
    float scalar1 = scalarField.density[scalarIndex];
    float scalar2 = scalarField.density[getScalarIndex(currentXIndex + 1, currentYIndex, currentZIndex)];


    if (scalar1 < 0 && scalar2 >= 0)
    {
        int currentHeightmapIndexOffset = atomicAdd(signChangeEdgesIndexOffsetsXBaseNeg.indexOffset[heightmapIndexOffsetIndex], 1);
        atomicCounterIncrement(testBaseNeg);
        if (currentHeightmapIndexOffset > 5)
        {
            atomicCounterIncrement(testBaseNegIf);
        }
        signChangeEdgesX.height[scalarIndex] = currentHeightmapIndexOffset;
    }
    else if (scalar1 >= 0 && scalar2 < 0)
    {
        int currentHeightmapIndexOffset = atomicAdd(signChangeEdgesIndexOffsetsXBasePos.indexOffset[heightmapIndexOffsetIndex], 1);
        atomicCounterIncrement(testBasePos);
        if (currentHeightmapIndexOffset > 5)
        {
            atomicCounterIncrement(testBasePosIf);
        }
        signChangeEdgesX.height[scalarIndex] = currentHeightmapIndexOffset;
    }
}

testBasePos and testBaseNeg always have the same result, but testBasePosIf and testBaseNegIf have different results each time I run the shader. I always reset all the buffers, so that shouldn’t be the issue.

Does anyone have an idea what might go wrong here?