fix: encapsulate OpenGL types in VertexAttribute to eliminate raw GL API usage in tests

- Add VertexAttributeType and VertexAttributeNormalized enums in OpenGLVertexArray.h
- Add ToGLAttributeType() converter in OpenGLVertexArray.cpp
- Remove glActiveTexture() call from quad test (already handled by texture.Bind())
- Remove #include <glad/glad.h> from triangle test
- Update unit tests to use encapsulated enums

All three OpenGL integration tests (minimal, triangle, quad) pass with 0% pixel difference.
This commit is contained in:
2026-03-22 14:33:57 +08:00
parent 1f129ed20f
commit 1797e7fe17
10 changed files with 61 additions and 112 deletions

View File

@@ -127,8 +127,8 @@ int WINAPI WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPSTR lpCmdLine
VertexAttribute posAttr = {};
posAttr.index = 0;
posAttr.count = 4;
posAttr.type = GL_FLOAT;
posAttr.normalized = GL_FALSE;
posAttr.type = VertexAttributeType::Float;
posAttr.normalized = VertexAttributeNormalized::False;
posAttr.stride = sizeof(Vertex);
posAttr.offset = 0;
vertexArray.AddVertexBuffer(vertexBuffer.GetID(), posAttr);
@@ -136,8 +136,8 @@ int WINAPI WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPSTR lpCmdLine
VertexAttribute texAttr = {};
texAttr.index = 1;
texAttr.count = 2;
texAttr.type = GL_FLOAT;
texAttr.normalized = GL_FALSE;
texAttr.type = VertexAttributeType::Float;
texAttr.normalized = VertexAttributeNormalized::False;
texAttr.stride = sizeof(Vertex);
texAttr.offset = sizeof(float) * 4;
vertexArray.AddVertexBuffer(vertexBuffer.GetID(), texAttr);
@@ -200,7 +200,6 @@ int WINAPI WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPSTR lpCmdLine
pipelineState.Bind();
vertexArray.Bind();
glActiveTexture(GL_TEXTURE0);
texture.Bind(0);
sampler.Bind(0);