Browse Source

gpu: normalize 8-bit integer data in 0..1 by default.

legacy
Sam Hocevar sam 12 years ago
parent
commit
9960f73351
1 changed files with 6 additions and 1 deletions
  1. +6
    -1
      src/gpu/vertexbuffer.cpp

+ 6
- 1
src/gpu/vertexbuffer.cpp View File

@@ -294,8 +294,13 @@ void VertexDeclaration::SetStream(VertexBuffer *vb, ShaderAttrib attr1,
if (type_index < 0 || type_index >= sizeof(tlut) / sizeof(*tlut)) if (type_index < 0 || type_index >= sizeof(tlut) / sizeof(*tlut))
type_index = 0; type_index = 0;


/* Normalize unsigned bytes by default, because it's usually
* some color information. */
GLboolean normalize = (tlut[type_index].type == GL_UNSIGNED_BYTE)
|| (tlut[type_index].type == GL_BYTE);

glVertexAttribPointer((GLint)reg, tlut[type_index].size, glVertexAttribPointer((GLint)reg, tlut[type_index].size,
tlut[type_index].type, GL_FALSE,
tlut[type_index].type, normalize,
stride, (GLvoid const *)(uintptr_t)offset); stride, (GLvoid const *)(uintptr_t)offset);
} }
#endif #endif


Loading…
Cancel
Save