gl-renderer: Use GL_UNSIGNED_SHORT for index array type

GL_UNSIGNED_INT is only supported when GL_OES_element_index_uint is
available (mesa implements that extension).  We don't need 32-bit
indices, so just use GL_UNSIGNED_SHORT.
diff --git a/src/gl-renderer.c b/src/gl-renderer.c
index e321211..af8011b 100644
--- a/src/gl-renderer.c
+++ b/src/gl-renderer.c
@@ -856,7 +856,7 @@
 	struct weston_compositor *ec = output->compositor;
 	struct gl_renderer *gr = get_renderer(ec);
 	GLfloat *d;
-	unsigned int *p;
+	unsigned short *p;
 	int i, j, k, n;
 	GLfloat x[4], y[4], u[4], v[4];
 
@@ -956,7 +956,7 @@
 	glEnableVertexAttribArray(1);
 
 	glDrawElements(GL_TRIANGLES, n * 6,
-		       GL_UNSIGNED_INT, gr->indices.data);
+		       GL_UNSIGNED_SHORT, gr->indices.data);
 
 	glDisableVertexAttribArray(1);
 	glDisableVertexAttribArray(0);