I want to draw an earth globe on android. At this point I need help with the UV texture coordinates. I'm using this earth texture (kibotu.net/earth.jpg). Currently it looks like this front side (kibotu.net/earthsphere.png), but 90° rotated it looks like this (kibotu.net/earthsphere2.png).
Since OpenGL ES doesn't support Quadrics and it has not a native GLUT library I find it rather difficult. So maybe someone came across the same Problem and can help me.
My first approach was to use Blender and export it as OBJ File and load it into my application. However there are 2 side effects: totally weird looking normals (kibotu.net/sphere.png) and most importantly no texture coordinates.
(I've used these Blender Export Options [kibotu.net/blenderobjoptions.png])
My second attempt was to use the freeglut library to do the job. Now I've got a nice looking sphere (kibotu.net/sphere5.png). However there are no texture coordinates either. Since it's last version was released on 27 November 2009 I very much doubt that there will be an update any time soon.
So after that I've tried to apply the wiki approach to calculate sphere uvs. However it looked like this kibotu.net/sphere2.png. I was searching every single stackoverflow thread after this problem and came across this uv approach. However there is no final solution. I've applied it to the freeglut code.
static private FloatBuffer sphereVertex;
static private FloatBuffer sphereNormal;
static private FloatBuffer sphereTexture;
static float sphere_parms[]=new float[3];
private static void plotSpherePoints(float radius, int stacks, int slices)
{
sphereVertex = OpenGLUtils.allocateFloatBuffer( 4* 6 * stacks * (slices+1) );
sphereNormal = OpenGLUtils.allocateFloatBuffer( 4* 6 * stac开发者_运维百科ks * (slices+1) );
sphereTexture = OpenGLUtils.allocateFloatBuffer( 4* 4 * stacks * (slices+1) );
int i, j;
float slicestep, stackstep;
stackstep = ((float)Math.PI) / stacks;
slicestep = 2.0f * ((float)Math.PI) / slices;
int counter = 0;
for (i = 0; i < stacks; ++i) {
float a = i * stackstep;
float b = a + stackstep;
float s0 = (float)Math.sin(a);
float s1 = (float)Math.sin(b);
float c0 = (float)Math.cos(a);
float c1 = (float)Math.cos(b);
float nv,u,v,dx,dy,dz;
for (j = 0; j <= slices; ++j)
{
float c = j * slicestep;
float x = (float)Math.cos(c);
float y = (float)Math.sin(c);
nv=x * s0;
sphereNormal.put(nv);
sphereVertex.put( dx = nv * radius);
nv=y * s0;
sphereNormal.put(nv);
sphereVertex.put( dy = nv * radius);
nv=c0;
sphereNormal.put(nv);
sphereVertex.put( dz = nv * radius);
// uv 1
if (dz < 0)
u = (float) (1 + dx/Math.sqrt(dx*dx+dy*dy+dz*dz) / 4);
else
u = (float) (1 - (1 + dx/Math.sqrt(dx*dx+dy*dy+dz*dz) ) / 4);
v = (float) (0.5 + ( -dy/Math.sqrt(dx*dx+dy*dy+dz*dz) ) /2);
// u = (float) (dx / Math.sqrt(dx*dx + dy*dy +dz*dz));
// v = (float) (dy / Math.sqrt(dx*dx + dy*dy +dz*dz));
sphereTexture.put(u);
sphereTexture.put(v);
nv=x * s1;
sphereNormal.put(nv);
sphereVertex.put( dx = nv * radius);
nv=y * s1;
sphereNormal.put(nv);
sphereVertex.put( dy = nv * radius);
nv=c1;
sphereNormal.put(nv);
sphereVertex.put( dz = nv * radius);
// uv 2
if (dz < 0)
u = (float) (1 + dx/Math.sqrt(dx*dx+dy*dy+dz*dz) / 4);
else
u = (float) (1 - (1 + dx/Math.sqrt(dx*dx+dy*dy+dz*dz) ) / 4);
v = (float) (0.5 + ( -dy/Math.sqrt(dx*dx+dy*dy+dz*dz) ) /2);
sphereTexture.put(u);
sphereTexture.put(v);
}
}
sphereNormal.position(0);
sphereVertex.position(0);
sphereTexture.position(0);
}
And the drawing algorithm:
public static class SolidSphere{
public static void draw(GL10 gl,float radius, int slices, int stacks)
{
int i, triangles;
if (sphereVertex!=null)
{
if (sphere_parms[0] != radius || sphere_parms[1] != slices || sphere_parms[2] != stacks)
{
sphereVertex=null;
sphereNormal=null;
sphereTexture = null;
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, OpenGLUtils.allocateFloatBuffer(0));
gl.glNormalPointer(GL10.GL_FLOAT, 0, OpenGLUtils.allocateFloatBuffer(0));
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, OpenGLUtils.allocateFloatBuffer(0));
}
}
if (sphereVertex==null)
{
sphere_parms[0] = radius;
sphere_parms[1] = (float)slices;
sphere_parms[2] = (float)stacks;
plotSpherePoints(radius, stacks, slices);
}
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, sphereVertex);
gl.glNormalPointer(GL10.GL_FLOAT, 0, sphereNormal);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, sphereTexture);
gl.glEnableClientState (GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState (GL10.GL_NORMAL_ARRAY);
gl.glEnableClientState (GL10.GL_TEXTURE_COORD_ARRAY);
triangles = (slices + 1) * 2;
for(i = 0; i < stacks; i++)
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, i * triangles, triangles);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_NORMAL_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
}
Can anyone help me figuring this out please?
You should be able to take any triangle mesh for a (unit) sphere and apply a mapping from vertex (X,Y,Z) to (UV).
I'm too lazy / busy (delete whichever you wish) to go through your code, but you might find the answer in chapter 6 of Watt & Watt's "Advanced Animation and Rendering Techniques". It gives some simple approaches to generating suitable UV coords for spheres.
IIRC, to avoid too much distortion at the poles, their mapping uses sine to squeeze/stretch the latitude mapping.
精彩评论