i’m trying to draw 3d model using a WavefrontObjectLoader class.
if i draw one obj it’s work, but if i try to draw 2 - only the last one that i draw work…
this is the class i use:
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import javax.media.opengl.GL2;
import com.jogamp.common.nio.Buffers;
public class WavefrontObjectLoader {
private String OBJModelPath; //the path to the model file
public ArrayList<float[]> vData = new ArrayList<float[]>(); //list of vertex coordinates
private ArrayList<float[]> vtData = new ArrayList<float[]>(); //list of texture coordinates
private ArrayList<float[]> vnData = new ArrayList<float[]>(); //list of normal coordinates
private ArrayList<int[]> fv = new ArrayList<int[]>(); //face vertex indices
private ArrayList<int[]> ft = new ArrayList<int[]>(); //face texture indices
private ArrayList<int[]> fn = new ArrayList<int[]>(); //face normal indices
private FloatBuffer modeldata; //buffer which will contain vertice data
private int FaceFormat; //format of the faces triangles or quads
private int FaceMultiplier; //number of possible coordinates per face
private int PolyCount = 0; //the model polygon count
private boolean init = true;
public WavefrontObjectLoader(String inModelPath) {
OBJModelPath = inModelPath;
LoadOBJModel(OBJModelPath);
SetFaceRenderType();
}
private void LoadOBJModel(String ModelPath) {
try {
BufferedReader br = null;
if (ModelPath.endsWith(".zip")) {
ZipInputStream tZipInputStream = new ZipInputStream(new BufferedInputStream((new Object()).getClass().getResourceAsStream(ModelPath)));
ZipEntry tZipEntry;
tZipEntry = tZipInputStream.getNextEntry();
String inZipEntryName = tZipEntry.getName();
if (!tZipEntry.isDirectory()) {
br = new BufferedReader(new InputStreamReader(tZipInputStream));
}
} else {
Object obj = new Object();
InputStream myis = this.getClass().getResourceAsStream(ModelPath);
InputStreamReader isr = new InputStreamReader(myis);
br = new BufferedReader(isr);
}
String line = null;
while ((line = br.readLine()) != null) {
if (line.startsWith("#")) { //read any descriptor data in the file
// Zzzz ...
} else if (line.equals("")) {
// Ignore whitespace data
} else if (line.startsWith("v ")) { //read in vertex data
vData.add(ProcessData(line));
} else if (line.startsWith("vt ")) { //read texture coordinates
vtData.add(ProcessData(line));
} else if (line.startsWith("vn ")) { //read normal coordinates
vnData.add(ProcessData(line));
} else if (line.startsWith("f ")) { //read face data
ProcessfData(line);
}
}
br.close();
} catch (IOException e) {
}
}
private float[] ProcessData(String read) {
final String s[] = read.split("\\s+");
return (ProcessFloatData(s)); //returns an array of processed float data
}
private float[] ProcessFloatData(String sdata[]) {
float data[] = new float[sdata.length - 1];
for (int loop = 0; loop < data.length; loop++) {
data[loop] = Float.parseFloat(sdata[loop + 1]);
}
return data; //return an array of floats
}
private void ProcessfData(String fread) {
PolyCount++;
String s[] = fread.split("\\s+");
if (fread.contains("//")) { //pattern is present if obj has only v and vn in face data
for (int loop = 1; loop < s.length; loop++) {
s[loop] = s[loop].replaceAll("//", "/0/"); //insert a zero for missing vt data
}
}
ProcessfIntData(s); //pass in face data
}
private void ProcessfIntData(String sdata[]) {
int vdata[] = new int[sdata.length - 1];
int vtdata[] = new int[sdata.length - 1];
int vndata[] = new int[sdata.length - 1];
for (int loop = 1; loop < sdata.length; loop++) {
String s = sdata[loop];
String[] temp = s.split("/");
vdata[loop - 1] = Integer.valueOf(temp[0]); //always add vertex indices
if (temp.length > 1) { //we have v and vt data
vtdata[loop - 1] = Integer.valueOf(temp[1]); //add in vt indices
} else {
vtdata[loop - 1] = 0; //if no vt data is present fill in zeros
}
if (temp.length > 2) { //we have v, vt, and vn data
vndata[loop - 1] = Integer.valueOf(temp[2]); //add in vn indices
} else {
vndata[loop - 1] = 0; //if no vn data is present fill in zeros
}
}
fv.add(vdata);
ft.add(vtdata);
fn.add(vndata);
}
private void SetFaceRenderType() {
final int temp[] = (int[]) fv.get(0);
if (temp.length == 3) {
FaceFormat = GL2.GL_TRIANGLES; //the faces come in sets of 3 so we have triangular faces
FaceMultiplier = 3;
} else if (temp.length == 4) {
FaceFormat = GL2.GL_QUADS; //the faces come in sets of 4 so we have quadrilateral faces
FaceMultiplier = 4;
} else {
FaceFormat = GL2.GL_POLYGON; //fall back to render as free form polygons
}
}
private void ConstructInterleavedArray(GL2 inGL) {
final int tv[] = (int[]) fv.get(0);
final int tt[] = (int[]) ft.get(0);
final int tn[] = (int[]) fn.get(0);
//if a value of zero is found that it tells us we don't have that type of data
if ((tv[0] != 0) && (tt[0] != 0) && (tn[0] != 0)) {
ConstructTNV(); //we have vertex, 2D texture, and normal Data
inGL.glInterleavedArrays(GL2.GL_T2F_N3F_V3F, 0, modeldata);
} else if ((tv[0] != 0) && (tt[0] != 0) && (tn[0] == 0)) {
ConstructTV(); //we have just vertex and 2D texture Data
inGL.glInterleavedArrays(GL2.GL_T2F_V3F, 0, modeldata);
} else if ((tv[0] != 0) && (tt[0] == 0) && (tn[0] != 0)) {
ConstructNV(); //we have just vertex and normal Data
inGL.glInterleavedArrays(GL2.GL_N3F_V3F, 0, modeldata);
} else if ((tv[0] != 0) && (tt[0] == 0) && (tn[0] == 0)) {
ConstructV();
inGL.glInterleavedArrays(GL2.GL_V3F, 0, modeldata);
}
}
private void ConstructTNV() {
int[] v, t, n;
float tcoords[] = new float[2]; //only T2F is supported in interLeavedArrays!!
float coords[] = new float[3];
int fbSize = PolyCount * (FaceMultiplier * 8); //3v per poly, 2vt per poly, 3vn per poly
modeldata = Buffers.newDirectFloatBuffer(fbSize);
modeldata.position(0);
for (int oloop = 0; oloop < fv.size(); oloop++) {
v = (int[]) (fv.get(oloop));
t = (int[]) (ft.get(oloop));
n = (int[]) (fn.get(oloop));
for (int iloop = 0; iloop < v.length; iloop++) {
//fill in the texture coordinate data
for (int tloop = 0; tloop < tcoords.length; tloop++)
//only T2F is supported in interleavedarrays!!
tcoords[tloop] = ((float[]) vtData.get(t[iloop] - 1))[tloop];
modeldata.put(tcoords);
//fill in the normal coordinate data
for (int vnloop = 0; vnloop < coords.length; vnloop++)
coords[vnloop] = ((float[]) vnData.get(n[iloop] - 1))[vnloop];
modeldata.put(coords);
//fill in the vertex coordinate data
for (int vloop = 0; vloop < coords.length; vloop++)
coords[vloop] = ((float[]) vData.get(v[iloop] - 1))[vloop];
modeldata.put(coords);
}
}
modeldata.position(0);
}
private void ConstructTV() {
int[] v, t;
float tcoords[] = new float[2]; //only T2F is supported in interLeavedArrays!!
float coords[] = new float[3];
int fbSize = PolyCount * (FaceMultiplier * 5); //3v per poly, 2vt per poly
modeldata = Buffers.newDirectFloatBuffer(fbSize);
modeldata.position(0);
for (int oloop = 0; oloop < fv.size(); oloop++) {
v = (int[]) (fv.get(oloop));
t = (int[]) (ft.get(oloop));
for (int iloop = 0; iloop < v.length; iloop++) {
//fill in the texture coordinate data
for (int tloop = 0; tloop < tcoords.length; tloop++)
//only T2F is supported in interleavedarrays!!
tcoords[tloop] = ((float[]) vtData.get(t[iloop] - 1))[tloop];
modeldata.put(tcoords);
//fill in the vertex coordinate data
for (int vloop = 0; vloop < coords.length; vloop++)
coords[vloop] = ((float[]) vData.get(v[iloop] - 1))[vloop];
modeldata.put(coords);
}
}
modeldata.position(0);
}
private void ConstructNV() {
int[] v, n;
float coords[] = new float[3];
int fbSize = PolyCount * (FaceMultiplier * 6); //3v per poly, 3vn per poly
modeldata = Buffers.newDirectFloatBuffer(fbSize);
modeldata.position(0);
for (int oloop = 0; oloop < fv.size(); oloop++) {
v = (int[]) (fv.get(oloop));
n = (int[]) (fn.get(oloop));
for (int iloop = 0; iloop < v.length; iloop++) {
//fill in the normal coordinate data
for (int vnloop = 0; vnloop < coords.length; vnloop++)
coords[vnloop] = ((float[]) vnData.get(n[iloop] - 1))[vnloop];
modeldata.put(coords);
//fill in the vertex coordinate data
for (int vloop = 0; vloop < coords.length; vloop++)
coords[vloop] = ((float[]) vData.get(v[iloop] - 1))[vloop];
modeldata.put(coords);
}
}
modeldata.position(0);
}
private void ConstructV() {
int[] v;
float coords[] = new float[3];
int fbSize = PolyCount * (FaceMultiplier * 3); //3v per poly
modeldata = Buffers.newDirectFloatBuffer(fbSize);
modeldata.position(0);
for (int oloop = 0; oloop < fv.size(); oloop++) {
v = (int[]) (fv.get(oloop));
for (int iloop = 0; iloop < v.length; iloop++) {
//fill in the vertex coordinate data
for (int vloop = 0; vloop < coords.length; vloop++)
coords[vloop] = ((float[]) vData.get(v[iloop] - 1))[vloop];
modeldata.put(coords);
}
}
modeldata.position(0);
}
public void drawModel(GL2 inGL) {
if (init) {
ConstructInterleavedArray(inGL);
cleanup();
init = false;
}
inGL.glDrawArrays(FaceFormat, 0, PolyCount * FaceMultiplier);
}
private void cleanup() {
vData.clear();
vtData.clear();
vnData.clear();
fv.clear();
ft.clear();
fn.clear();
modeldata.clear();
}
public static int loadWavefrontObjectAsDisplayList(GL2 inGL,String inFileName) {
int tDisplayListID = inGL.glGenLists(1);
WavefrontObjectLoader tWaveFrontObjectModel = new WavefrontObjectLoader(inFileName);
inGL.glNewList(tDisplayListID,GL2.GL_COMPILE);
tWaveFrontObjectModel.drawModel(inGL);
inGL.glEndList();
return tDisplayListID;
}
}
and i want to use it here:
import com.jogamp.opengl.util.gl2.GLUT;
import com.jogamp.opengl.util.texture.Texture;
import com.jogamp.opengl.util.texture.TextureIO;
import java.io.File;
import java.io.IOException;
import javax.media.opengl.GL;
import javax.media.opengl.GL2;
/**
* MazeObject, class representing an object in the maze
*
*/
public class MazeCellObj extends MazeObject {
private float yrot = 30;
public enum ObjType {
START,
HEAL,
DAMAGE,
ENDKEY,
END,
NONE
}
private Texture obj_texture;
private WavefrontObjectLoader obj;
//The variable that decides if it's a start point or an end point
private ObjType type;
public MazeCellObj(ObjType type) {
this.type = type;
setObjects(type);
}
public ObjType getObjType() {
return type;
}
//The function to check if it's the start position
public boolean isStart() {
if (type == ObjType.START) return true;
return false;
}
public void draw(GL2 gl) {
GLUT glut = new GLUT();
//Transform the to-be-placed geometric shapes
gl.glPushMatrix();
gl.glRotatef(90, -1.0f, 0.0f, 0.0f);
gl.glTranslatef(0.0f, 0.0f, -0.35f);
//Check which object this and draw accordingly
switch(type) {
case START:
//Place a glowing yellow cone
//Change the colouring for this particular object material
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_AMBIENT, new float[]{0.8f, 0.8f, 0.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_DIFFUSE, new float[]{1.0f, 1.0f, 0.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_EMISSION, new float[]{0.5f, 0.5f, 0.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_SPECULAR, new float[]{1.0f, 1.0f, 1.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_SHININESS, new float[]{60.0f}, 0);
gl.glColor3f(1.0f, 1.0f, 0.0f);
glut.glutSolidCone(0.25f, 0.5f, 10, 10);
break;
case END:
//Place a shiny green torus
//Change the colouring for this particular object material
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_AMBIENT, new float[]{0.0f, 0.2f, 0.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_DIFFUSE, new float[]{0.0f, 1.0f, 0.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_EMISSION, new float[]{0.0f, 0.1f, 0.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_SPECULAR, new float[]{1.0f, 1.0f, 1.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_SHININESS, new float[]{20.0f}, 0);
gl.glColor3f(0.0f, 1.0f, 0.0f);
glut.glutSolidTorus(0.125f, 0.25f, 10, 10);
break;
case HEAL:
//Change the colouring for this particular object material
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_AMBIENT, new float[]{0.0f, 0.2f, 0.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_DIFFUSE, new float[]{0.0f, 1.0f, 0.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_EMISSION, new float[]{0.0f, 0.1f, 0.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_SPECULAR, new float[]{1.0f, 1.0f, 1.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_SHININESS, new float[]{20.0f}, 0);
gl.glRotatef(yrot, 0.0f, 0.0f, 1.0f);
yrot += 0.05f;
gl.glColor3f(15.0f, 0.0f, 20.0f);
glut.glutSolidCube(0.23f);
break;
case DAMAGE:
//Change the colouring for this particular object material
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_AMBIENT, new float[]{1.0f, 1.0f, 1.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_DIFFUSE, new float[]{1.0f, 1.0f, 1.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_EMISSION, new float[]{1.0f, 1.0f, 1.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_SPECULAR, new float[]{1.0f, 1.0f, 1.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_SHININESS, new float[]{40.0f}, 0);
gl.glColor3f(1.0f, 1.0f, 1.0f);
//gl.glRotatef(yrot, 1.0f, 0.0f, 0.0f);
//yrot = yrot + 0.5f;
//set the texture for the object
gl.glActiveTexture(GL2.GL_TEXTURE0);
obj_texture.enable(gl);
obj_texture.bind(gl);
gl.glScalef(50, 50, 50);
obj.drawModel(gl);
//obj_texture.disable(gl);
/*gl.glColor3f(1.0f, 1.0f, 0.0f);
glut.glutSolidSphere(0.125f, 5, 5);*/
break;
case ENDKEY:
//Change the colouring for this particular object material
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_AMBIENT, new float[]{1.0f, 1.0f, 1.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_DIFFUSE, new float[]{1.0f, 1.0f, 1.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_EMISSION, new float[]{1.0f, 1.0f, 1.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_SPECULAR, new float[]{1.0f, 1.0f, 1.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_SHININESS, new float[]{20.0f}, 0);
gl.glColor3f(1.0f, 1.0f, 1.0f);
//set the texture for the object
gl.glActiveTexture(GL2.GL_TEXTURE0);
obj_texture.enable(gl);
obj_texture.bind(gl);
yrot += 2.5f;
if (yrot > 390) {
yrot = 30;
}
gl.glRotatef(-70.0f, 0.0f, 1.0f, 0.0f);
gl.glRotatef(yrot, 1.0f, 0.0f, 0.0f);
gl.glScalef(0.02f, 0.02f, 0.02f);
obj.drawModel(gl);
//obj_texture.disable(gl);
//gl.glRotatef(xrot, 1.0f, 0.0f, 0.0f);
/*gl.glRotatef(yrot, 0.0f, 1.0f, 0.0f);
yrot = yrot + 0.5f;
gl.glColor3f(1.0f, 1.0f, 20.0f);
glut.glutSolidCylinder(0.125f, 0.125f, 10, 10);*/
break;
default:
break;
}
//Put the material properties back to default
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_AMBIENT, new float[]{0.8f, 0.8f, 0.8f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_DIFFUSE, new float[]{0.8f, 0.8f, 0.8f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_EMISSION, new float[]{0.0f, 0.0f, 0.0f, 1.0f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_SPECULAR, new float[]{0.2f, 0.2f, 0.2f, 0.2f}, 0);
gl.glMaterialfv(GL.GL_FRONT, GL2.GL_SHININESS, new float[]{50.0f}, 0);
gl.glPopMatrix();
}
private void setObjects(ObjType type) {
switch(type) {
case HEAL:
break;
case DAMAGE:
try {
obj_texture=TextureIO.newTexture(new File( "textures/axe.jpg" ),true);
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
obj = new WavefrontObjectLoader("resources/axe_v1.obj");
break;
case ENDKEY:
try {
obj_texture=TextureIO.newTexture(new File( "textures/keyB_tx.bmp" ),true);
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
obj = new WavefrontObjectLoader("resources/Key_B_02.obj");
break;
default:
break;
}
}
}
if i try the dmg obj alone or the key object alone it’s work…
this draw func call inside this class:
import com.jogamp.opengl.util.texture.Texture;
import java.util.Vector;
import javax.media.opengl.GL2;
/**
* Cell, class representing a cell in the maze
*/
public class Cell {
//Define the sets of coordinates that will make up the walls, floors and ceiling
private float[][] wall1 = {{-0.5f, 0.5f, 0.5f},
{-0.5f, -0.5f, 0.5f},
{-0.5f, -0.5f, -0.5f},
{-0.5f, 0.5f, -0.5f}};
private float[][] wall2 = {{0.5f, 0.5f, 0.5f},
{0.5f, 0.5f, -0.5f},
{0.5f, -0.5f, -0.5f},
{0.5f, -0.5f, 0.5f}};
private float[][] wall3 = {{0.5f, 0.5f, -0.5f},
{-0.5f, 0.5f, -0.5f},
{-0.5f, -0.5f, -0.5f},
{0.5f, -0.5f, -0.5f}};
private float[][] wall4 = {{0.5f, 0.5f, 0.5f},
{0.5f, -0.5f, 0.5f},
{-0.5f, -0.5f, 0.5f},
{-0.5f, 0.5f, 0.5f}};
private float[][] floor = {{0.5f, -0.5f, 0.5f},
{0.5f, -0.5f, -0.5f},
{-0.5f, -0.5f, -0.5f},
{-0.5f, -0.5f, 0.5f}};
private float[][] ceiling = {{0.5f, 0.5f, 0.5f},
{-0.5f, 0.5f, 0.5f},
{-0.5f, 0.5f, -0.5f},
{0.5f, 0.5f, -0.5f}};
//The list of booleans that determine the existence of the walls
boolean[] walls;
//The X and Y index of the maze cell
private float i;
private float j;
//The maze object containing the start/end properties of the maze
private MazeCellObj obj;
//The drawlist variable
private int rectList = -1;
///normal for collision
Vector<Float> axisXNormal = new Vector<Float>(3);
Vector<Float> axisZNormal = new Vector<Float>(3);
public Cell(boolean[] walls, float i, float j) {
// This constructor gets passed three parameters
// The first is a list of booleans listing whether
// the cell has walls. The order of the walls is
// +x, -x, +z, -z . So, if your cell is 1x1x1,
// and the middle of your cell is at
// (0.5, 0.5, 0.5) the +x wall is the wall
// whose centre is at (1.0, 0.5, 0.5) and the
// -x wall is the wall whose centre is at
// (0.0, 0.5, 0.5)
// The i and j values are the cell index in the maze
this.walls = walls;
this.i = i;
this.j = j;
// Take the rectangle render list for drawing walls etc...
this.rectList = ViewRenderer.rectList;
//set the normals for the collision
axisXNormal.add(1f);
axisXNormal.add(0f);
axisXNormal.add(0f);
axisZNormal.add(0f);
axisZNormal.add(0f);
axisZNormal.add(1f);
}
public void draw(Texture[] textures, GL2 gl) {
gl.glPushMatrix();
//Move the current cell of the maze according to its relative position
gl.glTranslatef(i, 0.0f, j);
//Set up the lighting point position
gl.glLightfv(GL2.GL_LIGHT0, GL2.GL_POSITION, new float[]{-0.25f, 0.4f, 0.0f, 1.0f}, 0);
gl.glLightfv(GL2.GL_LIGHT1, GL2.GL_POSITION, new float[]{0.25f, 0.4f, 0.0f, 1.0f}, 0);
//gl.glLightfv(GL2.GL_LIGHT0, GL2.GL_SPOT_DIRECTION, new float [] { 0.0f, -0.5f, 1.0f, 0.0f}, 0);
//Enable and bind the textures to be used for mapping onto the faces
gl.glActiveTexture(GL2.GL_TEXTURE0);
textures[0].enable(gl);
textures[0].bind(gl);
gl.glColor3f(1.0f, 1.0f, 1.0f);
//If a wall of the current maze cell exists, draw it. Repeat for all 4 walls.
//right wall
if (walls[0]) {
gl.glNormal3f(1.0f, 0.0f, 0.0f);
gl.glCallList(rectList);
}
//left wall
if (walls[1]) {
gl.glPushMatrix();
gl.glRotatef(180, 0.0f, 1.0f, 0.0f);
gl.glNormal3f(1.0f, 0.0f, 0.0f);
gl.glCallList(rectList);
gl.glPopMatrix();
}
//back wall
if (walls[2]) {
gl.glPushMatrix();
gl.glRotatef(270, 0.0f, 1.0f, 0.0f);
gl.glNormal3f(1.0f, 0.0f, 0.0f);
gl.glCallList(rectList);
gl.glPopMatrix();
}
//front wall
if (walls[3]) {
gl.glPushMatrix();
gl.glRotatef(90, 0.0f, 1.0f, 0.0f);
gl.glNormal3f(1.0f, 0.0f, 0.0f);
gl.glCallList(rectList);
gl.glPopMatrix();
}
//Change the textures to be used for next mapping
textures[0].disable(gl);
textures[1].enable(gl);
textures[1].bind(gl);
//The floor
gl.glBegin(GL2.GL_POLYGON);
//setNormal(floor[0], floor[1], floor[2], gl);
gl.glMultiTexCoord2f(GL2.GL_TEXTURE0, 0, 0);
gl.glVertex3fv(floor[0], 0);
gl.glMultiTexCoord2f(GL2.GL_TEXTURE0, 0, 1);
gl.glVertex3fv(floor[1], 0);
gl.glMultiTexCoord2f(GL2.GL_TEXTURE0, 1, 1);
gl.glVertex3fv(floor[2], 0);
gl.glMultiTexCoord2f(GL2.GL_TEXTURE0, 1, 0);
gl.glVertex3fv(floor[3], 0);
gl.glEnd();
//Change the textures to be used for next mapping
textures[1].disable(gl);
textures[2].enable(gl);
textures[2].bind(gl);
//The ceiling
gl.glPushMatrix();
gl.glRotatef(90, 0.0f, 0.0f, -1.0f);
gl.glNormal3f(1.0f, 0.0f, 0.0f);
gl.glCallList(rectList);
gl.glPopMatrix();
textures[2].disable(gl);
//If the maze's end-point object exists, invoke its draw method
if (obj != null) obj.draw(gl);
gl.glPopMatrix();
}
}
thanks for any help!!