Merge branch 'feature/example-refactor' into develop

This commit is contained in:
David Williams 2015-02-24 22:23:57 +01:00
commit fbb7ff4728
20 changed files with 798 additions and 585 deletions

View File

@ -67,7 +67,7 @@ IF(ENABLE_EXAMPLES AND Qt5OpenGL_FOUND)
ADD_SUBDIRECTORY(examples/Paging)
ADD_SUBDIRECTORY(examples/OpenGL)
ADD_SUBDIRECTORY(examples/SmoothLOD)
#ADD_SUBDIRECTORY(examples/DecodeOnGPU)
ADD_SUBDIRECTORY(examples/DecodeOnGPU)
ADD_SUBDIRECTORY(examples/Python)
SET(BUILD_EXAMPLES ON)
ELSE()

View File

@ -24,12 +24,14 @@ PROJECT(BasicExample)
#Projects source files
SET(SRC_FILES
main.cpp
../common/OpenGLWidget.cpp
../common/PolyVoxExample.cpp
)
#Projects headers files
SET(INC_FILES
../common/OpenGLWidget.h
../common/OpenGLWidget.inl
../common/PolyVoxExample.h
)
#"Sources" and "Headers" are the group names in Visual Studio.

View File

@ -21,7 +21,7 @@ freely, subject to the following restrictions:
distribution.
*******************************************************************************/
#include "OpenGLWidget.h"
#include "PolyVoxExample.h"
#include "PolyVox/CubicSurfaceExtractor.h"
#include "PolyVox/MarchingCubesSurfaceExtractor.h"
@ -66,30 +66,43 @@ void createSphereInVolume(PagedVolume<uint8_t>& volData, float fRadius)
}
}
class BasicExample : public PolyVoxExample
{
public:
BasicExample(QWidget *parent)
:PolyVoxExample(parent)
{
}
protected:
void initializeExample() override
{
// Create an empty volume and then place a sphere in it
PagedVolume<uint8_t> volData(PolyVox::Region(Vector3DInt32(0, 0, 0), Vector3DInt32(63, 63, 63)));
createSphereInVolume(volData, 30);
// Extract the surface for the specified region of the volume. Uncomment the line for the kind of surface extraction you want to see.
auto mesh = extractCubicMesh(&volData, volData.getEnclosingRegion());
//auto mesh = extractMarchingCubesMesh(&volData, volData.getEnclosingRegion());
// The surface extractor outputs the mesh in an efficient compressed format which is not directly suitable for rendering. The easiest approach is to
// decode this on the CPU as shown below, though more advanced applications can upload the compressed mesh to the GPU and decompress in shader code.
auto decodedMesh = decodeMesh(mesh);
//Pass the surface to the OpenGL window
addMesh(decodedMesh);
setCameraTransform(QVector3D(100.0f, 100.0f, 100.0f), -(PI / 4.0f), PI + (PI / 4.0f));
}
};
int main(int argc, char *argv[])
{
//Create and show the Qt OpenGL window
QApplication app(argc, argv);
OpenGLWidget openGLWidget(0);
BasicExample openGLWidget(0);
openGLWidget.show();
//Create an empty volume and then place a sphere in it
PagedVolume<uint8_t> volData(PolyVox::Region(Vector3DInt32(0, 0, 0), Vector3DInt32(63, 63, 63)));
createSphereInVolume(volData, 30);
// Extract the surface for the specified region of the volume. Uncomment the line for the kind of surface extraction you want to see.
auto mesh = extractCubicMesh(&volData, volData.getEnclosingRegion());
//auto mesh = extractMarchingCubesMesh(&volData, volData.getEnclosingRegion());
// The surface extractor outputs the mesh in an efficient compressed format which is not directly suitable for rendering. The easiest approach is to
// decode this on the CPU as shown below, though more advanced applications can upload the compressed mesh to the GPU and decompress in shader code.
auto decodedMesh = decodeMesh(mesh);
//Pass the surface to the OpenGL window
openGLWidget.addMesh(decodedMesh);
//openGLWidget.addMesh(mesh2);
openGLWidget.setViewableRegion(volData.getEnclosingRegion());
//Run the message pump.
return app.exec();
}

View File

@ -24,12 +24,14 @@ PROJECT(DecodeOnGPUExample)
#Projects source files
SET(SRC_FILES
main.cpp
../common/OpenGLWidget.cpp
../common/PolyVoxExample.cpp
)
#Projects headers files
SET(INC_FILES
../common/OpenGLWidget.h
../common/OpenGLWidget.inl
../common/PolyVoxExample.h
)
#"Sources" and "Headers" are the group names in Visual Studio.

View File

@ -4,9 +4,9 @@ in uvec4 position; // This will be the position of the vertex in model-space
in uint normal;
// The usual matrices are provided
uniform mat4 cameraToClipMatrix;
uniform mat4 worldToCameraMatrix;
uniform mat4 modelToWorldMatrix;
uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
uniform mat4 modelMatrix;
// This will be used by the fragment shader to calculate flat-shaded normals. This is an unconventional approach
// but we use it in this example framework because not all surface extractor generate surface normals.
@ -40,7 +40,7 @@ void main()
worldNormal.w = 1.0;
// Standard sequence of OpenGL transformations.
worldPosition = modelToWorldMatrix * decodedPosition;
vec4 cameraPosition = worldToCameraMatrix * worldPosition;
gl_Position = cameraToClipMatrix * cameraPosition;
worldPosition = modelMatrix * decodedPosition;
vec4 cameraPosition = viewMatrix * worldPosition;
gl_Position = projectionMatrix * cameraPosition;
}

View File

@ -21,7 +21,7 @@ freely, subject to the following restrictions:
distribution.
*******************************************************************************/
#include "OpenGLWidget.h"
#include "PolyVoxExample.h"
#include "PolyVox/CubicSurfaceExtractor.h"
#include "PolyVox/MarchingCubesSurfaceExtractor.h"
@ -66,102 +66,116 @@ void createSphereInVolume(PagedVolume<uint8_t>& volData, float fRadius)
}
}
OpenGLMeshData buildOpenGLMeshData(const PolyVox::Mesh< PolyVox::MarchingCubesVertex< uint8_t > >& surfaceMesh, const PolyVox::Vector3DInt32& translation = PolyVox::Vector3DInt32(0, 0, 0), float scale = 1.0f)
class DecodeOnGPUExample : public PolyVoxExample
{
// Convienient access to the vertices and indices
const auto& vecIndices = surfaceMesh.getIndices();
const auto& vecVertices = surfaceMesh.getVertices();
public:
DecodeOnGPUExample(QWidget *parent)
:PolyVoxExample(parent)
{
}
// This struct holds the OpenGL properties (buffer handles, etc) which will be used
// to render our mesh. We copy the data from the PolyVox mesh into this structure.
OpenGLMeshData meshData;
protected:
void initializeExample() override
{
QSharedPointer<QGLShaderProgram> shader(new QGLShaderProgram);
// Create the VAO for the mesh
glGenVertexArrays(1, &(meshData.vertexArrayObject));
glBindVertexArray(meshData.vertexArrayObject);
if (!shader->addShaderFromSourceFile(QGLShader::Vertex, ":/decode.vert"))
{
std::cerr << shader->log().toStdString() << std::endl;
exit(EXIT_FAILURE);
}
// The GL_ARRAY_BUFFER will contain the list of vertex positions
glGenBuffers(1, &(meshData.vertexBuffer));
glBindBuffer(GL_ARRAY_BUFFER, meshData.vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, vecVertices.size() * sizeof(MarchingCubesVertex< uint8_t >), vecVertices.data(), GL_STATIC_DRAW);
if (!shader->addShaderFromSourceFile(QGLShader::Fragment, ":/decode.frag"))
{
std::cerr << shader->log().toStdString() << std::endl;
exit(EXIT_FAILURE);
}
// and GL_ELEMENT_ARRAY_BUFFER will contain the indices
glGenBuffers(1, &(meshData.indexBuffer));
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, meshData.indexBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, vecIndices.size() * sizeof(uint32_t), vecIndices.data(), GL_STATIC_DRAW);
setShader(shader);
// Every surface extractor outputs valid positions for the vertices, so tell OpenGL how these are laid out
glEnableVertexAttribArray(0); // Attrib '0' is the vertex positions
glVertexAttribIPointer(0, 3, GL_UNSIGNED_SHORT, sizeof(MarchingCubesVertex< uint8_t >), (GLvoid*)(offsetof(MarchingCubesVertex< uint8_t >, encodedPosition))); //take the first 3 floats from every sizeof(decltype(vecVertices)::value_type)
//Create an empty volume and then place a sphere in it
PagedVolume<uint8_t> volData(PolyVox::Region(Vector3DInt32(0, 0, 0), Vector3DInt32(63, 63, 63)));
createSphereInVolume(volData, 30);
// Some surface extractors also generate normals, so tell OpenGL how these are laid out. If a surface extractor
// does not generate normals then nonsense values are written into the buffer here and sghould be ignored by the
// shader. This is mostly just to simplify this example code - in a real application you will know whether your
// chosen surface extractor generates normals and can skip uploading them if not.
glEnableVertexAttribArray(1); // Attrib '1' is the vertex normals.
glVertexAttribIPointer(1, 1, GL_UNSIGNED_SHORT, sizeof(MarchingCubesVertex< uint8_t >), (GLvoid*)(offsetof(MarchingCubesVertex< uint8_t >, encodedNormal)));
// Extract the surface for the specified region of the volume. Uncomment the line for the kind of surface extraction you want to see.
//auto mesh = extractCubicMesh(&volData, volData.getEnclosingRegion());
auto mesh = extractMarchingCubesMesh(&volData, volData.getEnclosingRegion());
// Finally a surface extractor will probably output additional data. This is highly application dependant. For this example code
// we're just uploading it as a set of bytes which we can read individually, but real code will want to do something specialised here.
glEnableVertexAttribArray(2); //We're talking about shader attribute '2'
GLint size = (std::min)(sizeof(uint8_t), size_t(4)); // Can't upload more that 4 components (vec4 is GLSL's biggest type)
glVertexAttribIPointer(2, size, GL_UNSIGNED_BYTE, sizeof(MarchingCubesVertex< uint8_t >), (GLvoid*)(offsetof(MarchingCubesVertex< uint8_t >, data)));
// The surface extractor outputs the mesh in an efficient compressed format which is not directly suitable for rendering. The easiest approach is to
// decode this on the CPU as shown below, though more advanced applications can upload the compressed mesh to the GPU and decompress in shader code.
//auto decodedMesh = decodeMesh(mesh);
// We're done uploading and can now unbind.
glBindVertexArray(0);
//Pass the surface to the OpenGL window
OpenGLMeshData meshData = buildOpenGLMeshData(mesh);
addMeshData(meshData);
// A few additional properties can be copied across for use during rendering.
meshData.noOfIndices = vecIndices.size();
meshData.translation = QVector3D(translation.getX(), translation.getY(), translation.getZ());
meshData.scale = scale;
setCameraTransform(QVector3D(100.0f, 100.0f, 100.0f), -(PI / 4.0f), PI + (PI / 4.0f));
}
// Set 16 or 32-bit index buffer size.
meshData.indexType = sizeof(PolyVox::Mesh< PolyVox::MarchingCubesVertex< uint8_t > >::IndexType) == 2 ? GL_UNSIGNED_SHORT : GL_UNSIGNED_INT;
private:
OpenGLMeshData buildOpenGLMeshData(const PolyVox::Mesh< PolyVox::MarchingCubesVertex< uint8_t > >& surfaceMesh, const PolyVox::Vector3DInt32& translation = PolyVox::Vector3DInt32(0, 0, 0), float scale = 1.0f)
{
// Convienient access to the vertices and indices
const auto& vecIndices = surfaceMesh.getIndices();
const auto& vecVertices = surfaceMesh.getVertices();
return meshData;
}
// This struct holds the OpenGL properties (buffer handles, etc) which will be used
// to render our mesh. We copy the data from the PolyVox mesh into this structure.
OpenGLMeshData meshData;
// Create the VAO for the mesh
glGenVertexArrays(1, &(meshData.vertexArrayObject));
glBindVertexArray(meshData.vertexArrayObject);
// The GL_ARRAY_BUFFER will contain the list of vertex positions
glGenBuffers(1, &(meshData.vertexBuffer));
glBindBuffer(GL_ARRAY_BUFFER, meshData.vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, vecVertices.size() * sizeof(MarchingCubesVertex< uint8_t >), vecVertices.data(), GL_STATIC_DRAW);
// and GL_ELEMENT_ARRAY_BUFFER will contain the indices
glGenBuffers(1, &(meshData.indexBuffer));
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, meshData.indexBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, vecIndices.size() * sizeof(uint32_t), vecIndices.data(), GL_STATIC_DRAW);
// Every surface extractor outputs valid positions for the vertices, so tell OpenGL how these are laid out
glEnableVertexAttribArray(0); // Attrib '0' is the vertex positions
glVertexAttribIPointer(0, 3, GL_UNSIGNED_SHORT, sizeof(MarchingCubesVertex< uint8_t >), (GLvoid*)(offsetof(MarchingCubesVertex< uint8_t >, encodedPosition))); //take the first 3 floats from every sizeof(decltype(vecVertices)::value_type)
// Some surface extractors also generate normals, so tell OpenGL how these are laid out. If a surface extractor
// does not generate normals then nonsense values are written into the buffer here and sghould be ignored by the
// shader. This is mostly just to simplify this example code - in a real application you will know whether your
// chosen surface extractor generates normals and can skip uploading them if not.
glEnableVertexAttribArray(1); // Attrib '1' is the vertex normals.
glVertexAttribIPointer(1, 1, GL_UNSIGNED_SHORT, sizeof(MarchingCubesVertex< uint8_t >), (GLvoid*)(offsetof(MarchingCubesVertex< uint8_t >, encodedNormal)));
// Finally a surface extractor will probably output additional data. This is highly application dependant. For this example code
// we're just uploading it as a set of bytes which we can read individually, but real code will want to do something specialised here.
glEnableVertexAttribArray(2); //We're talking about shader attribute '2'
GLint size = (std::min)(sizeof(uint8_t), size_t(4)); // Can't upload more that 4 components (vec4 is GLSL's biggest type)
glVertexAttribIPointer(2, size, GL_UNSIGNED_BYTE, sizeof(MarchingCubesVertex< uint8_t >), (GLvoid*)(offsetof(MarchingCubesVertex< uint8_t >, data)));
// We're done uploading and can now unbind.
glBindVertexArray(0);
// A few additional properties can be copied across for use during rendering.
meshData.noOfIndices = vecIndices.size();
meshData.translation = QVector3D(translation.getX(), translation.getY(), translation.getZ());
meshData.scale = scale;
// Set 16 or 32-bit index buffer size.
meshData.indexType = sizeof(PolyVox::Mesh< PolyVox::MarchingCubesVertex< uint8_t > >::IndexType) == 2 ? GL_UNSIGNED_SHORT : GL_UNSIGNED_INT;
return meshData;
}
};
int main(int argc, char *argv[])
{
//Create and show the Qt OpenGL window
QApplication app(argc, argv);
OpenGLWidget openGLWidget(0);
DecodeOnGPUExample openGLWidget(0);
openGLWidget.show();
QSharedPointer<QGLShaderProgram> shader(new QGLShaderProgram);
if (!shader->addShaderFromSourceFile(QGLShader::Vertex, ":/decode.vert"))
{
std::cerr << shader->log().toStdString() << std::endl;
exit(EXIT_FAILURE);
}
if (!shader->addShaderFromSourceFile(QGLShader::Fragment, ":/decode.frag"))
{
std::cerr << shader->log().toStdString() << std::endl;
exit(EXIT_FAILURE);
}
openGLWidget.setShader(shader);
//Create an empty volume and then place a sphere in it
PagedVolume<uint8_t> volData(PolyVox::Region(Vector3DInt32(0, 0, 0), Vector3DInt32(63, 63, 63)));
createSphereInVolume(volData, 30);
// Extract the surface for the specified region of the volume. Uncomment the line for the kind of surface extraction you want to see.
//auto mesh = extractCubicMesh(&volData, volData.getEnclosingRegion());
auto mesh = extractMarchingCubesMesh(&volData, volData.getEnclosingRegion());
// The surface extractor outputs the mesh in an efficient compressed format which is not directly suitable for rendering. The easiest approach is to
// decode this on the CPU as shown below, though more advanced applications can upload the compressed mesh to the GPU and decompress in shader code.
//auto decodedMesh = decodeMesh(mesh);
//Pass the surface to the OpenGL window
OpenGLMeshData meshData = buildOpenGLMeshData(mesh);
openGLWidget.addMeshData(meshData);
openGLWidget.setViewableRegion(volData.getEnclosingRegion());
//Run the message pump.
return app.exec();
}

View File

@ -29,7 +29,7 @@ SET(SRC_FILES
#OpenGLImmediateModeSupport.cpp
#OpenGLSupport.cpp
#OpenGLVertexBufferObjectSupport.cpp
../common/OpenGLWidget.cpp
../common/PolyVoxExample.cpp
Shapes.cpp
)
@ -39,6 +39,8 @@ SET(INC_FILES
#OpenGLSupport.h
#OpenGLVertexBufferObjectSupport.h
../common/OpenGLWidget.h
../common/OpenGLWidget.inl
../common/PolyVoxExample.h
Shapes.h
)

View File

@ -31,7 +31,7 @@ freely, subject to the following restrictions:
#include "Shapes.h"
#include "OpenGLWidget.h"
#include "PolyVoxExample.h"
#ifdef WIN32
#include <windows.h> // Standard Header For Most Programs
@ -47,100 +47,110 @@ using namespace std;
const int32_t g_uVolumeSideLength = 128;
int main(int argc, char *argv[])
class OpenGLExample : public PolyVoxExample
{
FilePager<MaterialDensityPair88>* pager = new FilePager<MaterialDensityPair88>(".");
PagedVolume<MaterialDensityPair88> volData(PolyVox::Region(Vector3DInt32(0, 0, 0), Vector3DInt32(g_uVolumeSideLength - 1, g_uVolumeSideLength - 1, g_uVolumeSideLength - 1)), pager);
//Make our volume contain a sphere in the center.
int32_t minPos = 0;
int32_t midPos = g_uVolumeSideLength / 2;
int32_t maxPos = g_uVolumeSideLength - 1;
cout << "Creating sphere 1" << std::endl;
createSphereInVolume(volData, 60.0f, 5);
cout << "Creating sphere 2" << std::endl;
createSphereInVolume(volData, 50.0f, 4);
cout << "Creating sphere 3" << std::endl;
createSphereInVolume(volData, 40.0f, 3);
cout << "Creating sphere 4" << std::endl;
createSphereInVolume(volData, 30.0f, 2);
cout << "Creating sphere 5" << std::endl;
createSphereInVolume(volData, 20.0f, 1);
cout << "Creating cubes" << std::endl;
createCubeInVolume(volData, Vector3DInt32(minPos, minPos, minPos), Vector3DInt32(midPos-1, midPos-1, midPos-1), 0);
createCubeInVolume(volData, Vector3DInt32(midPos+1, midPos+1, minPos), Vector3DInt32(maxPos, maxPos, midPos-1), 0);
createCubeInVolume(volData, Vector3DInt32(midPos+1, minPos, midPos+1), Vector3DInt32(maxPos, midPos-1, maxPos), 0);
createCubeInVolume(volData, Vector3DInt32(minPos, midPos+1, midPos+1), Vector3DInt32(midPos-1, maxPos, maxPos), 0);
createCubeInVolume(volData, Vector3DInt32(1, midPos-10, midPos-10), Vector3DInt32(maxPos-1, midPos+10, midPos+10), MaterialDensityPair44::getMaxDensity());
createCubeInVolume(volData, Vector3DInt32(midPos-10, 1, midPos-10), Vector3DInt32(midPos+10, maxPos-1, midPos+10), MaterialDensityPair44::getMaxDensity());
createCubeInVolume(volData, Vector3DInt32(midPos-10, midPos-10 ,1), Vector3DInt32(midPos+10, midPos+10, maxPos-1), MaterialDensityPair44::getMaxDensity());
QApplication app(argc, argv);
OpenGLWidget openGLWidget(0);
openGLWidget.show();
QSharedPointer<QGLShaderProgram> shader(new QGLShaderProgram);
if (!shader->addShaderFromSourceFile(QGLShader::Vertex, ":/openglexample.vert"))
public:
OpenGLExample(QWidget *parent)
:PolyVoxExample(parent)
{
std::cerr << shader->log().toStdString() << std::endl;
exit(EXIT_FAILURE);
}
if (!shader->addShaderFromSourceFile(QGLShader::Fragment, ":/openglexample.frag"))
protected:
void initializeExample() override
{
std::cerr << shader->log().toStdString() << std::endl;
exit(EXIT_FAILURE);
}
FilePager<MaterialDensityPair88>* pager = new FilePager<MaterialDensityPair88>(".");
PagedVolume<MaterialDensityPair88> volData(PolyVox::Region(Vector3DInt32(0, 0, 0), Vector3DInt32(g_uVolumeSideLength - 1, g_uVolumeSideLength - 1, g_uVolumeSideLength - 1)), pager);
openGLWidget.setShader(shader);
//Make our volume contain a sphere in the center.
int32_t minPos = 0;
int32_t midPos = g_uVolumeSideLength / 2;
int32_t maxPos = g_uVolumeSideLength - 1;
QTime time;
time.start();
//openGLWidget.setVolume(&volData);
cout << endl << "Time taken = " << time.elapsed() / 1000.0f << "s" << endl << endl;
cout << "Creating sphere 1" << std::endl;
createSphereInVolume(volData, 60.0f, 5);
cout << "Creating sphere 2" << std::endl;
createSphereInVolume(volData, 50.0f, 4);
cout << "Creating sphere 3" << std::endl;
createSphereInVolume(volData, 40.0f, 3);
cout << "Creating sphere 4" << std::endl;
createSphereInVolume(volData, 30.0f, 2);
cout << "Creating sphere 5" << std::endl;
createSphereInVolume(volData, 20.0f, 1);
const int32_t extractedRegionSize = 32;
int meshCounter = 0;
cout << "Creating cubes" << std::endl;
createCubeInVolume(volData, Vector3DInt32(minPos, minPos, midPos + 1), Vector3DInt32(midPos - 1, midPos - 1, maxPos), 0);
createCubeInVolume(volData, Vector3DInt32(midPos + 1, midPos + 1, midPos + 1), Vector3DInt32(maxPos, maxPos, maxPos), 0);
createCubeInVolume(volData, Vector3DInt32(minPos, midPos + 1, minPos), Vector3DInt32(midPos - 1, maxPos, midPos - 1), 0);
createCubeInVolume(volData, Vector3DInt32(midPos + 1, minPos, minPos), Vector3DInt32(maxPos, midPos - 1, midPos - 1), 0);
for (int32_t z = 0; z < volData.getDepth(); z += extractedRegionSize)
{
for (int32_t y = 0; y < volData.getHeight(); y += extractedRegionSize)
createCubeInVolume(volData, Vector3DInt32(1, midPos - 10, midPos - 10), Vector3DInt32(maxPos - 1, midPos + 10, midPos + 10), MaterialDensityPair44::getMaxDensity());
createCubeInVolume(volData, Vector3DInt32(midPos - 10, 1, midPos - 10), Vector3DInt32(midPos + 10, maxPos - 1, midPos + 10), MaterialDensityPair44::getMaxDensity());
createCubeInVolume(volData, Vector3DInt32(midPos - 10, midPos - 10, 1), Vector3DInt32(midPos + 10, midPos + 10, maxPos - 1), MaterialDensityPair44::getMaxDensity());
QSharedPointer<QGLShaderProgram> shader(new QGLShaderProgram);
if (!shader->addShaderFromSourceFile(QGLShader::Vertex, ":/openglexample.vert"))
{
for (int32_t x = 0; x < volData.getWidth(); x += extractedRegionSize)
std::cerr << shader->log().toStdString() << std::endl;
exit(EXIT_FAILURE);
}
if (!shader->addShaderFromSourceFile(QGLShader::Fragment, ":/openglexample.frag"))
{
std::cerr << shader->log().toStdString() << std::endl;
exit(EXIT_FAILURE);
}
setShader(shader);
QTime time;
time.start();
//openGLWidget.setVolume(&volData);
cout << endl << "Time taken = " << time.elapsed() / 1000.0f << "s" << endl << endl;
const int32_t extractedRegionSize = 32;
int meshCounter = 0;
for (int32_t z = 0; z < volData.getDepth(); z += extractedRegionSize)
{
for (int32_t y = 0; y < volData.getHeight(); y += extractedRegionSize)
{
// Specify the region to extract based on a starting position and the desired region sze.
PolyVox::Region regToExtract(x, y, z, x + extractedRegionSize, y + extractedRegionSize, z + extractedRegionSize);
for (int32_t x = 0; x < volData.getWidth(); x += extractedRegionSize)
{
// Specify the region to extract based on a starting position and the desired region sze.
PolyVox::Region regToExtract(x, y, z, x + extractedRegionSize, y + extractedRegionSize, z + extractedRegionSize);
// If you uncomment this line you will be able to see that the volume is rendered as multiple seperate meshes.
//regToExtract.shrink(1);
// If you uncomment this line you will be able to see that the volume is rendered as multiple seperate meshes.
//regToExtract.shrink(1);
// Perform the extraction for this region of the volume
auto mesh = extractMarchingCubesMesh(&volData, regToExtract);
// Perform the extraction for this region of the volume
auto mesh = extractMarchingCubesMesh(&volData, regToExtract);
// The returned mesh needs to be decoded to be appropriate for GPU rendering.
auto decodedMesh = decodeMesh(mesh);
// The returned mesh needs to be decoded to be appropriate for GPU rendering.
auto decodedMesh = decodeMesh(mesh);
// Pass the surface to the OpenGL window. Note that we are also passing an offset in this multi-mesh example. This is because
// the surface extractors return a mesh with 'local space' positions to reduce storage requirements and precision problems.
openGLWidget.addMesh(decodedMesh, decodedMesh.getOffset());
// Pass the surface to the OpenGL window. Note that we are also passing an offset in this multi-mesh example. This is because
// the surface extractors return a mesh with 'local space' positions to reduce storage requirements and precision problems.
addMesh(decodedMesh, decodedMesh.getOffset());
meshCounter++;
meshCounter++;
}
}
}
cout << "Rendering volume as " << meshCounter << " seperate meshes" << endl;
setCameraTransform(QVector3D(150.0f, 150.0f, 150.0f), -(PI / 4.0f), PI + (PI / 4.0f));
}
};
cout << "Rendering volume as " << meshCounter << " seperate meshes" << endl;
openGLWidget.setViewableRegion(volData.getEnclosingRegion());
int main(int argc, char *argv[])
{
//Create and show the Qt OpenGL window
QApplication app(argc, argv);
OpenGLExample openGLWidget(0);
openGLWidget.show();
//Run the message pump.
return app.exec();
}
}

View File

@ -4,9 +4,9 @@ in vec4 position; // This will be the position of the vertex in model-space
in vec4 normal; // The normal data may not have been set
in ivec2 material;
uniform mat4 cameraToClipMatrix;
uniform mat4 worldToCameraMatrix;
uniform mat4 modelToWorldMatrix;
uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
uniform mat4 modelMatrix;
out vec4 worldPosition; //This is being passed to the fragment shader to calculate the normals
out vec3 normalFromVS;
@ -15,7 +15,7 @@ flat out ivec2 materialFromVS;
void main()
{
// Compute the usual OpenGL transformation to clip space.
gl_Position = cameraToClipMatrix * worldToCameraMatrix * modelToWorldMatrix * position;
gl_Position = projectionMatrix * viewMatrix * modelMatrix * position;
// This example is demonstrating the marching cubes mesh, which does have per-vertex normals. We can
// just pass them through, though real code might want to deal with transforming normals appropriatly.

View File

@ -24,14 +24,16 @@ PROJECT(PagingExample)
#Projects source files
SET(SRC_FILES
main.cpp
../common/OpenGLWidget.cpp
Perlin.cpp
../common/PolyVoxExample.cpp
)
#Projects headers files
SET(INC_FILES
../common/OpenGLWidget.h
../common/OpenGLWidget.inl
Perlin.h
../common/PolyVoxExample.h
)
#"Sources" and "Headers" are the group names in Visual Studio.

View File

@ -21,7 +21,7 @@ freely, subject to the following restrictions:
distribution.
*******************************************************************************/
#include "OpenGLWidget.h"
#include "PolyVoxExample.h"
#include "Perlin.h"
#include "PolyVox/MaterialDensityPair.h"
@ -139,48 +139,61 @@ public:
}
};
class PagingExample : public PolyVoxExample
{
public:
PagingExample(QWidget *parent)
:PolyVoxExample(parent)
{
}
protected:
void initializeExample() override
{
PerlinNoisePager* pager = new PerlinNoisePager();
PagedVolume<MaterialDensityPair44> volData(PolyVox::Region::MaxRegion(), pager, 64);
volData.setMemoryUsageLimit(8 * 1024 * 1024); // 8Mb
//createSphereInVolume(volData, 30);
//createPerlinTerrain(volData);
//createPerlinVolumeSlow(volData);
std::cout << "Memory usage: " << (volData.calculateSizeInBytes() / 1024.0 / 1024.0) << "MB" << std::endl;
//std::cout << "Compression ratio: 1 to " << (1.0/(volData.calculateCompressionRatio())) << std::endl;
PolyVox::Region reg(Vector3DInt32(-255, 0, 0), Vector3DInt32(255, 255, 255));
std::cout << "Prefetching region: " << reg.getLowerCorner() << " -> " << reg.getUpperCorner() << std::endl;
volData.prefetch(reg);
std::cout << "Memory usage: " << (volData.calculateSizeInBytes() / 1024.0 / 1024.0) << "MB" << std::endl;
//std::cout << "Compression ratio: 1 to " << (1.0/(volData.calculateCompressionRatio())) << std::endl;
PolyVox::Region reg2(Vector3DInt32(0, 0, 0), Vector3DInt32(255, 255, 255));
std::cout << "Flushing region: " << reg2.getLowerCorner() << " -> " << reg2.getUpperCorner() << std::endl;
volData.flush(reg2);
std::cout << "Memory usage: " << (volData.calculateSizeInBytes() / 1024.0 / 1024.0) << "MB" << std::endl;
//std::cout << "Compression ratio: 1 to " << (1.0/(volData.calculateCompressionRatio())) << std::endl;
std::cout << "Flushing entire volume" << std::endl;
volData.flushAll();
std::cout << "Memory usage: " << (volData.calculateSizeInBytes() / 1024.0 / 1024.0) << "MB" << std::endl;
//std::cout << "Compression ratio: 1 to " << (1.0/(volData.calculateCompressionRatio())) << std::endl;
//Extract the surface
auto mesh = extractCubicMesh(&volData, reg2);
std::cout << "#vertices: " << mesh.getNoOfVertices() << std::endl;
auto decodedMesh = decodeMesh(mesh);
//Pass the surface to the OpenGL window
addMesh(decodedMesh);
setCameraTransform(QVector3D(300.0f, 300.0f, 300.0f), -(PI / 4.0f), PI + (PI / 4.0f));
}
};
int main(int argc, char *argv[])
{
//Create and show the Qt OpenGL window
QApplication app(argc, argv);
OpenGLWidget openGLWidget(0);
PagingExample openGLWidget(0);
openGLWidget.show();
PerlinNoisePager* pager = new PerlinNoisePager();
PagedVolume<MaterialDensityPair44> volData(PolyVox::Region::MaxRegion(), pager, 64);
volData.setMemoryUsageLimit(8 * 1024 * 1024); // 8Mb
//createSphereInVolume(volData, 30);
//createPerlinTerrain(volData);
//createPerlinVolumeSlow(volData);
std::cout << "Memory usage: " << (volData.calculateSizeInBytes()/1024.0/1024.0) << "MB" << std::endl;
//std::cout << "Compression ratio: 1 to " << (1.0/(volData.calculateCompressionRatio())) << std::endl;
PolyVox::Region reg(Vector3DInt32(-255,0,0), Vector3DInt32(255,255,255));
std::cout << "Prefetching region: " << reg.getLowerCorner() << " -> " << reg.getUpperCorner() << std::endl;
volData.prefetch(reg);
std::cout << "Memory usage: " << (volData.calculateSizeInBytes()/1024.0/1024.0) << "MB" << std::endl;
//std::cout << "Compression ratio: 1 to " << (1.0/(volData.calculateCompressionRatio())) << std::endl;
PolyVox::Region reg2(Vector3DInt32(0,0,0), Vector3DInt32(255,255,255));
std::cout << "Flushing region: " << reg2.getLowerCorner() << " -> " << reg2.getUpperCorner() << std::endl;
volData.flush(reg2);
std::cout << "Memory usage: " << (volData.calculateSizeInBytes()/1024.0/1024.0) << "MB" << std::endl;
//std::cout << "Compression ratio: 1 to " << (1.0/(volData.calculateCompressionRatio())) << std::endl;
std::cout << "Flushing entire volume" << std::endl;
volData.flushAll();
std::cout << "Memory usage: " << (volData.calculateSizeInBytes()/1024.0/1024.0) << "MB" << std::endl;
//std::cout << "Compression ratio: 1 to " << (1.0/(volData.calculateCompressionRatio())) << std::endl;
//Extract the surface
auto mesh = extractCubicMesh(&volData, reg2);
std::cout << "#vertices: " << mesh.getNoOfVertices() << std::endl;
auto decodedMesh = decodeMesh(mesh);
//Pass the surface to the OpenGL window
openGLWidget.addMesh(decodedMesh);
openGLWidget.setViewableRegion(reg2);
//Run the message pump.
return app.exec();
}

View File

@ -127,17 +127,17 @@ def run():
in vec4 position;
in vec4 normal;
uniform mat4 cameraToClipMatrix;
uniform mat4 worldToCameraMatrix;
uniform mat4 modelToWorldMatrix;
uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
uniform mat4 modelMatrix;
flat out float theColor;
void main()
{
vec4 temp = modelToWorldMatrix * position;
temp = worldToCameraMatrix * temp;
gl_Position = cameraToClipMatrix * temp;
vec4 temp = modelMatrix * position;
temp = viewMatrix * temp;
gl_Position = projectionMatrix * temp;
theColor = clamp(abs(dot(normalize(normal.xyz), normalize(vec3(0.9,0.1,0.5)))), 0, 1);
}
@ -183,13 +183,13 @@ def run():
glDisableVertexAttribArray(0)
#Now grab out transformation martix locations
modelToWorldMatrixUnif = glGetUniformLocation(shader, b"modelToWorldMatrix")
worldToCameraMatrixUnif = glGetUniformLocation(shader, b"worldToCameraMatrix")
cameraToClipMatrixUnif = glGetUniformLocation(shader, b"cameraToClipMatrix")
modelMatrixUnif = glGetUniformLocation(shader, b"modelMatrix")
viewMatrixUnif = glGetUniformLocation(shader, b"viewMatrix")
projectionMatrixUnif = glGetUniformLocation(shader, b"projectionMatrix")
modelToWorldMatrix = np.array([[1.0,0.0,0.0,-32.0],[0.0,1.0,0.0,-32.0],[0.0,0.0,1.0,-32.0],[0.0,0.0,0.0,1.0]], dtype='f')
worldToCameraMatrix = np.array([[1.0,0.0,0.0,0.0],[0.0,1.0,0.0,0.0],[0.0,0.0,1.0,-50.0],[0.0,0.0,0.0,1.0]], dtype='f')
cameraToClipMatrix = np.array([[0.0,0.0,0.0,0.0],[0.0,0.0,0.0,0.0],[0.0,0.0,0.0,0.0],[0.0,0.0,0.0,0.0]], dtype='f')
modelMatrix = np.array([[1.0,0.0,0.0,-32.0],[0.0,1.0,0.0,-32.0],[0.0,0.0,1.0,-32.0],[0.0,0.0,0.0,1.0]], dtype='f')
viewMatrix = np.array([[1.0,0.0,0.0,0.0],[0.0,1.0,0.0,0.0],[0.0,0.0,1.0,-50.0],[0.0,0.0,0.0,1.0]], dtype='f')
projectionMatrix = np.array([[0.0,0.0,0.0,0.0],[0.0,0.0,0.0,0.0],[0.0,0.0,0.0,0.0],[0.0,0.0,0.0,0.0]], dtype='f')
#These next few lines just set up our camera frustum
fovDeg = 45.0
@ -198,16 +198,16 @@ def run():
zNear = 1.0
zFar = 1000.0
cameraToClipMatrix[0][0] = frustumScale
cameraToClipMatrix[1][1] = frustumScale
cameraToClipMatrix[2][2] = (zFar + zNear) / (zNear - zFar)
cameraToClipMatrix[2][3] = -1.0
cameraToClipMatrix[3][2] = (2 * zFar * zNear) / (zNear - zFar)
projectionMatrix[0][0] = frustumScale
projectionMatrix[1][1] = frustumScale
projectionMatrix[2][2] = (zFar + zNear) / (zNear - zFar)
projectionMatrix[2][3] = -1.0
projectionMatrix[3][2] = (2 * zFar * zNear) / (zNear - zFar)
#worldToCameraMatrix and cameraToClipMatrix don't change ever so just set them once here
#viewMatrix and projectionMatrix don't change ever so just set them once here
with shader:
glUniformMatrix4fv(cameraToClipMatrixUnif, 1, GL_TRUE, cameraToClipMatrix)
glUniformMatrix4fv(worldToCameraMatrixUnif, 1, GL_TRUE, worldToCameraMatrix)
glUniformMatrix4fv(projectionMatrixUnif, 1, GL_TRUE, projectionMatrix)
glUniformMatrix4fv(viewMatrixUnif, 1, GL_TRUE, viewMatrix)
#These are used to track the rotation of the volume
LastFrameMousePos = (0,0)
@ -240,10 +240,10 @@ def run():
rotateAroundX = np.array([[1.0,0.0,0.0,0.0],[0.0,cos(radians(yRotation)),-sin(radians(yRotation)),0.0],[0.0,sin(radians(yRotation)),cos(radians(yRotation)),0.0],[0.0,0.0,0.0,1.0]], dtype='f')
rotateAroundY = np.array([[cos(radians(xRotation)),0.0,sin(radians(xRotation)),0.0],[0.0,1.0,0.0,0.0],[-sin(radians(xRotation)),0.0,cos(radians(xRotation)),0.0],[0.0,0.0,0.0,1.0]], dtype='f')
modelToWorldMatrix = rotateAroundY.dot(rotateAroundX.dot(moveToOrigin))
modelMatrix = rotateAroundY.dot(rotateAroundX.dot(moveToOrigin))
with shader:
glUniformMatrix4fv(modelToWorldMatrixUnif, 1, GL_TRUE, modelToWorldMatrix)
glUniformMatrix4fv(modelMatrixUnif, 1, GL_TRUE, modelMatrix)
glBindVertexArray(vertexArrayObject)
glDrawElements(GL_TRIANGLES, len(indices), GL_UNSIGNED_INT, None)

View File

@ -24,12 +24,14 @@ PROJECT(SmoothLODExample)
#Projects source files
SET(SRC_FILES
main.cpp
../common/OpenGLWidget.cpp
../common/PolyVoxExample.cpp
)
#Projects headers files
SET(INC_FILES
../common/OpenGLWidget.h
../common/OpenGLWidget.inl
../common/PolyVoxExample.h
)
#"Sources" and "Headers" are the group names in Visual Studio.

View File

@ -21,7 +21,7 @@ freely, subject to the following restrictions:
distribution.
*******************************************************************************/
#include "OpenGLWidget.h"
#include "PolyVoxExample.h"
#include "PolyVox/Density.h"
#include "PolyVox/MarchingCubesSurfaceExtractor.h"
@ -68,43 +68,56 @@ void createSphereInVolume(PagedVolume<uint8_t>& volData, float fRadius)
}
}
class SmoothLODExample : public PolyVoxExample
{
public:
SmoothLODExample(QWidget *parent)
:PolyVoxExample(parent)
{
}
protected:
void initializeExample() override
{
//Create an empty volume and then place a sphere in it
PagedVolume<uint8_t> volData(PolyVox::Region(Vector3DInt32(0, 0, 0), Vector3DInt32(63, 63, 63)));
createSphereInVolume(volData, 28);
//Smooth the data - should reimplement this using LowPassFilter
//smoothRegion<PagedVolume, Density8>(volData, volData.getEnclosingRegion());
//smoothRegion<PagedVolume, Density8>(volData, volData.getEnclosingRegion());
//smoothRegion<PagedVolume, Density8>(volData, volData.getEnclosingRegion());
RawVolume<uint8_t> volDataLowLOD(PolyVox::Region(Vector3DInt32(0, 0, 0), Vector3DInt32(15, 31, 31)));
VolumeResampler< PagedVolume<uint8_t>, RawVolume<uint8_t> > volumeResampler(&volData, PolyVox::Region(Vector3DInt32(0, 0, 0), Vector3DInt32(31, 63, 63)), &volDataLowLOD, volDataLowLOD.getEnclosingRegion());
volumeResampler.execute();
//Extract the surface
auto meshLowLOD = extractMarchingCubesMesh(&volDataLowLOD, volDataLowLOD.getEnclosingRegion());
// The returned mesh needs to be decoded to be appropriate for GPU rendering.
auto decodedMeshLowLOD = decodeMesh(meshLowLOD);
//Extract the surface
auto meshHighLOD = extractMarchingCubesMesh(&volData, PolyVox::Region(Vector3DInt32(30, 0, 0), Vector3DInt32(63, 63, 63)));
// The returned mesh needs to be decoded to be appropriate for GPU rendering.
auto decodedMeshHighLOD = decodeMesh(meshHighLOD);
//Pass the surface to the OpenGL window
addMesh(decodedMeshHighLOD, Vector3DInt32(30, 0, 0));
addMesh(decodedMeshLowLOD, Vector3DInt32(0, 0, 0), 63.0f / 31.0f);
setCameraTransform(QVector3D(100.0f, 100.0f, 100.0f), -(PI / 4.0f), PI + (PI / 4.0f));
}
};
int main(int argc, char *argv[])
{
//Create and show the Qt OpenGL window
QApplication app(argc, argv);
OpenGLWidget openGLWidget(0);
SmoothLODExample openGLWidget(0);
openGLWidget.show();
//Create an empty volume and then place a sphere in it
PagedVolume<uint8_t> volData(PolyVox::Region(Vector3DInt32(0, 0, 0), Vector3DInt32(63, 63, 63)));
createSphereInVolume(volData, 28);
//Smooth the data - should reimplement this using LowPassFilter
//smoothRegion<PagedVolume, Density8>(volData, volData.getEnclosingRegion());
//smoothRegion<PagedVolume, Density8>(volData, volData.getEnclosingRegion());
//smoothRegion<PagedVolume, Density8>(volData, volData.getEnclosingRegion());
RawVolume<uint8_t> volDataLowLOD(PolyVox::Region(Vector3DInt32(0,0,0), Vector3DInt32(15, 31, 31)));
VolumeResampler< PagedVolume<uint8_t>, RawVolume<uint8_t> > volumeResampler(&volData, PolyVox::Region(Vector3DInt32(0, 0, 0), Vector3DInt32(31, 63, 63)), &volDataLowLOD, volDataLowLOD.getEnclosingRegion());
volumeResampler.execute();
//Extract the surface
auto meshLowLOD = extractMarchingCubesMesh(&volDataLowLOD, volDataLowLOD.getEnclosingRegion());
// The returned mesh needs to be decoded to be appropriate for GPU rendering.
auto decodedMeshLowLOD = decodeMesh(meshLowLOD);
//Extract the surface
auto meshHighLOD = extractMarchingCubesMesh(&volData, PolyVox::Region(Vector3DInt32(30, 0, 0), Vector3DInt32(63, 63, 63)));
// The returned mesh needs to be decoded to be appropriate for GPU rendering.
auto decodedMeshHighLOD = decodeMesh(meshHighLOD);
//Pass the surface to the OpenGL window
openGLWidget.addMesh(decodedMeshHighLOD, Vector3DInt32(30, 0, 0));
openGLWidget.addMesh(decodedMeshLowLOD, Vector3DInt32(0, 0, 0), 63.0f / 31.0f);
openGLWidget.setViewableRegion(volData.getEnclosingRegion());
//Run the message pump.
return app.exec();
}

View File

@ -1,195 +0,0 @@
#include "OpenGLWidget.h"
#include <QMouseEvent>
#include <QMatrix4x4>
#include <QCoreApplication>
//#include <QtMath>
using namespace PolyVox;
using namespace std;
////////////////////////////////////////////////////////////////////////////////
// Public functions
////////////////////////////////////////////////////////////////////////////////
OpenGLWidget::OpenGLWidget(QWidget *parent)
:QGLWidget(parent)
,m_viewableRegion(PolyVox::Region(0, 0, 0, 255, 255, 255))
,m_xRotation(0)
,m_yRotation(0)
{
}
void OpenGLWidget::setShader(QSharedPointer<QGLShaderProgram> shader)
{
mShader = shader;
}
void OpenGLWidget::setViewableRegion(PolyVox::Region viewableRegion)
{
m_viewableRegion = viewableRegion;
// The user has specifed a new viewable region
// so we need to regenerate our camera matrix.
setupWorldToCameraMatrix();
}
void OpenGLWidget::mousePressEvent(QMouseEvent* event)
{
// Initialise these variables which will be used when the mouse actually moves.
m_CurrentMousePos = event->pos();
m_LastFrameMousePos = m_CurrentMousePos;
}
void OpenGLWidget::mouseMoveEvent(QMouseEvent* event)
{
// Update the x and y rotations based on the mouse movement.
m_CurrentMousePos = event->pos();
QPoint diff = m_CurrentMousePos - m_LastFrameMousePos;
m_xRotation += diff.x();
m_yRotation += diff.y();
m_LastFrameMousePos = m_CurrentMousePos;
// The camera rotation has changed so we need to regenerate the matrix.
setupWorldToCameraMatrix();
// Re-render.
update();
}
////////////////////////////////////////////////////////////////////////////////
// Protected functions
////////////////////////////////////////////////////////////////////////////////
void OpenGLWidget::initializeGL()
{
if (!initializeOpenGLFunctions())
{
std::cerr << "Could not initialize OpenGL functions" << std::endl;
exit(EXIT_FAILURE);
}
//Print out some information about the OpenGL implementation.
std::cout << "OpenGL Implementation Details:" << std::endl;
if(glGetString(GL_VENDOR))
std::cout << "\tGL_VENDOR: " << glGetString(GL_VENDOR) << std::endl;
if(glGetString(GL_RENDERER))
std::cout << "\tGL_RENDERER: " << glGetString(GL_RENDERER) << std::endl;
if(glGetString(GL_VERSION))
std::cout << "\tGL_VERSION: " << glGetString(GL_VERSION) << std::endl;
if(glGetString(GL_SHADING_LANGUAGE_VERSION))
std::cout << "\tGL_SHADING_LANGUAGE_VERSION: " << glGetString(GL_SHADING_LANGUAGE_VERSION) << std::endl;
//Set up the clear colour
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClearDepth(1.0f);
glEnable(GL_DEPTH_TEST);
glDepthMask(GL_TRUE);
glDepthFunc(GL_LEQUAL);
glDepthRange(0.0, 1.0);
mShader = QSharedPointer<QGLShaderProgram>(new QGLShaderProgram);
// This is basically a simple fallback vertex shader which does the most basic rendering possible.
// PolyVox examples are able to provide their own shaders to demonstrate certain effects if desired.
if (!mShader->addShaderFromSourceFile(QGLShader::Vertex, ":/example.vert"))
{
std::cerr << mShader->log().toStdString() << std::endl;
exit(EXIT_FAILURE);
}
// This is basically a simple fallback fragment shader which does the most basic rendering possible.
// PolyVox examples are able to provide their own shaders to demonstrate certain effects if desired.
if (!mShader->addShaderFromSourceFile(QGLShader::Fragment, ":/example.frag"))
{
std::cerr << mShader->log().toStdString() << std::endl;
exit(EXIT_FAILURE);
}
// Bind the position semantic - this is defined in the vertex shader above.
mShader->bindAttributeLocation("position", 0);
// Bind the other semantics. Note that these don't actually exist in our example shader above! However, other
// example shaders may choose to provide them and having the binding code here does not seem to cause any problems.
mShader->bindAttributeLocation("normal", 1);
mShader->bindAttributeLocation("material", 2);
if (!mShader->link())
{
std::cerr << mShader->log().toStdString() << std::endl;
exit(EXIT_FAILURE);
}
// Initial setup of camera.
setupWorldToCameraMatrix();
}
void OpenGLWidget::resizeGL(int w, int h)
{
//Setup the viewport
glViewport(0, 0, w, h);
auto aspectRatio = w / (float)h;
float zNear = 1.0;
float zFar = 1000.0;
cameraToClipMatrix.setToIdentity();
cameraToClipMatrix.frustum(-aspectRatio, aspectRatio, -1, 1, zNear, zFar);
}
void OpenGLWidget::paintGL()
{
//Clear the screen
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Our example framework only uses a single shader for the scene (for all meshes).
mShader->bind();
// These two matrices are constant for all meshes.
mShader->setUniformValue("worldToCameraMatrix", worldToCameraMatrix);
mShader->setUniformValue("cameraToClipMatrix", cameraToClipMatrix);
// Iterate over each mesh which the user added to our list, and render it.
for (OpenGLMeshData meshData : mMeshData)
{
//Set up the model matrrix based on provided translation and scale.
QMatrix4x4 modelToWorldMatrix;
modelToWorldMatrix.translate(meshData.translation);
modelToWorldMatrix.scale(meshData.scale);
mShader->setUniformValue("modelToWorldMatrix", modelToWorldMatrix);
// Bind the vertex array for the current mesh
glBindVertexArray(meshData.vertexArrayObject);
// Draw the mesh
glDrawElements(GL_TRIANGLES, meshData.noOfIndices, meshData.indexType, 0);
// Unbind the vertex array.
glBindVertexArray(0);
}
// We're done with the shader for this frame.
mShader->release();
// Check for errors.
GLenum errCode = glGetError();
if(errCode != GL_NO_ERROR)
{
std::cerr << "OpenGL Error: " << errCode << std::endl;
}
}
////////////////////////////////////////////////////////////////////////////////
// Private functions
////////////////////////////////////////////////////////////////////////////////
void OpenGLWidget::setupWorldToCameraMatrix()
{
QVector3D lowerCorner(m_viewableRegion.getLowerX(), m_viewableRegion.getLowerY(), m_viewableRegion.getLowerZ());
QVector3D upperCorner(m_viewableRegion.getUpperX(), m_viewableRegion.getUpperY(), m_viewableRegion.getUpperZ());
QVector3D centerPoint = (lowerCorner + upperCorner) * 0.5;
float fDiagonalLength = (upperCorner - lowerCorner).length();
worldToCameraMatrix.setToIdentity();
worldToCameraMatrix.translate(0, 0, -fDiagonalLength / 2.0f); //Move the camera back by the required amount
worldToCameraMatrix.rotate(m_xRotation, 0, 1, 0); //rotate around y-axis
worldToCameraMatrix.rotate(m_yRotation, 1, 0, 0); //rotate around x-axis
worldToCameraMatrix.translate(-centerPoint); //centre the model on the origin
}

View File

@ -28,135 +28,73 @@ distribution.
#include <QOpenGLFunctions_3_1>
#include <QElapsedTimer>
#include <QGLWidget>
#include <QGLShaderProgram>
#include <QOpenGLVertexArrayObject>
#include <QOpenGLBuffer>
// This structure holds all the data required
// to render one of our meshes through OpenGL.
struct OpenGLMeshData
// This is a very basic class for getting an OpenGL example up and running with Qt5. It simply displays
// an OpenGL widget and implements an FPS-style camera as well as other very basic functionality. User
// code can derive from this and override the provided virtual functions to implement functionality.
// The class is templatized so users can specify the OpenGL version via the appropriate QOpenGLFunctions.
template <typename QOpenGLFunctionsType>
class OpenGLWidget : public QGLWidget, protected QOpenGLFunctionsType
{
GLuint noOfIndices;
GLenum indexType;
GLuint indexBuffer;
GLuint vertexBuffer;
GLuint vertexArrayObject;
QVector3D translation;
float scale;
};
// Our OpenGLWidget is used by all the examples to render the extracted meshes. It is
// fairly specific to our needs (you probably won't want to use it in your own project)
// but should provide a useful illustration of how PolyVox meshes can be rendered.
class OpenGLWidget : public QGLWidget, protected QOpenGLFunctions_3_1
{
public:
// Constructor
protected:
// Protected constructor because this widget should not be created directly - it should only be subclassed.
OpenGLWidget(QWidget *parent);
// Convert a PolyVox mesh to OpenGL index/vertex buffers. Inlined because it's templatised.
template <typename MeshType>
void addMesh(const MeshType& surfaceMesh, const PolyVox::Vector3DInt32& translation = PolyVox::Vector3DInt32(0, 0, 0), float scale = 1.0f)
{
// Convienient access to the vertices and indices
const auto& vecIndices = surfaceMesh.getIndices();
const auto& vecVertices = surfaceMesh.getVertices();
// Derived classes should override these to provide functionality.
virtual void initialize() {}
virtual void renderOneFrame() {}
// This struct holds the OpenGL properties (buffer handles, etc) which will be used
// to render our mesh. We copy the data from the PolyVox mesh into this structure.
OpenGLMeshData meshData;
// Getters for properties defined by this widget.
const QMatrix4x4& viewMatrix();
const QMatrix4x4& projectionMatrix();
// Create the VAO for the mesh
glGenVertexArrays(1, &(meshData.vertexArrayObject));
glBindVertexArray(meshData.vertexArrayObject);
// Setters for properties defined by this widget.
void setCameraTransform(QVector3D position, float pitch, float yaw);
// The GL_ARRAY_BUFFER will contain the list of vertex positions
glGenBuffers(1, &(meshData.vertexBuffer));
glBindBuffer(GL_ARRAY_BUFFER, meshData.vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, vecVertices.size() * sizeof(typename MeshType::VertexType), vecVertices.data(), GL_STATIC_DRAW);
// and GL_ELEMENT_ARRAY_BUFFER will contain the indices
glGenBuffers(1, &(meshData.indexBuffer));
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, meshData.indexBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, vecIndices.size() * sizeof(typename MeshType::IndexType), vecIndices.data(), GL_STATIC_DRAW);
// Every surface extractor outputs valid positions for the vertices, so tell OpenGL how these are laid out
glEnableVertexAttribArray(0); // Attrib '0' is the vertex positions
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(typename MeshType::VertexType), (GLvoid*)(offsetof(typename MeshType::VertexType, position))); //take the first 3 floats from every sizeof(decltype(vecVertices)::value_type)
// Some surface extractors also generate normals, so tell OpenGL how these are laid out. If a surface extractor
// does not generate normals then nonsense values are written into the buffer here and sghould be ignored by the
// shader. This is mostly just to simplify this example code - in a real application you will know whether your
// chosen surface extractor generates normals and can skip uploading them if not.
glEnableVertexAttribArray(1); // Attrib '1' is the vertex normals.
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(typename MeshType::VertexType), (GLvoid*)(offsetof(typename MeshType::VertexType, normal)));
// Finally a surface extractor will probably output additional data. This is highly application dependant. For this example code
// we're just uploading it as a set of bytes which we can read individually, but real code will want to do something specialised here.
glEnableVertexAttribArray(2); //We're talking about shader attribute '2'
GLint size = (std::min)(sizeof(typename MeshType::VertexType::DataType), size_t(4)); // Can't upload more that 4 components (vec4 is GLSL's biggest type)
glVertexAttribIPointer(2, size, GL_UNSIGNED_BYTE, sizeof(typename MeshType::VertexType), (GLvoid*)(offsetof(typename MeshType::VertexType, data)));
// We're done uploading and can now unbind.
glBindVertexArray(0);
// A few additional properties can be copied across for use during rendering.
meshData.noOfIndices = vecIndices.size();
meshData.translation = QVector3D(translation.getX(), translation.getY(), translation.getZ());
meshData.scale = scale;
// Set 16 or 32-bit index buffer size.
meshData.indexType = sizeof(typename MeshType::IndexType) == 2 ? GL_UNSIGNED_SHORT : GL_UNSIGNED_INT;
// Now add the mesh to the list of meshes to render.
addMeshData(meshData);
}
void addMeshData(OpenGLMeshData meshData)
{
mMeshData.push_back(meshData);
}
// For our purposes we use a single shader for the whole volume, and
// this example framework is only meant to show a single volume at a time
void setShader(QSharedPointer<QGLShaderProgram> shader);
// The viewable region can be adjusted so that this example framework can be used for different volume sizes.
void setViewableRegion(PolyVox::Region viewableRegion);
// Mouse handling
void mouseMoveEvent(QMouseEvent* event);
void mousePressEvent(QMouseEvent* event);
protected:
private:
// Qt OpenGL functions
void initializeGL();
void resizeGL(int w, int h);
void paintGL();
private:
// Mouse handling
void mouseMoveEvent(QMouseEvent* event);
void mousePressEvent(QMouseEvent* event);
void setupWorldToCameraMatrix();
// Index/vertex buffer data
std::vector<OpenGLMeshData> mMeshData;
QSharedPointer<QGLShaderProgram> mShader;
// Keyboard handling
void keyPressEvent(QKeyEvent* event);
void keyReleaseEvent(QKeyEvent* event);
// Matrices
QMatrix4x4 worldToCameraMatrix;
QMatrix4x4 cameraToClipMatrix;
QMatrix4x4 mViewMatrix;
QMatrix4x4 mProjectionMatrix;
// Mouse data
QPoint m_LastFrameMousePos;
QPoint m_CurrentMousePos;
// Camera setup
PolyVox::Region m_viewableRegion;
int m_xRotation;
int m_yRotation;
// Keyboard data
QList<int> mPressedKeys;
// For input handling and movement
float mCameraMoveSpeed = 50.0f;
float mCameraRotateSpeed = 0.005f;
// Camera properties
QVector3D mCameraPosition = QVector3D(0, 0, -100);
float mCameraYaw = 0.0f;
float mCameraPitch = 0.0f;
float mCameraFOV = 60.0f;
QElapsedTimer mElapsedTimer;
};
#include "OpenGLWidget.inl"
#endif //__BasicExample_OpenGLWidget_H__

View File

@ -0,0 +1,194 @@
#include "OpenGLWidget.h"
#include <QMouseEvent>
#include <QMatrix4x4>
#include <QCoreApplication>
#include <QTimer>
using namespace PolyVox;
using namespace std;
////////////////////////////////////////////////////////////////////////////////
// Protected functions
////////////////////////////////////////////////////////////////////////////////
template <typename QOpenGLFunctionsType>
OpenGLWidget<QOpenGLFunctionsType>::OpenGLWidget(QWidget *parent)
:QGLWidget(parent)
{
}
template <typename QOpenGLFunctionsType>
const QMatrix4x4& OpenGLWidget<QOpenGLFunctionsType>::viewMatrix()
{
return mViewMatrix;
}
template <typename QOpenGLFunctionsType>
const QMatrix4x4& OpenGLWidget<QOpenGLFunctionsType>::projectionMatrix()
{
return mProjectionMatrix;
}
template <typename QOpenGLFunctionsType>
void OpenGLWidget<QOpenGLFunctionsType>::setCameraTransform(QVector3D position, float pitch, float yaw)
{
mCameraPosition = position;
mCameraYaw = yaw;
mCameraPitch = pitch;
}
////////////////////////////////////////////////////////////////////////////////
// Private functions
////////////////////////////////////////////////////////////////////////////////
template <typename QOpenGLFunctionsType>
void OpenGLWidget<QOpenGLFunctionsType>::initializeGL()
{
if (!initializeOpenGLFunctions())
{
std::cerr << "Could not initialize OpenGL functions" << std::endl;
exit(EXIT_FAILURE);
}
//Print out some information about the OpenGL implementation.
std::cout << "OpenGL Implementation Details:" << std::endl;
if(glGetString(GL_VENDOR))
std::cout << "\tGL_VENDOR: " << glGetString(GL_VENDOR) << std::endl;
if(glGetString(GL_RENDERER))
std::cout << "\tGL_RENDERER: " << glGetString(GL_RENDERER) << std::endl;
if(glGetString(GL_VERSION))
std::cout << "\tGL_VERSION: " << glGetString(GL_VERSION) << std::endl;
if(glGetString(GL_SHADING_LANGUAGE_VERSION))
std::cout << "\tGL_SHADING_LANGUAGE_VERSION: " << glGetString(GL_SHADING_LANGUAGE_VERSION) << std::endl;
//Set up the clear colour
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClearDepth(1.0f);
glEnable(GL_DEPTH_TEST);
glDepthMask(GL_TRUE);
glDepthFunc(GL_LEQUAL);
glDepthRange(0.0, 1.0);
initialize();
// Start a timer to drive the main rendering loop.
QTimer* timer = new QTimer(this);
connect(timer, SIGNAL(timeout()), this, SLOT(update()));
timer->start(0);
mElapsedTimer.start();
}
template <typename QOpenGLFunctionsType>
void OpenGLWidget<QOpenGLFunctionsType>::resizeGL(int w, int h)
{
//Setup the viewport
glViewport(0, 0, w, h);
auto aspectRatio = w / (float)h;
float zNear = 1.0;
float zFar = 1000.0;
mProjectionMatrix.setToIdentity();
mProjectionMatrix.perspective(mCameraFOV, aspectRatio, zNear, zFar);
}
template <typename QOpenGLFunctionsType>
void OpenGLWidget<QOpenGLFunctionsType>::paintGL()
{
// Direction : Spherical coordinates to Cartesian coordinates conversion
QVector3D cameraForward(
cos(mCameraPitch) * sin(mCameraYaw),
sin(mCameraPitch),
cos(mCameraPitch) * cos(mCameraYaw)
);
// Right vector
QVector3D cameraRight(
sin(mCameraYaw - 3.14f / 2.0f),
0,
cos(mCameraYaw - 3.14f / 2.0f)
);
// Up vector
QVector3D cameraUp = QVector3D::crossProduct(cameraRight, cameraForward);
// Get the elapsed time since last frame and convert to seconds.
float deltaTime = mElapsedTimer.restart() / 1000.0f;
// Move forward
if ((mPressedKeys.contains(Qt::Key_Up)) || (mPressedKeys.contains(Qt::Key_W)))
{
mCameraPosition += cameraForward * deltaTime * mCameraMoveSpeed;
}
// Move backward
if ((mPressedKeys.contains(Qt::Key_Down)) || (mPressedKeys.contains(Qt::Key_S)))
{
mCameraPosition -= cameraForward * deltaTime * mCameraMoveSpeed;
}
// Strafe right
if ((mPressedKeys.contains(Qt::Key_Right)) || (mPressedKeys.contains(Qt::Key_D)))
{
mCameraPosition += cameraRight * deltaTime * mCameraMoveSpeed;
}
// Strafe left
if ((mPressedKeys.contains(Qt::Key_Left)) || (mPressedKeys.contains(Qt::Key_A)))
{
mCameraPosition -= cameraRight * deltaTime * mCameraMoveSpeed;
}
mViewMatrix.setToIdentity();
mViewMatrix.lookAt(
mCameraPosition, // Camera is here
mCameraPosition + cameraForward, // and looks here : at the same position, plus "direction"
cameraUp // Head is up (set to 0,-1,0 to look upside-down)
);
//Clear the screen
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
renderOneFrame();
// Check for errors.
GLenum errCode = glGetError();
if(errCode != GL_NO_ERROR)
{
std::cerr << "OpenGL Error: " << errCode << std::endl;
}
}
template <typename QOpenGLFunctionsType>
void OpenGLWidget<QOpenGLFunctionsType>::mousePressEvent(QMouseEvent* event)
{
// Initialise these variables which will be used when the mouse actually moves.
m_CurrentMousePos = event->pos();
m_LastFrameMousePos = m_CurrentMousePos;
}
template <typename QOpenGLFunctionsType>
void OpenGLWidget<QOpenGLFunctionsType>::mouseMoveEvent(QMouseEvent* event)
{
// Update the x and y rotations based on the mouse movement.
m_CurrentMousePos = event->pos();
QPoint diff = m_CurrentMousePos - m_LastFrameMousePos;
mCameraYaw -= diff.x() * mCameraRotateSpeed;
mCameraPitch -= diff.y() * mCameraRotateSpeed;
m_LastFrameMousePos = m_CurrentMousePos;
}
template <typename QOpenGLFunctionsType>
void OpenGLWidget<QOpenGLFunctionsType>::keyPressEvent(QKeyEvent* event)
{
if (event->key() == Qt::Key_Escape)
{
close();
}
mPressedKeys.append(event->key());
}
template <typename QOpenGLFunctionsType>
void OpenGLWidget<QOpenGLFunctionsType>::keyReleaseEvent(QKeyEvent* event)
{
mPressedKeys.removeAll(event->key());
}

View File

@ -0,0 +1,6 @@
#include "PolyVoxExample.h"
void PolyVoxExample::setShader(QSharedPointer<QGLShaderProgram> shader)
{
mShader = shader;
}

View File

@ -0,0 +1,197 @@
/*******************************************************************************
Copyright (c) 2005-2009 David Williams
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source
distribution.
*******************************************************************************/
#ifndef __PolyVoxExample_H__
#define __PolyVoxExample_H__
#include "OpenGLWidget.h"
// This structure holds all the data required
// to render one of our meshes through OpenGL.
struct OpenGLMeshData
{
GLuint noOfIndices;
GLenum indexType;
GLuint indexBuffer;
GLuint vertexBuffer;
GLuint vertexArrayObject;
QVector3D translation;
float scale;
};
class PolyVoxExample : public OpenGLWidget<QOpenGLFunctions_3_1>
{
public:
PolyVoxExample(QWidget *parent)
:OpenGLWidget(parent)
{
}
// For our purposes we use a single shader for the whole volume, and
// this example framework is only meant to show a single volume at a time
void setShader(QSharedPointer<QGLShaderProgram> shader);
// Convert a PolyVox mesh to OpenGL index/vertex buffers. Inlined because it's templatised.
template <typename MeshType>
void addMesh(const MeshType& surfaceMesh, const PolyVox::Vector3DInt32& translation = PolyVox::Vector3DInt32(0, 0, 0), float scale = 1.0f)
{
// Convienient access to the vertices and indices
const auto& vecIndices = surfaceMesh.getIndices();
const auto& vecVertices = surfaceMesh.getVertices();
// This struct holds the OpenGL properties (buffer handles, etc) which will be used
// to render our mesh. We copy the data from the PolyVox mesh into this structure.
OpenGLMeshData meshData;
// Create the VAO for the mesh
glGenVertexArrays(1, &(meshData.vertexArrayObject));
glBindVertexArray(meshData.vertexArrayObject);
// The GL_ARRAY_BUFFER will contain the list of vertex positions
glGenBuffers(1, &(meshData.vertexBuffer));
glBindBuffer(GL_ARRAY_BUFFER, meshData.vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, vecVertices.size() * sizeof(typename MeshType::VertexType), vecVertices.data(), GL_STATIC_DRAW);
// and GL_ELEMENT_ARRAY_BUFFER will contain the indices
glGenBuffers(1, &(meshData.indexBuffer));
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, meshData.indexBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, vecIndices.size() * sizeof(typename MeshType::IndexType), vecIndices.data(), GL_STATIC_DRAW);
// Every surface extractor outputs valid positions for the vertices, so tell OpenGL how these are laid out
glEnableVertexAttribArray(0); // Attrib '0' is the vertex positions
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(typename MeshType::VertexType), (GLvoid*)(offsetof(typename MeshType::VertexType, position))); //take the first 3 floats from every sizeof(decltype(vecVertices)::value_type)
// Some surface extractors also generate normals, so tell OpenGL how these are laid out. If a surface extractor
// does not generate normals then nonsense values are written into the buffer here and sghould be ignored by the
// shader. This is mostly just to simplify this example code - in a real application you will know whether your
// chosen surface extractor generates normals and can skip uploading them if not.
glEnableVertexAttribArray(1); // Attrib '1' is the vertex normals.
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(typename MeshType::VertexType), (GLvoid*)(offsetof(typename MeshType::VertexType, normal)));
// Finally a surface extractor will probably output additional data. This is highly application dependant. For this example code
// we're just uploading it as a set of bytes which we can read individually, but real code will want to do something specialised here.
glEnableVertexAttribArray(2); //We're talking about shader attribute '2'
GLint size = (std::min)(sizeof(typename MeshType::VertexType::DataType), size_t(4)); // Can't upload more that 4 components (vec4 is GLSL's biggest type)
glVertexAttribIPointer(2, size, GL_UNSIGNED_BYTE, sizeof(typename MeshType::VertexType), (GLvoid*)(offsetof(typename MeshType::VertexType, data)));
// We're done uploading and can now unbind.
glBindVertexArray(0);
// A few additional properties can be copied across for use during rendering.
meshData.noOfIndices = vecIndices.size();
meshData.translation = QVector3D(translation.getX(), translation.getY(), translation.getZ());
meshData.scale = scale;
// Set 16 or 32-bit index buffer size.
meshData.indexType = sizeof(typename MeshType::IndexType) == 2 ? GL_UNSIGNED_SHORT : GL_UNSIGNED_INT;
// Now add the mesh to the list of meshes to render.
addMeshData(meshData);
}
void addMeshData(OpenGLMeshData meshData)
{
mMeshData.push_back(meshData);
}
protected:
const float PI = 3.14159265358979f;
virtual void initializeExample() {};
void initialize() override
{
mShader = QSharedPointer<QGLShaderProgram>(new QGLShaderProgram);
// This is basically a simple fallback vertex shader which does the most basic rendering possible.
// PolyVox examples are able to provide their own shaders to demonstrate certain effects if desired.
if (!mShader->addShaderFromSourceFile(QGLShader::Vertex, ":/example.vert"))
{
std::cerr << mShader->log().toStdString() << std::endl;
exit(EXIT_FAILURE);
}
// This is basically a simple fallback fragment shader which does the most basic rendering possible.
// PolyVox examples are able to provide their own shaders to demonstrate certain effects if desired.
if (!mShader->addShaderFromSourceFile(QGLShader::Fragment, ":/example.frag"))
{
std::cerr << mShader->log().toStdString() << std::endl;
exit(EXIT_FAILURE);
}
// Bind the position semantic - this is defined in the vertex shader above.
mShader->bindAttributeLocation("position", 0);
// Bind the other semantics. Note that these don't actually exist in our example shader above! However, other
// example shaders may choose to provide them and having the binding code here does not seem to cause any problems.
mShader->bindAttributeLocation("normal", 1);
mShader->bindAttributeLocation("material", 2);
if (!mShader->link())
{
std::cerr << mShader->log().toStdString() << std::endl;
exit(EXIT_FAILURE);
}
// Now do any initialization for the specific example.
initializeExample();
}
void renderOneFrame() override
{
// Our example framework only uses a single shader for the scene (for all meshes).
mShader->bind();
// These two matrices are constant for all meshes.
mShader->setUniformValue("viewMatrix", viewMatrix());
mShader->setUniformValue("projectionMatrix", projectionMatrix());
// Iterate over each mesh which the user added to our list, and render it.
for (OpenGLMeshData meshData : mMeshData)
{
//Set up the model matrrix based on provided translation and scale.
QMatrix4x4 modelMatrix;
modelMatrix.translate(meshData.translation);
modelMatrix.scale(meshData.scale);
mShader->setUniformValue("modelMatrix", modelMatrix);
// Bind the vertex array for the current mesh
glBindVertexArray(meshData.vertexArrayObject);
// Draw the mesh
glDrawElements(GL_TRIANGLES, meshData.noOfIndices, meshData.indexType, 0);
// Unbind the vertex array.
glBindVertexArray(0);
}
// We're done with the shader for this frame.
mShader->release();
}
private:
// Index/vertex buffer data
std::vector<OpenGLMeshData> mMeshData;
QSharedPointer<QGLShaderProgram> mShader;
};
#endif //__PolyVoxExample_H__

View File

@ -3,9 +3,9 @@
in vec4 position; // This will be the position of the vertex in model-space
// The usual matrices are provided
uniform mat4 cameraToClipMatrix;
uniform mat4 worldToCameraMatrix;
uniform mat4 modelToWorldMatrix;
uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
uniform mat4 modelMatrix;
// This will be used by the fragment shader to calculate flat-shaded normals. This is an unconventional approach
// but we use it in this example framework because not all surface extractor generate surface normals.
@ -14,7 +14,7 @@ out vec4 worldPosition;
void main()
{
// Standard sequence of OpenGL transformations.
worldPosition = modelToWorldMatrix * position;
vec4 cameraPosition = worldToCameraMatrix * worldPosition;
gl_Position = cameraToClipMatrix * cameraPosition;
worldPosition = modelMatrix * position;
vec4 cameraPosition = viewMatrix * worldPosition;
gl_Position = projectionMatrix * cameraPosition;
}