Test of using glXGetProcAddress().

This commit is contained in:
Robert Osfield 2021-03-31 13:50:13 +01:00
parent 675be3efe1
commit fd80861510

View File

@ -18,6 +18,7 @@
#include <osg/Shader> #include <osg/Shader>
#include <osgUtil/Optimizer> #include <osgUtil/Optimizer>
class DrawMeshTasks : public osg::Drawable class DrawMeshTasks : public osg::Drawable
{ {
public: public:
@ -40,9 +41,17 @@ public:
virtual void drawImplementation(osg::RenderInfo& renderInfo) const virtual void drawImplementation(osg::RenderInfo& renderInfo) const
{ {
const osg::GLExtensions* extensions = renderInfo.getState()->get<osg::GLExtensions>(); const osg::GLExtensions* extensions = renderInfo.getState()->get<osg::GLExtensions>();
if (extensions->isMeshShaderSupported && extensions->glDrawMeshTasksNV)
void* (* my_glXGetProcAddress) (const GLchar *name);
osg::setGLExtensionFuncPtr(my_glXGetProcAddress, "glXGetProcAddress", "glXGetProcAddressARB");
void (GL_APIENTRY * my_glDrawMeshTasksNV) (GLuint first, GLuint count);
osg::convertPointer(my_glDrawMeshTasksNV, my_glXGetProcAddress("glDrawMeshTasksNV"));
if (extensions->isMeshShaderSupported && my_glDrawMeshTasksNV)
{ {
extensions->glDrawMeshTasksNV(first, count); my_glDrawMeshTasksNV(first, count);
} }
else else
{ {
@ -55,7 +64,6 @@ int main( int argc, char** argv )
{ {
osg::ArgumentParser arguments( &argc, argv ); osg::ArgumentParser arguments( &argc, argv );
static const char* meshSource = \ static const char* meshSource = \
"#version 450 \n" "#version 450 \n"
"#extension GL_NV_mesh_shader : enable\n" "#extension GL_NV_mesh_shader : enable\n"
@ -92,111 +100,14 @@ int main( int argc, char** argv )
osg::ref_ptr<osg::Node> drawMesh = new DrawMeshTasks(0, 1); osg::ref_ptr<osg::Node> drawMesh = new DrawMeshTasks(0, 1);
drawMesh->getOrCreateStateSet()->setAttribute( program.get() ); drawMesh->getOrCreateStateSet()->setAttribute( program.get() );
osgViewer::Viewer viewer(arguments);
const int width( 800 ), height( 450 );
const std::string version( "4.6" );
osg::ref_ptr< osg::GraphicsContext::Traits > traits = new osg::GraphicsContext::Traits();
traits->x = 20; traits->y = 30;
traits->width = width; traits->height = height;
traits->windowDecoration = true;
traits->doubleBuffer = true;
traits->glContextVersion = version;
traits->readDISPLAY();
traits->setUndefinedScreenDetailsToDefaultScreen();
osg::ref_ptr< osg::GraphicsContext > gc = osg::GraphicsContext::createGraphicsContext( traits.get() );
if( !gc.valid() )
{
osg::notify( osg::FATAL ) << "Unable to create OpenGL v" << version << " context." << std::endl;
return( 1 );
}
osgViewer::Viewer viewer;
// Create a Camera that uses the above OpenGL context.
osg::Camera* cam = viewer.getCamera();
cam->setGraphicsContext( gc.get() );
// Must set perspective projection for fovy and aspect.
cam->setProjectionMatrix( osg::Matrix::perspective( 30., (double)width/(double)height, 1., 100. ) );
// Unlike OpenGL, OSG viewport does *not* default to window dimensions.
cam->setViewport( new osg::Viewport( 0, 0, width, height ) );
viewer.setSceneData( drawMesh ); viewer.setSceneData( drawMesh );
viewer.setLight( 0 );
viewer.setLightingMode( osg::View::NO_LIGHT );
// for non GL3/GL4 and non GLES2 platforms we need enable the osg_ uniforms that the shaders will use, // for non GL3/GL4 and non GLES2 platforms we need enable the osg_ uniforms that the shaders will use,
// you don't need thse two lines on GL3/GL4 and GLES2 specific builds as these will be enable by default. // you don't need thse two lines on GL3/GL4 and GLES2 specific builds as these will be enable by default.
gc->getState()->setUseModelViewAndProjectionUniforms(true); //gc->getState()->setUseModelViewAndProjectionUniforms(true);
gc->getState()->setUseVertexAttributeAliasing(true); //gc->getState()->setUseVertexAttributeAliasing(true);
return( viewer.run() ); return( viewer.run() );
} }
/*
Building OSG for OpenGL 3.x
OSG currently support OpenGL 3.x on Windows. This comment block describes the
necessary configuration steps.
Get the draft gl3.h header file from OpenGL.org and put it in a folder called
GL3 somewhere on your hard drive. OSG includes this header as <GL3/gl3.h>. Get
gl3.h from here:
http://www.opengl.org/registry/
Open the cmake-gui and load OSG's top-level CmakeLists.txt. You'll need to make
several changes.
* Add the path to <GL3/gl3.h> to the CMake compiler flags, CMAKE_CXX_FLAGS and
CMAKE_CXX_FLAGS_DEBUG (for release and debug builds; others if you use other
build configurations). The text to add should look something like this:
/I C:\GLHeader
The folder GLHeader should contain a subfolder GL3, which in turn contains
gl3.h.
* Enable the following CMake variable:
OSG_GL3_AVAILABLE
* Disable the following CMake variables:
OSG_GL1_AVAILABLE
OSG_GL2_AVAILABLE
OSG_GLES1_AVAILABLE
OSG_GLES2_AVAILABLE
OSG_GL_DISPLAYLISTS_AVAILABLE
OSG_GL_FIXED_FUNCTION_AVAILABLE
OSG_GL_MATRICES_AVAILABLE
OSG_GL_VERTEX_ARRAY_FUNCS_AVAILABLE
OSG_GL_VERTEX_FUNCS_AVAILABLE
Create your project files in cmake-gui as usual, and build OSG as usual.
If you have an external project that will depend on OSG built for OpenGL 3.x,
you'll need to ensure your external project also uses the compiler include
directives to find <GL3/gl3.h>.
To verify your application is using a pure OpenGL 3.x context, set
OSG_NOTIFY_LEVEL=INFO in the environment and check the console output. Context
creation displays output such as the following:
GL3: Attempting to create OpenGL3 context.
GL3: version: 3.1
GL3: context flags: 0
GL3: profile: 0
GL3: context created successfully.
When your app begins rendering, it displays information about the actual context
it is using:
glVersion=3.1, isGlslSupported=YES, glslLanguageVersion=1.4
--
Under Linux the follow is sufficient to configure the OSG build for a GL3 core profile build:
cmake . -DOSG_GL3_AVAILABLE=ON \
-DOSG_GL1_AVAILABLE=OFF -DOSG_GL2_AVAILABLE=OFF -DOSG_GLES1_AVAILABLE=OFF -DOSG_GLES2_AVAILABLE=OFF \
-DOSG_GL_DISPLAYLISTS_AVAILABLE=OFF -DOSG_GL_FIXED_FUNCTION_AVAILABLE=OFF -DOSG_GL_MATRICES_AVAILABLE=OFF \
-DOSG_GL_VERTEX_ARRAY_FUNCS_AVAILABLE=OFF -DOSG_GL_VERTEX_FUNCS_AVAILABLE=OFF
*/