Ok, here's a little example (code is attached). Sorry if it's a bit contrived,
I couldn't think of anything simple but illustrative. Basically it renders a
cylinder with the following stages:
1) Render the cylinder to a texture from slaveCamera
2) Process the texture from stage 1 with firstProcessor, inverting the colors
3) Render the cylinder to a texture from viewer->camera, this time using the
result of stage 2 as a texture
4) Process the texture from stage 3 with secondProcessor, which makes the image
look wavy
The problem is that instead of this ordering, we get 1-3-2-4, which means that
the pre-render result is always from the previous frame. If you compile and run
the program, you'll notice that this causes a green fringe at the edges of the
cylinder when you make it move (because the background color of the texture is
green). Here's an image of this whole thing:
http://a.imagehost.org/view/0333/osgppukuva
To fix this problem, we can try to add firstProcessor as a child of slaveCamera
instead of root, as explained in my previous posts. However, this somehow
breaks stage 3, which refuses to render to a texture anymore.
To see the original problem, run the program and move the object with the
mouse. This should give the ugly green borders. Comment out the first line
(#define) of the code to see how it breaks when we attempt to alter the
rendering order.
Anyway, this is my problem of course and I'm not demanding that anyone fix it
for me, but I think that having the ability to do this would be very benefical
for osgPPU in general. As I mentioned before, osgPPU would be perfectly suited
for things such as processing shadow maps or any number of other advanced
algorithms, aside for this one little detail.
Thanks,
Miika
------------------
Read this topic online here:
http://forum.openscenegraph.org/viewtopic.php?p=16027#16027
#define ATTACH_PROCESSOR_TO_ROOT // If this is defined, do the typical
attachment to scene root - comment out to attempt to fix the render order
(which goes wrong)
#include <iostream>
#include <osg/Texture>
#include <osg/Geode>
#include <osg/ShapeDrawable>
#include <osg/Camera>
#include <osg/Texture2D>
#include <osg/Program>
#include <osg/Shader>
#include <osg/Node>
#include <osg/Group>
#include <osgViewer/Viewer>
#include <osgGA/TrackballManipulator>
#include <osgPPU/Processor.h>
#include <osgPPU/UnitInOut.h>
#include <osgPPU/UnitOut.h>
#include <osgPPU/UnitCameraAttachmentBypass.h>
#include <osgPPU/ShaderAttribute.h>
int main()
{
// Set up the viewer
osgViewer::Viewer viewer;
unsigned int screenWidth;
unsigned int screenHeight;
osg::GraphicsContext::getWindowingSystemInterface()->getScreenResolution(osg::GraphicsContext::ScreenIdentifier(0),
screenWidth, screenHeight);
unsigned int windowWidth = 640;
unsigned int windowHeight = 480;
viewer.setUpViewInWindow((screenWidth-windowWidth)/2,
(screenHeight-windowHeight)/2, windowWidth, windowHeight);
viewer.setThreadingModel(osgViewer::Viewer::SingleThreaded);
// Add root node with two groups: rttScene will be rendered by
slaveCamera and processed, mainScene will be rendered by osgViewer's camera
osg::Group* root = new osg::Group();
osg::Group* rttScene = new osg::Group();
osg::Group* mainScene = new osg::Group();
// Add something to see, same to both scenes
osg::Geode* geode = new osg::Geode();
geode->addDrawable(new osg::ShapeDrawable(new
osg::Cylinder(osg::Vec3(4.4f,0.0f,0.0f),1.0f,1.4f)));
rttScene->addChild(geode);
mainScene->addChild(geode);
root->addChild(mainScene);
// Create the texture to render to...
osg::Texture2D* slaveCameraTexture = new osg::Texture2D;
{
slaveCameraTexture->setTextureSize(640, 480);
slaveCameraTexture->setInternalFormat(GL_RGBA);
slaveCameraTexture->setFilter(osg::Texture2D::MIN_FILTER,osg::Texture2D::LINEAR);
slaveCameraTexture->setFilter(osg::Texture2D::MAG_FILTER,osg::Texture2D::LINEAR);
}
// and the RTT camera
osg::Camera* slaveCamera = new osg::Camera();
{
slaveCamera->setClearMask(GL_COLOR_BUFFER_BIT |
GL_DEPTH_BUFFER_BIT);
slaveCamera->setClearColor(osg::Vec4(1,0,1,1));
slaveCamera->setViewport(new osg::Viewport(0,0,640,480));
slaveCamera->setReferenceFrame(osg::Transform::RELATIVE_RF);
slaveCamera->setRenderOrder(osg::Camera::PRE_RENDER);
slaveCamera->attach(osg::Camera::COLOR_BUFFER0,
slaveCameraTexture);
slaveCamera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
root->addChild(slaveCamera);
slaveCamera->addChild(rttScene);
}
// Set up a simple osgPPU processor which inverts the input image's
colors
osg::Texture* rttTexture;
osgPPU::Processor *firstProcessor = new osgPPU::Processor;
{
firstProcessor->setCamera(slaveCamera);
osgPPU::UnitCameraAttachmentBypass* unitCam = new
osgPPU::UnitCameraAttachmentBypass();
unitCam->setBufferComponent(osg::Camera::COLOR_BUFFER0);
firstProcessor->addChild(unitCam);
osgPPU::UnitInOut* ppuInvert = new osgPPU::UnitInOut;
ppuInvert->setName("ppuInvert");
{
osg::Shader* invShader = new
osg::Shader(osg::Shader::FRAGMENT,
"uniform sampler2D inputTexture;\n"\
"void main() {\n"\
" gl_FragColor.rgb = 1.0 -
texture2D(inputTexture, gl_TexCoord[0].xy).xyz;\n"\
" gl_FragColor.a = 1.0;\n"\
"}\n"
);
osgPPU::ShaderAttribute* InvertShader = new
osgPPU::ShaderAttribute;
InvertShader->addShader(invShader);
ppuInvert->getOrCreateStateSet()->setAttributeAndModes(InvertShader);
unitCam->addChild(ppuInvert);
}
rttTexture = ppuInvert->getOrCreateOutputTexture();
}
// This is the critical part - if we attach to the root, we get a wrong
render stage.
// If we attach it to the slave camera, the ordering will be correct
but the main camera won't render to an FBO anymore
#ifdef ATTACH_PROCESSOR_TO_ROOT
root->addChild(firstProcessor);
#else
slaveCamera->addChild(firstProcessor);
#endif
/*
// Third alternative, if needed - similar results with the slaveCamera
attachment
#else
osg::Group* procGroup = new osg::Group();
procGroup->getOrCreateStateSet()->setRenderBinDetails(-1, "RenderBin");
procGroup->addChild(firstProcessor);
root->addChild(procGroup);
#endif
*/
// Set a shader to the main scene. Simply shows the input texture (RTT
from slaveCamera) on top of the geometry using NDC coordinates.
osg::Program* mainshaderProgramObject = new osg::Program;
osg::Shader* mainshaderFragmentObject = new
osg::Shader(osg::Shader::FRAGMENT,
"uniform sampler2D rttTexture;\n"\
"void main() {\n"\
" gl_FragColor = texture2D(rttTexture, gl_FragCoord.xy /
vec2(640, 480));\n"\
"}\n"
);
mainshaderProgramObject->addShader(mainshaderFragmentObject);
osg::Uniform* rttTextureUniform = new osg::Uniform("rttTexture",1);
mainScene->getOrCreateStateSet()->addUniform(rttTextureUniform);
mainScene->getOrCreateStateSet()->setTextureAttributeAndModes(1,rttTexture,osg::StateAttribute::ON
| osg::StateAttribute::OVERRIDE);
mainScene->getOrCreateStateSet()->setTextureMode(1,GL_TEXTURE_2D,osg::StateAttribute::ON);
mainScene->getOrCreateStateSet()->setAttributeAndModes(mainshaderProgramObject,
osg::StateAttribute::ON | osg::StateAttribute::OVERRIDE);
// Create the RTT texture for main camera, with a silly PPU that makes
the image wawy...
osg::Texture2D* mainCameraTexture = new osg::Texture2D;
{
mainCameraTexture->setTextureSize(640, 480);
mainCameraTexture->setInternalFormat(GL_RGBA);
mainCameraTexture->setFilter(osg::Texture2D::MIN_FILTER,osg::Texture2D::LINEAR);
mainCameraTexture->setFilter(osg::Texture2D::MAG_FILTER,osg::Texture2D::LINEAR);
}
osgPPU::Processor *secondProcessor = new osgPPU::Processor;
{
osg::Camera* mainCamera = viewer.getCamera();
mainCamera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
mainCamera->attach(osg::Camera::COLOR_BUFFER0,
mainCameraTexture);
secondProcessor->setCamera(mainCamera);
osgPPU::UnitCameraAttachmentBypass* unitMainCam = new
osgPPU::UnitCameraAttachmentBypass();
unitMainCam->setBufferComponent(osg::Camera::COLOR_BUFFER0);
secondProcessor->addChild(unitMainCam);
osgPPU::UnitInOut* ppuWavy = new osgPPU::UnitInOut;
ppuWavy->setName("ppuWavy");
{
osg::Shader* wavShader = new
osg::Shader(osg::Shader::FRAGMENT,
"uniform sampler2D inputTexture;\n"\
"void main() {\n"\
" gl_FragColor =
texture2D(inputTexture, gl_TexCoord[0].xy + 0.01*vec2(0,
sin(gl_TexCoord[0].x*100.0)));\n"\
"}\n"
);
osgPPU::ShaderAttribute* WavyShader = new
osgPPU::ShaderAttribute;
WavyShader->addShader(wavShader);
ppuWavy->getOrCreateStateSet()->setAttributeAndModes(WavyShader);
unitMainCam->addChild(ppuWavy);
}
osgPPU::UnitOut *unitOut = new osgPPU::UnitOut;
unitOut->setName("Output");
ppuWavy->addChild(unitOut);
}
root->addChild(secondProcessor);
// Run the viewer
viewer.setSceneData(root);
viewer.setCameraManipulator(new osgGA::TrackballManipulator());
viewer.realize();
while( !viewer.done() ) { viewer.frame(); }
return 0;
}
_______________________________________________
osg-users mailing list
[email protected]
http://lists.openscenegraph.org/listinfo.cgi/osg-users-openscenegraph.org