Hi,

I'm looking for a solution to this problem as well.  I built a pipeline (a very 
simple one) like so:


Code:

osg::Group* ogrpDoNotBlurTheScene(osg::Camera* camera, osg::Node* glowedScene, 
  unsigned tex_width, unsigned tex_height, unsigned windowWidth, 
  unsigned windowHeight, osg::Camera::RenderTargetImplementation 
renderImplementation,
  osg::ref_ptr<osgPPU::Processor> &p_ppuProcessor, 
  osg::ref_ptr<osgPPU::UnitOut>  &noBlurOutUnit)
{
    // this group will house the osgPPU pipeline units
    osg::Group* group = new osg::Group;
    group->setName("NoBlurringEffect");

    // create the texture which will hold the usual view or scene
    osg::Texture* textureView = createRenderTextureWrks(windowWidth, 
windowHeight);

    // setup the camera, which will render the usual scene into the above
    //   allocated texture
    camera->setViewport(new osg::Viewport(0,0,windowWidth,windowHeight));
    camera->attach(osg::Camera::COLOR_BUFFER0, textureView);
    camera->setRenderTargetImplementation(renderImplementation);

    // setup osgPPU pipeline processor, which will use the main camera
    //   this code allows teh processor to have access to the main camera's
    //   texture output but it does NOT actually get that texture and put
    //   it into the pipeline.  It only makes that texture output from the
    //   camera available for retrieval.
    osg::ref_ptr<osgPPU::Processor> processor = new osgPPU::Processor();
    processor->setName("ProcessorForNormalScene");
    processor->setCamera(camera);
    p_ppuProcessor = processor;

    // setup unit which will bring the output of the camera into the scene
    //   UnitCameraAttachmentBypass is the unit that actually gets the output
    //   texture of a camera and brings it into the pipeline so that that 
    //   texture may be further manipulated.  It must be a direct child of the
    //   processor unit to accomplish feat.
    osgPPU::UnitCameraAttachmentBypass* ucaByPass = new 
osgPPU::UnitCameraAttachmentBypass();
    ucaByPass->setBufferComponent(osg::Camera::COLOR_BUFFER0);
    ucaByPass->setName("mainCamOutputTexUCAB");
    processor->addChild(ucaByPass);

    // This unit of type UnitOut is required so that any data passed to it is
    //   output to the frame buffer object.  It must be the LAST unit of the
    //   pipeline.
    // A key point is the glsl (openGL Shader Language) line:
    //   "  gl_FragColor=texture2D(textureNameInShader,gl_TexCoord[0].st);\n" 
    //   this ine takes the incoming texture and causes it to be the the
    //   output texture without modifying that texture.  It construsts a 
    //   simple pass-through.
    // Notice that the output viewport is the SAME SIZE AS the viewport
    //   of the camera's viewport.
   osgPPU::UnitOut* unitOut2= new osgPPU::UnitOut(); 
   osgPPU::ShaderAttribute* shaderAttribute= new osgPPU::ShaderAttribute(); 
   { 
      osg::Shader* shader= new osg::Shader(osg::Shader::FRAGMENT); 
      const char* shaderSource= 
         "uniform sampler2D textureNameInShader;\n" 
         "void main()\n" 
         "{\n" 
         "  gl_FragColor=texture2D(textureNameInShader,gl_TexCoord[0].st);\n" 
         "}"; 
      shader->setShaderSource(shaderSource); 
      shaderAttribute->addShader(shader); 
      shaderAttribute->setName("nomShaderAttribute"); 
      shaderAttribute->add("textureNameInShader", osg::Uniform::SAMPLER_2D); 
      shaderAttribute->set("textureNameInShader", 0); 

      unitOut2->setName("finalOutputUnit"); 
      unitOut2->setViewport(new osg::Viewport(0,0, windowWidth, windowHeight) );
      unitOut2->getOrCreateStateSet()->setAttributeAndModes(shaderAttribute); 
   } 
   noBlurOutUnit = unitOut2;

   // this line sends the output of ucaByPass into unitOut2, the final 
   //  destination frame buffer object
   ucaByPass->addChild(unitOut2); 


   // return osgPPU pipeline as a group and put the processor in that group
   //  as a child.
   group->addChild(processor);

    return group;
}





and in another section of code, after the pipeline is built and the viewer is 
running, I call a routine to handle the resize event of my window like so:


Code:

class CMyWindowSizeHandler : public osgViewer::WindowSizeHandler
{
public:
    osgViewer::Viewer *viewer; // not needed
    osgPPU::UnitOut *unitOut2NoBlur;
    osg::ref_ptr<osg::Switch> m_oswBlur;
    osg::ref_ptr<osg::Switch> m_oswNoBlur;
    osg::ref_ptr<osgPPU::Processor> processor;

    CMyWindowSizeHandler(osgViewer::Viewer *v, osgPPU::UnitOut 
*p_unitOut2NoBlur,
    osg::ref_ptr<osgPPU::Processor> &p_ppuProcessor) : 
    viewer(v) , unitOut2NoBlur(p_unitOut2NoBlur), processor(p_ppuProcessor)
    {
      std::cout<< "ctored win sz hdl\n";
    } // ctor

    bool handle(const osgGA::GUIEventAdapter& ea,osgGA::GUIActionAdapter&aa)
    {
      extern osg::Texture* createRenderTextureWrks(int tex_width, int 
tex_height);

        switch(ea.getEventType())
        {
            case(osgGA::GUIEventAdapter::RESIZE):
            {
              std::cout<< "window resized"<< std::endl;
              osgViewer::ViewerBase::Windows wnds;
              viewer->getWindows(wnds);
              int x,y,wd,ht;
              wnds[0]->getWindowRectangle(x,y,wd,ht);
              std::cout<< "screen ht:="<< ht << "  screen wd:="<< wd << 
std::endl;
              std::cout<< "ht:="<< ea.getWindowHeight() << "  wd:=" << 
ea.getWindowWidth() << std::endl;

              if ((wd != ea.getWindowWidth()) || (ht != ea.getWindowHeight()))
              {
                osg::Texture *textureView = 
createRenderTextureWrks(ea.getWindowWidth(),ea.getWindowHeight());
                viewer->getCamera()->setViewport(new 
osg::Viewport(0,0,ea.getWindowWidth(),ea.getWindowHeight()));
                viewer->getCamera()->detach(osg::Camera::COLOR_BUFFER0);
                viewer->getCamera()->attach(osg::Camera::COLOR_BUFFER0, 
textureView);
                unitOut2NoBlur->setViewport(new 
osg::Viewport(0,0,ea.getWindowWidth(),ea.getWindowHeight()));
                processor->setCamera(viewer->getCamera());
              }
              break;
            }// case key up
            default:
                break;
        }// sw
        return false;
    }// handle
}; // class keyevent




What I have found so far:
1. my texture area is NOT resized on output
2. attaching a "new" texture to my main viewer's camera does not appear to have 
any effect
3. setting my processor to the main view's camera which I just altered, has no 
effect
4. Once you set the camera for a processor unit, you can not access that camera 
to get it's pointer and thereby gain access to the camera's attached texture
5. Once you attach a texture to a camera, you can no longer access that 
camera's texture so setTextureSize() can NOT be called on a texture that has 
already been attached to a camera

I am afraid of deleting my entire pipleline and rebuilding it as I have found 
out that removing a child node within a pipeline or deleting a unit in a 
pipeline crashes my application.  See my code which I posted in this link:
[url]
http://forum.openscenegraph.org/viewtopic.php?p=26645#26645
[/url]

So, I have found no solution to this problem yet either. Do either of you have 
code that you can share if you have solved this problem?  I would appreciate 
seeing your solution.

Thanks!

Cheers,
Allen

------------------
Read this topic online here:
http://forum.openscenegraph.org/viewtopic.php?p=26729#26729





_______________________________________________
osg-users mailing list
[email protected]
http://lists.openscenegraph.org/listinfo.cgi/osg-users-openscenegraph.org

Reply via email to