I realise this is a topic that has been posted a few times and I have read those posts and tried to implement them.
However I cannot get RTT to work on the iPad. I am using an iPad Air and Ogre 1.9.
I have the capture from the device camera and the data in there saves as an Ogre Image correctly. I am using RTT code that I usually use for video playing on OSX but it's pretty much the same idea.
I believe the issue is that the data is non pow2, it is 1920 x 1080. To prove this I took two images one 1024x1024 and one 1024x768, the 1024x1024 renders on the texture fine but the other just renders black. Our build of ogre does render non pow2 textures and it does render non pow2 texture created after the initial load of ogre.
I have tried Copy and FBO for the RTT Modes and have tried FSAA at 0,2 and 4.
Code:
Capture
Code: Select all
// Get the pixel buffer from the system
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock it for processing
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
int size = (int)CVPixelBufferGetBytesPerRow(pixelBuffer);
size *= CVPixelBufferGetHeight(pixelBuffer);
void* data = CVPixelBufferGetBaseAddress(pixelBuffer);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
renderManager->setCurrentVideoFrame((unsigned char*)data, size);
data = NULL;
Code: Select all
/*
* Constructor
*/
RTTFrameListener::RTTFrameListener(){
// Create a new texture for our video
Ogre::TextureManager::getSingleton().remove("RTTTexture");
_texturePtr = Ogre::TextureManager::getSingleton().createManual("RTTTexture",
Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME,
Ogre::TEX_TYPE_2D,
1920, 1080,
4,0,
Ogre::PF_X8B8G8R8,
Ogre::TU_DYNAMIC_WRITE_ONLY);
}
bool RTTFrameListener::frameRenderingQueued(const Ogre::FrameEvent& evt){
Ogre::FrameListener::frameRenderingQueued(evt);
int size = 0;
unsigned char* frameData = renderManager->copyCurrentVideoFrame(&size);
if(frameData != NULL){
Ogre::PixelBox pb(1920, 1080, 1, Ogre::PF_BYTE_RGBA, frameData);
Ogre::HardwarePixelBufferSharedPtr buffer = _texturePtr->getBuffer();
buffer->blitFromMemory(pb);
free((unsigned char*)frameData);
}
frameData = NULL;
}
Code: Select all
_rttFrameListener = new RTTFrameListener();
// Attach it to a material
Ogre::MaterialPtr material = Ogre::MaterialManager::getSingleton().getByName(rttMaterial);
Ogre::TextureUnitState* ts = material->getTechnique((unsigned short)0)->getPass((unsigned short)0)->getTextureUnitState((unsigned short)0);
//Now, attach the texture to the material texture unit (single layer) and setup properties
ts->setTextureName("RTTTexture",Ogre::TEX_TYPE_2D);
ts->setTextureFiltering(Ogre::FO_LINEAR, Ogre::FO_LINEAR, Ogre::FO_NONE);
ts->setTextureAddressingMode(Ogre::TextureUnitState::TAM_CLAMP);
ts = NULL;
OgreFramework::getSingleton().m_pRoot->addFrameListener(_rttFrameListener);
Code: Select all
material RTTMaterial
{
receive_shadows off
technique
{
pass
{
depth_check off
depth_write off
cull_software none
cull_hardware none
lighting off
ambient 0 0 0
diffuse 1 1 1
texture_unit RTTMaterial
{
}
}
}
}
Kind Regards
Carl