From Colin Dunlop, add live video stream support

This commit is contained in:
Robert Osfield 2007-11-06 12:07:23 +00:00
parent 2e2ddba49e
commit 043ed14ba9
5 changed files with 1670 additions and 3 deletions

View File

@ -8,16 +8,20 @@ INCLUDE_DIRECTORIES(${QUICKTIME_INCLUDE_DIR})
SET(TARGET_SRC
MovieData.cpp
QTUtils.cpp
QTLiveUtils.cpp
QTtexture.cpp
QuicktimeImageStream.cpp
QuicktimeLiveImageStream.cpp
ReaderWriterQT.cpp
)
SET(TARGET_H
MovieData.h
QTUtils.h
QTLiveUtils.h
QTtexture.h
QuicktimeImageStream.h
QuicktimeLiveImageStream.h
)
SET(TARGET_ADDED_LIBRARIES)

View File

@ -0,0 +1,847 @@
/* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2007 Robert Osfield
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
#include <cstdio>
#include <cstdlib>
#include <string>
#include <sstream>
#include "osg/Image"
#include "osg/Notify"
#include "osg/Geode"
#include "osg/GL"
#include "osgDB/FileNameUtils"
#include "osgDB/Registry"
#include "osgDB/FileUtils"
#ifdef __APPLE__
#include <Quicktime/Quicktime.h>
#include <Carbon/Carbon.h>
#define QT_HANDLE_IMAGES_ALSO
#else
#include <QTML.h>
#include <Movies.h>
#include <Quickdraw.h>
#include <QDOffscreen.h>
#include <QuicktimeComponents.h>
#include <FixMath.h>
#include <CGBitmapContext.h>
#include <CGImage.h>
#include <CGColorSpace.h>
#include <ImageCompression.h>
#include <TextUtils.h>
#endif
#include "QTLiveUtils.h"
// Utils
char* pstr_printable(StringPtr src_pstr)
{
char* dst_cstr = new char[256];
p2cstrcpy(dst_cstr, src_pstr);
return dst_cstr;
}
void initialize_quicktime_qtml()
{
osg::notify(osg::NOTICE) << "QT QTML: Starting up... " << std::endl;
OSErr err;
#ifndef __APPLE__
err = InitializeQTML(0);
if (err!=0)
osg::notify(osg::FATAL) << "Error while initializing quicktime QTML: " << err << std::endl;
else
osg::notify(osg::NOTICE) << "QT QTML: initialized successfully" << std::endl;
#endif
}
void terminite_quicktime_qtml()
{
osg::notify(osg::NOTICE) << "QT QTML: Closing down... " << std::endl;
#ifndef __APPLE__
TerminateQTML();
#endif
osg::notify(osg::NOTICE) << "QT QTML: Closed successfully" << std::endl;
}
void enter_quicktime_movies()
{
osg::notify(osg::NOTICE) << "QT Movies: Starting up... " << std::endl;
OSErr err;
err = EnterMovies();
if (err!=0)
osg::notify(osg::FATAL) << "Error while initializing Movies: " << err << std::endl;
else
osg::notify(osg::NOTICE) << "QT Movies: initialized successfully" << std::endl;
}
void leave_quicktime_movies()
{
osg::notify(osg::NOTICE) << "QT Movies: Closing down... " << std::endl;
#ifndef __APPLE__
ExitMovies();
#endif
osg::notify(osg::NOTICE) << "QT Movies: closed successfully" << std::endl;
}
#if TARGET_OS_MAC
void enter_quicktime_movies_mt()
{
osg::notify(osg::NOTICE) << "QT Movies MT: Starting up... " << std::endl;
OSErr err;
err = EnterMoviesOnThread(0);
if (err!=0)
osg::notify(osg::FATAL) << "Error while initializing Movies MT: " << err << std::endl;
else
osg::notify(osg::NOTICE) << "QT Movies MT: initialized successfully" << std::endl;
}
void leave_quicktime_movies_mt()
{
osg::notify(osg::NOTICE) << "QT Movies MT: Closing down... " << std::endl;
#ifndef __APPLE__
ExitMoviesOnThread();
#endif
osg::notify(osg::NOTICE) << "QT Movies MT: closed successfully" << std::endl;
}
#endif
QTScopedQTMLInitialiser::QTScopedQTMLInitialiser()
{
initialize_quicktime_qtml();
}
QTScopedQTMLInitialiser::~QTScopedQTMLInitialiser()
{
terminite_quicktime_qtml();
}
QTScopedMovieInitialiser::QTScopedMovieInitialiser()
{
enter_quicktime_movies();
}
QTScopedMovieInitialiser::~QTScopedMovieInitialiser()
{
leave_quicktime_movies();
}
#if TARGET_OS_MAC
QTScopedMovieInitialiser_MT::QTScopedMovieInitialiser_MT()
{
enter_quicktime_movies_mt();
}
QTScopedMovieInitialiser_MT::~QTScopedMovieInitialiser_MT()
{
leave_quicktime_movies_mt();
}
#endif
// DigitizerInfo input/output Capability checker
bool supports_capability( long input_flags, long option_flags )
{
long result_l = (input_flags & option_flags);
return result_l == option_flags;
}
// Capability
void print_video_component_capability(VideoDigitizerComponent aComponent)
{
// Returns capability and status information about a specified video digitizer component.
VideoDigitizerError vid_err;
DigitizerInfo vid_info;
// Capability flags
osg::notify(osg::NOTICE) << std::endl;
vid_err = VDGetDigitizerInfo(aComponent, &vid_info);
if (vid_err) osg::notify(osg::NOTICE) << "VDGetDigitizerInfo(aComponent, &vid_info) - ERROR" << std::endl;
else
{
osg::notify(osg::NOTICE) << "DigitizerInfo:" << std::endl;
short vdigType = vid_info.vdigType;
if (vdigType == vdTypeBasic) osg::notify(osg::NOTICE) << "Digitizer Type : Basic (no clipping)" << std::endl;
if (vdigType == vdTypeAlpha) osg::notify(osg::NOTICE) << "Digitizer Type : Alpha clipping" << std::endl;
if (vdigType == vdTypeMask) osg::notify(osg::NOTICE) << "Digitizer Type : Mask Plane clipping" << std::endl;
if (vdigType == vdTypeKey) osg::notify(osg::NOTICE) << "Digitizer Type : Key Color(s) clipping" << std::endl;
short vdigSlot = vid_info.slot;
osg::notify(osg::NOTICE) << "Hardwre Slot : " << vdigSlot << std::endl;
osg::notify(osg::NOTICE) << "Input Capability:" << std::endl << std::boolalpha;
long inputCapabilityFlags = vid_info.inputCapabilityFlags;
osg::notify(osg::NOTICE) << " NTSC : " << supports_capability(inputCapabilityFlags, digiInDoesNTSC) << std::endl;
osg::notify(osg::NOTICE) << " PAL : " << supports_capability(inputCapabilityFlags, digiInDoesPAL) << std::endl;
osg::notify(osg::NOTICE) << " Composite : " << supports_capability(inputCapabilityFlags, digiInDoesComposite) << std::endl;
osg::notify(osg::NOTICE) << " Component : " << supports_capability(inputCapabilityFlags, digiInDoesComponent) << std::endl;
osg::notify(osg::NOTICE) << " SVideo : " << supports_capability(inputCapabilityFlags, digiInDoesSVideo) << std::endl;
osg::notify(osg::NOTICE) << "Input Current:" << std::endl;
long inputCurrentFlags = vid_info.inputCurrentFlags;
osg::notify(osg::NOTICE) << " NTSC : " << supports_capability(inputCurrentFlags, digiInDoesNTSC) << std::endl;
osg::notify(osg::NOTICE) << " PAL : " << supports_capability(inputCurrentFlags, digiInDoesPAL) << std::endl;
osg::notify(osg::NOTICE) << " Composite : " << supports_capability(inputCurrentFlags, digiInDoesComposite) << std::endl;
osg::notify(osg::NOTICE) << " Component : " << supports_capability(inputCurrentFlags, digiInDoesComponent) << std::endl;
osg::notify(osg::NOTICE) << " SVideo : " << supports_capability(inputCurrentFlags, digiInDoesSVideo) << std::endl;
// Heights
short minDestHeight = vid_info.minDestHeight;
short minDestWidth = vid_info.minDestWidth;
short maxDestWidth = vid_info.maxDestWidth;
short maxDestHeight = vid_info.maxDestHeight;
osg::notify(osg::NOTICE) << "Min destination width,height : " << minDestWidth << " " << minDestHeight << std::endl;
osg::notify(osg::NOTICE) << "Max destination width,height : " << maxDestWidth << " " << maxDestHeight << std::endl;
// Current Status
long inputFlags, outputFlags;
vid_err = VDGetCurrentFlags(aComponent, &inputFlags, &outputFlags);
osg::notify(osg::NOTICE) << " NTSC : " << supports_capability(inputFlags, digiInDoesNTSC) << std::endl;
osg::notify(osg::NOTICE) << " PAL : " << supports_capability(inputFlags, digiInDoesPAL) << std::endl;
osg::notify(osg::NOTICE) << " Composite : " << supports_capability(inputFlags, digiInDoesComposite) << std::endl;
osg::notify(osg::NOTICE) << " Component : " << supports_capability(inputFlags, digiInDoesComponent) << std::endl;
osg::notify(osg::NOTICE) << " SVideo : " << supports_capability(inputFlags, digiInDoesSVideo) << std::endl;
osg::notify(osg::NOTICE) << " GenLock : " << supports_capability(inputFlags, digiInDoesGenLock) << std::endl;
osg::notify(osg::NOTICE) << " SECAM : " << supports_capability(inputFlags, digiInDoesSECAM) << std::endl;
osg::notify(osg::NOTICE) << " VTR_Broadcast : " << supports_capability(inputFlags, digiInVTR_Broadcast) << std::endl;
osg::notify(osg::NOTICE) << " Color : " << supports_capability(inputFlags, digiInDoesColor) << std::endl;
osg::notify(osg::NOTICE) << " BW : " << supports_capability(inputFlags, digiInDoesBW) << std::endl;
osg::notify(osg::NOTICE) << " *SignalLock* : " << supports_capability(inputFlags, digiInSignalLock) << std::endl;
// Preferrd Width Height
long pref_width, pref_height;
vid_err = VDGetPreferredImageDimensions(aComponent, &pref_width, &pref_height);
if (vid_err) osg::notify(osg::NOTICE) << "VDGetPreferredImageDimensions(aComponent, &pref_width, &pref_height) - ERROR" << std::endl;
else osg::notify(osg::NOTICE) << "Preferrred width,height : " << pref_width << " " << pref_height << std::endl;
// Inputs
short inputs;
vid_err = VDGetNumberOfInputs(aComponent, &inputs);
if (vid_err) osg::notify(osg::NOTICE) << "VDGetNumberOfInputs(aComponent, &inputs) - ERROR" << std::endl;
else osg::notify(osg::NOTICE) << "Number of inputs : " << inputs << std::endl;
for (short i=0; i <= inputs; ++i)
{
Str255 name;
vid_err = VDGetInputName(aComponent,(long)i, name);
if (vid_err) osg::notify(osg::NOTICE) << "VDGetInputName(aComponent,(long)i, name) - ERROR" << std::endl;
else osg::notify(osg::NOTICE) << "Name of input " << i << " : " << pstr_printable(name) << std::endl;
short input_format;
vid_err = VDGetInputFormat(aComponent,(long)i, &input_format);
if (vid_err) osg::notify(osg::NOTICE) << "VDGetInputFormat(aComponent,(long)i, &input_format) - ERROR" << std::endl;
else
{
if (input_format == compositeIn) osg::notify(osg::NOTICE) << "Format of input : compositeIn" << std::endl;
if (input_format == sVideoIn) osg::notify(osg::NOTICE) << "Format of input : sVideoIn" << std::endl;
if (input_format == rgbComponentIn) osg::notify(osg::NOTICE) << "Format of input : rgbComponentIn" << std::endl;
if (input_format == rgbComponentSyncIn) osg::notify(osg::NOTICE) << "Format of input : rgbComponentSyncIn" << std::endl;
if (input_format == yuvComponentIn) osg::notify(osg::NOTICE) << "Format of input : yuvComponentIn" << std::endl;
if (input_format == yuvComponentSyncIn) osg::notify(osg::NOTICE) << "Format of input : yuvComponentSyncIn" << std::endl;
if (input_format == sdiIn) osg::notify(osg::NOTICE) << "Format of input : sdiIn" << std::endl;
}
}
// CURRENT Input
short active_input;
vid_err = VDGetInput(aComponent, &active_input);
if (vid_err) osg::notify(osg::NOTICE) << "VDGetInput(aComponent, &active_input) - ERROR" << std::endl;
else osg::notify(osg::NOTICE) << "Currently active input : " << active_input << std::endl;
}
}
void probe_video_digitizer_components()
{
// Extra scopes for DEBUG and breakpoint/stack checking plus QT init/destroy
{
// Begin QuickTime
QTScopedQTMLInitialiser qt_init;
QTScopedMovieInitialiser qt_movie_init;
// #define videoDigitizerComponentType = 'vdig'
ComponentDescription video_component_description;
video_component_description.componentType = 'vdig'; /* A unique 4-byte code indentifying the command set */
video_component_description.componentSubType = 0; /* Particular flavor of this instance */
video_component_description.componentManufacturer = 0; /* Vendor indentification */
video_component_description.componentFlags = 0; /* 8 each for Component,Type,SubType,Manuf/revision */
video_component_description.componentFlagsMask = 0; /* Mask for specifying which flags to consider in search, zero during registration */
long num_video_components = CountComponents (&video_component_description);
osg::notify(osg::NOTICE) << " available Video DigitizerComponents : " << num_video_components << std::endl;
if (num_video_components)
{
Component aComponent = 0;
do
{
ComponentDescription full_video_component_description = video_component_description;
aComponent = FindNextComponent(aComponent, &full_video_component_description);
if (aComponent)
{
osg::notify(osg::NOTICE) << "Component" << std::endl;
OSErr err;
Handle compName = NewHandle(256);
Handle compInfo = NewHandle(256);
err = GetComponentInfo( aComponent, &full_video_component_description, compName,compInfo,0);
osg::notify(osg::NOTICE) << " Name: " << pstr_printable((StringPtr)*compName) << std::endl;
osg::notify(osg::NOTICE) << " Desc: " << pstr_printable((StringPtr)*compInfo) << std::endl;
//Capabilities
VideoDigitizerComponent component_instance = OpenComponent(aComponent);
print_video_component_capability(component_instance);
CloseComponent(component_instance);
}
}
while (0 != aComponent);
}
// End QuickTime
}
}
static Boolean MyModalFilter(DialogPtr theDialog, const EventRecord *theEvent, short *itemHit, long refCon)
{
return false;
}
OSG_SGDeviceList print_sequence_grabber_device_list(SGDeviceList deviceList)
{
ComponentResult result = noErr;
short count = (*deviceList)->count;
short selectedIndex = (*deviceList)->selectedIndex;
osg::notify(osg::NOTICE) << "DeviceList : " << count << " devices in total" << std::endl;
osg::notify(osg::NOTICE) << "DeviceList : " << selectedIndex << " is current device" << std::endl;
// Create List
OSG_SGDeviceList device_list;
OSG_SGDevicePair device_pair;
for (short i=0; i<count; ++i)
{
// Devices
osg::notify(osg::NOTICE) << std::endl;
SGDeviceName deviceNameRec = (*deviceList)->entry[i];
Str63 deviceNameStr;
memcpy(deviceNameStr, deviceNameRec.name, sizeof(Str63));
osg::notify(osg::NOTICE) << " " << "Device ID : " << i << " : DeviceNameStr : " << pstr_printable(deviceNameStr) << std::endl;
SGDeviceInputList deviceInputList = deviceNameRec.inputs;
if (deviceInputList)
{
// Inputs
short inputCount = (*deviceInputList)->count;
short inputSelectedIndex = (*deviceInputList)->selectedIndex;
osg::notify(osg::NOTICE) << " " << "InputList : " << inputCount << " inputs in total" << std::endl;
osg::notify(osg::NOTICE) << " " << "InputList : " << inputSelectedIndex << " is current input" << std::endl;
for (short inp=0; inp<inputCount; ++inp)
{
SGDeviceInputName inputNameRec = (*deviceInputList)->entry[inp];
Str63 inputNameStr;
memcpy(inputNameStr, inputNameRec.name, sizeof(Str63));
osg::notify(osg::NOTICE) << " " << "InputNameStr : " << inp << " " << pstr_printable(inputNameStr) << std::endl;
// Build up device list
std::ostringstream os;
os << i << ":" << inp << ".live";
device_pair.first = os.str();
device_pair.second = std::string(pstr_printable(deviceNameStr)) + std::string(" ") + std::string(pstr_printable(inputNameStr));
// Append
device_list.push_back(device_pair);
}
}
else
{
osg::notify(osg::NOTICE) << " InputList is empty!" << std::endl;
}
}
return device_list;
}
std::vector<OSG_SGDeviceList> probe_sequence_grabber_components()
{
// Create List
std::vector<OSG_SGDeviceList> devices_list;
OSG_SGDeviceList device_list;
// Extra scopes for DEBUG and breakpoint/stack checking plus QT init/destroy
{
// Begin QuickTime
QTScopedQTMLInitialiser qt_init;
QTScopedMovieInitialiser qt_movie_init;
// #define videoDigitizerComponentType = 'vdig'
ComponentDescription sg_component_description;
sg_component_description.componentType = SeqGrabComponentType; /* A unique 4-byte code indentifying the command set */
sg_component_description.componentSubType = 0L; /* Particular flavor of this instance */
sg_component_description.componentManufacturer = 'appl'; /* Vendor indentification */
sg_component_description.componentFlags = 0L; /* 8 each for Component,Type,SubType,Manuf/revision */
sg_component_description.componentFlagsMask = 0L; /* Mask for specifying which flags to consider in search, zero during registration */
long num_sg_components = CountComponents (&sg_component_description);
osg::notify(osg::NOTICE) << " available SequenceGrabber Components : " << num_sg_components << std::endl;
if (num_sg_components)
{
Component aComponent = 0;
do
{
ComponentDescription full_sg_component_description = sg_component_description;
aComponent = FindNextComponent(aComponent, &full_sg_component_description);
if (aComponent)
{
osg::notify(osg::NOTICE) << "Component" << std::endl;
OSErr err;
Handle compName = NewHandle(256);
Handle compInfo = NewHandle(256);
err = GetComponentInfo( aComponent, &full_sg_component_description, compName,compInfo,0);
osg::notify(osg::NOTICE) << " Name: " << pstr_printable((StringPtr)*compName) << std::endl;
osg::notify(osg::NOTICE) << " Desc: " << pstr_printable((StringPtr)*compInfo) << std::endl;
SeqGrabComponent gSeqGrabber;
SGChannel gVideoChannel;
SGChannel gSoundChannel;
Rect gActiveVideoRect;
gSeqGrabber = OpenComponent (aComponent);
// If we got a sequence grabber, set it up
if (gSeqGrabber != 0L)
{
ComponentResult result = noErr;
// Initialize the sequence grabber
result = SGInitialize (gSeqGrabber);
if (result == noErr)
{
// Check capability and setting of Sequence Grabber
Rect destinationBounds;
OSStatus err;
GDHandle origDevice;
CGrafPtr origPort;
GWorldPtr gw;
PixMapHandle pixmap = NULL;
int* destinationData = new int [1024*1024]; // 1024*1024*4 bytes (32bit RGBA)
destinationBounds.left = 0;
destinationBounds.top = 0;
destinationBounds.right = 2048;
destinationBounds.bottom = 2048;
err = QTNewGWorldFromPtr(&gw, k32ARGBPixelFormat, &destinationBounds,
NULL, NULL, 0, (Ptr)destinationData, 4*1024);
if (err !=0 )
osg::notify(osg::FATAL) << "Could not create gWorld" << std::endl;
else
{
// Create GWorld
GetGWorld (&origPort, &origDevice);
SetGWorld (gw, NULL); // set current graphics port to offscreen
pixmap = GetGWorldPixMap (gw);
if (pixmap)
{
if (!LockPixels (pixmap)) // lock offscreen pixel map
{
osg::notify(osg::FATAL) << "Could not lock PixMap" << std::endl;
}
}
// Set GWorld
result = SGSetGWorld(gSeqGrabber, (CGrafPtr)gw, 0);
// Set GWorld back
// SetGWorld(origPort, origDevice);
if (result != noErr)
{
osg::notify(osg::FATAL) << "Could not set GWorld on SG" << std::endl;
}
else
{
// Get a video channel
result = SGNewChannel (gSeqGrabber, VideoMediaType, &gVideoChannel);
if ((gVideoChannel != nil) && (result == noErr))
{
// Init
// result = SGInitChannel(gVideoChannel, gSeqGrabber);
// if (result != noErr)
// {
// osg::notify(osg::NOTICE) << "SGInitChannel - failed!" << std::endl;
// }
// Usage
result = SGSetChannelUsage (gVideoChannel, seqGrabPreview);
// Panel
// Crashes every time
// result = SGSettingsDialog(gSeqGrabber, gVideoChannel, 0, 0, seqGrabSettingsPreviewOnly, &MyModalFilter, 0);
// Bounds
result = SGGetSrcVideoBounds (gVideoChannel, &gActiveVideoRect);
osg::notify(osg::NOTICE) << "SrcVideoBounds: " << gActiveVideoRect.right << " " << gActiveVideoRect.bottom << std::endl;
Str255 deviceName;
Str255 inputName;
short inputNumber;
result = SGGetChannelDeviceAndInputNames( gVideoChannel, deviceName, inputName, &inputNumber);
if (result != noErr)
{
osg::notify(osg::NOTICE) << "Could not get DeviceAndInput names from Video SG" << std::endl;
}
osg::notify(osg::NOTICE) << "ChannelDeviceAndInputNamesNumber: " << pstr_printable(deviceName) << " : " << pstr_printable(inputName) << " : " << inputNumber << std::endl;
SGDeviceList deviceList;
result = SGGetChannelDeviceList( gVideoChannel, sgDeviceListIncludeInputs, &deviceList);
if (result != noErr)
{
osg::notify(osg::NOTICE) << "Could not get DeviceList from Video SG" << std::endl;
}
else
{
osg::notify(osg::NOTICE) << "DeviceList from Video SG ok" << std::endl;
device_list = print_sequence_grabber_device_list(deviceList);
devices_list.push_back(device_list);
}
}
// Get a sound channel
result = SGNewChannel (gSeqGrabber, SoundMediaType, &gSoundChannel);
if ((gSoundChannel != nil) && (result == noErr))
{
// Usage
result = SGSetChannelUsage (gSoundChannel, seqGrabPreview);
Str255 deviceName;
Str255 inputName;
short inputNumber;
result = SGGetChannelDeviceAndInputNames( gVideoChannel, deviceName, inputName, &inputNumber);
if (result != noErr)
{
osg::notify(osg::NOTICE) << "Could not get DeviceAndInput names from Sound SG" << std::endl;
}
osg::notify(osg::NOTICE) << "ChannelDeviceAndInputNamesNumber: " << pstr_printable(deviceName) << " : " << pstr_printable(inputName) << " : " << inputNumber << std::endl;
SGDeviceList deviceList;
result = SGGetChannelDeviceList( gSoundChannel, sgDeviceListIncludeInputs, &deviceList);
if (result != noErr)
{
osg::notify(osg::NOTICE) << "Could not get DeviceList from Sound SG" << std::endl;
}
else
{
osg::notify(osg::NOTICE) << "DeviceList from Sound SG ok" << std::endl;
device_list = print_sequence_grabber_device_list(deviceList);
devices_list.push_back(device_list);
}
}
}
SetGWorld(origPort, origDevice);
DisposeGWorld(gw);
}
}
}
SGDisposeChannel(gSeqGrabber, gVideoChannel);
CloseComponent(gSeqGrabber);
}
}
while (0 != aComponent);
}
// End QuickTime
}
return devices_list;
}
void get_video_device_bounds_idstr(short deviceID, short deviceInputID, short& out_width, short& out_height, Str63& out_deviceIDStr)
{
// Extra scopes for DEBUG and breakpoint/stack checking plus QT init/destroy
{
// Begin QuickTime
QTScopedQTMLInitialiser qt_init;
QTScopedMovieInitialiser qt_movie_init;
ComponentDescription sg_component_description;
sg_component_description.componentType = SeqGrabComponentType; /* A unique 4-byte code indentifying the command set */
sg_component_description.componentSubType = 0L; /* Particular flavor of this instance */
sg_component_description.componentManufacturer = 0L; /* Vendor indentification */
sg_component_description.componentFlags = 0L; /* 8 each for Component,Type,SubType,Manuf/revision */
sg_component_description.componentFlagsMask = 0L; /* Mask for specifying which flags to consider in search, zero during registration */
long num_sg_components = CountComponents (&sg_component_description);
if (num_sg_components)
{
Component aComponent = 0;
do
{
ComponentDescription full_sg_component_description = sg_component_description;
aComponent = FindNextComponent(aComponent, &full_sg_component_description);
if (aComponent)
{
SeqGrabComponent gSeqGrabber;
SGChannel gVideoChannel;
Rect gActiveVideoRect;
gSeqGrabber = OpenComponent (aComponent);
// If we got a sequence grabber, set it up
if (gSeqGrabber != 0L)
{
ComponentResult result = noErr;
// Initialize the sequence grabber
result = SGInitialize (gSeqGrabber);
if (result == noErr)
{
// Check capability and setting of Sequence Grabber
Rect destinationBounds;
OSStatus err;
GDHandle origDevice;
CGrafPtr origPort;
GWorldPtr gw;
PixMapHandle pixmap = NULL;
int* destinationData = new int [1024*1024]; // 1024*1024*4 bytes (32bit RGBA)
destinationBounds.left = 0;
destinationBounds.top = 0;
destinationBounds.right = 256;
destinationBounds.bottom = 256;
err = QTNewGWorldFromPtr(&gw, k32ARGBPixelFormat, &destinationBounds,
NULL, NULL, 0, (Ptr)destinationData, 4*256);
if (err !=0 )
osg::notify(osg::NOTICE) << "Could not create gWorld" << std::endl;
else
{
// Create GWorld
GetGWorld (&origPort, &origDevice);
SetGWorld (gw, NULL); // set current graphics port to offscreen
pixmap = GetGWorldPixMap (gw);
if (pixmap)
{
if (!LockPixels (pixmap)) // lock offscreen pixel map
osg::notify(osg::FATAL) << "Could not lock PixMap" << std::endl;
}
// Set GWorld
result = SGSetGWorld(gSeqGrabber, (CGrafPtr)gw, 0);
// Set GWorld back
// SetGWorld(origPort, origDevice);
if (result != noErr)
{
osg::notify(osg::FATAL) << "Could not set GWorld on SG" << std::endl;
}
else
{
// Get a video channel
result = SGNewChannel (gSeqGrabber, VideoMediaType, &gVideoChannel);
if ((gVideoChannel != nil) && (result == noErr))
{
result = SGSetChannelUsage (gVideoChannel, seqGrabPreview);
Str255 deviceName;
Str255 inputName;
short inputNumber;
result = SGGetChannelDeviceAndInputNames( gVideoChannel, deviceName, inputName, &inputNumber);
SGDeviceList deviceList;
result = SGGetChannelDeviceList( gVideoChannel, sgDeviceListIncludeInputs, &deviceList);
short count = (*deviceList)->count;
if (deviceID >= count)
osg::notify(osg::FATAL) << "DeviceID : " << deviceID << " too large - we only have " << count << " devices" << std::endl;
SGDeviceName deviceNameRec = (*deviceList)->entry[deviceID];
SGDeviceInputList deviceInputList = deviceNameRec.inputs;
if (deviceInputList == 0)
osg::notify(osg::FATAL) << "DeviceInputList is empty!" << std::endl;
else
{
short inputCount = (*deviceInputList)->count;
if (deviceInputID >= inputCount)
osg::notify(osg::FATAL) << "DeviceInputID : " << deviceInputID << " too large - we only have " << inputCount << " inputs for device" << std::endl;
}
// Ok
Str63 deviceNameStr;
memcpy(deviceNameStr, deviceNameRec.name, sizeof(Str63));
// Set
result = SGSetChannelDevice ( gVideoChannel, deviceNameStr);
result = SGSetChannelDeviceInput( gVideoChannel, deviceInputID);
VideoDigitizerComponent vdig = SGGetVideoDigitizerComponent(gVideoChannel);
VideoDigitizerError vid_err;
vid_err = VDSetInputStandard (vdig, palIn);
result = SGVideoDigitizerChanged( gVideoChannel);
result = SGGetSrcVideoBounds ( gVideoChannel, &gActiveVideoRect);
osg::notify(osg::NOTICE) << "SrcVideoBounds: " << gActiveVideoRect.right << " " << gActiveVideoRect.bottom << std::endl;
// Out
out_width = gActiveVideoRect.right;
out_height = gActiveVideoRect.bottom;
memcpy(out_deviceIDStr, deviceNameRec.name, sizeof(Str63));
}
}
SetGWorld(origPort, origDevice);
DisposeGWorld(gw);
}
}
}
SGDisposeChannel(gSeqGrabber, gVideoChannel);
CloseComponent(gSeqGrabber);
}
}
while (0 != aComponent);
}
// End QuickTime
}
}
void get_sound_device_idstr(short soundDeviceID, short soundDeviceInputID, Str63& out_soundDeviceIDStr)
{
// Extra scopes for DEBUG and breakpoint/stack checking plus QT init/destroy
{
// Begin QuickTime
QTScopedQTMLInitialiser qt_init;
QTScopedMovieInitialiser qt_movie_init;
// #define videoDigitizerComponentType = 'vdig'
ComponentDescription sg_component_description;
sg_component_description.componentType = SeqGrabComponentType; /* A unique 4-byte code indentifying the command set */
sg_component_description.componentSubType = 0L; /* Particular flavor of this instance */
sg_component_description.componentManufacturer = 0L; /* Vendor indentification */
sg_component_description.componentFlags = 0L; /* 8 each for Component,Type,SubType,Manuf/revision */
sg_component_description.componentFlagsMask = 0L; /* Mask for specifying which flags to consider in search, zero during registration */
long num_sg_components = CountComponents (&sg_component_description);
if (num_sg_components)
{
Component aComponent = 0;
do
{
ComponentDescription full_sg_component_description = sg_component_description;
aComponent = FindNextComponent(aComponent, &full_sg_component_description);
if (aComponent)
{
SeqGrabComponent gSeqGrabber;
SGChannel gSoundChannel;
gSeqGrabber = OpenComponent (aComponent);
// If we got a sequence grabber, set it up
if (gSeqGrabber != 0L)
{
ComponentResult result = noErr;
// Initialize the sequence grabber
result = SGInitialize (gSeqGrabber);
if (result == noErr)
{
// Check capability and setting of Sequence Grabber
// Get a sound channel
result = SGNewChannel (gSeqGrabber, SoundMediaType, &gSoundChannel);
if ((gSoundChannel != nil) && (result == noErr))
{
result = SGSetChannelUsage (gSoundChannel, seqGrabPreview);
Str255 deviceName;
Str255 inputName;
short inputNumber;
result = SGGetChannelDeviceAndInputNames( gSoundChannel, deviceName, inputName, &inputNumber);
SGDeviceList deviceList;
result = SGGetChannelDeviceList( gSoundChannel, sgDeviceListIncludeInputs, &deviceList);
short count = (*deviceList)->count;
if (soundDeviceID >= count)
osg::notify(osg::FATAL) << "DeviceID : " << soundDeviceID << " too large - we only have " << count << " devices" << std::endl;
SGDeviceName deviceNameRec = (*deviceList)->entry[soundDeviceID];
SGDeviceInputList deviceInputList = deviceNameRec.inputs;
short inputCount = (*deviceInputList)->count;
if (soundDeviceInputID >= inputCount)
osg::notify(osg::FATAL) << "DeviceInputID : " << soundDeviceInputID << " too large - we only have " << inputCount << " inputs for device" << std::endl;
// Ok
Str63 deviceNameStr;
memcpy(deviceNameStr, deviceNameRec.name, sizeof(Str63));
// Set
result = SGSetChannelDevice ( gSoundChannel, deviceNameStr);
result = SGSetChannelDeviceInput( gSoundChannel, soundDeviceInputID);
// Out
memcpy(out_soundDeviceIDStr, deviceNameRec.name, sizeof(Str63));
SGDisposeChannel(gSeqGrabber, gSoundChannel);
}
}
CloseComponent(gSeqGrabber);
}
}
}
while (0 != aComponent);
}
// End QuickTime
}
}
// Getting Information About Video Digitizer Components
// You can use the VDGetDigitizerInfo function in your application to retrieve
// information about the capabilities of a video digitizer component. You can use
// the VDGetCurrentFlags function to obtain current status information from a video digitizer component.
// Setting Source Characteristics
// You can use the VDGetMaxSrcRect function in your application to get the size and location of the maximum
// source rectangle. Similarly, the VDGetActiveSrcRect function allows you to get this information about
// the active source rectangle, and the VDGetVBlankRect function enables you to obtain information about the vertical blanking rectangle.
// You can use the VDSetDigitizerRect function to set the size and location of the digitizer rectangle.
// The VDGetDigitizerRect function lets you retrieve the size and location of this rectangle.
// Imput Source
// Some of these functions provide information about the available video inputs. Applications can use
// the VDGetNumberOfInputs function to determine the number of video inputs supported by the digitizer component.
// The VDGetInputFormat function allows applications to find out the video format (composite, s-video, or component) employed by a specified input.
// You can use the VDSetInput function in your application to specify the input to be used by the digitizer component.
// The VDGetInput function returns the currently selected input.
// The VDSetInputStandard function allows you to specify the video signaling standard to be used by the video digitizer component.
/*
QTVideoOutputRestoreState
QTVideoOutputSaveState
Selecting an Input Source
VDGetInput
VDGetInputFormat
VDGetNumberOfInputs
VDSetInput
VDSetInputStandard
Setting Source Characteristics
VDGetActiveSrcRect
VDGetDigitizerRect
VDGetMaxSrcRect
VDGetVBlankRect
VDSetDigitizerRect
Setting Video Destinations
VDGetMaxAuxBuffer
VDGetPlayThruDestination
VDPreflightDestination
VDPreflightGlobalRect
VDSetPlayThruDestination
VDSetPlayThruGlobalRect
Video Clipping
VDClearClipRgn
VDGetClipState
VDSetClipRgn
VDSetClipState
*/
/*
QTVideoOutputCopyIndAudioOutputDeviceUID
QTVideoOutputGetIndImageDecompressor
VDGetInputGammaRecord
VDGetInputName
VDGetPreferredImageDimensions
VDIIDCGetCSRData
VDIIDCGetDefaultFeatures
VDIIDCGetFeatures
VDIIDCGetFeaturesForSpecifier
VDIIDCSetCSRData
VDIIDCSetFeatures
VDSetDestinationPort
VDSetInputGammaRecord
VDSetPreferredImageDimensions
VDUseSafeBuffers
*/
//void test ()
//{
//if ((i == count-1) && (inp == inputCount-1))
//{
// osg::notify(osg::NOTICE) << " * TEST SGSetChannelDevice(..) : " << pstr_printable(deviceNameRec.name) << std::endl;
// result = SGSetChannelDevice (gVideoChannel, deviceNameStr);
// if (result == noErr)
// {
// result = SGSetChannelDeviceInput( gVideoChannel, 0 );
// result = SGGetSrcVideoBounds (gVideoChannel, &gActiveVideoRect);
// osg::notify(osg::NOTICE) << "SrcVideoBounds: " << gActiveVideoRect.right << " " << gActiveVideoRect.bottom << std::endl;
// Str255 deviceName2;
// Str255 inputName2;
// short inputNumber2;
// result = SGGetChannelDeviceAndInputNames( gVideoChannel, deviceName2, inputName2, &inputNumber2);
// osg::notify(osg::NOTICE) << "ChannelDeviceAndInputNamesNumber: " << pstr_printable(deviceName2) << " : " << pstr_printable(inputName2) << " : " << inputNumber2 << std::endl;
// result = SGGetChannelDeviceList( gVideoChannel, sgDeviceListIncludeInputs, &deviceList);
// if (result != noErr)
// {
// osg::notify(osg::NOTICE) << "Could not get DeviceList from Video SG" << std::endl;
// }
// else
// {
// osg::notify(osg::NOTICE) << "DeviceList from Video SG ok" << std::endl;
// short count = (*deviceList)->count;
// short selectedIndex = (*deviceList)->selectedIndex;
// osg::notify(osg::NOTICE) << "DeviceList : " << count << " devices in total" << std::endl;
// osg::notify(osg::NOTICE) << "DeviceList : " << selectedIndex << " is current device" << std::endl;
// }
// }
// else
// {
// osg::notify(osg::NOTICE) << "SGSetChannelDevice - failed!" << std::endl;
// }
// osg::notify(osg::NOTICE) << " * TEST SGSetChannelDevice(..) end" << std::endl;
//}

View File

@ -0,0 +1,109 @@
/* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2007 Robert Osfield
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
#ifdef __APPLE__
#include <Quicktime/Quicktime.h>
#include <Carbon/Carbon.h>
#define QT_HANDLE_IMAGES_ALSO
#else
#include <QTML.h>
#include <Movies.h>
#include <Quickdraw.h>
#include <QDOffscreen.h>
#include <QuicktimeComponents.h>
#include <FixMath.h>
#include <CGBitmapContext.h>
#include <CGImage.h>
#include <CGColorSpace.h>
#include <ImageCompression.h>
#include <TextUtils.h>
#endif
//QT
//#include "Components.h"
//#include "QuickTimeComponents.h"
// QTML
void initialize_quicktime_qtml();
void terminite_quicktime_qtml();
class QTScopedQTMLInitialiser
{
public:
QTScopedQTMLInitialiser();
~QTScopedQTMLInitialiser();
private:
QTScopedQTMLInitialiser(const QTScopedQTMLInitialiser&);
const QTScopedQTMLInitialiser& operator=(const QTScopedQTMLInitialiser&);
};
// QT Movies
void enter_quicktime_movies();
void leave_quicktime_movies();
class QTScopedMovieInitialiser
{
public:
QTScopedMovieInitialiser();
~QTScopedMovieInitialiser();
private:
QTScopedMovieInitialiser(const QTScopedMovieInitialiser&);
const QTScopedMovieInitialiser& operator=(const QTScopedMovieInitialiser&);
};
#if TARGET_OS_MAC
// QT Movies_MT (QT multi-thread support API)
/*
* EnterMovies initializes a single, non-reentrant QuickTime environment for your application.
* If your application uses QuickTime on multiple threads simultaneously, call EnterMoviesOnThread from each thread that uses QuickTime to create a local QuickTime environment for that thread (requires QuickTime 6 or later).
* For more information about threaded programming and QuickTime, see Technical Note TN2125, Thread-safe programming in QuickTime.
* http://developer.apple.com/technotes/tn/tn2125.html
*/
void enter_quicktime_movies_mt();
void leave_quicktime_movies_mt();
class QTScopedMovieInitialiser_MT
{
public:
QTScopedMovieInitialiser_MT();
~QTScopedMovieInitialiser_MT();
private:
QTScopedMovieInitialiser_MT(const QTScopedMovieInitialiser_MT&);
const QTScopedMovieInitialiser_MT& operator=(const QTScopedMovieInitialiser_MT&);
};
#endif
// Utils
char* pstr_printable(StringPtr src);
typedef std::pair<std::string,std::string> OSG_SGDevicePair;
typedef std::vector<OSG_SGDevicePair> OSG_SGDeviceList;
// Capability Video
void print_video_component_capability(VideoDigitizerComponent aComponent);
void probe_video_digitizer_components();
// Capability Sequence Grabber
OSG_SGDeviceList print_sequence_grabber_device_list(SGDeviceList deviceList);
std::vector<OSG_SGDeviceList> probe_sequence_grabber_components();
//
void get_video_device_bounds_idstr(short videoDeviceID, short videoDeviceInputID, short& out_width, short& out_height, Str63& out_videoDeviceIDStr);
void get_sound_device_idstr(short soundDeviceID, short soundDeviceInputID, Str63& out_soundDeviceIDStr);

View File

@ -0,0 +1,596 @@
/* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2007 Robert Osfield
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
#include <cstdlib>
#include <osg/Notify>
#include <osg/Timer>
#include <osg/ref_ptr>
#include <osg/Referenced>
#include <osg/Notify>
#include <osgDB/Registry>
#include <osg/GL>
#include <osg/Endian>
#include <osg/Timer>
#include <osgDB/FileNameUtils>
#include <OpenThreads/ScopedLock>
#include <OpenThreads/Thread>
#include "QuicktimeLiveImageStream.h"
#include "QTLiveUtils.h"
// Constructor: setup and start thread
QuicktimeLiveImageStream::QuicktimeLiveImageStream(std::string fileName) : ImageStream()
{
setOrigin(osg::Image::TOP_LEFT);
_status = ImageStream::PAUSED;
//probe_video_digitizer_components();
//probe_sequence_grabber_components();
// Initialise QT
// initialize_quicktime_qtml();
// enter_quicktime_movies();
//
load(fileName);
}
// Deconstructor: stop and terminate thread
QuicktimeLiveImageStream::~QuicktimeLiveImageStream()
{
// Terminate QT
// leave_quicktime_movies();
// terminite_quicktime_qtml();
}
/// Start or continue stream.
void QuicktimeLiveImageStream::play()
{
osg::notify(osg::DEBUG_INFO)<<"Sending play"<<this<<std::endl;
/* if (g_s_use_sg)
{
ComponentResult result = noErr;
result = SGStartPreview(m_gSeqGrabber);
if (result != noErr)
osg::notify(osg::FATAL) << "SGStartPreview : error" << std::endl;
}*/
}
/// Pause stream at current position.
void QuicktimeLiveImageStream::pause()
{
osg::notify(osg::DEBUG_INFO)<<"Sending pause"<<this<<std::endl;
}
/// stop playing
void QuicktimeLiveImageStream::quit(bool wiatForThreadToExit)
{
osg::notify(osg::DEBUG_INFO)<<"Sending quit"<<this<<std::endl;
}
//
// PRIVATE
//
// Use the Sequence Grabber or the raw Video Digitizer
// If using SG then use it in Preview or Record option
// Thre options - VD Play Through, SG Preview or SG Record
static bool g_s_use_sg = true ; // 1a
static bool g_s_use_sg_record = false; // 1b
// load
void QuicktimeLiveImageStream::load(std::string fileName)
{
osg::notify(osg::DEBUG_INFO)<<"QuicktimeLive Loading..."<<this<<std::endl;
// CreateAndRunWithSequenceGrabber
if (g_s_use_sg)
createAndRunWithSequenceGrabber(fileName);
else
createAndRunWithVideoDigitizer(fileName);
}
// Create the Image
void QuicktimeLiveImageStream::createImage()
{
// Old
// char* pointer = (char*)malloc(4 * m_videoRectWidth*m_videoRectHeight + 32);
// void* buffer = (void*)(((unsigned long)(pointer + 31) >> 5) << 5);
// New
int* buffer = new int [m_videoRectWidth*m_videoRectHeight]; // 1024*1024*4 bytes (32bit RGBA)
//
GLenum internalFormat = (osg::getCpuByteOrder()==osg::BigEndian)?
GL_UNSIGNED_INT_8_8_8_8_REV :
GL_UNSIGNED_INT_8_8_8_8;
setImage(m_videoRectWidth,m_videoRectHeight,1,
(GLint) GL_RGBA8, (GLenum)GL_BGRA_EXT, internalFormat,
(unsigned char*)buffer,osg::Image::NO_DELETE,4);
}
// Create the offscreen GWorld (using Image as target memory)
void QuicktimeLiveImageStream::createGWorld()
{
Rect destinationBounds;
OSStatus err;
GDHandle origDevice;
CGrafPtr origPort;
destinationBounds.left = 0;
destinationBounds.top = 0;
destinationBounds.right = m_videoRectWidth;
destinationBounds.bottom = m_videoRectHeight;
err = QTNewGWorldFromPtr(&m_gw, k32ARGBPixelFormat, &destinationBounds,
NULL, NULL, 0, (Ptr)data(), 4*m_videoRectWidth);
if (err !=0 )
{
osg::notify(osg::DEBUG_INFO) << "Could not create gWorld" << std::endl;
}
else
{
// Query
GetGWorld (&origPort, &origDevice);
SetGWorld (m_gw, NULL); // set current graphics port to offscreen
m_pixmap = GetGWorldPixMap(m_gw);
if (m_pixmap)
{
if (!LockPixels (m_pixmap)) // lock offscreen pixel map
{
osg::notify(osg::FATAL) << "Could not lock PixMap" << std::endl;
}
}
// Set back
SetGWorld(origPort, origDevice);
}
}
// 1.
// CreateAndRunWithSequenceGrabber
void QuicktimeLiveImageStream::createAndRunWithSequenceGrabber(std::string fileName)
{
std::string::size_type idx = fileName.find(':');
if (idx == std::string::npos)
{
osg::notify(osg::FATAL) << "Error while parsing deviceID:deviceInputID.live path : " << fileName << std::endl;
}
// Better c++ code is to use istrstream
std::string deviceIDStr = fileName.substr(0,idx);
std::string deviceInputIDStr = fileName.substr(idx+1);
m_videoDeviceID = static_cast<short>(atoi(deviceIDStr.c_str()));
m_videoDeviceInputID = static_cast<short>(atoi(deviceInputIDStr.c_str()));
// Get Video Digitizer Rectangle bounds from a Sequence Grabber proxy (using IDs)
get_video_device_bounds_idstr(m_videoDeviceID, m_videoDeviceInputID, m_videoRectWidth, m_videoRectHeight, m_videoDeviceIDStr);
// Sound
m_soundDeviceID = 2; m_soundDeviceInputID = 0;
//get_sound_device_idstr(m_soundDeviceID, m_soundDeviceInputID, m_soundDeviceIDStr);
// Create the Image
createImage();
// Create the offscreen GWorld (using Image as target memory)
createGWorld();
// Create the Sequence Grabber (using GWorld as target memory)
createSequenceGrabber();
// Create the Sequence Grabber Video Channel
createSequenceGrabberVideoChannel();
if (g_s_use_sg_record)
{
// Create the Sequence Grabber DataProc setup for Record
createSequenceGrabberDataProc();
}
// Create the Sequence Grabber Audio Channel
createSequenceGrabberAudioChannel();
// Start the engine Jack!
// Callbacks
createSequenceGrabberVideoBottlenecks();
ComponentResult result = noErr;
result = SGPrepare( m_gSeqGrabber, TRUE, FALSE);
if (result != noErr)
osg::notify(osg::FATAL) << "SGPrepare : error" << std::endl;
if (g_s_use_sg_record)
{
result = SGStartRecord(m_gSeqGrabber);
if (result != noErr)
osg::notify(osg::FATAL) << "SGStartRecord : error" << std::endl;
}
else
{
result = SGStartPreview(m_gSeqGrabber);
if (result != noErr)
osg::notify(osg::FATAL) << "SGStartPreview : error" << std::endl;
}
_status = ImageStream::PLAYING;
// Ticker
start();
}
// 1.
// Create the Sequence Grabber (using GWorld as target memory)
void QuicktimeLiveImageStream::createSequenceGrabber()
{
ComponentDescription sg_component_description;
sg_component_description.componentType = SeqGrabComponentType; /* A unique 4-byte code indentifying the command set */
sg_component_description.componentSubType = 0L; /* Particular flavor of this instance */
sg_component_description.componentManufacturer = 'appl'; /* Vendor indentification */
sg_component_description.componentFlags = 0L; /* 8 each for Component,Type,SubType,Manuf/revision */
sg_component_description.componentFlagsMask = 0L; /* Mask for specifying which flags to consider in search, zero during registration */
long num_sg_components = CountComponents (&sg_component_description);
if (num_sg_components)
{
Component aComponent = 0;
ComponentDescription full_sg_component_description = sg_component_description;
aComponent = FindNextComponent(aComponent, &full_sg_component_description);
if (aComponent)
{
m_gSeqGrabber = OpenComponent(aComponent);
// If we got a sequence grabber, set it up
if (m_gSeqGrabber != 0L)
{
// Check capability and setting of Sequence Grabber
GDHandle origDevice;
CGrafPtr origPort;
// Create GWorld
GetGWorld (&origPort, &origDevice);
SetGWorld (m_gw, NULL); // set current graphics port to offscreen
// Initialize the sequence grabber
ComponentResult result = noErr;
result = SGInitialize (m_gSeqGrabber);
if (result == noErr)
{
// Set GWorld
result = SGSetGWorld(m_gSeqGrabber, (CGrafPtr)m_gw, 0);
if (result != noErr)
{
osg::notify(osg::FATAL) << "Could not set GWorld on SG" << std::endl;
}
}
// Set GWorld back
SetGWorld(origPort, origDevice);
}
}
}
}
// Create the Sequence Grabber Video Channel
void QuicktimeLiveImageStream::createSequenceGrabberVideoChannel()
{
// Check capability and setting of Sequence Grabber
GDHandle origDevice;
CGrafPtr origPort;
// Create GWorld
GetGWorld (&origPort, &origDevice);
SetGWorld (m_gw, NULL); // set current graphics port to offscreen
// Setup
// Get a video channel
ComponentResult result = SGNewChannel (m_gSeqGrabber, VideoMediaType, &m_gVideoChannel);
if ((m_gVideoChannel != nil) && (result == noErr))
{
result = SGInitChannel(m_gVideoChannel, m_gSeqGrabber);
Rect gActiveVideoRect;
// Usage
if (g_s_use_sg_record)
result = SGSetChannelUsage (m_gVideoChannel, seqGrabRecord | seqGrabLowLatencyCapture);
else
{
result = SGSetChannelUsage (m_gVideoChannel, seqGrabPreview);
}
// result = SGSetUseScreenBuffer(m_gVideoChannel, FALSE);
// Set
osg::notify(osg::DEBUG_INFO) << "Setting up vdig from input prefs" << std::endl;
result = SGSetChannelDevice ( m_gVideoChannel, m_videoDeviceIDStr);
result = SGSetChannelDeviceInput( m_gVideoChannel, m_videoDeviceInputID);
// result = SGSetChannelPlayFlags ( m_gVideoChannel, channelPlayFast | channelPlayHighQuality | channelPlayAllData);
result = SGSetChannelPlayFlags ( m_gVideoChannel, channelPlayFast );
VideoDigitizerComponent vdig = SGGetVideoDigitizerComponent(m_gVideoChannel);
VideoDigitizerError vid_err;
vid_err = VDSetInputStandard (vdig, palIn);
osg::notify(osg::DEBUG_INFO) << "Setup vdig from input prefs:" << std::endl;
print_video_component_capability(vdig);
result = SGVideoDigitizerChanged( m_gVideoChannel);
result = SGGetSrcVideoBounds ( m_gVideoChannel, &gActiveVideoRect);
result = SGSetChannelBounds ( m_gVideoChannel, &gActiveVideoRect);
result = SGChangedSource (m_gSeqGrabber, m_gVideoChannel);
Fixed frame_rate;
result = SGGetFrameRate (m_gVideoChannel, &frame_rate);
int zx = 0;
result = SGSetFrameRate (m_gVideoChannel, 100);
//
// Sound
/*
long sound_id;
Str255 sound_driver_name;
char* sound_driver_name_cstr;
vid_err = VDGetSoundInputSource(vdig, (long)m_videoDeviceInputID, &sound_id);
vid_err = VDGetSoundInputDriver(vdig, sound_driver_name);
sound_driver_name_cstr = pstr_printable(sound_driver_name);
osg::notify(osg::DEBUG_INFO) << "vdig sound driver name :" << sound_driver_name_cstr << std::endl;
osg::notify(osg::DEBUG_INFO) << "vdig sound driver id :" << sound_id << std::endl;
*/
}
else
{
osg::notify(osg::FATAL) << "Could not create SGNewChannel for Video Channel" << std::endl;
}
// Set GWorld back
SetGWorld(origPort, origDevice);
}
static OSErr MySGDataProc (SGChannel c,Ptr p,long len,long *offset,long chRefCon,TimeValue time,short writeType,long refCon )
{
QuicktimeLiveImageStream* p_is = (QuicktimeLiveImageStream*)refCon;
return p_is->dataProcCallback(c,p,len,offset,chRefCon,time,writeType,refCon);
}
OSErr QuicktimeLiveImageStream::dataProcCallback( SGChannel c,Ptr p,long len,long *offset,long chRefCon,TimeValue time,short writeType,long refCon )
{
OSErr err = noErr;
//
osg::notify(osg::INFO) << " Video " << refCon << std::endl;
dirty();
//
return err;
}
// Create the Sequence Grabber DataProc setup for Record
void QuicktimeLiveImageStream::createSequenceGrabberDataProc()
{
OSErr err;
err = SGSetDataRef(m_gSeqGrabber, 0, 0, seqGrabToMemory | seqGrabDontMakeMovie);
if (err != noErr)
osg::notify(osg::FATAL) << "SGSetDataRef : error" << std::endl;
// specify a sequence grabber data function
err = SGSetDataProc(m_gSeqGrabber, NewSGDataUPP(MySGDataProc), (long)this);
if (err != noErr)
osg::notify(osg::FATAL) << "SGSetDataProc : error" << std::endl;
}
// Create the Sequence Grabber Audio Channel
void QuicktimeLiveImageStream::createSequenceGrabberAudioChannel()
{
// Check capability and setting of Sequence Grabber
GDHandle origDevice;
CGrafPtr origPort;
// Create GWorld
GetGWorld (&origPort, &origDevice);
SetGWorld (m_gw, NULL); // set current graphics port to offscreen
// Setup
// Get a video channel
ComponentResult result = SGNewChannel (m_gSeqGrabber, SoundMediaType, &m_gSoundChannel);
if ((m_gSoundChannel != nil) && (result == noErr))
{
result = SGInitChannel(m_gSoundChannel, m_gSeqGrabber);
// result = SGSetChannelUsage (m_gSoundChannel, seqGrabPreview );
// Usage
if (g_s_use_sg_record)
result = SGSetChannelUsage (m_gSoundChannel, seqGrabRecord | seqGrabLowLatencyCapture);
else
{
result = SGSetChannelUsage (m_gSoundChannel, seqGrabPreview | seqGrabRecord | seqGrabLowLatencyCapture);
}
// Get
Str255 deviceName;
Str255 inputName;
short inputNumber;
result = SGGetChannelDeviceAndInputNames( m_gSoundChannel, deviceName, inputName, &inputNumber);
// Set
// osg::notify(osg::DEBUG_INFO) << "Setting up audio component from input prefs" << std::endl;
result = SGSetChannelDevice ( m_gSoundChannel, m_soundDeviceIDStr);
result = SGSetChannelDeviceInput( m_gSoundChannel, m_soundDeviceInputID);
// Set the volume low to prevent feedback when we start the preview,
// in case the mic is anywhere near the speaker.
short volume = 0;
result = SGGetChannelVolume (m_gSoundChannel, &volume );
// result = SGSetChannelVolume (m_gSoundChannel, 255);
// Inform
result = SGChangedSource ( m_gSeqGrabber, m_gSoundChannel);
}
else
{
osg::notify(osg::FATAL) << "Could not create SGNewChannel for Sound Channel" << std::endl;
}
// Set GWorld back
SetGWorld(origPort, origDevice);
}
// GrabFrameCompleteProc (QT callback)
static ComponentResult GrabFrameCompleteProc(SGChannel sgChan, short nBufferNum, Boolean *pbDone, long lRefCon)
{
QuicktimeLiveImageStream* p_is = (QuicktimeLiveImageStream*)lRefCon;
return p_is->grabFrameCompleteProc(sgChan, nBufferNum, pbDone, lRefCon);
}
// GrabFrameCompleteProc (QuicktimeLiveImageStream)
ComponentResult QuicktimeLiveImageStream::grabFrameCompleteProc(SGChannel sgChan, short nBufferNum, Boolean *pbDone, long lRefCon)
{
ComponentResult err = noErr;
// call the default grab-complete function
err = SGGrabFrameComplete(sgChan, // channel reference
nBufferNum, // buffer identifier, provided for you
pbDone); // pointer to a boolean, has the frame been completely captured? provided for you
static unsigned int fps_counter = 0;
static osg::Timer_t start, finish;
if (fps_counter == 0)
start = osg::Timer::instance()->tick();
// if the frame is done, make sure the Image is replaced
if (*pbDone && (sgChan == m_gVideoChannel))
{
dirty();
++fps_counter;
if (fps_counter == 100)
{
finish = osg::Timer::instance()->tick();
double dur = osg::Timer::instance()->delta_s(start, finish);
double fps = 100.0 / dur;
osg::notify(osg::NOTICE) << "Executed 100 frames in " << dur << " seconds : ~" << fps << " fps" << std::endl;
fps_counter = 0;
}
}
return err;
}
// Create callbacks
void QuicktimeLiveImageStream::createSequenceGrabberVideoBottlenecks()
{
OSErr err = noErr;
// set the value of a reference constant that is passed to the callback functions
err = SGSetChannelRefCon(m_gVideoChannel, (long)this);
if (err == noErr)
{
VideoBottles vb;
// get the current bottlenecks
vb.procCount = 9;
err = SGGetVideoBottlenecks(m_gVideoChannel, &vb);
if (err == noErr)
{
// add our GrabFrameComplete function
vb.grabCompleteProc = NewSGGrabCompleteBottleUPP(GrabFrameCompleteProc);
err = SGSetVideoBottlenecks(m_gVideoChannel, &vb);
}
}
}
// 2.
// CreateAndRunWithVideoDigitizer
void QuicktimeLiveImageStream::createAndRunWithVideoDigitizer(std::string fileName)
{
std::string::size_type idx = fileName.find(':');
if (idx == std::string::npos)
{
osg::notify(osg::FATAL) << "Error while parsing deviceID:deviceInputID.live path : " << fileName << std::endl;
}
// Better c++ code is to use istrstream
std::string deviceIDStr = fileName.substr(0,idx);
std::string deviceInputIDStr = fileName.substr(idx+1);
m_videoDeviceID = static_cast<short>(atoi(deviceIDStr.c_str()));
m_videoDeviceInputID = static_cast<short>(atoi(deviceInputIDStr.c_str()));
// Get Video Digitizer Rectangle bounds from a Sequence Grabber proxy (using IDs)
get_video_device_bounds_idstr(m_videoDeviceID, m_videoDeviceInputID, m_videoRectWidth, m_videoRectHeight, m_videoDeviceIDStr);
// Create the Image
createImage();
// Create the offscreen GWorld (using Image as target memory)
createGWorld();
// Create the Sequence Grabber (using GWorld as target memory)
createVideoDigitizer();
// Go
_status = ImageStream::PLAYING;
VideoDigitizerError error = VDSetPlayThruOnOff(m_vdig, vdPlayThruOn);
if (error != noErr)
osg::notify(osg::FATAL) << "VDSetPlayThruOnOff : error" << std::endl;
// Ticker
start();
}
// 2.
// Create the Video Digitizer (using GWorld Pixmap as target mamory)
void QuicktimeLiveImageStream::createVideoDigitizer()
{
// #define videoDigitizerComponentType = 'vdig'
ComponentDescription video_component_description;
video_component_description.componentType = 'vdig'; /* A unique 4-byte code indentifying the command set */
video_component_description.componentSubType = 0; /* Particular flavor of this instance */
video_component_description.componentManufacturer = 0; /* Vendor indentification */
video_component_description.componentFlags = 0; /* 8 each for Component,Type,SubType,Manuf/revision */
video_component_description.componentFlagsMask = 0; /* Mask for specifying which flags to consider in search, zero during registration */
long num_video_components = CountComponents (&video_component_description);
osg::notify(osg::DEBUG_INFO) << " available Video DigitizerComponents : " << num_video_components << std::endl;
if (num_video_components)
{
Component aComponent = 0;
short aDeviceID = 0;
do
{
ComponentDescription full_video_component_description = video_component_description;
aComponent = FindNextComponent(aComponent, &full_video_component_description);
if (aComponent && (aDeviceID == m_videoDeviceID))
{
osg::notify(osg::DEBUG_INFO) << "Component" << std::endl;
OSErr err;
Handle compName = NewHandle(256);
Handle compInfo = NewHandle(256);
err = GetComponentInfo( aComponent, &full_video_component_description, compName,compInfo,0);
osg::notify(osg::DEBUG_INFO) << " Name: " << pstr_printable((StringPtr)*compName) << std::endl;
osg::notify(osg::DEBUG_INFO) << " Desc: " << pstr_printable((StringPtr)*compInfo) << std::endl;
//Capabilities
VideoDigitizerComponent component_instance = OpenComponent(aComponent);
m_vdig = component_instance;
//Setup
// Onscreen
// Check capability and setting of Sequence Grabber
GDHandle origDevice;
CGrafPtr origPort;
GetGWorld (&origPort, &origDevice);
VideoDigitizerError error;
Rect destinationBounds;
destinationBounds.left = 0;
destinationBounds.top = 0;
destinationBounds.right = m_videoRectWidth;
destinationBounds.bottom = m_videoRectHeight;
error = VDSetPlayThruDestination(m_vdig, m_pixmap, &destinationBounds, 0, 0);
//error = VDSetPlayThruGlobalRect(m_vdig, (GrafPtr)origPort, &destinationBounds);
if (error != noErr)
osg::notify(osg::FATAL) << "VDSetPlayThruDestination : error" << std::endl;
print_video_component_capability(component_instance);
break;
}
++aDeviceID;
}
while (0 != aComponent);
}
}
// Thread run method
void QuicktimeLiveImageStream::run()
{
ComponentResult result = noErr;
bool done = false;
//memset( data(), 255, 720*250*4);
while (!done)
{
// Do some funky rotational memset
// void * memset ( void * ptr, int value, size_t num );
//memset
// dirty();
if (g_s_use_sg)
{
result = SGIdle(m_gSeqGrabber);
if (result != noErr)
osg::notify(osg::FATAL) << "SGIdle : error" << std::endl;
}
//OpenThreads::Thread::microSleep(250000); // 25fps (1,000,000 = 1 fps)
//OpenThreads::Thread::microSleep(50000); // 200fps (1,000,000 = 1 fps)
//OpenThreads::Thread::microSleep(25000); // 400fps (1,000,000 = 1 fps)
// Ridiculous
OpenThreads::Thread::microSleep(10000); // 1000fps (1,000,000 = 1 fps)
}
}

View File

@ -15,12 +15,21 @@
#include <stdlib.h>
#include <string.h>
#ifndef __APPLE__
#include "Components.h"
#include "QuickTimeComponents.h"
#else
#include <Quicktime/Quicktime.h>
#endif
#ifndef SEEK_SET
# define SEEK_SET 0
#endif
#include "QTUtils.h"
#include "QTLiveUtils.h"
#include "QTtexture.h"
#include "QuicktimeImageStream.h"
#include "QuicktimeLiveImageStream.h"
using namespace osg;
@ -34,8 +43,12 @@ class QuicktimeExitObserver : public osg::Observer
{
public:
QuicktimeExitObserver () : _instanceCount(0){}
virtual ~QuicktimeExitObserver(){};
QuicktimeExitObserver () : _instanceCount(0)
{
}
virtual ~QuicktimeExitObserver()
{
};
void addMedia(Image* ptr)
{
@ -59,6 +72,14 @@ private:
class ReaderWriterQT : public osgDB::ReaderWriter
{
public:
ReaderWriterQT::ReaderWriterQT()
{
}
ReaderWriterQT::~ReaderWriterQT()
{
}
virtual const char* className() const { return "Default Quicktime Image Reader/Writer"; }
virtual bool acceptsMovieExtension(const std::string& extension) const
@ -74,6 +95,11 @@ public:
osgDB::equalCaseInsensitive(extension,"swf");
}
virtual bool acceptsLiveExtension(const std::string& extension) const
{
return osgDB::equalCaseInsensitive(extension,"live");
}
virtual bool acceptsExtension(const std::string& extension) const
{
// this should be the only image importer required on the Mac
@ -95,7 +121,8 @@ public:
osgDB::equalCaseInsensitive(extension,"psd") ||
#endif
acceptsMovieExtension(extension);
acceptsMovieExtension(extension) ||
acceptsLiveExtension(extension);
}
virtual ReadResult readImage(const std::string& file, const osgDB::ReaderWriter::Options* options) const
@ -108,6 +135,89 @@ public:
if (!acceptsExtension(ext)) return ReadResult::FILE_NOT_HANDLED;
// if the file is a ".live" video encoded string then load as an ImageStream
if (acceptsLiveExtension(ext))
{
long num_video_components;
{
// Begin QuickTime
QTScopedQTMLInitialiser qt_init;
QTScopedMovieInitialiser qt_movie_init;
//
ComponentDescription video_component_description;
video_component_description.componentType = 'vdig'; /* A unique 4-byte code indentifying the command set */
video_component_description.componentSubType = 0L; /* Particular flavor of this instance */
video_component_description.componentManufacturer = 0L; /* Vendor indentification */
video_component_description.componentFlags = 0L; /* 8 each for Component,Type,SubType,Manuf/revision */
video_component_description.componentFlagsMask = 0L; /* Mask for specifying which flags to consider in search, zero during registration */
num_video_components = CountComponents (&video_component_description);
}
if (osgDB::getNameLessExtension(file) == "devices")
{
osg::notify(osg::ALWAYS) << " available Video DigitizerComponents : " << num_video_components << std::endl;
if (num_video_components)
{
// Probe Video Dig
probe_video_digitizer_components();
// Probe SG
std::vector<OSG_SGDeviceList> devices_list = probe_sequence_grabber_components();
if (devices_list.size())
{
// Video
OSG_SGDeviceList& video_device_list = devices_list[0];
// Print
osg::notify(osg::ALWAYS) << std::endl;
osg::notify(osg::ALWAYS) << "Video Component/Input IDs follow: " << std::endl;
osg::notify(osg::ALWAYS) << std::endl;
for (int device_input = 0; device_input < video_device_list.size(); ++device_input)
{
OSG_SGDevicePair device_pair = video_device_list[device_input];
osg::notify(osg::ALWAYS) << device_pair.first.c_str() << " " << device_pair.second.c_str() << std::endl;
}
}
if (devices_list.size() > 1)
{
// Audio
OSG_SGDeviceList& audio_device_list = devices_list[1];
// Print
osg::notify(osg::ALWAYS) << std::endl;
osg::notify(osg::ALWAYS) << "Audio Component/Input IDs follow: " << std::endl;
osg::notify(osg::ALWAYS) << std::endl;
for (int device_input = 0; device_input < audio_device_list.size(); ++device_input)
{
OSG_SGDevicePair device_pair = audio_device_list[device_input];
osg::notify(osg::ALWAYS) << device_pair.first.c_str() << " " << device_pair.second.c_str() << std::endl;
}
}
}
return ReadResult::FILE_NOT_HANDLED;
}
else
{
osg::notify(osg::DEBUG_INFO) << " available Video DigitizerComponents : " << num_video_components << std::endl;
if (num_video_components)
{
// Note from Riccardo Corsi
// Quicktime initialization is done here, when a media is found
// and before any image or movie is loaded.
// After the first call the function does nothing.
// The cleaning up is left to the QuicktimeExitObserver (see below)
initQuicktime();
//
QuicktimeLiveImageStream* p_qt_image_stream = new QuicktimeLiveImageStream(osgDB::getNameLessExtension(file));
// add the media to the observer for proper clean up on exit
_qtExitObserver.addMedia(p_qt_image_stream);
return p_qt_image_stream;
}
else
{
osg::notify(osg::DEBUG_INFO) << "No available Video DigitizerComponents : " << std::endl;
return ReadResult::FILE_NOT_HANDLED;
}
}
}
// Not an encoded "live" psuedo file - so check a real file exists
std::string fileName = osgDB::findDataFile( file, options);
if (fileName.empty()) return ReadResult::FILE_NOT_FOUND;
@ -118,6 +228,7 @@ public:
// The cleaning up is left to the QuicktimeExitObserver (see below)
initQuicktime();
// if the file is a movie file then load as an ImageStream.
if (acceptsMovieExtension(ext))
{