Quantcast
Channel: Media Foundation Development for Windows Desktop forum
Viewing all 1079 articles
Browse latest View live

Problem with MFT+Media Session

$
0
0

I took Media Session example from here http://msdn.microsoft.com/en-us/library/windows/desktop/ff728866(v=vs.85).aspx. For editing each frame i am using MFT. But if i just create MFT, not linking it with Media Session, program will begin playback and end it at this moment. Look at the AddBranchToPartialTopology and CreateMFTransform.

#include "Player.h"
#include "MediaType.h"
#include <assert.h>

void ErrorMessage(HRESULT hr)
{
	LPVOID lpMsgBuf;
	FormatMessage( 
		FORMAT_MESSAGE_ALLOCATE_BUFFER | 
		FORMAT_MESSAGE_FROM_SYSTEM | 
		FORMAT_MESSAGE_IGNORE_INSERTS,
		NULL,
		hr,
		0, // Default language
		(LPTSTR) &lpMsgBuf,
		0,
		NULL 
	);
	// Process any inserts in lpMsgBuf.
	// ...
	// Display the string.
	MessageBox( NULL, (LPCTSTR)lpMsgBuf, L"Error", MB_OK | MB_ICONINFORMATION );
	// Free the buffer.
	LocalFree( lpMsgBuf );
}


#pragma comment(lib, "shlwapi")

template <class Q>
HRESULT GetEventObject(IMFMediaEvent *pEvent, Q **ppObject)
{
	*ppObject = NULL;   // zero output

	PROPVARIANT var;
	HRESULT hr = pEvent->GetValue(&var);
	if (SUCCEEDED(hr))
	{
		if (var.vt == VT_UNKNOWN)
		{
			hr = var.punkVal->QueryInterface(ppObject);
		}
		else
		{
			hr = MF_E_INVALIDTYPE;
		}
		PropVariantClear(&var);
	}
	return hr;
}

HRESULT CreateMediaSource(PCWSTR pszURL, IMFMediaSource **ppSource);

HRESULT CreatePlaybackTopology(IMFMediaSource *pSource, 
							   IMFPresentationDescriptor *pPD, HWND hVideoWnd,IMFTopology **ppTopology);

//  Static class method to create the CPlayer object.

HRESULT CPlayer::CreateInstance(
	HWND hVideo,                  // Video window.
	HWND hEvent,                  // Window to receive notifications.
	CPlayer **ppPlayer)           // Receives a pointer to the CPlayer object.
{
	if (ppPlayer == NULL)
	{
		return E_POINTER;
	}

	CPlayer *pPlayer = new (std::nothrow) CPlayer(hVideo, hEvent);
	if (pPlayer == NULL)
	{
		return E_OUTOFMEMORY;
	}

	HRESULT hr = pPlayer->Initialize();
	if (SUCCEEDED(hr))
	{
		*ppPlayer = pPlayer;
	}
	else
	{
		pPlayer->Release();
	}
	return hr;
}

HRESULT CPlayer::Initialize()
{
	// Start up Media Foundation platform.
	HRESULT hr = MFStartup(MF_VERSION);
	if (SUCCEEDED(hr))
	{
		m_hCloseEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
		if (m_hCloseEvent == NULL)
		{
			hr = HRESULT_FROM_WIN32(GetLastError());
		}
	}
	return hr;
}

CPlayer::CPlayer(HWND hVideo, HWND hEvent) : 
	m_pSession(NULL),
	m_pSource(NULL),
	m_pVideoDisplay(NULL),
	m_hwndVideo(hVideo),
	m_hwndEvent(hEvent),
	m_state(Closed),
	m_hCloseEvent(NULL),
	m_nRefCount(1)
{
}

CPlayer::~CPlayer()
{
	assert(m_pSession == NULL);  
	// If FALSE, the app did not call Shutdown().

	// When CPlayer calls IMediaEventGenerator::BeginGetEvent on the
	// media session, it causes the media session to hold a reference 
	// count on the CPlayer. 

	// This creates a circular reference count between CPlayer and the 
	// media session. Calling Shutdown breaks the circular reference 
	// count.

	// If CreateInstance fails, the application will not call 
	// Shutdown. To handle that case, call Shutdown in the destructor. 

	Shutdown();
}

// IUnknown methods

HRESULT CPlayer::QueryInterface(REFIID riid, void** ppv)
{
	static const QITAB qit[] = 
	{
		QITABENT(CPlayer, IMFAsyncCallback),
		{ 0 }
	};
	return QISearch(this, qit, riid, ppv);
}

ULONG CPlayer::AddRef()
{
	return InterlockedIncrement(&m_nRefCount);
}

ULONG CPlayer::Release()
{
	ULONG uCount = InterlockedDecrement(&m_nRefCount);
	if (uCount == 0)
	{
		delete this;
	}
	return uCount;
}

//  Open a URL for playback.
HRESULT CPlayer::OpenURL(const WCHAR *sURL)
{
	// 1. Create a new media session.
	// 2. Create the media source.
	// 3. Create the topology.
	// 4. Queue the topology [asynchronous]
	// 5. Start playback [asynchronous - does not happen in this method.]

	IMFTopology *pTopology = NULL;
	IMFPresentationDescriptor* pSourcePD = NULL;

	// Create the media session.
	HRESULT hr = CreateSession();
	if (FAILED(hr))
	{
		goto done;
	}

	// Create the media source.
	hr = CreateMediaSource(sURL, &m_pSource);
	if (FAILED(hr))
	{
		goto done;
	}

	// Create the presentation descriptor for the media source.
	hr = m_pSource->CreatePresentationDescriptor(&pSourcePD);
	if (FAILED(hr))
	{
		goto done;
	}

	// Create a partial topology.
	hr = CreatePlaybackTopology(m_pSource, pSourcePD, m_hwndVideo, &pTopology);
	if (FAILED(hr))
	{
		goto done;
	}

	// Set the topology on the media session.
	hr = m_pSession->SetTopology(0, pTopology);
	if (FAILED(hr))
	{
		goto done;
	}

	m_state = OpenPending;

	// If SetTopology succeeds, the media session will queue an 
	// MESessionTopologySet event.

done:
	if (FAILED(hr))
	{
		m_state = Closed;
	}

	SafeRelease(&pSourcePD);
	SafeRelease(&pTopology);
	return hr;
}

//  Pause playback.
HRESULT CPlayer::Pause()    
{
	if (m_state != Started)
	{
		return MF_E_INVALIDREQUEST;
	}
	if (m_pSession == NULL || m_pSource == NULL)
	{
		return E_UNEXPECTED;
	}

	HRESULT hr = m_pSession->Pause();
	if (SUCCEEDED(hr))
	{
		m_state = Paused;
	}

	return hr;
}

// Stop playback.
HRESULT CPlayer::Stop()
{
	if (m_state != Started && m_state != Paused)
	{
		return MF_E_INVALIDREQUEST;
	}
	if (m_pSession == NULL)
	{
		return E_UNEXPECTED;
	}

	HRESULT hr = m_pSession->Stop();
	if (SUCCEEDED(hr))
	{
		m_state = Stopped;
	}
	return hr;
}

//  Repaint the video window. Call this method on WM_PAINT.

HRESULT CPlayer::Repaint()
{
	if (m_pVideoDisplay)
	{
		return m_pVideoDisplay->RepaintVideo();
	}
	else
	{
		return S_OK;
	}
}

//  Resize the video rectangle.
//
//  Call this method if the size of the video window changes.

HRESULT CPlayer::ResizeVideo(WORD width, WORD height)
{
	if (m_pVideoDisplay)
	{
		// Set the destination rectangle.
		// Leave the default source rectangle (0,0,1,1).

		RECT rcDest = { 0, 0, width, height };

		return m_pVideoDisplay->SetVideoPosition(NULL, &rcDest);
	}
	else
	{
		return S_OK;
	}
}

//  Callback for the asynchronous BeginGetEvent method.

HRESULT CPlayer::Invoke(IMFAsyncResult *pResult)
{
	MediaEventType meType = MEUnknown;  // Event type

	IMFMediaEvent *pEvent = NULL;

	// Get the event from the event queue.
	HRESULT hr = m_pSession->EndGetEvent(pResult, &pEvent);
	if (FAILED(hr))
	{
		goto done;
	}

	// Get the event type. 
	hr = pEvent->GetType(&meType);
	if (FAILED(hr))
	{
		goto done;
	}

	// Get the event status. If the operation that triggered the event 
	// did not succeed, the status is a failure code.
	HRESULT hrStatus = S_OK;  
	hr = pEvent->GetStatus(&hrStatus);

	// Check if the async operation succeeded.
	if (SUCCEEDED(hr) && FAILED(hrStatus)) 
	{
		hr = hrStatus;
	}
	if (FAILED(hr))
	{
		goto done;
	}

	switch(meType)
	{
	case MESessionTopologyStatus:
		hr = OnTopologyStatus(pEvent);
		break;

	case MEEndOfPresentation:
		hr = OnPresentationEnded(pEvent);
		break;

	case MENewPresentation:
		hr = OnNewPresentation(pEvent);
		break;

	default:
		hr = OnSessionEvent(pEvent, meType);
		break;
	}

	if (meType == MESessionClosed)
	{
		// The session was closed. 
		// The application is waiting on the m_hCloseEvent event handle. 
		SetEvent(m_hCloseEvent);
	}
	else
	{
		// For all other events, get the next event in the queue.
		hr = m_pSession->BeginGetEvent(this, NULL);
		if (FAILED(hr))
		{
			goto done;
		}
	}

	// Check the application state. 

	// If a call to IMFMediaSession::Close is pending, it means the 
	// application is waiting on the m_hCloseEvent event and
	// the application's message loop is blocked. 

	// Otherwise, post a private window message to the application. 

	if (m_state != Closing)
	{
		// Leave a reference count on the event.
		pEvent->AddRef();

		PostMessage(m_hwndEvent, WM_APP_PLAYER_EVENT, 
			(WPARAM)pEvent, (LPARAM)meType);
	}

done:
	SafeRelease(&pEvent);
	return S_OK;
}

HRESULT CPlayer::HandleEvent(UINT_PTR pEventPtr)
{
	HRESULT hrStatus = S_OK;            
	MediaEventType meType = MEUnknown;  

	IMFMediaEvent *pEvent = (IMFMediaEvent*)pEventPtr;

	if (pEvent == NULL)
	{
		return E_POINTER;
	}

	// Get the event type.
	HRESULT hr = pEvent->GetType(&meType);
	if (FAILED(hr))
	{
		goto done;
	}

	// Get the event status. If the operation that triggered the event 
	// did not succeed, the status is a failure code.
	hr = pEvent->GetStatus(&hrStatus);

	// Check if the async operation succeeded.
	if (SUCCEEDED(hr) && FAILED(hrStatus)) 
	{
		hr = hrStatus;
	}
	if (FAILED(hr))
	{
		goto done;
	}

	switch(meType)
	{
	case MESessionTopologyStatus:
		hr = OnTopologyStatus(pEvent);
		break;

	case MEEndOfPresentation:
		hr = OnPresentationEnded(pEvent);
		break;

	case MENewPresentation:
		hr = OnNewPresentation(pEvent);
		break;

	default:
		hr = OnSessionEvent(pEvent, meType);
		break;
	}

done:
	SafeRelease(&pEvent);
	return hr;
}

//  Release all resources held by this object.
HRESULT CPlayer::Shutdown()
{
	// Close the session
	HRESULT hr = CloseSession();

	// Shutdown the Media Foundation platform
	MFShutdown();

	if (m_hCloseEvent)
	{
		CloseHandle(m_hCloseEvent);
		m_hCloseEvent = NULL;
	}

	return hr;
}

/// Protected methods

HRESULT CPlayer::OnTopologyStatus(IMFMediaEvent *pEvent)
{
	UINT32 status; 

	HRESULT hr = pEvent->GetUINT32(MF_EVENT_TOPOLOGY_STATUS, &status);
	if (SUCCEEDED(hr) && (status == MF_TOPOSTATUS_READY))
	{
		SafeRelease(&m_pVideoDisplay);

		// Get the IMFVideoDisplayControl interface from EVR. This call is
		// expected to fail if the media file does not have a video stream.

		(void)MFGetService(m_pSession, MR_VIDEO_RENDER_SERVICE, 
			IID_PPV_ARGS(&m_pVideoDisplay));

		hr = StartPlayback();
	}
	return hr;
}


//  Handler for MEEndOfPresentation event.
HRESULT CPlayer::OnPresentationEnded(IMFMediaEvent *pEvent)
{
	// The session puts itself into the stopped state automatically.
	m_state = Stopped;
	return S_OK;
}

//  Handler for MENewPresentation event.
//
//  This event is sent if the media source has a new presentation, which 
//  requires a new topology. 

HRESULT CPlayer::OnNewPresentation(IMFMediaEvent *pEvent)
{
	IMFPresentationDescriptor *pPD = NULL;
	IMFTopology *pTopology = NULL;

	// Get the presentation descriptor from the event.
	HRESULT hr = GetEventObject(pEvent, &pPD);
	if (FAILED(hr))
	{
		goto done;
	}

	// Create a partial topology.
	hr = CreatePlaybackTopology(m_pSource, pPD,  m_hwndVideo,&pTopology);
	if (FAILED(hr))
	{
		goto done;
	}

	// Set the topology on the media session.
	hr = m_pSession->SetTopology(0, pTopology);
	if (FAILED(hr))
	{
		goto done;
	}

	m_state = OpenPending;

done:
	SafeRelease(&pTopology);
	SafeRelease(&pPD);
	return S_OK;
}

//  Create a new instance of the media session.
HRESULT CPlayer::CreateSession()
{
	// Close the old session, if any.
	HRESULT hr = CloseSession();
	if (FAILED(hr))
	{
		goto done;
	}

	assert(m_state == Closed);

	// Create the media session.
	hr = MFCreateMediaSession(NULL, &m_pSession);
	if (FAILED(hr))
	{
		goto done;
	}

	// Start pulling events from the media session
	hr = m_pSession->BeginGetEvent((IMFAsyncCallback*)this, NULL);
	if (FAILED(hr))
	{
		goto done;
	}

	m_state = Ready;

done:
	return hr;
}

//  Close the media session. 
HRESULT CPlayer::CloseSession()
{
	//  The IMFMediaSession::Close method is asynchronous, but the 
	//  CPlayer::CloseSession method waits on the MESessionClosed event.
	//  
	//  MESessionClosed is guaranteed to be the last event that the 
	//  media session fires.

	HRESULT hr = S_OK;

	SafeRelease(&m_pVideoDisplay);

	// First close the media session.
	if (m_pSession)
	{
		DWORD dwWaitResult = 0;

		m_state = Closing;

		hr = m_pSession->Close();
		// Wait for the close operation to complete
		if (SUCCEEDED(hr))
		{
			dwWaitResult = WaitForSingleObject(m_hCloseEvent, 5000);
			if (dwWaitResult == WAIT_TIMEOUT)
			{
				assert(FALSE);
			}
			// Now there will be no more events from this session.
		}
	}

	// Complete shutdown operations.
	if (SUCCEEDED(hr))
	{
		// Shut down the media source. (Synchronous operation, no events.)
		if (m_pSource)
		{
			(void)m_pSource->Shutdown();
		}
		// Shut down the media session. (Synchronous operation, no events.)
		if (m_pSession)
		{
			(void)m_pSession->Shutdown();
		}
	}

	SafeRelease(&m_pSource);
	SafeRelease(&m_pSession);
	m_state = Closed;
	return hr;
}

//  Start playback from the current position. 
HRESULT CPlayer::StartPlayback()
{
	assert(m_pSession != NULL);

	PROPVARIANT varStart;
	PropVariantInit(&varStart);

	HRESULT hr = m_pSession->Start(&GUID_NULL, &varStart);
	if (SUCCEEDED(hr))
	{
		// Note: Start is an asynchronous operation. However, we
		// can treat our state as being already started. If Start
		// fails later, we'll get an MESessionStarted event with
		// an error code, and we will update our state then.
		m_state = Started;
	}
	PropVariantClear(&varStart);
	return hr;
}

//  Start playback from paused or stopped.
HRESULT CPlayer::Play()
{
	if (m_state != Paused && m_state != Stopped)
	{
		return MF_E_INVALIDREQUEST;
	}
	if (m_pSession == NULL || m_pSource == NULL)
	{
		return E_UNEXPECTED;
	}
	return StartPlayback();
}


//  Create a media source from a URL.
HRESULT CreateMediaSource(PCWSTR sURL, IMFMediaSource **ppSource)
{
	MF_OBJECT_TYPE ObjectType = MF_OBJECT_INVALID;

	IMFSourceResolver* pSourceResolver = NULL;
	IUnknown* pSource = NULL;

	// Create the source resolver.
	HRESULT hr = MFCreateSourceResolver(&pSourceResolver);
	if (FAILED(hr))
	{
		goto done;
	}

	// Use the source resolver to create the media source.

	// Note: For simplicity this sample uses the synchronous method to create 
	// the media source. However, creating a media source can take a noticeable
	// amount of time, especially for a network source. For a more responsive 
	// UI, use the asynchronous BeginCreateObjectFromURL method.

	hr = pSourceResolver->CreateObjectFromURL(
		sURL,                       // URL of the source.
		MF_RESOLUTION_MEDIASOURCE,  // Create a source object.
		NULL,                       // Optional property store.&ObjectType,        // Receives the created object type. &pSource            // Receives a pointer to the media source.
		);
	if (FAILED(hr))
	{
		goto done;
	}

	// Get the IMFMediaSource interface from the media source.
	hr = pSource->QueryInterface(IID_PPV_ARGS(ppSource));

done:
	SafeRelease(&pSourceResolver);
	SafeRelease(&pSource);
	return hr;
}

//  Create an activation object for a renderer, based on the stream media type.

HRESULT CreateMediaSinkActivate(
	IMFStreamDescriptor *pSourceSD,     // Pointer to the stream descriptor.
	HWND hVideoWindow,                  // Handle to the video clipping window.
	IMFActivate **ppActivate
	)
{
	IMFMediaTypeHandler *pHandler = NULL;
	IMFActivate *pActivate = NULL;

	// Get the media type handler for the stream.
	HRESULT hr = pSourceSD->GetMediaTypeHandler(&pHandler);
	if (FAILED(hr))
	{
		goto done;
	}

	// Get the major media type.
	GUID guidMajorType;
	hr = pHandler->GetMajorType(&guidMajorType);
	if (FAILED(hr))
	{
		goto done;
	}

	// Create an IMFActivate object for the renderer, based on the media type.
	if (MFMediaType_Audio == guidMajorType)
	{
		// Create the audio renderer.
		hr = MFCreateAudioRendererActivate(&pActivate);
	}
	else if (MFMediaType_Video == guidMajorType)
	{
		// Create the video renderer.
		hr = MFCreateVideoRendererActivate(hVideoWindow, &pActivate);
	}
	else
	{
		// Unknown stream type. 
		hr = E_FAIL;
		// Optionally, you could deselect this stream instead of failing.
	}
	if (FAILED(hr))
	{
		goto done;
	}

	// Return IMFActivate pointer to caller.
	*ppActivate = pActivate;
	(*ppActivate)->AddRef();

done:
	SafeRelease(&pHandler);
	SafeRelease(&pActivate);
	return hr;
}

HRESULT CreateMFTransform(
	IMFStreamDescriptor *pSD,
	IMFTransform **ppDecoder
	)	
{
	HRESULT hr = S_OK;
	UINT32 count = 0;

	IMFActivate **ppActivate = NULL;

	MFT_REGISTER_TYPE_INFO inInfo = { 0 };

	IMFMediaTypeHandler *pTH = NULL;
	IMFMediaType *pMT = NULL;
	GUID subtype;
	pSD->GetMediaTypeHandler(&pTH);
	pTH->GetCurrentMediaType(&pMT);
	pMT->GetGUID(MF_MT_SUBTYPE, &subtype);
	LogMediaType(pMT);
	SafeRelease(&pTH);

	inInfo.guidMajorType = MFMediaType_Video;
	inInfo.guidSubtype = subtype;


	//UINT32 unFlags =	MFT_ENUM_FLAG_SYNCMFT  | 
	//					MFT_ENUM_FLAG_LOCALMFT | 
	//					MFT_ENUM_FLAG_SORTANDFILTER;
	UINT32 unFlags =	MFT_ENUM_FLAG_SYNCMFT | 
						MFT_ENUM_FLAG_LOCALMFT | 
						MFT_ENUM_FLAG_SORTANDFILTER |
						MFT_ENUM_FLAG_HARDWARE;
	hr = MFTEnumEx(
		MFT_CATEGORY_VIDEO_EFFECT,
		//MFT_CATEGORY_VIDEO_DECODER,
		unFlags,
		&inInfo,
		NULL,&ppActivate,&count
		);

	if (SUCCEEDED(hr) && count == 0)
	{
		hr = MF_E_TOPO_CODEC_NOT_FOUND;
	}

	IMFTransform *pMFTransform = NULL;
	if (SUCCEEDED(hr))
	{
		hr = ppActivate[0]->ActivateObject(IID_PPV_ARGS(&pMFTransform));
	}
	hr = pMFTransform->SetInputType(0, pMT, NULL);
	hr = pMT->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
	hr = pMT->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YUY2);
	hr = pMFTransform->SetOutputType(0, pMT, NULL);

	//DWORD inMin, inMax, outMin, outMax,
	//	inCount, outCount;
	//	
	//hr = pMFTransform->GetStreamLimits(&inMin, &inMax, &outMin, &outMax);
	//hr = pMFTransform->GetStreamCount(&inCount, &outCount);
	//DWORD *inIDs = new DWORD[inCount], *outIDs = new DWORD[outCount];
	//hr = pMFTransform->GetStreamIDs(inCount, inIDs, outCount, outIDs);
	//
	//IMFMediaType *MFMT = NULL; 
	//int k = 0;
	//hr = S_OK;
	//while (SUCCEEDED(hr))
	//{
	//	hr = pMFTransform->GetOutputAvailableType(0, k, &MFMT);
	//	if(SUCCEEDED(hr))
	//		LogMediaType(MFMT);
	//	SafeRelease(&MFMT);
	//	k++;
	//}
	//k = 0;
	//hr = S_OK;
	//while (SUCCEEDED(hr))
	//{
	//	hr = pMFTransform->GetInputAvailableType(0, k, &MFMT);
	//	if(SUCCEEDED(hr))
	//		LogMediaType(MFMT);
	//	SafeRelease(&MFMT);
	//	k++;
	//}
	//
	//_MFT_INPUT_STREAM_INFO inMFInfo, outMFInfo;
	//hr = pMFTransform->GetInputStreamInfo(0, &inMFInfo);
	//hr = pMFTransform->GetInputStreamInfo(0, &outMFInfo);
	//
	//hr = pMFTransform->GetInputCurrentType(0, &pMT);
	//if(SUCCEEDED(hr))
	//	LogMediaType(pMT);
	//SafeRelease(&pMT);
	//hr = pMFTransform->GetOutputCurrentType(0, &pMT);
	//if(SUCCEEDED(hr))
	//	LogMediaType(pMT);
	//SafeRelease(&pMT);

	for (UINT32 i = 0; i < count; i++)
	{
		ppActivate[i]->Release();
	}
	CoTaskMemFree(ppActivate);

	if(SUCCEEDED(hr))
		*ppDecoder = pMFTransform;
	return hr;
}

// Add a source node to a topology.
HRESULT AddSourceNode(
	IMFTopology *pTopology,           // Topology.
	IMFMediaSource *pSource,          // Media source.
	IMFPresentationDescriptor *pPD,   // Presentation descriptor.
	IMFStreamDescriptor *pSD,         // Stream descriptor.
	IMFTopologyNode **ppNode)         // Receives the node pointer.
{
	IMFTopologyNode *pNode = NULL;

	// Create the node.
	HRESULT hr = MFCreateTopologyNode(MF_TOPOLOGY_SOURCESTREAM_NODE, &pNode);
	if (FAILED(hr))
	{
		goto done;
	}

	// Set the attributes.
	hr = pNode->SetUnknown(MF_TOPONODE_SOURCE, pSource);
	if (FAILED(hr))
	{
		goto done;
	}

	hr = pNode->SetUnknown(MF_TOPONODE_PRESENTATION_DESCRIPTOR, pPD);
	if (FAILED(hr))
	{
		goto done;
	}

	hr = pNode->SetUnknown(MF_TOPONODE_STREAM_DESCRIPTOR, pSD);
	if (FAILED(hr))
	{
		goto done;
	}

	// Add the node to the topology.
	hr = pTopology->AddNode(pNode);
	if (FAILED(hr))
	{
		goto done;
	}

	// Return the pointer to the caller.
	*ppNode = pNode;
	(*ppNode)->AddRef();

done:
	SafeRelease(&pNode);
	return hr;
}

// Add an output node to a topology.
HRESULT AddOutputNode(
	IMFTopology *pTopology,     // Topology.
	IMFActivate *pActivate,     // Media sink activation object.
	DWORD dwId,                 // Identifier of the stream sink.
	IMFTopologyNode **ppNode)   // Receives the node pointer.
{
	IMFTopologyNode *pNode = NULL;

	// Create the node.
	HRESULT hr = MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, &pNode);
	if (FAILED(hr))
	{
		goto done;
	}

	// Set the object pointer.
	hr = pNode->SetObject(pActivate);
	if (FAILED(hr))
	{
		goto done;
	}

	// Set the stream sink ID attribute.
	hr = pNode->SetUINT32(MF_TOPONODE_STREAMID, dwId);
	if (FAILED(hr))
	{
		goto done;
	}

	hr = pNode->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE);
	if (FAILED(hr))
	{
		goto done;
	}

	// Add the node to the topology.
	hr = pTopology->AddNode(pNode);
	if (FAILED(hr))
	{
		goto done;
	}

	// Return the pointer to the caller.
	*ppNode = pNode;
	(*ppNode)->AddRef();

done:
	SafeRelease(&pNode);
	return hr;
}

HRESULT AddMFTNode(
	IMFTopology *pTopology,
	IMFTransform *pTransform
	)
{
	HRESULT hr = S_OK;

	IMFTopologyNode *pNode = NULL;
    
    // Create the node.
    hr = MFCreateTopologyNode(MF_TOPOLOGY_TRANSFORM_NODE, &pNode);
	// Set the object pointer.
    if (SUCCEEDED(hr))
    {
        hr = pNode->SetObject(pTransform);
    }

    // Add the node to the topology.
    if (SUCCEEDED(hr))
    {
        hr = pTopology->AddNode(pNode);
    }
	return hr;
}

//  Add a topology branch for one stream.
//
//  For each stream, this function does the following:
//
//    1. Creates a source node associated with the stream. 
//    2. Creates an output node for the renderer. 
//    3. Connects the two nodes.
//
//  The media session will add any decoders that are needed.

HRESULT AddBranchToPartialTopology(
	IMFTopology *pTopology,         // Topology.
	IMFMediaSource *pSource,        // Media source.
	IMFPresentationDescriptor *pPD, // Presentation descriptor.
	DWORD iStream,                  // Stream index.
	HWND hVideoWnd)                 // Window for video playback.
{
	IMFStreamDescriptor *pSD = NULL;
	IMFActivate         *pSinkActivate = NULL;
	IMFTransform		*pDecoder = NULL;
	IMFTopologyNode     *pSourceNode = NULL;
	IMFTopologyNode     *pOutputNode = NULL;

	BOOL fSelected = FALSE;

	HRESULT hr = pPD->GetStreamDescriptorByIndex(iStream, &fSelected, &pSD);
	if (FAILED(hr))
	{
		goto done;
	}

	if (fSelected)
	{
		// Create the media sink activation object.
		hr = CreateMediaSinkActivate(pSD, hVideoWnd, &pSinkActivate);
		if (FAILED(hr))
		{
			goto done;
		}

		// Add a source node for this stream.
		hr = AddSourceNode(pTopology, pSource, pPD, pSD, &pSourceNode);
		if (FAILED(hr))
		{
			goto done;
		}

		hr = CreateMFTransform(pSD, &pDecoder);
		if (FAILED(hr))
		{
			goto done;
		}
		//hr = AddMFTNode(pTopology, pDecoder);
		if (FAILED(hr))
		{
			goto done;
		}

		// Create the output node for the renderer.
		hr = AddOutputNode(pTopology, pSinkActivate, 0, &pOutputNode);
		if (FAILED(hr))
		{
			goto done;
		}

		// Connect the source node to the output node.
		hr = pSourceNode->ConnectOutput(0, pOutputNode, 0);
	}
	// else: If not selected, don't add the branch. 

done:
	SafeRelease(&pSD);
	SafeRelease(&pSinkActivate);
	SafeRelease(&pSourceNode);
	SafeRelease(&pOutputNode);
	return hr;
}

//  Create a playback topology from a media source.
HRESULT CreatePlaybackTopology(
	IMFMediaSource *pSource,          // Media source.
	IMFPresentationDescriptor *pPD,   // Presentation descriptor.
	HWND hVideoWnd,                   // Video window.
	IMFTopology **ppTopology)         // Receives a pointer to the topology.
{
	IMFTopology *pTopology = NULL;
	DWORD cSourceStreams = 0;

	// Create a new topology.
	HRESULT hr = MFCreateTopology(&pTopology);
	if (FAILED(hr))
	{
		goto done;
	}




	// Get the number of streams in the media source.
	hr = pPD->GetStreamDescriptorCount(&cSourceStreams);
	if (FAILED(hr))
	{
		goto done;
	}

	// For each stream, create the topology nodes and add them to the topology.
	for (DWORD i = 0; i < cSourceStreams; i++)
	{
		hr = AddBranchToPartialTopology(pTopology, pSource, pPD, i, hVideoWnd);
		if (FAILED(hr))
		{
			goto done;
		}
	}

	// Return the IMFTopology pointer to the caller.
	*ppTopology = pTopology;
	(*ppTopology)->AddRef();

done:
	SafeRelease(&pTopology);
	return hr;
}

Where is the registry key for the default audio device in Windows 7 x64?

$
0
0

My original post that I had submitted to the scripting guys, because I am trying to finish a script, was declared as needing to be posted to another forum. 

The second time I posted this question, I posted it in the Windows 7 Media forums... and was redirected here by a link provided in the only response provided. Hopefully, I will finally find my answer here; In what seems to be a vista forum... Please, I do not wish to be redirected again.

I need this for an AHK (AutoHotKey) script that I am working on that will toggle between my two enabled audio playback and recording devices.

Going through the sound device GUI is tedious as I swap devices multiple times a day.

Please remember, I need the Win7 x64 registry location for these keys. The answer posted here: Where is the registry key for the default audio device in Windows 7 did not help me as it only answers the question for Windows 7 32 bit.

I thought that it would be these locations below... but after swapping devices, and refreshing the registry, the values never changed.

HKEY_LOCAL_MACHINE\SOFTWARE\Wow6432Node\Microsoft\Windows\CurrentVersion\MMDevices\DefaultDeviceHeuristics\Default\Role_0\Factor_1\Capture

HKEY_LOCAL_MACHINE\SOFTWARE\Wow6432Node\Microsoft\Windows\CurrentVersion\MMDevices\DefaultDeviceHeuristics\Default\Role_0\Factor_1\Render

If I was using 32 bit windows... my AutoHotKey script would look something like this... Unfortunately I am in 64 bit.

; Toggle Sound Devices
^+PgUp::
    RegRead, Device, HKEY_CURRENT_USER, Software\Microsoft\Multimedia\Sound Mapper, Playback
    if(Device = "Realtek HD Audio output")
    {
        RegWrite, REG_SZ, HKEY_CURRENT_USER, Software\Microsoft\Multimedia\Sound Mapper, Playback, Sound Blaster World of Warcraft Wireless Headset
        Device := "SoundBlaster"
    }
    else
    {
        RegWrite, REG_SZ, HKEY_CURRENT_USER, Software\Microsoft\Multimedia\Sound Mapper, Playback, Realtek HD Audio output
        Device := "Realtek"
    }
    ToolTip, % "Sound Device: " Device
    SetTimer, ResetToolTip, 1000
return


; Clear the ToolTip
ReSetToolTip:
    ToolTip
    SetTimer, ReSetToolTip, Off
return

Added later on in the day:

I just ran a program that reports changes to the registry from taking 2 snapshots... I took a snapshot with realtek as my default device... and then a snapshot with my SoundBlaster wireless headset as the default device... 

Nothing changed in the registry. This is quite frustrating.

Is there nowhere in the windows 7 x64 registry where the default audio playback and recording devices are located???

Is there a specific config file that is targeted by mmsys.cpl (the sound devices manager???)


Configure MediaSession Sample Buffering

$
0
0

I have a rather unusual topology where, by design, some of the sinks do not request samples when the session starts. A problem arises when data is teed. What I have found is the media session is buffering samples anticipating the sink to start requests. Instead those samples should be dropped since the sink may not request a sample for quite some time and the buffered data is no longer relevant.

Is there a way to configure the session to buffer only the last, most recent sample? If not, what are my options? I'd rather not have to write my own MediaSession object, unless that is the only solution.

Furthermore, it is not an option to drop the samples in the sink. There are some very computationally expensive transforms in the topology and samples should only be processed if necessary.

How to launch WMPDMC.Exe for streaming on a DLNA remote device (play to)

$
0
0

Hello,

On a Windows Desktop APP (not modern UI):

how can I sendthe parameters of thedevice andthePathoffilestoWMPDMC.Exein order to playa video/audio file to a remote device ?

In modern UI there is a class'windows.media.playto'(visual studio2012), but I need to implement streaming in a desktop (possibily WPF app)

I Tried to identify what parameters are sent when the 'Play to' Context menu is chosen from Explorer but it looks like it's passing an int value...

I believe I should provide to it in some way the guid for dlna device and the Path of files to simply start streaming.

Thanks in advance for any hint.

Marco


Marco Orlandi EXO System Italia SRL

Driver MFT missing 720p in video mode

$
0
0

Microsoft Windows 8 built in Camera app support 0.9 MP (720p) in image capture and video mode.

Normal ( without Driver MFT)

But after I using Driver MFT sample (http://code.msdn.microsoft.com/windowshardware/Driver-MFT-Sample-34ecfecb) , the 720p video mode is missing , but 0.9 MP in image capture mode still exist

after using Driver MFT   image mode (left)   video mode(right)

Anyone also discover this problem ? 

Could anyone please tell me how to make the 720p video mode available while using driver MFT ?

My camera support 720p YUY2 and MJPG format.

Any suggestions and helps are greatly appreciated.

thanks in advanced



MF h264 decoder delay

$
0
0

Hi,

I am having a delay of aprox. 2 seconds before i get the first decoded frame. I have read in internet that this is an implementation problem, no way to get rid of it.

I have also found some interesting properties (CODECAPI_AVDecNumWorkerThreadsMFPKEY_CONSTRAINDECLATENCY or MFPKEY_MAXDECLATENCYMS) but they are not implemented in the h264 decoder.

I would like to get some info about this problem as I would like to use the decoder for real scenarios.

Best Regards

VRAM capture avstream driver(for camera) with media foundation framework

$
0
0

Hi,

I am trying to expose the VRAM property -> KSPROPSETID_VramCapture, in my avstream minidriver for camera. And I am using media foundation as the framework.

I am modifying the sample avstream driver provided by microsoft -> avshws(http://code.msdn.microsoft.com/windowshardware/AVSHwS-AVStream-Simulated-7953991d).

I have exposed all the properties within this set.

According to documentation given by microsoft on this page -> http://msdn.microsoft.com/en-us/library/windows/hardware/ff568780%28v=vs.85%29.aspx, KSPROPERTY_PREFERRED_CAPTURE_SURFACE get property request should be received. But, what I have observed is that, instead of the above step taking place, KSPROPERTY_CURRENT_CAPTURE_SURFACE set property request with KS_CAPTURE_ALLOC_SYSTEM as its value.

Even after I return, STATUS_UNSUCCESSFUL as HRESULT for the set request, I am unable to get a KSPROPERTY_PREFERRED_CAPTURE_SURFACE get property request.

Can anyone tell me what could be the possible reason for this anomaly?


VC1 encoder with Windows 7 SDK (Media Foundation)

$
0
0

Hi,

I'd like to develop "vc1 encoder" with Windows 7 SDK.

So, I created an instance of the IMFTransform. And, the input subtype of the instance was set to "MFVideoFormat_I420" (YUV file is used as an input source.).

I also created an instance of the IMFMediaSource. Then, I wish to connect an YUV file(input source) to the instance. but I can connect them. I think IMFMediaSource is created with a compressed input file only.(is it right?)

How does an YUV file use as an input of IMFTransform?

Please help me.


.nsc parser in Media Foundation

$
0
0

Hi,

What is the .nsc file parser component in Media Foundation over Windows 7 OS.

Best Regards,

Sharad

How do I use MFCreateVideoRenderer() independently?

$
0
0

I'm starting from scratch on this and am totally and completely lost.   Any help is greatly appreciated.

I have an existing program I've written that calls many media foundation things directly.  It works. Now I want to render video to a window, which wasn't an original part of my program.  I believe MFCreateVideoRenderer() is the correct choice. 

So far, I've coded only ONE LINE of source, and it fails when I run it!  It looks similar to this:

HRESULT hr = MFCreateVideoRenderer(MFVideoFormat_NV12, (void**)&pInterface);

I keep getting a E_NOINTERFACE result.  I've tried various definitions for pInterface, but none work.  My original believe was that I could use "IMFSinkWriter *pInterface".  Didn't work.  I tried IUnknown and IMFVideoPresenter. I'm not even sure this is related to why I'm getting the error.

I can find no examples whatsoever online of how to independently use the EVR.  My intent is to clone my current sinking to a file, and massage that code to sink to a video display window instead.   I'm trying to create the video renderer in parallel with my working call to MFCreateSinkWriterFromURL(...&m_pWriter) for IMFSinkWrtier *pWriter.  Elsewhere in my code, I call m_pWriter->AddStream, m_pWriter->SetInputMediaType, and m_pWriter->BeginWriting.  I thought I could call the same with IMFSinkWrite coming from the EVR.  But I can't connect the dots...

[EDIT] I believe I'm seeking how to connect the EVR to an IMFSinkWrtier.  If I get that working, I should have my own examples of how to do everything else.




The enhanced video renderer (EVR) media sink can be used as a stand-alone component - IMFStreamSink ProcessSample E_NOINTERFACE error

$
0
0

Hello

I follow the instructions in (Using the EVR Media Sink) http://msdn.microsoft.com/en-us/library/aa965265(v=vs.85).aspx

and (Media Sinks) http://msdn.microsoft.com/en-us/library/ms701626(v=vs.85).aspx

'You should read the remainder of this topic if you are writing a custom media sink,or if you want to use a media sink directly without the Media Session.'

From the Sample Project 'MFCaptureToFile' , I developed a Render.cpp class to display the video in the right side of the dialog, using the Media Sink directly.

Instead of sending the Video  Samples to the Capture File, I'm sending to a Queue and then to the IMFStreamSink->ProcessSample as the Media Sink requests thru the IAsyncCallback->Invoke   MediaEventType  MEStreamSinkRequestSample.

Everything works as described in the manual except that when I call the IMFStreamSink->ProcessSample, I get an HRESULT = E_NOINTERFACE.


1. I used the MFTrace and everything is created with sucess. It also shows the E_NOINTERFACE when calling IMFStreamSink->ProcessSample .

2. The problem: no clue whatsoever about which interface causes the E_NOINTERFACE error. ==> I guess something more needs to be initialized, besides what is described in the manual.

 

Best regards

Registering a live media source

$
0
0

Hello all,

I am attempting to create a media source based on the Mpeg1Source sample.

My source is a live source. For the time being, I will be outputting frames that are generated internally.

I am following the guides from msdn:

Writing a Custom Media Source

http://msdn.microsoft.com/en-us/library/windows/desktop/ms700134(v=vs.85).aspx

Case Study: MPEG-1 Media Source

http://msdn.microsoft.com/en-us/library/windows/desktop/ee318417(v=vs.85).aspx

and looking through the mpeg1source sample code. I see how the source is registered in the registry as a byte stream handler.

My question is, how do I register my live source component? is there documentation for this?

I am familiar with DirectShow filter registration, but Media Foundation is a new game to me!

Thank you in advance.

jimmy


jimmy

How to register a live media source - media foundation

$
0
0

Hello all,

I am attempting to create a media source based on the Mpeg1Source sample.

My source is a live source. For the time being, I will be outputting frames that are generated internally.

 

I am following the guides from msdn:

Writing a Custom Media Source

http://msdn.microsoft.com/en-us/library/windows/desktop/ms700134(v=vs.85).aspx

Case Study: MPEG-1 Media Source

http://msdn.microsoft.com/en-us/library/windows/desktop/ee318417(v=vs.85).aspx

and looking through the mpeg1source sample code. I see how the source is registered in the registry as a byte stream handler.

 

My question is, how do I register my live source component? is there documentation for this?

I am familiar with DirectShow filter registration, but Media Foundation is a new game to me!

 

Thank you in advance.

jimmy

 

 

 

 


jimmy

Media Foundation Worker Thread Exception Handling?

$
0
0

Media Foundation Worker Thread Exception Handling?

How can I implement an exception handling on Media Foundation worker threads?

In DirectShow in PushSource filters I use:
 LONG WINAPI ExceptionFilterFunc(LPEXCEPTION_POINTERS ppp) // as handler

 SetUnhandledExceptionFilter(&ExceptionFilterFunc);  // on start of thread

This way I can receive any exception in that handler which works fine.

But on Media Foundation worker threads (and on use of third-party PushSource-Filters on DirectShow)
normally I don't start the thread somewhere. So for example in the CPlayer Sample (using Media Session)
there's nowhere a worker thread started, but of course its there, when I use the callback.

So how do I access them to add an exception handler?

 Yet on any third-party Dshow SourcePush filters, where my own Sink-Filter is added in the graph, I simply also setup exception handling by SetUnhandledExceptionFilter when my SinkFilter first time processes the Receive method. So only a few moments then the worker thread runs without an handler, but then it has same handler as if it was setup on start of that thread. (That does not ensure to avoid exceptions which already raise on processing first frame by a third-party SourcePush Filter)

(mftrace.exe is a good thing, but its not the solution for the final release version of the app,

where simply the exception should be found BEFORE the full app crashes)

MF runs much safer than DShow, but there is no guarantee for 100% safe processing ever, so if such a worker thread crashes I simply want the APP to exit that thread returning an ExitCode. The APP then can continue with other actions or it can restart the session with another attempt or with other parameters..




direct 3d and media foundation

$
0
0

hi guys

is there any interface in mediafoundation that  can encode d3d surface or texture to a video?

Thanks.


Using evrpresenter in mf_protectedplayback sample

$
0
0

Hi All,

I built the Win 7.1 evrpresenter renderer and registered it. I then built the MFPlayer and I'm able to render video using the custom evrpresenter renderer.

I next attempted to add the evrpresenter as the renderer in the mf_protectedplayback sample by putting the following line of code in Player.cpp around line 1050 right after the call to MFCreateVideoRendererActivate:

CHECK_HR(hr = pRendererActivate->SetGUID(MF_ACTIVATE_CUSTOM_VIDEO_PRESENTER_CLSID, m_clsidPresenter)); 

However whenever I try to play a file I get the error HRESULT = 0x80070241

I'd like to add a custom renderer for protected playback. Is this not the right strategy to use?

Crash in MFTEnumEx on some Windows 7 systems

$
0
0

I work on Firefox and wrote our code that uses Media Foundation for H.264/AAC/MP4 and MP3 playback in HTML5 <video>.

Firefox calls MFTEnumEx so that it can enumerate and disable all the codecs except the ones that we want to be available in the <video> element. This is so that those codecs we don't want to expose won't be able to be instantiated when we call MFCreateSourceReaderFromByteStream(). This is to limit video codec proliferation on the web, and to minimize our attack surface as much as possible.

Unfortunately for some reason MFTEnumEx is crashing on some Windows 7 systems. Here's our tracking bug for this:
http://bugzilla.mozilla.org/show_bug.cgi?id=858667

I can't reproduce this crash myself, but we have numerous crash reports from our users who have experienced this bug: http://bit.ly/13r9dkM

I also note that a similar crash occurs in the Chrome browser:
http://code.google.com/p/chromium/issues/detail?id=178490

My code that calls MFTEnumEx is here:

http://mxr.mozilla.org/mozilla-central/source/content/media/wmf/WMFUtils.cpp#233

My questions:

  1. Does the Windows Media Foundation team know about this crash in MFTEnumEx already?
  2. Is this bug in MFTEnumEx likely to be patched at any time in the future?
  3. Is there another way I can disable codecs that we don't want to expose, so that I can work around this problem? (I'm already setting the MIME type on the IMFByteStream before calling
    MFCreateSourceReaderFromByteStream).

IMFSinkWriter quit working

$
0
0

My code used to output a large .mp4 file (e.g. 4MB) that I could successfully play in WMF12.  Since the last time I tested this feature, however, the file being produced is only 677 bytes and WMF12 complains "... might not support the file type ... or codec ...".

I'm pasting a reduced collection of statements from my code that are being executed.  I've traced with breakpoints and every step is happening.  The return result is S_OK every time, including the WriteSample() call.  Yet it seems samples are NOT begin written to the file.

I've included below the output from my own TraceAttributes() function as well.  I believe I was indeed using a combination of pType2 and pType, as the code shows.  I did try changing the AddStream to use pType rather than pType2, but doing so caused the SewtInputMediaType to return 0xC00D5212 (... no codec ...)

This is at the limit of my understanding of Media Foundation.  What might be the reason it doesn't work?  And remember, it used to work, although I can't say for sure what code elements might have changed a little since then.

IMFSinkWriter           *m_pWriterProcessedImage;
			........................................
			hr = MFCreateSinkWriterFromURL(
				FILENAME,
				NULL,
				NULL,&m_pWriterProcessedImage
				);

			.......................................

			hr = m_pWriterProcessedImage->AddStream(pType2, psink_stream);
			hr = m_pWriterProcessedImage->SetInputMediaType(*psink_stream, pType, NULL);
			hr = m_pWriterProcessedImage->BeginWriting();
			TraceAttributes("for AddStream()", pType2);
				**BEGIN OUTPUT FROM TraceAttributes**
					STREAM ATTRIBUTES: for AddStream()
						Type				= {73646976-0000-0010-8000-00AA00389B71}
						Subtype				= {34363248-0000-0010-8000-00AA00389B71}
						bitrate				= 2400000
						size				= 640 x 480
						frame rate			= 30.000000 (30000/1000)
						pixel aspect		= (1/1)
						interlace			= 2 (progressive)
						{AD76A80B-2D5C-4E0B-B375-64E520137036}: <UNRECOGNIZED>
						{3C036DE7-3AD0-4C9E-9216-EE6D6AC21CB3}: <UNRECOGNIZED>
						{261E9D83-9529-4B8F-A111-8B9C950A81A9}: <UNRECOGNIZED>
						{9AA7E155-B64A-4C1D-A500-455D600B6560}: <UNRECOGNIZED>
				**END OUTPUT FROM TraceAttributes**
			TraceAttributes("for SetInputMediaType()", pType);
				**BEGIN OUTPUT FROM TraceAttributes**
						STREAM ATTRIBUTES: for SetInputMediaType()
							Type				= {73646976-0000-0010-8000-00AA00389B71}
							Subtype				= {3231564E-0000-0010-8000-00AA00389B71}
							bitrate				= 15373949
							size				= 640 x 480
							frame rate			= 30.000000 (30000/1000)
							pixel aspect		= (1/1)
							interlace			= 2 (progressive)
							stride				= 640
							independent samples	= 1
							fixed size samples	= 1
							sample size			= 460800
							user data			= 460800
				**END OUTPUT FROM TraceAttributes**
			......................................
			hr = m_pWriterProcessedImage->WriteSample(0, pSample); **THIS GETS CALLED MULTIPLE TIMES**

			........................................
			
			hr = m_pWriterProcessedImage->Finalize();

How do you REALLY use the EVR Media Sink?

$
0
0

I want to display a sequence of IMFSample to a window on my computer screen.  I believe I should use the EVR.

The "Using the EVR Media Sink" page (http://msdn.microsoft.com/en-us/library/aa965265(v=vs.85).aspx) says I can use the EVR stand-alone.  It says to use MFCreateVideoRenderer() to create a media sink.  

However, the doc for MFCreateVideoRenderer() gives insufficient advice about the first parameter, the REFIID riidRenderer.  No matter what I try to put into there, I get E_NOINTERFACE.

What should I provide for the first parameter to MFCreateVideoRenderer()?

Or else, how can I display my sequence of IMFSamples to my computer screen?  I am *not* using a Media Session. 

DLNA in Windows 7 64 bit - newly added files not available on devices.

$
0
0

dear Readers,

my apologies if this is the wrong forum.  i have a windows 64 bit machine.  using resources on the internet, i was able to configure the media files (music, photos, and videos) on different folders to be playable on my personal DLNA compatible new BD -570 SONY player and Samsung 40" LCD TV.

2 weeks ago, i created a folder under that shared subdirectory and added a few more mp3 files.  the folder and those media files are not visible downstairs on my TV or my bluray player.

is there an extra step i need to do before having the newly created subfolder available downstairs.

 

thanks for your help.

regards

Ravi.


Sr GIS App Developer Dallas Fort Worth area, TX
Viewing all 1079 articles
Browse latest View live


<script src="https://jsc.adskeeper.com/r/s/rssing.com.1596347.js" async> </script>