Quantcast
Channel: Media Foundation Development for Windows Desktop forum
Viewing all 1079 articles
Browse latest View live

Windows Media foundation: IMFSinkWriter WriteSample API returning error sampleduration is not set

$
0
0
I have a requirement of receiving RTP stream(H264) and exporting them to a MP4 file. We are using media foundation to export frames received in RTP stream to mp4 file. So we extract H264 frames(from RTP packets) and provide it to sinkwriter through WriteSample API(sample time, sample duration etc are set). This seems to work and I get playable mp4.
But when I don't set sample duration(using SetSampleDuration API), writeSample throws error (MF_E_NO_SAMPLE_DURATION) . The error is not thrown for first few frames but only after certain time(frame after 1.48 seconds)a
Questions:
1. Why SetSampleDuration is needed?I assumed that we don't need sample duration if we are providing sample time for every sample. Sink writer can calculate difference between the current frame and last frame as sample duration 
2. Why the error is not thrown for first few frames by writeSample API .WriteSample throws error  only after certain time(frame after 1.48 seconds). Is it specific to certain frames.
3. How to do we ideally set the sample duration when duration between frames is not uniform. In my case the average fps is 15 but time between 2 frames is not uniform. (Timestamp of frames in ms: 0, 83,133, 200, 283,333,400,...)
3.1 To set sample duration of a frame, Wait for next frame and subtract current frame timestamp from next frame timestamp. Should application hold back till next frame is available
3.2 Setting sample duration based on average fps is fine(even though time difference between frames is not uniform). 
(Note: I tried 3.2 and it works. I can't visually see any issue. This might be because time difference between frames is not uniform but not varying much. But I am not sure if this is ok. Should I go for approach 3.1)

Is UWP suitable for windows media applications?

$
0
0

Hi all,

Is UWP something I can use for playing video/audio files in an application? or should I stick with windows media foundation? the app will mostly deal with media files.

Thanks 

Converting audio using MFT was successful on windows 7 but failed on windows 10

$
0
0
When we implement the conversion of  video with Audio sample rate from 16Khz to 44.1 Khz, there was a problem as follows:

- On windows 7, the program worked successful.
- On Windows 10, the program encountered a bug, the output audio is ran faster than the input audio.

Program details as below:
---------------------------------------
#include "stdafx.h"
#include <windows.h>
#include <windowsx.h>

#include <comdef.h>
#include <stdio.h>
#include <mfapi.h>
#include <mfidl.h>
#include <mfreadwrite.h>
#include <Mferror.h>
#include <mfplay.h>
#pragma comment(lib, "ole32")
#pragma comment(lib, "mfplat")
#pragma comment(lib, "mfreadwrite")
#pragma comment(lib, "mfuuid")

int main()
{
HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
hr = MFStartup(MF_VERSION);

IMFMediaType *pMediaType;
IMFMediaType *pMediaTypeOut;
IMFSourceReader *pSourceReader;
IMFAttributes *pAttributes;
IMFSinkWriter *pSinkWriter;
IMFMediaType *pCurrentMediaType;
LONGLONG nDruration = 412800000;

// Load souce file
hr = MFCreateSourceReaderFromURL(
L"input.mp4",
NULL,
&pSourceReader
);
pSourceReader->SetStreamSelection(MF_SOURCE_READER_FIRST_AUDIO_STREAM, TRUE);

// Create a partial media type that specifies uncompressed audio
IMFMediaType *pPartialType;
MFCreateMediaType(&pPartialType);
hr = pPartialType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
hr = pPartialType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM);
hr = pSourceReader->SetCurrentMediaType(MF_SOURCE_READER_FIRST_AUDIO_STREAM
, nullptr
, pPartialType);
hr = pSourceReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_AUDIO_STREAM, &pPartialType);
hr = pSourceReader->SetStreamSelection(MF_SOURCE_READER_FIRST_AUDIO_STREAM, TRUE);

// set media type for output file
hr = MFCreateMediaType(&pMediaTypeOut);

// set major type for output file
hr = pMediaTypeOut->SetGUID(
MF_MT_MAJOR_TYPE,
MFMediaType_Audio
);

// Set subtype for output file
hr = pMediaTypeOut->SetGUID(
MF_MT_SUBTYPE,
MFAudioFormat_AAC
);

hr = pMediaTypeOut->SetUINT32(
MF_MT_AUDIO_SAMPLES_PER_SECOND,
44100
);

// set audio number channal for output file
hr = pMediaTypeOut->SetUINT32(
MF_MT_AUDIO_NUM_CHANNELS,
2
);

// set audio bit depth for output file
hr = pMediaTypeOut->SetUINT32(
MF_MT_AUDIO_BITS_PER_SAMPLE,
16
);

hr = pMediaTypeOut->SetUINT32(
MF_MT_AUDIO_AVG_BYTES_PER_SECOND,
12000.5
);

hr = pMediaTypeOut->SetUINT32(
MF_MT_AUDIO_BLOCK_ALIGNMENT,
1
);
pMediaTypeOut->SetUINT32(MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION, 0x29);

DWORD nWriterStreamIndex = -1;
hr = MFCreateSinkWriterFromURL(
L"Output.mp4",
NULL,
NULL,
&pSinkWriter
);
hr = pSinkWriter->AddStream(pMediaTypeOut, &nWriterStreamIndex);
hr = pSinkWriter->BeginWriting();

_com_error err(hr);
LPCTSTR errMsg = err.ErrorMessage();

LONGLONG SampleDuration;
//Sets the input format for a stream on the sink writer.
hr = pSinkWriter->SetInputMediaType(nWriterStreamIndex, pPartialType, NULL);

for (;;)
{
DWORD nStreamIndex, nStreamFlags;
LONGLONG nTime;
IMFSample *pSample;

hr = pSourceReader->ReadSample(
MF_SOURCE_READER_FIRST_AUDIO_STREAM,
0,
&nStreamIndex,
&nStreamFlags,
&nTime,
&pSample);
printf("%d\n", nStreamFlags);
printf("%d\n", nTime);

//Update media type, when current media tye changed.
if (nStreamFlags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED) {
pSourceReader->GetNativeMediaType(MF_SOURCE_READER_FIRST_AUDIO_STREAM, MF_SOURCE_READER_CURRENT_TYPE_INDEX, &pCurrentMediaType);
pSourceReader->SetCurrentMediaType(MF_SOURCE_READER_FIRST_AUDIO_STREAM
, nullptr
, pCurrentMediaType);
pSourceReader->SetStreamSelection(MF_SOURCE_READER_FIRST_AUDIO_STREAM, TRUE);
continue;
}
pSample->GetSampleDuration(&SampleDuration);

if (nTime >= nDruration)
{
break;
}
// Calculate new timestamp of sample when this sample is written on output file
if (nTime + SampleDuration >= nDruration)
{
SampleDuration = nDruration - nTime;
pSample->SetSampleDuration(SampleDuration);
}
pSample->SetSampleTime(nTime);

if (FAILED(hr)) {
printf("ReadSample Error...\n");
return 0;
}

//write sample
if (pSample)
{
OutputDebugString(L"Write sample...\n");
hr = pSinkWriter->WriteSample(
nWriterStreamIndex,
pSample
);
if (FAILED(hr)) {
printf("WriteSample Error...\n");
return 0;
}
}

if (nStreamFlags & MF_SOURCE_READERF_ENDOFSTREAM)
{
break;
}
}
hr = pSinkWriter->Finalize();
return 0;
}
-------------------------------

When debugging, we detected the following points:
- After running through the IMFSourceReader::ReadSample funtion. On windows 7, the parameter "nStreamFlags" has resulted in 0.On windows 7, the "nStreamFlags" has resulted in "MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED".
- Bug only appear in the input audio format is AAC, with input  audio format is WMA, PCM ... it is not happen.
- When input audio format is AAC, this bug only appear in cases with input audio sample rate is less than 32Khz and converting to 44.1kHz sample rate output.

Q: Can you explain the problem, is there something wrong with the IMFSourceReader::ReadSample funtion?

Using and implementing IMFSeekInfo::GetNearestKeyFrames

$
0
0

I'm looking for guidance both on how IMFSeekInfo should be used and how it should be implemented by third parties. In particular, I'm interested in what should be returned in the last output parameter of IMFSeekInfo::GetNearestKeyFrames.

The documentation for GetNearestKeyFrames says this for the last two parameters:

pvarPreviousKeyFrame: Receives the position of the nearest key frame that appears earlier than pvarStartPosition. The units for this parameter are specified by pguidTimeFormat.

pvarNextKeyFrame: Receives the position of the nearest key frame that appearsearlier than pvarStartPosition. The units for this parameter are specified by pguidTimeFormat.

There are two things I'm not sure of. First, I believe there might be a documentation bug and that pvarNextKeyFrame ought to return alater key frame rather than an earlier one. That would be consistent with the name of the parameter and what Microsoft's IMFSeekInfo implementation for H264 encoded MP4 video streams does.

More importantly, again for the pvarNextKeyFrame output parameter, there are some cases where the implementation of IMFSeekInfo for H264 encoded MP4 streams sometimes returns a frame that is earlier than the requested one. I can't find any documentation as to why but my guess is that it is intended to provide a downstream deinterlacer with an additional frame prior to the frame that is being seeked to.

Specifically, this is what I have observed. Suppose an H264 video stream in an MP4 file has 300 frames total, has a frame rate of 1 frame per second and the first frame is at time 0s. It has key frames at time 0s, time 100s and time 200s. For brevity, I will use seconds as the units of time.

Say one requests the nearest next key frame corresponding to either time 50s, 99s or time 100s. There is a key frame at time 100s and, as expected, 100s will be returned as the nearest next frame time. The surprising thing is what happens when one requests the nearest next key frame for time 101s. One might expect the key frame at 200s to be returned but actually 100s is returned, even though 100s < 101s. When one requests the next key frame corresponding to time 102s, only then is the key frame at 200s returned.

My theory is this ensures a downstream deinterlacer would have at least one extra frame prior to the requested frame to work with. If IMFSeekInfo did not make this adjustment, and the video stream was interlaced, the client would likely have to make the adjustment itself. Is this guess right?

Assuming my guess is right, from the point of view of a client of the IMFSeekInfo interface that might be dealing with interlaced video, may it assume that all implementations of IMFSeekInfo will make this kind of allowance for interlaced video or, if there is a possibility of interlaced video, should they make their own allowance for it? If not, may they assume that specific implementations of IMFSeekInfo, such as Microsoft's implementation for H264/MP4 video, will always make this allowance?

Should third parties copy this behaviour when implementing IMFSeekInfo? How about if they know that a video stream is not interlaced?

    

Poor performance of H264 decoding

$
0
0

I'm decoding a H264 video stream using Media Foundation. It works, but performance is really poor on both Windows (desktop) and HoloLens 2 (UWP). Decoding a single 1920x1080 frame takes 30 ms on a fairly powerful PC, 100 ms on the HoloLens .

I'm wondering if it's using software rendering instead of hardware rendering. Any idea on how to make sure that I'm doing hardware decoding, or any other hints on how to improve performance?

The code is here:

https://pastebin.com/r8RwwmFx

Topoedit can not resolve topology for playing .wtv file?

$
0
0

Hi all,

I'm learning about MF and I was using the topoedit tool (windows 10) to render different files (like mp3, avi, wtv, etc). I see that it worked on all except wtv files. It looks like the decoder was missing so I installed an mpeg-1/2 from the Microsoft store. Topoedit was able to resolve the topology for the wtv files but the video still does not work (error something..). I'm aware that all this is beyond my programming skills so at this point I'm wondering about the component and how they work. Is the wtv file not being rendered because I don't have the correct (video) decoder or something deeper like the file is encrypted and I need something called protected media path? or something else?

Thanks

Media foundation: Writing G711 PCMU to mp4

$
0
0
We have a requirement (Windows UWP app) to store audio and video data received from cameras(RTP packets) in mp4.Video format is h264 and audio is g711 pcmu

We are using media foundation(c# using MF .Net) sinkWriter to write video data to mp4 which works fine. I would like to know how to write audio samples to mp4. I tried the following way:


private void SetupAudioMediaType(out IMFMediaType mediaType, in Guid audioSubType)
{    
    HResult hr = HResult.S_OK;
    
hr = MFExtern.MFCreateMediaType(out mediaType);
    if (!hr.Succeeded())
    {
        Debug.Fail("MFCreateMediaType for audio failed " + hr.ToString());
        return;
    }

    hr = mediaType.SetGUID(MFAttributesClsid.MF_MT_MAJOR_TYPE, MFMediaType.Audio);
    if (!hr.Succeeded())
    {
        Debug.Fail("Set MF_MT_MAJOR_TYPE media-out failed " + hr.ToString());
        return ;
    }
    hr = mediaType.SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, audioSubType);
    if (!hr.Succeeded())
    {
        Debug.Fail("Set MF_MT_SUBTYPE media-out failed " + hr.ToString());
        return ;
    }

    hr = mediaType.SetUINT32(MFAttributesClsid.MF_MT_AUDIO_SAMPLES_PER_SECOND, 48000);
    if (!hr.Succeeded())
    {
        Debug.Fail("Set MF_MT_AUDIO_SAMPLES_PER_SECOND media-out failed " + hr.ToString());
        return 
    }

    hr = mediaType.SetUINT32(MFAttributesClsid.MF_MT_AUDIO_NUM_CHANNELS, 1);
    if (!hr.Succeeded())
    {
        Debug.Fail("Set MF_MT_AUDIO_NUM_CHANNELS media-out failed " + hr.ToString());
        return ;
    }

    hr = mediaType.SetUINT32(MFAttributesClsid.MF_MT_AUDIO_BITS_PER_SAMPLE, 8);
    if (!hr.Succeeded())
    {
        Debug.Fail("Set MF_MT_AUDIO_BITS_PER_SAMPLE media-out failed " + hr.ToString());
        return ;
    }

    return errorCode;
}



private MultiplexerErrorCode SetupAudio()
{
    HResult hr = HResult.S_OK;

    IMFMediaType mediaTypeOut = null;
    IMFMediaType mediaTypeIn = null;

    SetupAudioMediaType(out mediaTypeOut, MFMediaType.AAC);  // or mp3 (MP4 in windows support mp3 or aac)
    if (errorCode != MultiplexerErrorCode.Success)
    {
        Debug.Fail("setupAudioMediaType output failed:", errorCode.ToString());
    }
    else
    {
        hr = sinkWriter.AddStream(mediaTypeOut, out audioStreamIndex);
        if (!hr.Succeeded())
        {
            Debug.Fail("AddStream  audio  failed " + hr.ToString());
        }
        else
        {
Guid PcmuAudioSubType = (new FourCC(7,0,0,0)).ToMediaSubtype();   //PCMU
            SetupAudioMediaType(out mediaTypeIn, PcmuAudioSubType);
            hr = sinkWriter.SetInputMediaType(audioStreamIndex, mediaTypeIn, null);
            if (!hr.Succeeded())
            {
                Debug.Fail("SetInputMediaType audio  failed " + hr.ToString());
            }
        }
    }
    
    return ;
}

SetInputMediaType returns error MF_E_INVALIDMEDIATYPE. From my analysis following are the reasons for the error
1) I think PCMU input type is not supported. It should be PCM. Is this understanding corect? If so does this mean I have to decode PCMU to PCM.If so is there any windows c# API which does this ? And once this decode is done , what are the bits per sample in the ouput pcm. Is it 16? 
2)Even if I provide pcm as input type, SetInputMediaType returns MF_E_INVALIDMEDIATYPE error. Is it because the aac encoder supports only sampling rate of 44.1 & 48Khz. (mp3 supports 32 Khz). If my understanding is correct, how do I overcome the issue. Should I up-sample. If so how?
3)Is there any simpler way to write pcmu(8000 samples per second, 8 bits per sample) to mp4 along with the video frames

MFCreateSinkWriterFromURL creates mp4 file with wrong duration

$
0
0
Hello,

I'm using MFCreateSinkWriterFromURL to create an mp4 file from an existing audio/video stream. By existing, I mean the capture is started before the sink writer and the stream is already sent to a remote host.

What I try to achieve is to record the stream sent to the remote host into an mp4 file. So I use the MFCreateSinkWriterFromURL to create a SinkWriter, I wait for the first I-Frame by checking the attribute MFSampleExtension_CleanPoint, and then I start the recording with IMfSinkWriter::BeginWriting.

The problem is that the duration of the mp4 file depends on the duration of the stream that comes from the webcam.

So for example, if I start the webcam capture, wait for 3 minutes. Then start to record the stream into an mp4 file and wait for 30 seconds. The length of video (as displayed in VLC and ffplay) is 3 minutes and 30 seconds (and it should be 30 seconds).

I tried to reset the SampleTime on the IMFSample and make it starts at 0. I also tried to set the MFSampleExtension_Discontinuity attribute on the first video IMFSample. I have logged into a text file the duration and timestamp of all samples that should be written into the mp4 file, and it seems right.

Any idea on what could be wrong? Thanks for your help.



DXVA 2.0 Video Decoding as D3D Texture

$
0
0

Hi guys,

I'm starting out with Windows Media Foundation video decoding using the DXVA 2.0 API. Basically I would like to decode avideo frame and then read that frame from the corresponding D3D surface as a texture.

I've been reading the documentation for a couple of weeks and I think I have most of the decoding process down.

What I'm a little confused about is how to put a topology together if there is a DXVA decoder node involved without the need to render anything.

- Do I just connect the source to the decoder in the topology with no sink?

- Is the goal of the IMFAsyncCallback object just to call IDirectXVideoDecoder::EndFrame and send a notification to another object in the process?

- How do I access the resulting D3D texture? 

I would definitely appreciate tips and advice on how to implement a basic setup.

Cheers!

WMP Mute when UiMode is None

$
0
0

I have a problem when trying to mute sound from my Playlist when in UiMode "none".

The code

CWMPPlaylistCollection playlistCollection = m_player.GetPlaylistCollection();
CWMPPlaylist playList = playlistCollection.newPlaylist(_T("MyList"));

playList.appendItem(m_player.newMedia(_T("V:\\Videos\\1.mp4")));
playList.appendItem(m_player.newMedia(_T("V:\\Videos\\2.mp4")));
playList.appendItem(m_player.newMedia(_T("V:\\Videos\\3.mp4")));

m_player.SetUiMode(_T("none"));
m_player.SetWindowlessVideo(TRUE);
m_player.SetStretchToFit(TRUE);
m_player.SetEnableContextMenu(FALSE);
BOOL bIsMute = m_player.GetSettings().GetIsAvailable(_T("Mute"));
if (bIsMute)
	m_player.GetSettings().SetMute(TRUE);

m_player.GetControls().play();

works well for the first video, but the sound is turned on when the second video automatically starts?

I have noticed, that if I select UiMode "full" or "mini" it works, then I can also see the mute button?

Any ideas what is wrong?

-cpede

Handling different keyboard layouts with WM_CHAR message?

$
0
0

We have an app that is using WM_CHAR messages for keyboard input.

I have the "Lithuanian Standard" keyboard layout loaded onto my Windows 10. If I have that activated and type a w on my regular English keyboard into a windows text box, I get a ž character. But if I press the same w key in my app, I get the þ character instead.

Is there a Windows API call I can make to have Windows turn the wchar_t þ I get from WM_CHAR into a ž based on the current keyboard layout? (and handle all the other situations as well)

Our program is 100% Unicode enabled and our window is created with CreateWindowW and etc.

Also we need to support as far back as Windows 7 only.

Subtitles? How to add text to mp4

$
0
0

Hello,

Is it possible to add subtitles to a video, using media foundation? I know it is possible to include subtitles into a mp4 file, but I can't find anything about it regarding media foundation.

The final goal is to provide information about specific frames. So when generating a mp4 using a sinkwriter, is is possible to add text to each sample (and visually display it when watching the mp4)?

Any help would be welcome.

Can I call C++/WinRT inside Device MFT?

$
0
0

I'm writing Device MFT and using Windows Vision Skill to process image. I had the problem when integrating Windows Vision Skill into Device MFT. Since Windows Vision Skill is C++/WinRT, so I think the problem is about how to integrate C++/WinRT code with Device MFT.

Here is what I did:

Create C++ DLL wrapper for Windows Skill (C++/WinRT) and then import to Device MFT.  I got the error "ClassFactory cannot supply requested class" when creating C++/WinRT object.

I also tried to use Activation Context API to activate the context, this time the camera stop working.

Here is the code I use to start create object via wrapper:

    Wrapper* pwrap = new Wrapper();

    ACTCTX actCtx;
    memset((void*)&actCtx, 0, sizeof(ACTCTX));
    actCtx.cbSize = sizeof(ACTCTX);
    actCtx.lpSource = PathToManifest(); 

    HANDLE hCtx = ::CreateActCtx(&actCtx);
    if (hCtx == INVALID_HANDLE_VALUE)
        DMFTRACE(DMFT_GENERAL, TRACE_LEVEL_INFORMATION, "CreateActCtx returned: INVALID_HANDLE_VALUE");
    else
    {
        ULONG_PTR cookie;
        if (::ActivateActCtx(hCtx, &cookie))
        {

            S_Error err_res = pwrap->Create();

            ::DeactivateActCtx(0, cookie);
        }
    }

Here is the c++/winrt creating code:

bool Wrapper::Create()
{
    bool err = false;

    try {

        // Create the ObjectDetector skill descriptor
        auto skillDescriptor = ObjectDetectorDescriptor().as<ISkillDescriptor>();

        // Create instance of the skill
        ObjectDetectorSkill Skill = skillDescriptor.CreateSkillAsync().get().as<ObjectDetectorSkill>();

        // Create instance of the skill binding
        ObjectDetectorBinding Binding = Skill.CreateSkillBindingAsync().get().as<ObjectDetectorBinding>();

        err = true;
    }
    catch (hresult_error const& ex)
    {
        std::wstring wbuf = ex.message().c_str();
        int wleng = wbuf.length();
        char* buf = new char[wleng];
        size_t len;
        errno_t  error = wcstombs_s(&len, (char*)(buf), 255, wbuf.c_str(), 255);
        UNREFERENCED_PARAMETER(len);
        UNREFERENCED_PARAMETER(error);

        m_error = buf;
    }

    return err;
}

Here is the manifest file and all the dependency DLL are put into same directory with MFT dll:

<?xml version="1.0" encoding="UTF-8" standalone="yes"?><assembly manifestVersion="1.0" xmlns="urn:schemas-microsoft-com:asm.v1"><assemblyIdentity type="win32" name="Wrapper" version="1.0.0.0" ></assemblyIdentity><dependency><dependentAssembly><assemblyIdentity type="win32" name="Microsoft.AI.Skills.SkillInterfacePreview" version="1.0.0.0"></assemblyIdentity></dependentAssembly></dependency><dependency><dependentAssembly><assemblyIdentity type="win32" name="Microsoft.AI.Skills.Vision.ObjectDetectorPreview" version="1.0.0.0"></assemblyIdentity></dependentAssembly></dependency></assembly>

I think there are 2 problems here:

1. The Windows Skill (C++/WinRT) DLL cannot be loaded, so I cannot create the object.

2. I'm not sure that calling Async function inside MFT would be OK or not, since the camera stopped working when using Activation Context API (I assume that the Activation Context API help loading all dependency DLL).

Does anyone know how to integrate C++/WinRT with MFT? Please help me.

Does mediafoundation or dshow support avchd?

$
0
0
Does mediafoundation or dshow support avchd?

IMFSequencerSource:AppendTopology returns 0xC00D36D5 (MF_E_NOT_FOUND) for H264 video file.

$
0
0

Hi,

I am using the source sequencer to play out a playlist of video content, and am getting the above error when calling AppendTopolgy.  I am manually creating the Microsoft H264 decoder transform node and connecting the source->decoder->EVR nodes manually (but get the same error if I let the resolver take care of it).  If I use topoedit to render the file it is unable to resolve the graph for the video portion, but I can manually build the graph and it plays fine.  As far as I can tell this is a normal mp4 file, and the source video node reports all the correct formats.  Also note my code works fine for other file types, so this is specific to mp4 files and the H264 decoder.

Can anyone tell me what the error code actually means in this case ?

Thanks,

Andy.


MediaPlayerElement does not work with DRM in XAML islands

$
0
0

Hi,

I faced with an issue that it is not possible to play DRM content using MediaPlayerElement in WPF Core 3.1 (XAML Islands).

I tried sample app Adaptive streaming and on UWP it work, but on WPF it simply fails.

When I am setting Media Source I am receiving error: "WinRT originate error - 0xC00D715B : 'The topology could not be successfully verified.'."

What does it mean actually and how to fix it?

Dshowbridge enable between Media foundation and Directshow

$
0
0

If I enable Dshowbridge in camera device or system, what portion in MF & Directshow is enable?

could you share the more detail for Dshowbridge?

H264 decoder problem : artifacts on decoded image

$
0
0

Hello,

I usually succeed at decoding an AVI/H264 with Microsoft Media Foundation, but I have a problem with a movie, that exhibits artificats when decoded by me. VLC or Windows Media player do not have such a problem.

See the attached image.

I don't know what could be wrong in my use of the H264 decoder.

-I tried to decode directly out from the IMFSourceReader or through a MFT transform

-I tried under W7 and W10, without difference

-I tried options like MF_LOW_LATENCY under W10, but it does not change anything (as expected).

-The movie only have I and P frames, no B-frames.

-The only noticeable thing is that there are very few keyframes (8 out of 1500 images, because it is slow motion). Could it be a bug of MSMF because of that large GOP size ?

Here is also a link to a minimal VS2019 project, with the movie (7MB), that generates one image file per decoded frame, so that it is easy to see the artifacts appear.

https://chachatelier.fr/tmp/TestMFT.zip

Do you have any idea of what could be wrong ?

Random failures trying to decode .HEIC images.

$
0
0

Hello,
we are adding the support of .HEIC images to our software. But we’ve faced strange random failures: sometimes CopyPixels returns HRESULT 0xc00d36bb (“An unexpected error has occurred in the operation requested”).
What we do: there is an executable which decodes an image and the last function of the code is CopyPixels. The executable returns its result. At some machines, running the executable in a loop goes quite smoothly, it does not fail for hours and days. But at some machines, the code fails after 10-20-30 attempts. Sometimes even the first execution fails.
At the same time, the same code does not fail if BMP or JPEG data is passed to it. It fails only with HEIC image data. And it does not matter if 49x49 or 64x64 HEIC image hardcoded data is passed to it or a real image from iPhone X (4032 x 3024) is loaded.
We also tried to call CopyPixels looped: it looked like if the first call of CopyPixels succeeded, all other CopyPixels calls did not fail in the same process. But as soon as it failed, several next calls to it failed as well but then, it stopped failing. If to try to load a HEIF file in Paint at the moment we got the failure, the loading in Paint fails too. If the first loading in Paint succeeds, it does always, even if our code fails sometimes.
All above concerns HEVC video extension of version 1.0.30442.0 and HEIF of v. 10.0.18362.1031
If to uninstall HEVC 1.0.30442.0 and to install HEVC 1.0.30443.0 from MS Store, the same code behavior gets much more stable.
I wonder if there is a bug in HEVC 1.0.30442.0 or we use WIC incorrectly? Our using it, looks to be correct as other format images (BMP, JPEG) are decoded without a problem (by the same code!) and we have never had any failures with them.
I can provide the VS2017 solution with the source code, as well as the batch we use for running the exe but I failed to attach it ("Body text cannot contain images or links until we are able to verify your account")
We would be much obliged if somebody advised how to avoid facing the failures. Thanks in advance.

Undocumented(?) limitation constraints for H264 encoding

$
0
0

Hello,

I am trying to understand why an MSMF H264 encoding session is refused for some not-so large image sizes.

For instance, 1280x1024 is accepted, but not 1024x1280. So my assumption that the bitrate could be a problem is wrong in that case.

Perhaps the problem is in MSMF, H264 or even MPEG4/AVI containers ?

I wonder if there is any documentation I should take into account about limitations regarding a mix of

-current profile

-estimated bitrate

-frame widht and height

-framerate

-kind of container

Here is a minimal sample code to exhibit the problem. Did I miss some parameters ? My goal is to encode videos ~4096x4096, even with very low framerate, with maximum quality, preferably on Windows 7 (that unfortunately does not support High profile)

https://chachatelier.fr/temp/TestMFTH264.zip

Typically my output :

IsWindows7OrGreater : 1
IsWindows7SP1OrGreater : 1
IsWindows8OrGreater : 0
IsWindows8Point1OrGreater : 0
IsWindows10OrGreater : 0
initalizing MSMF...
============================================
(1024x1024)@1 fps, eAVEncH264VProfile_Base
estimated bitRate = 71 kbps
        OK
============================================
(1024x1024)@5 fps, eAVEncH264VProfile_Base
estimated bitRate = 358 kbps
        OK
============================================
(1280x1024)@1 fps, eAVEncH264VProfile_Base
estimated bitRate = 89 kbps
        OK
============================================
(1280x1024)@5 fps, eAVEncH264VProfile_Base
estimated bitRate = 448 kbps
        OK
============================================
(1024x1280)@1 fps, eAVEncH264VProfile_Base
estimated bitRate = 89 kbps
        errorMessage unknown for code <0xc00d36b4>
        UNSUPPORTED
============================================
(1024x1280)@5 fps, eAVEncH264VProfile_Base
estimated bitRate = 448 kbps
        errorMessage unknown for code <0xc00d36b4>
        UNSUPPORTED
============================================
(1280x1280)@1 fps, eAVEncH264VProfile_Base
estimated bitRate = 112 kbps
        errorMessage unknown for code <0xc00d36b4>
        UNSUPPORTED
============================================
(1280x1280)@5 fps, eAVEncH264VProfile_Base
estimated bitRate = 560 kbps
        errorMessage unknown for code <0xc00d36b4>
        UNSUPPORTED
============================================
(1024x1024)@1 fps, eAVEncH264VProfile_Main
estimated bitRate = 143 kbps
        OK
============================================
(1024x1024)@5 fps, eAVEncH264VProfile_Main
estimated bitRate = 716 kbps
        OK
============================================
(1280x1024)@1 fps, eAVEncH264VProfile_Main
estimated bitRate = 179 kbps
        OK
============================================
(1280x1024)@5 fps, eAVEncH264VProfile_Main
estimated bitRate = 896 kbps
        OK
============================================
(1024x1280)@1 fps, eAVEncH264VProfile_Main
estimated bitRate = 179 kbps
        errorMessage unknown for code <0xc00d36b4>
        UNSUPPORTED
============================================
(1024x1280)@5 fps, eAVEncH264VProfile_Main
estimated bitRate = 896 kbps
        errorMessage unknown for code <0xc00d36b4>
        UNSUPPORTED
============================================
(1280x1280)@1 fps, eAVEncH264VProfile_Main
estimated bitRate = 224 kbps
        errorMessage unknown for code <0xc00d36b4>
        UNSUPPORTED
============================================
(1280x1280)@5 fps, eAVEncH264VProfile_Main
estimated bitRate = 1120 kbps
        errorMessage unknown for code <0xc00d36b4>
        UNSUPPORTED
============================================
(1024x1024)@1 fps, eAVEncH264VProfile_High
        skip unsupported profile under this version of Windows
============================================
(1024x1024)@5 fps, eAVEncH264VProfile_High
        skip unsupported profile under this version of Windows
============================================
(1280x1024)@1 fps, eAVEncH264VProfile_High
        skip unsupported profile under this version of Windows
============================================
(1280x1024)@5 fps, eAVEncH264VProfile_High
        skip unsupported profile under this version of Windows
============================================
(1024x1280)@1 fps, eAVEncH264VProfile_High
        skip unsupported profile under this version of Windows
============================================
(1024x1280)@5 fps, eAVEncH264VProfile_High
        skip unsupported profile under this version of Windows
============================================
(1280x1280)@1 fps, eAVEncH264VProfile_High
        skip unsupported profile under this version of Windows
============================================
(1280x1280)@5 fps, eAVEncH264VProfile_High
        skip unsupported profile under this version of Windows
Hit <Enter> to continue...






Viewing all 1079 articles
Browse latest View live


<script src="https://jsc.adskeeper.com/r/s/rssing.com.1596347.js" async> </script>