Skip to content
Snippets Groups Projects
DecApp.cpp 101.25 KiB
/* The copyright in this software is being made available under the BSD
 * License, included below. This software may be subject to other third party
 * and contributor rights, including patent rights, and no such rights are
 * granted under this license.
 *
 * Copyright (c) 2010-2023, ITU/ISO/IEC
 * All rights reserved.
 *
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions are met:
 *
 *  * Redistributions of source code must retain the above copyright notice,
 *    this list of conditions and the following disclaimer.
 *  * Redistributions in binary form must reproduce the above copyright notice,
 *    this list of conditions and the following disclaimer in the documentation
 *    and/or other materials provided with the distribution.
 *  * Neither the name of the ITU/ISO/IEC nor the names of its contributors may
 *    be used to endorse or promote products derived from this software without
 *    specific prior written permission.
 *
 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
 * THE POSSIBILITY OF SUCH DAMAGE.
 */

/** \file     DecApp.cpp
    \brief    Decoder application class
*/

#include <list>
#include <numeric>
#include <vector>
#include <stdio.h>
#include <fcntl.h>

#include "DecApp.h"
#include "DecoderLib/AnnexBread.h"
#include "DecoderLib/NALread.h"
#if RExt__DECODER_DEBUG_STATISTICS
#include "CommonLib/CodingStatistics.h"
#endif
#include "CommonLib/dtrace_codingstruct.h"

//! \ingroup DecoderApp
//! \{

// ====================================================================================================================
// Constructor / destructor / initialization / destroy
// ====================================================================================================================

DecApp::DecApp()
: m_iPOCLastDisplay(-MAX_INT)
{
  for (int i = 0; i < MAX_NUM_LAYER_IDS; i++)
  {
    m_newCLVS[i] = true;
  }
#if JVET_AI0181
  m_doiSEIPelY = nullptr;
  m_doiSEIPelU = nullptr;
  m_doiSEIPelV = nullptr;
#endif
}

// ====================================================================================================================
// Public member functions
// ====================================================================================================================

/**
 - create internal class
 - initialize internal class
 - until the end of the bitstream, call decoding function in DecApp class
 - delete allocated buffers
 - destroy internal class
 - returns the number of mismatching pictures
 */
uint32_t DecApp::decode()
{
  int      poc;
  PicList *pcListPic = nullptr;

#if JVET_AI0181
  m_doiSEILastProcessedOverlay = -1;
#endif
  
#if GREEN_METADATA_SEI_ENABLED
  FeatureCounterStruct featureCounter;
  FeatureCounterStruct featureCounterOld;
  std::ifstream        bitstreamSize(m_bitstreamFileName.c_str(), std::ifstream::in | std::ifstream::binary);
  std::streampos fsize = 0;
  fsize = bitstreamSize.tellg();
  bitstreamSize.seekg( 0, std::ios::end );
  featureCounter.bytes = (int) bitstreamSize.tellg() - (int) fsize;
  bitstreamSize.close();
#endif

  std::ifstream bitstreamFile(m_bitstreamFileName.c_str(), std::ifstream::in | std::ifstream::binary);
  if (!bitstreamFile)
  {
    EXIT( "Failed to open bitstream file " << m_bitstreamFileName.c_str() << " for reading" ) ;
  }

  InputByteStream bytestream(bitstreamFile);

  if (!m_outputDecodedSEIMessagesFilename.empty() && m_outputDecodedSEIMessagesFilename!="-")
  {
    m_seiMessageFileStream.open(m_outputDecodedSEIMessagesFilename.c_str(), std::ios::out);
    if (!m_seiMessageFileStream.is_open() || !m_seiMessageFileStream.good())
    {
      EXIT( "Unable to open file "<< m_outputDecodedSEIMessagesFilename.c_str() << " for writing decoded SEI messages");
    }
  }

  if (!m_oplFilename.empty() && m_oplFilename!="-")
  {
    m_oplFileStream.open(m_oplFilename.c_str(), std::ios::out);
    if (!m_oplFileStream.is_open() || !m_oplFileStream.good())
    {
      EXIT( "Unable to open file "<< m_oplFilename.c_str() << " to write an opl-file for conformance testing (see JVET-P2008 for details)");
    }
  }

  // create & initialize internal classes
  xCreateDecLib();

  m_iPOCLastDisplay += m_iSkipFrame;      // set the last displayed POC correctly for skip forward.

  // clear contents of colour-remap-information-SEI output file
  if (!m_colourRemapSEIFileName.empty())
  {
    std::ofstream ofile(m_colourRemapSEIFileName.c_str());
    if (!ofile.good() || !ofile.is_open())
    {
      EXIT( "Unable to open file " << m_colourRemapSEIFileName.c_str() << " for writing colour-remap-information-SEI video");
    }
  }

  // clear contents of annotated-Regions-SEI output file
  if (!m_annotatedRegionsSEIFileName.empty())
  {
    std::ofstream ofile(m_annotatedRegionsSEIFileName.c_str());
    if (!ofile.good() || !ofile.is_open())
    {
      fprintf(stderr, "\nUnable to open file '%s' for writing annotated-Regions-SEI\n", m_annotatedRegionsSEIFileName.c_str());
      exit(EXIT_FAILURE);
    }
  }

#if JVET_AF0088_OMI_SEI
  if (!m_objectMaskInfoSEIFileName.empty())
  {
    std::ofstream ofile(m_objectMaskInfoSEIFileName.c_str());
    if (!ofile.good() || !ofile.is_open())
    {
      fprintf(stderr, "\nUnable to open file '%s' for writing Object-Mask-Information-SEI\n",
              m_objectMaskInfoSEIFileName.c_str());
      exit(EXIT_FAILURE);
    }
  }
#endif

#if JVET_AH0161_REGION_PACKING_INFORMATION_SEI
  if (!m_packedRegionsInfoSEIFileName.empty())
  {
    std::ofstream ofile(m_packedRegionsInfoSEIFileName.c_str());
    if (!ofile.good() || !ofile.is_open())
    {
      fprintf(stderr, "\nUnable to open file '%s' for writing packed regions info SEI\n", m_packedRegionsInfoSEIFileName.c_str());
      exit(EXIT_FAILURE);
    }
  }
#endif

  // main decoder loop
  bool loopFiltered[MAX_VPS_LAYERS] = { false };

  bool bPicSkipped = false;

  bool openedPostFile = false;
  setShutterFilterFlag(!m_shutterIntervalPostFileName.empty());   // not apply shutter interval SEI processing if filename is not specified.
  m_cDecLib.setShutterFilterFlag(getShutterFilterFlag());
#if JVET_AF0167_MULTI_PLANE_IMAGE_INFO_SEI
  setMultiPlaneImageInfoFlag(!m_multiplaneImageInfoFileName.empty());
  m_cDecLib.setMultiPlaneImageInfoFlag(getMultiPlaneImageInfoFlag());
  bool openedMPIITextureYUV = false;
  bool openedMPIIOpacityYUV = false;
#endif

  bool isEosPresentInPu = false;
  bool isEosPresentInLastPu = false;

  bool outputPicturePresentInBitstream = false;
  auto setOutputPicturePresentInStream = [&]()
  {
    if( !outputPicturePresentInBitstream )
    {
      PicList::iterator iterPic = pcListPic->begin();
      while (!outputPicturePresentInBitstream && iterPic != pcListPic->end())
      {
        Picture *pcPic = *(iterPic++);
        if (pcPic->neededForOutput)
        {
          outputPicturePresentInBitstream = true;
        }
      }
    }
  };

    m_cDecLib.setHTidExternalSetFlag(m_mTidExternalSet);
    m_cDecLib.setTOlsIdxExternalFlag(m_tOlsIdxTidExternalSet);

#if GREEN_METADATA_SEI_ENABLED
    m_cDecLib.setFeatureAnalysisFramewise( m_GMFAFramewise);
    m_cDecLib.setGMFAFile(m_GMFAFile);
#endif
  
  bool gdrRecoveryPeriod[MAX_NUM_LAYER_IDS] = { false };
  bool prevPicSkipped = true;
  int lastNaluLayerId = -1;
  bool decodedSliceInAU = false;

  while (!!bitstreamFile)
  {
    InputNALUnit nalu;
    nalu.m_nalUnitType = NAL_UNIT_INVALID;

    // determine if next NAL unit will be the first one from a new picture
    bool bNewPicture = m_cDecLib.isNewPicture(&bitstreamFile, &bytestream);
    bool bNewAccessUnit = bNewPicture && decodedSliceInAU && m_cDecLib.isNewAccessUnit( bNewPicture, &bitstreamFile, &bytestream );
    if(!bNewPicture)
    {
      AnnexBStats stats = AnnexBStats();

      // find next NAL unit in stream
      byteStreamNALUnit(bytestream, nalu.getBitstream().getFifo(), stats);
      if (nalu.getBitstream().getFifo().empty())
      {
        /* this can happen if the following occur:
         *  - empty input file
         *  - two back-to-back start_code_prefixes
         *  - start_code_prefix immediately followed by EOF
         */
        msg( ERROR, "Warning: Attempt to decode an empty NAL unit\n");
      }
      else
      {
        // read NAL unit header
        read(nalu);

        // flush output for first slice of an IDR picture
        if(m_cDecLib.getFirstSliceInPicture() &&
            (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR_W_RADL ||
             nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR_N_LP))
        {
          if (!m_cDecLib.getMixedNaluTypesInPicFlag())
          {
            m_newCLVS[nalu.m_nuhLayerId] = true;   // An IDR picture starts a new CLVS
            xFlushOutput(pcListPic, nalu.m_nuhLayerId);
          }
          else
          {
            m_newCLVS[nalu.m_nuhLayerId] = false;
          }
        }
        else if (m_cDecLib.getFirstSliceInPicture() && nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA && isEosPresentInLastPu)
        {
          // A CRA that is immediately preceded by an EOS is a CLVSS
          m_newCLVS[nalu.m_nuhLayerId] = true;
          xFlushOutput(pcListPic, nalu.m_nuhLayerId);
        }
        else if (m_cDecLib.getFirstSliceInPicture() && nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA && !isEosPresentInLastPu)
        {
          // A CRA that is not immediately precede by an EOS is not a CLVSS
          m_newCLVS[nalu.m_nuhLayerId] = false;
        }
        else if(m_cDecLib.getFirstSliceInPicture() && !isEosPresentInLastPu)
        {
          m_newCLVS[nalu.m_nuhLayerId] = false;
        }

        // parse NAL unit syntax if within target decoding layer
        if ((m_maxTemporalLayer == TL_INFINITY || nalu.m_temporalId <= m_maxTemporalLayer)
            && xIsNaluWithinTargetDecLayerIdSet(&nalu))
        {
          if (m_targetDecLayerIdSet.size())
          {
            CHECK(std::find(m_targetDecLayerIdSet.begin(), m_targetDecLayerIdSet.end(), nalu.m_nuhLayerId) == m_targetDecLayerIdSet.end(), "bitstream shall not contain any other layers than included in the OLS with OlsIdx");
          }
          if (bPicSkipped)
          {
            if ((nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_TRAIL) || (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_STSA) || (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_RASL) || (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_RADL) || (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR_W_RADL) || (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR_N_LP) || (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA) || (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_GDR))
            {
              if (decodedSliceInAU && m_cDecLib.isSliceNaluFirstInAU(true, nalu))
              {
                m_cDecLib.resetAccessUnitNals();
                m_cDecLib.resetAccessUnitApsNals();
                m_cDecLib.resetAccessUnitPicInfo();
              }
              bPicSkipped = false;
            }
          }

          int skipFrameCounter = m_iSkipFrame;
          m_cDecLib.decode(nalu, m_iSkipFrame, m_iPOCLastDisplay, m_targetOlsIdx);

          if ( prevPicSkipped && nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_GDR )
          {
            gdrRecoveryPeriod[nalu.m_nuhLayerId] = true;
          }

          if ( skipFrameCounter == 1 && ( nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_GDR  || nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA ))
          {
            skipFrameCounter--;
          }

          if ( m_iSkipFrame < skipFrameCounter  &&
              ((nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_TRAIL) || (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_STSA) || (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_RASL) || (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_RADL) || (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR_W_RADL) || (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR_N_LP) || (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA) || (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_GDR)))
          {
            if (decodedSliceInAU && m_cDecLib.isSliceNaluFirstInAU(true, nalu))
            {
              m_cDecLib.checkSeiInPictureUnit();
              m_cDecLib.resetPictureSeiNalus();
              m_cDecLib.checkAPSInPictureUnit();
              m_cDecLib.resetPictureUnitNals();
              m_cDecLib.resetAccessUnitSeiTids();
              m_cDecLib.checkSEIInAccessUnit();
              m_cDecLib.resetAccessUnitSeiPayLoadTypes();
              m_cDecLib.resetAccessUnitNals();
              m_cDecLib.resetAccessUnitApsNals();
              m_cDecLib.resetAccessUnitPicInfo();
            }
            bPicSkipped = true;
            m_iSkipFrame++;   // skipFrame count restore, the real decrement occur at the begin of next frame
          }

          if (nalu.m_nalUnitType == NAL_UNIT_OPI)
          {
            if (!m_cDecLib.getHTidExternalSetFlag() && m_cDecLib.getOPI()->getHtidInfoPresentFlag())
            {
              m_maxTemporalLayer = m_cDecLib.getOPI()->getOpiHtidPlus1() - 1;
            }
            m_cDecLib.setHTidOpiSetFlag(m_cDecLib.getOPI()->getHtidInfoPresentFlag());
          }
          if (nalu.m_nalUnitType == NAL_UNIT_VPS)
          {
            m_cDecLib.deriveTargetOutputLayerSet( m_cDecLib.getVPS()->m_targetOlsIdx );
            m_targetDecLayerIdSet = m_cDecLib.getVPS()->m_targetLayerIdSet;
            m_targetOutputLayerIdSet = m_cDecLib.getVPS()->m_targetOutputLayerIdSet;
          }
          if (nalu.isSlice())
          {
            decodedSliceInAU = true;
          }
        }
        else
        {
          bPicSkipped = true;
          if (nalu.isSlice())
          {
            m_cDecLib.setFirstSliceInPicture(false);
          }
        }
      }

      if( nalu.isSlice() && nalu.m_nalUnitType != NAL_UNIT_CODED_SLICE_RASL)
      {
        prevPicSkipped = bPicSkipped;
      }

      // once an EOS NAL unit appears in the current PU, mark the variable isEosPresentInPu as true
      if (nalu.m_nalUnitType == NAL_UNIT_EOS)
      {
        isEosPresentInPu = true;
        m_newCLVS[nalu.m_nuhLayerId] = true;  //The presence of EOS means that the next picture is the beginning of new CLVS
        m_cDecLib.setEosPresentInPu(true);
      }
      // within the current PU, only EOS and EOB are allowed to be sent after an EOS nal unit
      if(isEosPresentInPu)
      {
        CHECK(nalu.m_nalUnitType != NAL_UNIT_EOS && nalu.m_nalUnitType != NAL_UNIT_EOB, "When an EOS NAL unit is present in a PU, it shall be the last NAL unit among all NAL units within the PU other than other EOS NAL units or an EOB NAL unit");
      }
      lastNaluLayerId = nalu.m_nuhLayerId;
    }
    else
    {
      nalu.m_nuhLayerId = lastNaluLayerId;
    }

    if (bNewPicture || !bitstreamFile || nalu.m_nalUnitType == NAL_UNIT_EOS)
    {
      if (!m_cDecLib.getFirstSliceInSequence(nalu.m_nuhLayerId) && !bPicSkipped)
      {
        if (!loopFiltered[nalu.m_nuhLayerId] || bitstreamFile)
        {
          m_cDecLib.executeLoopFilters();
          m_cDecLib.finishPicture(poc, pcListPic, INFO, m_newCLVS[nalu.m_nuhLayerId]);
        }
        loopFiltered[nalu.m_nuhLayerId] = (nalu.m_nalUnitType == NAL_UNIT_EOS);
        if (nalu.m_nalUnitType == NAL_UNIT_EOS)
        {
          m_cDecLib.setFirstSliceInSequence(true, nalu.m_nuhLayerId);
        }

        m_cDecLib.updateAssociatedIRAP();
        m_cDecLib.updatePrevGDRInSameLayer();
        m_cDecLib.updatePrevIRAPAndGDRSubpic();

        if (gdrRecoveryPeriod[nalu.m_nuhLayerId])
        {
          if (m_cDecLib.getGDRRecoveryPocReached())
          {
            gdrRecoveryPeriod[nalu.m_nuhLayerId] = false;
          }
        }
      }
      else
      {
        m_cDecLib.setFirstSliceInPicture(true);
      }
    }

    if( pcListPic )
    {
      if ( gdrRecoveryPeriod[nalu.m_nuhLayerId] ) // Suppress YUV and OPL output during GDR recovery
      {
        PicList::iterator iterPic = pcListPic->begin();
        while (iterPic != pcListPic->end())
        {
          Picture *pcPic = *(iterPic++);
          if (pcPic->layerId == nalu.m_nuhLayerId)
          {
            pcPic->neededForOutput = false;
          }
        }
      }

      BitDepths layerOutputBitDepth;

      PicList::iterator iterPicLayer = pcListPic->begin();
      for (; iterPicLayer != pcListPic->end(); ++iterPicLayer)
      {
        if ((*iterPicLayer)->layerId == nalu.m_nuhLayerId)
        {
          break;
        }
      }
      if (iterPicLayer != pcListPic->end())
      {
        BitDepths &bitDepths = (*iterPicLayer)->m_bitDepths;

        for (auto channelType: { ChannelType::LUMA, ChannelType::CHROMA })
        {
          if (m_outputBitDepth[channelType] == 0)
          {
            layerOutputBitDepth[channelType] = bitDepths[channelType];
          }
          else
          {
            layerOutputBitDepth[channelType] = m_outputBitDepth[channelType];
          }
        }
        if (m_packedYUVMode
            && (layerOutputBitDepth[ChannelType::LUMA] != 10 && layerOutputBitDepth[ChannelType::LUMA] != 12))
        {
          EXIT("Invalid output bit-depth for packed YUV output, aborting\n");
        }

        if (!m_reconFileName.empty() && !m_cVideoIOYuvReconFile[nalu.m_nuhLayerId].isOpen())
        {
          const auto  vps           = m_cDecLib.getVPS();
          std::string reconFileName = m_reconFileName;

          if (m_reconFileName.compare("/dev/null") && vps != nullptr && vps->getMaxLayers() > 1
              && xIsNaluWithinTargetOutputLayerIdSet(&nalu))
          {
            const size_t      pos         = reconFileName.find_last_of('.');
            const std::string layerString = std::string(".layer") + std::to_string(nalu.m_nuhLayerId);

            reconFileName.insert(pos, layerString);
          }

          if (vps == nullptr || vps->getMaxLayers() == 1 || xIsNaluWithinTargetOutputLayerIdSet(&nalu))
          {
            if (isY4mFileExt(reconFileName))
            {
              const auto sps        = pcListPic->front()->cs->sps;
              Fraction   frameRate  = DEFAULT_FRAME_RATE;

              const bool useSpsData = sps->getGeneralHrdParametersPresentFlag();
              if (useSpsData || (vps != nullptr && vps->getVPSGeneralHrdParamsPresentFlag()))
              {
                const GeneralHrdParams* hrd =
                  useSpsData ? sps->getGeneralHrdParameters() : vps->getGeneralHrdParameters();

                const int tLayer = m_maxTemporalLayer == TL_INFINITY
                                     ? (useSpsData ? sps->getMaxTLayers() - 1 : vps->getMaxSubLayers() - 1)
                                     : m_maxTemporalLayer;

                const OlsHrdParams& olsHrdParam =
                  (useSpsData ? sps->getOlsHrdParameters() : vps->getOlsHrdParameters(vps->m_targetOlsIdx))[tLayer];

                int elementDurationInTc = 1;
                if (olsHrdParam.getFixedPicRateWithinCvsFlag())
                {
                  elementDurationInTc = olsHrdParam.getElementDurationInTc();
                }
                else
                {
                  msg(WARNING,
                      "\nWarning: No fixed picture rate info is found in the bitstream, best guess is used.\n");
                }
                frameRate.num = hrd->getTimeScale();
                frameRate.den = hrd->getNumUnitsInTick() * elementDurationInTc;
                const int gcd = std::gcd(frameRate.num, frameRate.den);
                frameRate.num /= gcd;
                frameRate.den /= gcd;
              }
              else
              {
                msg(WARNING, "\nWarning: No frame rate info found in the bitstream, default 50 fps is used.\n");
              }
              const auto pps = pcListPic->front()->cs->pps;
              const auto sx = SPS::getWinUnitX(sps->getChromaFormatIdc());
              const auto sy = SPS::getWinUnitY(sps->getChromaFormatIdc());
              int picWidth = 0, picHeight = 0;
              if (m_upscaledOutput == 2)
              {
                auto confWindow = sps->getConformanceWindow();
                picWidth = sps->getMaxPicWidthInLumaSamples() -(confWindow.getWindowLeftOffset() + confWindow.getWindowRightOffset()) * sx;
                picHeight = sps->getMaxPicHeightInLumaSamples() - (confWindow.getWindowTopOffset() + confWindow.getWindowBottomOffset()) * sy;
              }
              else
              {
                auto confWindow = pps->getConformanceWindow();
                picWidth = pps->getPicWidthInLumaSamples() - (confWindow.getWindowLeftOffset() + confWindow.getWindowRightOffset()) * sx;
                picHeight = pps->getPicHeightInLumaSamples() - (confWindow.getWindowTopOffset() + confWindow.getWindowBottomOffset()) * sy;
              }              
              m_cVideoIOYuvReconFile[nalu.m_nuhLayerId].setOutputY4mInfo(
                picWidth, picHeight, frameRate, layerOutputBitDepth[ChannelType::LUMA], sps->getChromaFormatIdc(),
                sps->getVuiParameters()->getChromaSampleLocType());
            }
            m_cVideoIOYuvReconFile[nalu.m_nuhLayerId].open(reconFileName, true, layerOutputBitDepth,
                                                           layerOutputBitDepth, bitDepths);   // write mode
          }
        }
        // update file bitdepth shift if recon bitdepth changed between sequences
        for (auto channelType: { ChannelType::LUMA, ChannelType::CHROMA })
        {
          int reconBitdepth = (*iterPicLayer)->m_bitDepths[( ChannelType) channelType];
          int fileBitdepth  = m_cVideoIOYuvReconFile[nalu.m_nuhLayerId].getFileBitdepth(channelType);
          int bitdepthShift = m_cVideoIOYuvReconFile[nalu.m_nuhLayerId].getBitdepthShift(channelType);
          if (fileBitdepth + bitdepthShift != reconBitdepth)
          {
            m_cVideoIOYuvReconFile[nalu.m_nuhLayerId].setBitdepthShift(channelType, reconBitdepth - fileBitdepth);
          }
        }

        if (!m_SEIFGSFileName.empty() && !m_videoIOYuvSEIFGSFile[nalu.m_nuhLayerId].isOpen())
        {
          std::string SEIFGSFileName = m_SEIFGSFileName;
          if (m_SEIFGSFileName.compare("/dev/null") && m_cDecLib.getVPS() != nullptr && m_cDecLib.getVPS()->getMaxLayers() > 1 && xIsNaluWithinTargetOutputLayerIdSet(&nalu))
          {
            size_t      pos         = SEIFGSFileName.find_last_of('.');
            std::string layerString = std::string(".layer") + std::to_string(nalu.m_nuhLayerId);
            if (pos != std::string::npos)
            {
              SEIFGSFileName.insert(pos, layerString);
            }
            else
            {
              SEIFGSFileName.append(layerString);
            }
          }
          if ((m_cDecLib.getVPS() != nullptr && (m_cDecLib.getVPS()->getMaxLayers() == 1 || xIsNaluWithinTargetOutputLayerIdSet(&nalu))) || m_cDecLib.getVPS() == nullptr)
          {
            m_videoIOYuvSEIFGSFile[nalu.m_nuhLayerId].open(SEIFGSFileName, true, layerOutputBitDepth,
                                                           layerOutputBitDepth, bitDepths);   // write mode
          }
        }
        // update file bitdepth shift if recon bitdepth changed between sequences
        if (!m_SEIFGSFileName.empty())
        {
          for (const auto channelType: { ChannelType::LUMA, ChannelType::CHROMA })
          {
            int reconBitdepth = (*iterPicLayer)->m_bitDepths[( ChannelType) channelType];
            int fileBitdepth  = m_videoIOYuvSEIFGSFile[nalu.m_nuhLayerId].getFileBitdepth(channelType);
            int bitdepthShift = m_videoIOYuvSEIFGSFile[nalu.m_nuhLayerId].getBitdepthShift(channelType);
            if (fileBitdepth + bitdepthShift != reconBitdepth)
            {
              m_videoIOYuvSEIFGSFile[nalu.m_nuhLayerId].setBitdepthShift(channelType, reconBitdepth - fileBitdepth);
            }
          }
        }

        if (!m_SEICTIFileName.empty() && !m_cVideoIOYuvSEICTIFile[nalu.m_nuhLayerId].isOpen())
        {
          std::string SEICTIFileName = m_SEICTIFileName;
          if (m_SEICTIFileName.compare("/dev/null") && m_cDecLib.getVPS() != nullptr && m_cDecLib.getVPS()->getMaxLayers() > 1 && xIsNaluWithinTargetOutputLayerIdSet(&nalu))
          {
            size_t pos = SEICTIFileName.find_last_of('.');
            if (pos != std::string::npos)
            {
              SEICTIFileName.insert(pos, std::to_string(nalu.m_nuhLayerId));
            }
            else
            {
              SEICTIFileName.append(std::to_string(nalu.m_nuhLayerId));
            }
          }
          if ((m_cDecLib.getVPS() != nullptr && (m_cDecLib.getVPS()->getMaxLayers() == 1 || xIsNaluWithinTargetOutputLayerIdSet(&nalu))) || m_cDecLib.getVPS() == nullptr)
          {
            m_cVideoIOYuvSEICTIFile[nalu.m_nuhLayerId].open(SEICTIFileName, true, layerOutputBitDepth,
                                                            layerOutputBitDepth, bitDepths);   // write mode
          }
        }
      }
      if (!m_annotatedRegionsSEIFileName.empty())
      {
        xOutputAnnotatedRegions(pcListPic);
      }

      PicList::iterator iterPic = pcListPic->begin();
      Picture* pcPic = *(iterPic);
      SEIMessages       shutterIntervalInfo = getSeisByType(pcPic->SEIs, SEI::PayloadType::SHUTTER_INTERVAL_INFO);

      if (!m_shutterIntervalPostFileName.empty())
      {
        bool                    hasValidSII = true;
        SEIShutterIntervalInfo *curSIIInfo  = nullptr;
        if ((pcPic->getPictureType() == NAL_UNIT_CODED_SLICE_IDR_W_RADL ||
          pcPic->getPictureType() == NAL_UNIT_CODED_SLICE_IDR_N_LP) && m_newCLVS[nalu.m_nuhLayerId])
        {
          IdrSiiInfo curSII;
          curSII.m_picPoc = pcPic->getPOC();

          curSII.m_isValidSii                             = false;
          curSII.m_siiInfo.m_siiEnabled                   = false;
          curSII.m_siiInfo.m_siiNumUnitsInShutterInterval = 0;
          curSII.m_siiInfo.m_siiTimeScale = 0;
          curSII.m_siiInfo.m_siiMaxSubLayersMinus1 = 0;
          curSII.m_siiInfo.m_siiFixedSIwithinCLVS = 0;

          if (shutterIntervalInfo.size() > 0)
          {
            SEIShutterIntervalInfo *seiShutterIntervalInfo = (SEIShutterIntervalInfo*) *(shutterIntervalInfo.begin());
            curSII.m_isValidSii                            = true;

            curSII.m_siiInfo.m_siiEnabled = seiShutterIntervalInfo->m_siiEnabled;
            curSII.m_siiInfo.m_siiNumUnitsInShutterInterval = seiShutterIntervalInfo->m_siiNumUnitsInShutterInterval;
            curSII.m_siiInfo.m_siiTimeScale = seiShutterIntervalInfo->m_siiTimeScale;
            curSII.m_siiInfo.m_siiMaxSubLayersMinus1 = seiShutterIntervalInfo->m_siiMaxSubLayersMinus1;
            curSII.m_siiInfo.m_siiFixedSIwithinCLVS = seiShutterIntervalInfo->m_siiFixedSIwithinCLVS;
            curSII.m_siiInfo.m_siiSubLayerNumUnitsInSI.clear();
            for (int i = 0; i < seiShutterIntervalInfo->m_siiSubLayerNumUnitsInSI.size(); i++)
            {
              curSII.m_siiInfo.m_siiSubLayerNumUnitsInSI.push_back(seiShutterIntervalInfo->m_siiSubLayerNumUnitsInSI[i]);
            }

            uint32_t tmpInfo = (uint32_t)(m_activeSiiInfo.size() + 1);
            m_activeSiiInfo.insert(std::pair<uint32_t, IdrSiiInfo>(tmpInfo, curSII));
            curSIIInfo = seiShutterIntervalInfo;
          }
          else
          {
            curSII.m_isValidSii = false;
            hasValidSII         = false;
            uint32_t tmpInfo = (uint32_t)(m_activeSiiInfo.size() + 1);
            m_activeSiiInfo.insert(std::pair<uint32_t, IdrSiiInfo>(tmpInfo, curSII));
          }
        }
        else
        {
          if (m_activeSiiInfo.size() == 1)
          {
            curSIIInfo = &(m_activeSiiInfo.begin()->second.m_siiInfo);
          }
          else
          {
            bool isLast = true;
            for (int i = 1; i < m_activeSiiInfo.size() + 1; i++)
            {
              if (pcPic->getPOC() <= m_activeSiiInfo.at(i).m_picPoc)
              {
                if (m_activeSiiInfo[i - 1].m_isValidSii)
                {
                  curSIIInfo = &(m_activeSiiInfo.at(i - 1).m_siiInfo);
                }
                else
                {
                  hasValidSII = false;
                }
                isLast = false;
                break;
              }
            }
            if (isLast)
            {
              uint32_t tmpInfo = (uint32_t)(m_activeSiiInfo.size());
              curSIIInfo = &(m_activeSiiInfo.at(tmpInfo).m_siiInfo);
            }
          }
        }

        if (hasValidSII)
        {
          if (!curSIIInfo->m_siiFixedSIwithinCLVS)
          {
            uint32_t siiMaxSubLayersMinus1 = curSIIInfo->m_siiMaxSubLayersMinus1;
            uint32_t numUnitsLFR = curSIIInfo->m_siiSubLayerNumUnitsInSI[0];
            uint32_t numUnitsHFR = curSIIInfo->m_siiSubLayerNumUnitsInSI[siiMaxSubLayersMinus1];

            int blending_ratio = (numUnitsLFR / numUnitsHFR);
            bool checkEqualValuesOfSFR = true;
            bool checkSubLayerSI       = false;
            int i;

            //supports only the case of SFR = HFR / 2
            if (curSIIInfo->m_siiSubLayerNumUnitsInSI[siiMaxSubLayersMinus1] <
                        curSIIInfo->m_siiSubLayerNumUnitsInSI[siiMaxSubLayersMinus1 - 1])
            {
              checkSubLayerSI = true;
            }
            else
            {
              fprintf(stderr, "Warning: Shutter Interval SEI message processing is disabled due to SFR != (HFR / 2) \n");
            }
            //check shutter interval for all sublayer remains same for SFR pictures
            for (i = 1; i < siiMaxSubLayersMinus1; i++)
            {
              if (curSIIInfo->m_siiSubLayerNumUnitsInSI[0] != curSIIInfo->m_siiSubLayerNumUnitsInSI[i])
              {
                checkEqualValuesOfSFR = false;
              }
            }
            if (!checkEqualValuesOfSFR)
            {
              fprintf(stderr, "Warning: Shutter Interval SEI message processing is disabled when shutter interval is not same for SFR sublayers \n");
            }
            if (checkSubLayerSI && checkEqualValuesOfSFR)
            {
              setShutterFilterFlag(numUnitsLFR == blending_ratio * numUnitsHFR);
              setBlendingRatio(blending_ratio);
            }
            else
            {
              setShutterFilterFlag(false);
            }

            const SPS* activeSPS = pcListPic->front()->cs->sps;

            if (numUnitsLFR == blending_ratio * numUnitsHFR && activeSPS->getMaxTLayers() == 1 && activeSPS->getMaxDecPicBuffering(0) == 1)
            {
              fprintf(stderr, "Warning: Shutter Interval SEI message processing is disabled for single TempLayer and single frame in DPB\n");
              setShutterFilterFlag(false);
            }
          }
          else
          {
            fprintf(stderr, "Warning: Shutter Interval SEI message processing is disabled for fixed shutter interval case\n");
            setShutterFilterFlag(false);
          }
        }
        else
        {
          fprintf(stderr, "Warning: Shutter Interval information should be specified in SII-SEI message\n");
          setShutterFilterFlag(false);
        }
      }


      if (iterPicLayer != pcListPic->end())
      {
        if ((!m_shutterIntervalPostFileName.empty()) && (!openedPostFile) && getShutterFilterFlag())
        {
          BitDepths &bitDepths = (*iterPicLayer)->m_bitDepths;
          std::ofstream ofile(m_shutterIntervalPostFileName.c_str());
          if (!ofile.good() || !ofile.is_open())
          {
            fprintf(stderr, "\nUnable to open file '%s' for writing shutter-interval-SEI video\n", m_shutterIntervalPostFileName.c_str());
            exit(EXIT_FAILURE);
          }
          m_cTVideoIOYuvSIIPostFile.open(m_shutterIntervalPostFileName, true, layerOutputBitDepth, layerOutputBitDepth,
                                         bitDepths);   // write mode
          openedPostFile = true;
        }
      }

#if JVET_AF0167_MULTI_PLANE_IMAGE_INFO_SEI
      SEIMessages multiplaneImageInfo = getSeisByType(pcPic->SEIs, SEI::PayloadType::MULTIPLANE_IMAGE_INFO);
      if (multiplaneImageInfo.size() > 0 && !m_multiplaneImageInfoFileName.empty())
      {
        SEIMultiplaneImageInfo* seiMultiplaneImageInfo  = (SEIMultiplaneImageInfo*) *(multiplaneImageInfo.begin());
        m_mpiiInfo.m_mpiiSEIEnabled                     = true;
        m_mpiiInfo.m_mpiiNumLayersMinus1                = seiMultiplaneImageInfo->m_mpiiNumLayersMinus1;
        m_mpiiInfo.m_mpiiLayerDepthEqualDistanceFlag    = seiMultiplaneImageInfo->m_mpiiLayerDepthEqualDistanceFlag;
        m_mpiiInfo.m_mpiiLayerDepthValues               = seiMultiplaneImageInfo->m_mpiiLayerDepthValues;
        m_mpiiInfo.m_mpiiTextureOpacityInterleaveFlag   = seiMultiplaneImageInfo->m_mpiiTextureOpacityInterleaveFlag;
        m_mpiiInfo.m_mpiiTextureOpacityArrangementFlag  = seiMultiplaneImageInfo->m_mpiiTextureOpacityArrangementFlag;
        m_mpiiInfo.m_mpiiPictureNumLayersInHeightMinus1 = seiMultiplaneImageInfo->m_mpiiPictureNumLayersInHeightMinus1;
        if (iterPicLayer != pcListPic->end())
        {
          if (!openedMPIITextureYUV && !openedMPIIOpacityYUV)
          {
            m_textureOpacityToggle         = false;
            BitDepths&  bitDepths          = (*iterPicLayer)->m_bitDepths;
            std::string textureYuvFileName = m_multiplaneImageInfoFileName + "_texture.yuv";
            std::string opacityYuvFileName = m_multiplaneImageInfoFileName + "_opacity_400.yuv";
            std::ofstream ofileTexture(textureYuvFileName.c_str());
            if (!ofileTexture.good() || !ofileTexture.is_open())
            {
              fprintf(stderr, "\nUnable to open file '%s' for writing MPII texture layers \n", textureYuvFileName.c_str());
              exit(EXIT_FAILURE);
            }
            m_VideoIOTextureYuvReconFile.open(textureYuvFileName, true, layerOutputBitDepth, layerOutputBitDepth, bitDepths);
            openedMPIITextureYUV = true;
            std::ofstream ofileOpacity(opacityYuvFileName.c_str());
            if (!ofileOpacity.good() || !ofileOpacity.is_open())
            {
              fprintf(stderr, "\nUnable to open file '%s' for writing MPII opacity layers \n", opacityYuvFileName.c_str());
              exit(EXIT_FAILURE);
            }
            m_VideoIOOpacityYuvReconFile.open(opacityYuvFileName, true, layerOutputBitDepth, layerOutputBitDepth, bitDepths);
            openedMPIIOpacityYUV = true;
            std::string   depthInfoFileName = m_multiplaneImageInfoFileName + "_depth_info.txt";
            std::ofstream outLayerDepthvalues(depthInfoFileName);
            if (!ofileOpacity.good() || !ofileOpacity.is_open())
            {
              fprintf(stderr, "\nUnable to open file '%s' for writing MPII depth info \n", depthInfoFileName.c_str());
              exit(EXIT_FAILURE);
            }
            for (auto i: m_mpiiInfo.m_mpiiLayerDepthValues)
            {
              outLayerDepthvalues << i << " ";
            }
            outLayerDepthvalues.close();
          }
        }
      }
#endif
      // write reconstruction to file
      if( bNewPicture )
      {
        setOutputPicturePresentInStream();
        xWriteOutput( pcListPic, nalu.m_temporalId );
      }
      if (nalu.m_nalUnitType == NAL_UNIT_EOS)
      {
        if (!m_annotatedRegionsSEIFileName.empty() && bNewPicture)
        {
          xOutputAnnotatedRegions(pcListPic);
        }
        setOutputPicturePresentInStream();
        xWriteOutput( pcListPic, nalu.m_temporalId );
        m_cDecLib.setFirstSliceInPicture (false);
      }
      // write reconstruction to file -- for additional bumping as defined in C.5.2.3
      if (!bNewPicture && ((nalu.m_nalUnitType >= NAL_UNIT_CODED_SLICE_TRAIL && nalu.m_nalUnitType <= NAL_UNIT_RESERVED_IRAP_VCL_11)
        || (nalu.m_nalUnitType >= NAL_UNIT_CODED_SLICE_IDR_W_RADL && nalu.m_nalUnitType <= NAL_UNIT_CODED_SLICE_GDR)))
      {
        setOutputPicturePresentInStream();
        xWriteOutput( pcListPic, nalu.m_temporalId );
      }
    }
    if( bNewPicture )
    {
      m_cDecLib.checkSeiInPictureUnit();
      m_cDecLib.resetPictureSeiNalus();
      // reset the EOS present status for the next PU check
      isEosPresentInLastPu = isEosPresentInPu;
      isEosPresentInPu = false;
    }
    if (bNewPicture || !bitstreamFile || nalu.m_nalUnitType == NAL_UNIT_EOS)
    {
      m_cDecLib.checkAPSInPictureUnit();
      m_cDecLib.resetPictureUnitNals();
    }
    if (bNewAccessUnit || !bitstreamFile)
    {
      m_cDecLib.CheckNoOutputPriorPicFlagsInAccessUnit();
      m_cDecLib.resetAccessUnitNoOutputPriorPicFlags();
      m_cDecLib.checkLayerIdIncludedInCvss();
      m_cDecLib.checkSEIInAccessUnit();
      m_cDecLib.resetAccessUnitNestedSliSeiInfo();
      m_cDecLib.resetIsFirstAuInCvs();
      m_cDecLib.resetAccessUnitEos();
      m_cDecLib.resetAudIrapOrGdrAuFlag();
    }
    if(bNewAccessUnit)
    {
      decodedSliceInAU = false;
      m_cDecLib.checkTidLayerIdInAccessUnit();
      m_cDecLib.resetAccessUnitSeiTids();
      m_cDecLib.resetAccessUnitSeiPayLoadTypes();
      m_cDecLib.checkSeiContentInAccessUnit();
      m_cDecLib.resetAccessUnitSeiNalus();
      m_cDecLib.resetAccessUnitNals();
      m_cDecLib.resetAccessUnitApsNals();
      m_cDecLib.resetAccessUnitPicInfo();
    }
#if GREEN_METADATA_SEI_ENABLED
    if (m_GMFA && m_GMFAFramewise && bNewPicture)
    {
      FeatureCounterStruct featureCounterUpdated = m_cDecLib.getFeatureCounter();
      writeGMFAOutput(featureCounterUpdated, featureCounterOld, m_GMFAFile,false);
      featureCounterOld = m_cDecLib.getFeatureCounter();
    }
#endif
  }
  if (!m_annotatedRegionsSEIFileName.empty())
  {
    xOutputAnnotatedRegions(pcListPic);
  }
  // May need to check again one more time as in case one the bitstream has only one picture, the first check may miss it
  setOutputPicturePresentInStream();
  CHECK(!outputPicturePresentInBitstream, "It is required that there shall be at least one picture with PictureOutputFlag equal to 1 in the bitstream")
  
#if GREEN_METADATA_SEI_ENABLED
  if (m_GMFA && m_GMFAFramewise) //Last frame
  {
    FeatureCounterStruct featureCounterUpdated = m_cDecLib.getFeatureCounter();
    writeGMFAOutput(featureCounterUpdated, featureCounterOld, m_GMFAFile, false);
    featureCounterOld = m_cDecLib.getFeatureCounter();
  }
  
  if (m_GMFA)
  {
    // Summary
    FeatureCounterStruct featureCounterFinal = m_cDecLib.getFeatureCounter();
    FeatureCounterStruct dummy;
    writeGMFAOutput(featureCounterFinal, dummy, m_GMFAFile, true);
  }
#endif

  m_cDecLib.applyNnPostFilter();
  
  xFlushOutput( pcListPic );

  if (!m_shutterIntervalPostFileName.empty() && getShutterFilterFlag())
  {
    m_cTVideoIOYuvSIIPostFile.close();
  }

#if JVET_AF0167_MULTI_PLANE_IMAGE_INFO_SEI
  if (!m_multiplaneImageInfoFileName.empty())
  {
    m_VideoIOTextureYuvReconFile.close();
    m_VideoIOOpacityYuvReconFile.close();
  }
#endif
  // get the number of checksum errors
  uint32_t nRet = m_cDecLib.getNumberOfChecksumErrorsDetected();

  // delete buffers
  m_cDecLib.deletePicBuffer();
  // destroy internal classes
  xDestroyDecLib();

#if RExt__DECODER_DEBUG_STATISTICS
  CodingStatistics::DestroyInstance();
#endif

  destroyROM();

  return nRet;
}



void DecApp::writeLineToOutputLog(Picture * pcPic)
{
  if (m_oplFileStream.is_open() && m_oplFileStream.good())
  {
    const SPS *   sps             = pcPic->cs->sps;
    ChromaFormat  chromaFormatIdc = sps->getChromaFormatIdc();
    const Window &conf            = pcPic->getConformanceWindow();
    const int     leftOffset      = conf.getWindowLeftOffset() * SPS::getWinUnitX(chromaFormatIdc);
    const int     rightOffset     = conf.getWindowRightOffset() * SPS::getWinUnitX(chromaFormatIdc);
    const int     topOffset       = conf.getWindowTopOffset() * SPS::getWinUnitY(chromaFormatIdc);
    const int     bottomOffset    = conf.getWindowBottomOffset() * SPS::getWinUnitY(chromaFormatIdc);
    PictureHash   recon_digest;
    auto numChar = calcMD5WithCropping(((const Picture *) pcPic)->getRecoBuf(), recon_digest, sps->getBitDepths(),
                                       leftOffset, rightOffset, topOffset, bottomOffset);

    const int croppedWidth  = pcPic->Y().width - leftOffset - rightOffset;
    const int croppedHeight = pcPic->Y().height - topOffset - bottomOffset;

    m_oplFileStream << std::setw(3) << pcPic->layerId << ",";
    m_oplFileStream << std::setw(8) << pcPic->getPOC() << "," << std::setw(5) << croppedWidth << "," << std::setw(5)
                    << croppedHeight << "," << hashToString(recon_digest, numChar) << "\n";
  }
}

// ====================================================================================================================
// Protected member functions
// ====================================================================================================================

void DecApp::xCreateDecLib()
{
  initROM();

  // create decoder class
  m_cDecLib.create();

  // initialize decoder class
  m_cDecLib.init(
#if JVET_J0090_MEMORY_BANDWITH_MEASURE
    m_cacheCfgFile
#endif
  );
  m_cDecLib.setDecodedPictureHashSEIEnabled(m_decodedPictureHashSEIEnabled);


  if (!m_outputDecodedSEIMessagesFilename.empty())
  {
    std::ostream &os=m_seiMessageFileStream.is_open() ? m_seiMessageFileStream : std::cout;
    m_cDecLib.setDecodedSEIMessageOutputStream(&os);
  }
#if JVET_S0257_DUMP_360SEI_MESSAGE
  if (!m_outputDecoded360SEIMessagesFilename.empty())
  {
    m_cDecLib.setDecoded360SEIMessageFileName(m_outputDecoded360SEIMessagesFilename);
  }
#endif
  m_cDecLib.m_targetSubPicIdx = this->m_targetSubPicIdx;
  m_cDecLib.initScalingList();
#if GDR_LEAK_TEST
  m_cDecLib.m_gdrPocRandomAccess = this->m_gdrPocRandomAccess;
#endif // GDR_LEAK_TEST
}

void DecApp::xDestroyDecLib()
{
  if( !m_reconFileName.empty() )
  {
    for( auto & recFile : m_cVideoIOYuvReconFile )
    {
      recFile.second.close();
    }
  }
  if (!m_SEIFGSFileName.empty())
  {
    for (auto &recFile: m_videoIOYuvSEIFGSFile)
    {
      recFile.second.close();
    }
  }
  if (!m_SEICTIFileName.empty())
  {
    for (auto& recFile : m_cVideoIOYuvSEICTIFile)
    {
      recFile.second.close();
    }
  }
#if JVET_AI0181
  if (!m_SEIDOIFileName.empty())
  {
    for (auto& recFile: m_videoIOYuvSEIDOIFile)
    {
      recFile.second.close();
    }
  }
#endif

  // destroy decoder class
  m_cDecLib.destroy();
}


/** \param pcListPic list of pictures to be written to file
    \param tId       temporal sub-layer ID
 */
void DecApp::xWriteOutput( PicList* pcListPic, uint32_t tId )
{
  if (pcListPic->empty())
  {
    return;
  }

  PicList::iterator iterPic   = pcListPic->begin();
  int numPicsNotYetDisplayed = 0;
  int dpbFullness = 0;
  uint32_t maxNumReorderPicsHighestTid;
  uint32_t maxDecPicBufferingHighestTid;
  const VPS* referredVPS = pcListPic->front()->cs->vps;

  if( referredVPS == nullptr || referredVPS->m_numLayersInOls[referredVPS->m_targetOlsIdx] == 1 )
  {
    const SPS* activeSPS = (pcListPic->front()->cs->sps);
    const int  temporalId = (m_maxTemporalLayer == TL_INFINITY || m_maxTemporalLayer >= activeSPS->getMaxTLayers())
                              ? activeSPS->getMaxTLayers() - 1
                              : m_maxTemporalLayer;
    maxNumReorderPicsHighestTid = activeSPS->getMaxNumReorderPics( temporalId );
    maxDecPicBufferingHighestTid = activeSPS->getMaxDecPicBuffering( temporalId );
  }
  else
  {
    const int temporalId = (m_maxTemporalLayer == TL_INFINITY || m_maxTemporalLayer >= referredVPS->getMaxSubLayers())
                             ? referredVPS->getMaxSubLayers() - 1
                             : m_maxTemporalLayer;
    maxNumReorderPicsHighestTid = referredVPS->getMaxNumReorderPics( temporalId );
    maxDecPicBufferingHighestTid = referredVPS->getMaxDecPicBuffering( temporalId );
  }

  while (iterPic != pcListPic->end())
  {
    Picture* pcPic = *(iterPic);
    if(pcPic->neededForOutput && pcPic->getPOC() >= m_iPOCLastDisplay)
    {
      numPicsNotYetDisplayed++;
      dpbFullness++;
    }
    else if(pcPic->referenced)
    {
      dpbFullness++;
    }
    iterPic++;
  }

  iterPic = pcListPic->begin();

  if (numPicsNotYetDisplayed>=2)
  {
    iterPic++;
  }

  Picture* pcPic = *(iterPic);
  if( numPicsNotYetDisplayed>=2 && pcPic->fieldPic ) //Field Decoding
  {
    PicList::iterator endPic   = pcListPic->end();
    endPic--;
    iterPic   = pcListPic->begin();
    while (iterPic != endPic)
    {
      Picture* pcPicTop = *(iterPic);
      iterPic++;
      PicList::iterator iterPic2 = iterPic;
      while (iterPic2 != pcListPic->end())
      {
        if ((*iterPic2)->layerId == pcPicTop->layerId && (*iterPic2)->fieldPic && (*iterPic2)->topField != pcPicTop->topField)
        {
          break;
        }
        iterPic2++;
      }
      if (iterPic2 == pcListPic->end())
      {
        continue;
      }
      
      Picture* pcPicBottom = *(iterPic2);

      if ( pcPicTop->neededForOutput && pcPicBottom->neededForOutput &&
          (numPicsNotYetDisplayed >  maxNumReorderPicsHighestTid || dpbFullness > maxDecPicBufferingHighestTid) &&
          pcPicBottom->getPOC() >= m_iPOCLastDisplay )
      {
        // write to file
        numPicsNotYetDisplayed = numPicsNotYetDisplayed-2;
        if ( !m_reconFileName.empty() )
        {
          const Window &conf = pcPicTop->getConformanceWindow();
          const bool isTff = pcPicTop->topField;

          bool display = true;

          if (display)
          {
            m_cVideoIOYuvReconFile[pcPicTop->layerId].write(
              pcPicTop->getRecoBuf(), pcPicBottom->getRecoBuf(), m_outputColourSpaceConvert,
              false,   // TODO: m_packedYUVMode,
              conf.getWindowLeftOffset() * SPS::getWinUnitX(pcPicTop->cs->sps->getChromaFormatIdc()),
              conf.getWindowRightOffset() * SPS::getWinUnitX(pcPicTop->cs->sps->getChromaFormatIdc()),
              conf.getWindowTopOffset() * SPS::getWinUnitY(pcPicTop->cs->sps->getChromaFormatIdc()),
              conf.getWindowBottomOffset() * SPS::getWinUnitY(pcPicTop->cs->sps->getChromaFormatIdc()),
              ChromaFormat::UNDEFINED, isTff);
          }
        }
        writeLineToOutputLog(pcPicTop);
        writeLineToOutputLog(pcPicBottom);

        // update POC of display order
        m_iPOCLastDisplay = pcPicBottom->getPOC();
        // erase non-referenced picture in the reference picture list after display
        if ( ! pcPicTop->referenced && pcPicTop->reconstructed )
        {
          pcPicTop->reconstructed = false;
        }
        if ( ! pcPicBottom->referenced && pcPicBottom->reconstructed )
        {
          pcPicBottom->reconstructed = false;
        }
        pcPicTop->neededForOutput = false;
        pcPicBottom->neededForOutput = false;
      }
    }
  }
  else if( !pcPic->fieldPic ) //Frame Decoding
  {
    iterPic = pcListPic->begin();

    while (iterPic != pcListPic->end())
    {
      pcPic = *(iterPic);

      if(pcPic->neededForOutput && pcPic->getPOC() >= m_iPOCLastDisplay &&
        (numPicsNotYetDisplayed >  maxNumReorderPicsHighestTid || dpbFullness > maxDecPicBufferingHighestTid))
      {
        // write to file
        numPicsNotYetDisplayed--;
        if (!pcPic->referenced)
        {
          dpbFullness--;
        }


        if (!m_reconFileName.empty())
        {
          const Window &conf = pcPic->getConformanceWindow();
          ChromaFormat  chromaFormatIdc = pcPic->m_chromaFormatIdc;
#if JVET_AH0161_REGION_PACKING_INFORMATION_SEI
          if (pcPic->m_priProcess.m_enabled && pcPic->m_priProcess.m_targetPicWidth > 0 && pcPic->m_priProcess.m_targetPicHeight > 0)
          {
            PelStorage outPic;
            const Area a = Area( Position(0, 0), Size(pcPic->m_priProcess.m_targetPicWidth, pcPic->m_priProcess.m_targetPicHeight) );
            outPic.create( chromaFormatIdc, a, 0 );
            pcPic->m_priProcess.reconstruct(pcPic->getRecoBuf(), outPic, *pcPic->cs->sps, *pcPic->cs->pps);
            m_cVideoIOYuvReconFile[pcPic->layerId].write(
              outPic.get(COMPONENT_Y).width, outPic.get(COMPONENT_Y).height, outPic, m_outputColourSpaceConvert,
              m_packedYUVMode, 0, 0, 0, 0, ChromaFormat::UNDEFINED, m_clipOutputVideoToRec709Range);
          }
          else if( m_upscaledOutput )
#else
          if( m_upscaledOutput )
#endif
          {
            const SPS* sps = pcPic->cs->sps;
            m_cVideoIOYuvReconFile[pcPic->layerId].writeUpscaledPicture(
              *sps, *pcPic->cs->pps, pcPic->getRecoBuf(), m_outputColourSpaceConvert, m_packedYUVMode, m_upscaledOutput,
              ChromaFormat::UNDEFINED, m_clipOutputVideoToRec709Range, m_upscaleFilterForDisplay);
          }
          else
          {
            m_cVideoIOYuvReconFile[pcPic->layerId].write(
              pcPic->getRecoBuf().get(COMPONENT_Y).width, pcPic->getRecoBuf().get(COMPONENT_Y).height,
              pcPic->getRecoBuf(), m_outputColourSpaceConvert, m_packedYUVMode,
              conf.getWindowLeftOffset() * SPS::getWinUnitX(chromaFormatIdc),
              conf.getWindowRightOffset() * SPS::getWinUnitX(chromaFormatIdc),
              conf.getWindowTopOffset() * SPS::getWinUnitY(chromaFormatIdc),
              conf.getWindowBottomOffset() * SPS::getWinUnitY(chromaFormatIdc), ChromaFormat::UNDEFINED,
              m_clipOutputVideoToRec709Range);
            }
        }
        // Perform FGS on decoded frame and write to output FGS file
        if (!m_SEIFGSFileName.empty())
        {
          const Window& conf            = pcPic->getConformanceWindow();
          const SPS* sps                = pcPic->cs->sps;
          ChromaFormat  chromaFormatIdc    = sps->getChromaFormatIdc();
          if (m_upscaledOutput)
          {
            m_videoIOYuvSEIFGSFile[pcPic->layerId].writeUpscaledPicture(
              *sps, *pcPic->cs->pps, pcPic->getDisplayBufFG(), m_outputColourSpaceConvert, m_packedYUVMode,
              m_upscaledOutput, ChromaFormat::UNDEFINED, m_clipOutputVideoToRec709Range, m_upscaleFilterForDisplay);
          }
          else
          {
            m_videoIOYuvSEIFGSFile[pcPic->layerId].write(
              pcPic->getRecoBuf().get(COMPONENT_Y).width, pcPic->getRecoBuf().get(COMPONENT_Y).height,
              pcPic->getDisplayBufFG(), m_outputColourSpaceConvert, m_packedYUVMode,
              conf.getWindowLeftOffset() * SPS::getWinUnitX(chromaFormatIdc),
              conf.getWindowRightOffset() * SPS::getWinUnitX(chromaFormatIdc),
              conf.getWindowTopOffset() * SPS::getWinUnitY(chromaFormatIdc),
              conf.getWindowBottomOffset() * SPS::getWinUnitY(chromaFormatIdc), ChromaFormat::UNDEFINED,
              m_clipOutputVideoToRec709Range);
          }
        }

#if JVET_AI0181
        if (!m_SEIDOIFileName.empty())
        {
          xGenerateTargetPicture(pcPic);
        }
#endif

        if (!m_shutterIntervalPostFileName.empty() && getShutterFilterFlag())
        {
          int blendingRatio = getBlendingRatio();
          pcPic->xOutputPostFilteredPic(pcPic, pcListPic, blendingRatio);

          const Window &conf = pcPic->getConformanceWindow();
          const SPS* sps = pcPic->cs->sps;
          ChromaFormat  chromaFormatIdc = sps->getChromaFormatIdc();

          m_cTVideoIOYuvSIIPostFile.write(pcPic->getPostRecBuf().get(COMPONENT_Y).width,
                                          pcPic->getPostRecBuf().get(COMPONENT_Y).height, pcPic->getPostRecBuf(),
                                          m_outputColourSpaceConvert, m_packedYUVMode,
                                          conf.getWindowLeftOffset() * SPS::getWinUnitX(chromaFormatIdc),
                                          conf.getWindowRightOffset() * SPS::getWinUnitX(chromaFormatIdc),
                                          conf.getWindowTopOffset() * SPS::getWinUnitY(chromaFormatIdc),
                                          conf.getWindowBottomOffset() * SPS::getWinUnitY(chromaFormatIdc),
                                          ChromaFormat::UNDEFINED, m_clipOutputVideoToRec709Range);
        }

#if JVET_AF0167_MULTI_PLANE_IMAGE_INFO_SEI
        if (m_mpiiInfo.m_mpiiSEIEnabled && !m_multiplaneImageInfoFileName.empty())
        {
          if (m_mpiiInfo.m_mpiiTextureOpacityInterleaveFlag)
          {
            m_textureOpacityToggle = !m_textureOpacityToggle;
            if (m_textureOpacityToggle)
            {
              xUnpackTextureLayer(pcPic);
            }
            else
            {
              xUnpackOpacityLayer(pcPic);
            }
          }
          else
          {
            xUnpackTextureLayer(pcPic);
            xUnpackOpacityLayer(pcPic);
          }
        }
#endif
        // Perform CTI on decoded frame and write to output CTI file
        if (!m_SEICTIFileName.empty())
        {
          const Window& conf = pcPic->getConformanceWindow();
          const SPS* sps = pcPic->cs->sps;
          ChromaFormat  chromaFormatIdc = sps->getChromaFormatIdc();
          if (m_upscaledOutput)
          {
            m_cVideoIOYuvSEICTIFile[pcPic->layerId].writeUpscaledPicture(
              *sps, *pcPic->cs->pps, pcPic->getDisplayBuf(), m_outputColourSpaceConvert, m_packedYUVMode,
              m_upscaledOutput, ChromaFormat::UNDEFINED, m_clipOutputVideoToRec709Range, m_upscaleFilterForDisplay);
          }
          else
          {
            m_cVideoIOYuvSEICTIFile[pcPic->layerId].write(
              pcPic->getRecoBuf().get(COMPONENT_Y).width, pcPic->getRecoBuf().get(COMPONENT_Y).height,
              pcPic->getDisplayBuf(), m_outputColourSpaceConvert, m_packedYUVMode,
              conf.getWindowLeftOffset() * SPS::getWinUnitX(chromaFormatIdc),
              conf.getWindowRightOffset() * SPS::getWinUnitX(chromaFormatIdc),
              conf.getWindowTopOffset() * SPS::getWinUnitY(chromaFormatIdc),
              conf.getWindowBottomOffset() * SPS::getWinUnitY(chromaFormatIdc), ChromaFormat::UNDEFINED,
              m_clipOutputVideoToRec709Range);
          }
        }
        writeLineToOutputLog(pcPic);
#if JVET_AF0088_OMI_SEI
        if (!m_objectMaskInfoSEIFileName.empty())
        {
          xOutputObjectMaskInfos(pcPic);
        }
#endif
#if JVET_AH0161_REGION_PACKING_INFORMATION_SEI
        if (!m_packedRegionsInfoSEIFileName.empty())
        {
          xOutputPackedRegionsInfo(pcPic);
        }
#endif
        // update POC of display order
        m_iPOCLastDisplay = pcPic->getPOC();

        // erase non-referenced picture in the reference picture list after display
        if (!pcPic->referenced && pcPic->reconstructed)
        {
          pcPic->reconstructed = false;
        }
        pcPic->neededForOutput = false;
      }

      iterPic++;
    }
  }
}

/** \param pcListPic list of pictures to be written to file
 */
void DecApp::xFlushOutput( PicList* pcListPic, const int layerId )
{
  if(!pcListPic || pcListPic->empty())
  {
    return;
  }
  PicList::iterator iterPic   = pcListPic->begin();

  iterPic   = pcListPic->begin();
  Picture* pcPic = *(iterPic);

  if (pcPic->fieldPic ) //Field Decoding
  {
    PicList::iterator endPic = pcListPic->end();
    while (iterPic != endPic)
    {
      Picture *pcPicTop = *iterPic;
      iterPic++;

      if (pcPicTop == nullptr || (pcPicTop->layerId != layerId && layerId != NOT_VALID))
      {
        continue;
      }

      PicList::iterator iterPic2 = iterPic;
      while (iterPic2 != endPic)
      {
        if ((*iterPic2) != nullptr && (*iterPic2)->layerId == pcPicTop->layerId && (*iterPic2)->fieldPic && (*iterPic2)->topField != pcPicTop->topField)
        {
          break;
        }
        iterPic2++;
      }
      Picture *pcPicBottom = iterPic2 == endPic ? nullptr : *iterPic2;

      if (pcPicBottom != nullptr && pcPicTop->neededForOutput && pcPicBottom->neededForOutput)
      {
          // write to file
          if ( !m_reconFileName.empty() )
          {
            const Window &conf = pcPicTop->getConformanceWindow();
            const bool    isTff   = pcPicTop->topField;

            m_cVideoIOYuvReconFile[pcPicTop->layerId].write(
              pcPicTop->getRecoBuf(), pcPicBottom->getRecoBuf(), m_outputColourSpaceConvert,
              false,   // TODO: m_packedYUVMode,
              conf.getWindowLeftOffset() * SPS::getWinUnitX(pcPicTop->cs->sps->getChromaFormatIdc()),
              conf.getWindowRightOffset() * SPS::getWinUnitX(pcPicTop->cs->sps->getChromaFormatIdc()),
              conf.getWindowTopOffset() * SPS::getWinUnitY(pcPicTop->cs->sps->getChromaFormatIdc()),
              conf.getWindowBottomOffset() * SPS::getWinUnitY(pcPicTop->cs->sps->getChromaFormatIdc()),
              ChromaFormat::UNDEFINED, isTff);
          }
          writeLineToOutputLog(pcPicTop);
          writeLineToOutputLog(pcPicBottom);
        // update POC of display order
        m_iPOCLastDisplay = pcPicBottom->getPOC();

        // erase non-referenced picture in the reference picture list after display
        if( ! pcPicTop->referenced && pcPicTop->reconstructed )
        {
          pcPicTop->reconstructed = false;
        }
        if( ! pcPicBottom->referenced && pcPicBottom->reconstructed )
        {
          pcPicBottom->reconstructed = false;
        }
        pcPicTop->neededForOutput = false;
        pcPicBottom->neededForOutput = false;

        pcPicTop->destroy();
        delete pcPicTop;
        pcPicBottom->destroy();
        delete pcPicBottom;
        iterPic--;
        *iterPic = nullptr;
        iterPic++;
        *iterPic2 = nullptr;
      }
      else
      {
        pcPicTop->destroy();
        delete pcPicTop;
        iterPic--;
        *iterPic = nullptr;
        iterPic++;
      }
    }
  }
  else //Frame decoding
  {
    while (iterPic != pcListPic->end())
    {
      pcPic = *(iterPic);

      if( pcPic->layerId != layerId && layerId != NOT_VALID )
      {
        iterPic++;
        continue;
      }

      if (pcPic->neededForOutput)
      {
          // write to file
          if (!m_reconFileName.empty())
          {
            const Window &conf = pcPic->getConformanceWindow();
            ChromaFormat  chromaFormatIdc = pcPic->m_chromaFormatIdc;
#if JVET_AH0161_REGION_PACKING_INFORMATION_SEI
            if (pcPic->m_priProcess.m_enabled && pcPic->m_priProcess.m_targetPicWidth > 0 && pcPic->m_priProcess.m_targetPicHeight > 0)
            {
              PelStorage outPic;
              const Area a = Area( Position(0, 0), Size(pcPic->m_priProcess.m_targetPicWidth, pcPic->m_priProcess.m_targetPicHeight) );
              outPic.create( chromaFormatIdc, a, 0 );
              pcPic->m_priProcess.reconstruct(pcPic->getRecoBuf(), outPic, *pcPic->cs->sps, *pcPic->cs->pps);
              m_cVideoIOYuvReconFile[pcPic->layerId].write(
                outPic.get(COMPONENT_Y).width, outPic.get(COMPONENT_Y).height, outPic, m_outputColourSpaceConvert,
                m_packedYUVMode, 0, 0, 0, 0, ChromaFormat::UNDEFINED, m_clipOutputVideoToRec709Range);
            }
            else if( m_upscaledOutput )
#else
            if( m_upscaledOutput )
#endif
            {
              const SPS* sps = pcPic->cs->sps;
              m_cVideoIOYuvReconFile[pcPic->layerId].writeUpscaledPicture(
                *sps, *pcPic->cs->pps, pcPic->getRecoBuf(), m_outputColourSpaceConvert, m_packedYUVMode,
                m_upscaledOutput, ChromaFormat::UNDEFINED, m_clipOutputVideoToRec709Range, m_upscaleFilterForDisplay);
            }
            else
            {
              m_cVideoIOYuvReconFile[pcPic->layerId].write(
                pcPic->getRecoBuf().get(COMPONENT_Y).width, pcPic->getRecoBuf().get(COMPONENT_Y).height,
                pcPic->getRecoBuf(), m_outputColourSpaceConvert, m_packedYUVMode,
                conf.getWindowLeftOffset() * SPS::getWinUnitX(chromaFormatIdc),
                conf.getWindowRightOffset() * SPS::getWinUnitX(chromaFormatIdc),
                conf.getWindowTopOffset() * SPS::getWinUnitY(chromaFormatIdc),
                conf.getWindowBottomOffset() * SPS::getWinUnitY(chromaFormatIdc), ChromaFormat::UNDEFINED,
                m_clipOutputVideoToRec709Range);
              }
          }
          // Perform FGS on decoded frame and write to output FGS file
          if (!m_SEIFGSFileName.empty())
          {
            const Window& conf            = pcPic->getConformanceWindow();
            const SPS*    sps             = pcPic->cs->sps;
            ChromaFormat  chromaFormatIdc = sps->getChromaFormatIdc();
            if (m_upscaledOutput)
            {
              m_videoIOYuvSEIFGSFile[pcPic->layerId].writeUpscaledPicture(
                *sps, *pcPic->cs->pps, pcPic->getDisplayBufFG(), m_outputColourSpaceConvert, m_packedYUVMode,
                m_upscaledOutput, ChromaFormat::UNDEFINED, m_clipOutputVideoToRec709Range, m_upscaleFilterForDisplay);
            }
            else
            {
              m_videoIOYuvSEIFGSFile[pcPic->layerId].write(
                pcPic->getRecoBuf().get(COMPONENT_Y).width, pcPic->getRecoBuf().get(COMPONENT_Y).height,
                pcPic->getDisplayBufFG(), m_outputColourSpaceConvert, m_packedYUVMode,
                conf.getWindowLeftOffset() * SPS::getWinUnitX(chromaFormatIdc),
                conf.getWindowRightOffset() * SPS::getWinUnitX(chromaFormatIdc),
                conf.getWindowTopOffset() * SPS::getWinUnitY(chromaFormatIdc),
                conf.getWindowBottomOffset() * SPS::getWinUnitY(chromaFormatIdc), ChromaFormat::UNDEFINED,
                m_clipOutputVideoToRec709Range);
            }
          }

#if JVET_AI0181
          if (!m_SEIDOIFileName.empty())
          {
            xGenerateTargetPicture(pcPic);
          }
#endif

          if (!m_shutterIntervalPostFileName.empty() && getShutterFilterFlag())
          {
            int blendingRatio = getBlendingRatio();
            pcPic->xOutputPostFilteredPic(pcPic, pcListPic, blendingRatio);

            const Window &conf = pcPic->getConformanceWindow();
            const SPS* sps = pcPic->cs->sps;
            ChromaFormat  chromaFormatIdc = sps->getChromaFormatIdc();

            m_cTVideoIOYuvSIIPostFile.write(pcPic->getPostRecBuf().get(COMPONENT_Y).width,
                                            pcPic->getPostRecBuf().get(COMPONENT_Y).height, pcPic->getPostRecBuf(),
                                            m_outputColourSpaceConvert, m_packedYUVMode,
                                            conf.getWindowLeftOffset() * SPS::getWinUnitX(chromaFormatIdc),
                                            conf.getWindowRightOffset() * SPS::getWinUnitX(chromaFormatIdc),
                                            conf.getWindowTopOffset() * SPS::getWinUnitY(chromaFormatIdc),
                                            conf.getWindowBottomOffset() * SPS::getWinUnitY(chromaFormatIdc),
                                            ChromaFormat::UNDEFINED, m_clipOutputVideoToRec709Range);
          }

#if JVET_AF0167_MULTI_PLANE_IMAGE_INFO_SEI
          if (m_mpiiInfo.m_mpiiSEIEnabled && !m_multiplaneImageInfoFileName.empty())
          {
            if (m_mpiiInfo.m_mpiiTextureOpacityInterleaveFlag)
            {
              m_textureOpacityToggle = !m_textureOpacityToggle;
              if (m_textureOpacityToggle)
              {
                xUnpackTextureLayer(pcPic);
              }
              else
              {
                xUnpackOpacityLayer(pcPic);
              }
            }
            else
            {
              xUnpackTextureLayer(pcPic);
              xUnpackOpacityLayer(pcPic);
            }
          }
#endif
          // Perform CTI on decoded frame and write to output CTI file
          if (!m_SEICTIFileName.empty())
          {
            const Window& conf = pcPic->getConformanceWindow();
            const SPS* sps = pcPic->cs->sps;
            ChromaFormat  chromaFormatIdc = sps->getChromaFormatIdc();
            if (m_upscaledOutput)
            {
              m_cVideoIOYuvSEICTIFile[pcPic->layerId].writeUpscaledPicture(
                *sps, *pcPic->cs->pps, pcPic->getDisplayBuf(), m_outputColourSpaceConvert, m_packedYUVMode,
                m_upscaledOutput, ChromaFormat::UNDEFINED, m_clipOutputVideoToRec709Range, m_upscaleFilterForDisplay);
            }
            else
            {
              m_cVideoIOYuvSEICTIFile[pcPic->layerId].write(
                pcPic->getRecoBuf().get(COMPONENT_Y).width, pcPic->getRecoBuf().get(COMPONENT_Y).height,
                pcPic->getDisplayBuf(), m_outputColourSpaceConvert, m_packedYUVMode,
                conf.getWindowLeftOffset() * SPS::getWinUnitX(chromaFormatIdc),
                conf.getWindowRightOffset() * SPS::getWinUnitX(chromaFormatIdc),
                conf.getWindowTopOffset() * SPS::getWinUnitY(chromaFormatIdc),
                conf.getWindowBottomOffset() * SPS::getWinUnitY(chromaFormatIdc), ChromaFormat::UNDEFINED,
                m_clipOutputVideoToRec709Range);
            }
          }
          writeLineToOutputLog(pcPic);
#if JVET_AF0088_OMI_SEI
          if (!m_objectMaskInfoSEIFileName.empty())
          {
            xOutputObjectMaskInfos(pcPic);
          }
#endif
#if JVET_AH0161_REGION_PACKING_INFORMATION_SEI
          if (!m_packedRegionsInfoSEIFileName.empty())
          {
            xOutputPackedRegionsInfo(pcPic);
          }
#endif
          // update POC of display order
        m_iPOCLastDisplay = pcPic->getPOC();

        // erase non-referenced picture in the reference picture list after display
        if (!pcPic->referenced && pcPic->reconstructed)
        {
          pcPic->reconstructed = false;
        }
        pcPic->neededForOutput = false;
      }
      if (pcPic != nullptr && (m_shutterIntervalPostFileName.empty() || !getShutterFilterFlag()))
      {
        pcPic->destroy();
        delete pcPic;
        pcPic    = nullptr;
        *iterPic = nullptr;
      }
      iterPic++;
    }
  }

  if( layerId != NOT_VALID )
  {
    pcListPic->remove_if([](Picture* p) { return p == nullptr; });
  }
  else
  {
    pcListPic->clear();
  }
  m_iPOCLastDisplay = -MAX_INT;
}

/** \param pcListPic list of pictures to be written to file
 */
void DecApp::xOutputAnnotatedRegions(PicList* pcListPic)
{
  if(!pcListPic || pcListPic->empty())
  {
    return;
  }
  PicList::iterator iterPic   = pcListPic->begin();
  while (iterPic != pcListPic->end())
  {
    Picture* pcPic = *(iterPic);
    if (pcPic->neededForOutput)
    {
      // Check if any annotated region SEI has arrived
      SEIMessages annotatedRegionSEIs = getSeisByType(pcPic->SEIs, SEI::PayloadType::ANNOTATED_REGIONS);
      for(auto it=annotatedRegionSEIs.begin(); it!=annotatedRegionSEIs.end(); it++)
      {
        const SEIAnnotatedRegions &seiAnnotatedRegions = *(SEIAnnotatedRegions*)(*it);

        if (seiAnnotatedRegions.m_hdr.m_cancelFlag)
        {
          m_arObjects.clear();
          m_arLabels.clear();
        }
        else
        {
          if (m_arHeader.m_receivedSettingsOnce)
          {
            // validate those settings that must stay constant are constant.
            assert(m_arHeader.m_occludedObjectFlag              == seiAnnotatedRegions.m_hdr.m_occludedObjectFlag);
            assert(m_arHeader.m_partialObjectFlagPresentFlag    == seiAnnotatedRegions.m_hdr.m_partialObjectFlagPresentFlag);
            assert(m_arHeader.m_objectConfidenceInfoPresentFlag == seiAnnotatedRegions.m_hdr.m_objectConfidenceInfoPresentFlag);
            assert((!m_arHeader.m_objectConfidenceInfoPresentFlag) || m_arHeader.m_objectConfidenceLength == seiAnnotatedRegions.m_hdr.m_objectConfidenceLength);
          }
          else
          {
            m_arHeader.m_receivedSettingsOnce=true;
            m_arHeader=seiAnnotatedRegions.m_hdr; // copy the settings.
          }
          // Process label updates
          if (seiAnnotatedRegions.m_hdr.m_objectLabelPresentFlag)
          {
            for(auto srcIt=seiAnnotatedRegions.m_annotatedLabels.begin(); srcIt!=seiAnnotatedRegions.m_annotatedLabels.end(); srcIt++)
            {
              const uint32_t labIdx = srcIt->first;
              if (srcIt->second.labelValid)
              {
                m_arLabels[labIdx] = srcIt->second.label;
              }
              else
              {
                m_arLabels.erase(labIdx);
              }
            }
          }

          // Process object updates
          for(auto srcIt=seiAnnotatedRegions.m_annotatedRegions.begin(); srcIt!=seiAnnotatedRegions.m_annotatedRegions.end(); srcIt++)
          {
            uint32_t objIdx = srcIt->first;
            const SEIAnnotatedRegions::AnnotatedRegionObject &src =srcIt->second;

            if (src.objectCancelFlag)
            {
              m_arObjects.erase(objIdx);
            }
            else
            {
              auto destIt = m_arObjects.find(objIdx);

              if (destIt == m_arObjects.end())
              {
                //New object arrived, needs to be appended to the map of tracked objects
                m_arObjects[objIdx] = src;
              }
              else //Existing object, modifications to be done
              {
                SEIAnnotatedRegions::AnnotatedRegionObject &dst=destIt->second;

                if (seiAnnotatedRegions.m_hdr.m_objectLabelPresentFlag && src.objectLabelValid)
                {
                  dst.objectLabelValid=true;
                  dst.objLabelIdx = src.objLabelIdx;
                }
                if (src.boundingBoxValid)
                {
                  dst.boundingBoxTop    = src.boundingBoxTop   ;
                  dst.boundingBoxLeft   = src.boundingBoxLeft  ;
                  dst.boundingBoxWidth  = src.boundingBoxWidth ;
                  dst.boundingBoxHeight = src.boundingBoxHeight;
                  if (seiAnnotatedRegions.m_hdr.m_partialObjectFlagPresentFlag)
                  {
                    dst.partialObjectFlag = src.partialObjectFlag;
                  }
                  if (seiAnnotatedRegions.m_hdr.m_objectConfidenceInfoPresentFlag)
                  {
                    dst.objectConfidence = src.objectConfidence;
                  }
                }
              }
            }
          }
        }
      }

      if (!m_arObjects.empty())
      {
        FILE *fpPersist = fopen(m_annotatedRegionsSEIFileName.c_str(), "ab");
        if (fpPersist == nullptr)
        {
          std::cout << "Not able to open file for writing persist SEI messages" << std::endl;
        }
        else
        {
          fprintf(fpPersist, "\n");
          fprintf(fpPersist, "Number of objects = %d\n", (int)m_arObjects.size());
          for (auto it = m_arObjects.begin(); it != m_arObjects.end(); ++it)
          {
            fprintf(fpPersist, "Object Idx = %d\n",    it->first);
            fprintf(fpPersist, "Object Top = %d\n",    it->second.boundingBoxTop);
            fprintf(fpPersist, "Object Left = %d\n",   it->second.boundingBoxLeft);
            fprintf(fpPersist, "Object Width = %d\n",  it->second.boundingBoxWidth);
            fprintf(fpPersist, "Object Height = %d\n", it->second.boundingBoxHeight);
            if (it->second.objectLabelValid)
            {
              auto labelIt=m_arLabels.find(it->second.objLabelIdx);
              fprintf(fpPersist, "Object Label = %s\n", labelIt!=m_arLabels.end() ? (labelIt->second.c_str()) : "<UNKNOWN>");
            }
            if (m_arHeader.m_partialObjectFlagPresentFlag)
            {
              fprintf(fpPersist, "Object Partial = %d\n", it->second.partialObjectFlag?1:0);
            }
            if (m_arHeader.m_objectConfidenceInfoPresentFlag)
            {
              fprintf(fpPersist, "Object Conf = %d\n", it->second.objectConfidence);
            }
          }
          fclose(fpPersist);
        }
      }
    }
   iterPic++;
  }
}

#if JVET_AF0088_OMI_SEI
void DecApp::xOutputObjectMaskInfos(Picture* pcPic)
{

  SEIMessages objectMaskInfoSEIs = getSeisByType(pcPic->SEIs, SEI::PayloadType::OBJECT_MASK_INFO);
  for (auto it = objectMaskInfoSEIs.begin(); it != objectMaskInfoSEIs.end(); it++)
  {
    const SEIObjectMaskInfos& seiObjectMaskInfo = *(SEIObjectMaskInfos*) (*it);

    if (!seiObjectMaskInfo.m_hdr.m_cancelFlag)
    {
      if (m_omiHeader.m_receivedSettingsOnce)
      {
        CHECK(m_omiHeader.m_maskConfidenceInfoPresentFlag != seiObjectMaskInfo.m_hdr.m_maskConfidenceInfoPresentFlag, "Confidence info present flag should be consistent within the CLVS.");
        if (m_omiHeader.m_maskConfidenceInfoPresentFlag)
        {
          CHECK(m_omiHeader.m_maskConfidenceLengthMinus1 != seiObjectMaskInfo.m_hdr.m_maskConfidenceLengthMinus1, "Confidence length should be consistent within the CLVS.");
        }
        CHECK(m_omiHeader.m_maskDepthInfoPresentFlag != seiObjectMaskInfo.m_hdr.m_maskDepthInfoPresentFlag, "Depth info present flag should be consistent within the CLVS.");
        if (m_omiHeader.m_maskDepthInfoPresentFlag)
        {
          CHECK(m_omiHeader.m_maskDepthLengthMinus1 != seiObjectMaskInfo.m_hdr.m_maskDepthLengthMinus1, "Depth length should be consistent within the CLVS.");
        }
      }
      else
      {
        m_omiHeader.m_receivedSettingsOnce = true;
        m_omiHeader                        = seiObjectMaskInfo.m_hdr;   // copy the settings.
      }
    }

    FILE* fpPersist = fopen(m_objectMaskInfoSEIFileName.c_str(), "ab");
    if (fpPersist == nullptr)
    {
      std::cout << "Not able to open file for writing persist SEI messages" << std::endl;
    }
    else
    {
      fprintf(fpPersist, "POC %d\n", (int) pcPic->getPOC());
      // header
      fprintf(fpPersist, "OMI Cancel Flag = %d\n", seiObjectMaskInfo.m_hdr.m_cancelFlag);
      fprintf(fpPersist, "OMI AuxId = %d\n", seiObjectMaskInfo.m_hdr.m_auxIdMinus128 + 128);
      fprintf(fpPersist, "OMI MaskId Length = %d\n", seiObjectMaskInfo.m_hdr.m_maskIdLengthMinus8 + 8);
      fprintf(fpPersist, "OMI MaskConf Present = %d\n", seiObjectMaskInfo.m_hdr.m_maskConfidenceInfoPresentFlag);
      if (seiObjectMaskInfo.m_hdr.m_maskConfidenceInfoPresentFlag) 
      {
        fprintf(fpPersist, "OMI MaskConf Length = %d\n", seiObjectMaskInfo.m_hdr.m_maskConfidenceLengthMinus1 + 1);
      }
      fprintf(fpPersist, "OMI MaskDepth Present = %d\n", seiObjectMaskInfo.m_hdr.m_maskDepthInfoPresentFlag);
      if (seiObjectMaskInfo.m_hdr.m_maskDepthInfoPresentFlag)
      {
        fprintf(fpPersist, "OMI MaskDepth Length = %d\n", seiObjectMaskInfo.m_hdr.m_maskDepthLengthMinus1 + 1);
      }
      fprintf(fpPersist, "OMI MaskLabel Present = %d\n", seiObjectMaskInfo.m_hdr.m_maskLabelInfoPresentFlag);
      if (seiObjectMaskInfo.m_hdr.m_maskLabelInfoPresentFlag)
      {
        fprintf(fpPersist, "OMI MaskLabelLang Present = %d\n", seiObjectMaskInfo.m_hdr.m_maskLabelLanguagePresentFlag);
        if (seiObjectMaskInfo.m_hdr.m_maskLabelLanguagePresentFlag) 
        {
          fprintf(fpPersist, "OMI MaskLabelLang = %s\n", seiObjectMaskInfo.m_hdr.m_maskLabelLanguage.c_str());
        }
      }
      fprintf(fpPersist, "\n");

      // infos
      uint32_t maskIdx = 0;
      uint32_t auxId   = seiObjectMaskInfo.m_hdr.m_auxIdMinus128 + 128;
      fprintf(fpPersist, "OMI NumPrimaryPicLayer = %d\n", seiObjectMaskInfo.m_hdr.m_numPrimaryPicLayerMinus1 + 1);
      for (uint32_t i = 0; i <= seiObjectMaskInfo.m_hdr.m_numPrimaryPicLayerMinus1; i++) 
      {
        fprintf(fpPersist, "OMI PrimaryPicLayerId[%d] = %d\n", i, seiObjectMaskInfo.m_hdr.m_primaryPicLayerIdList[i]);
        for (uint32_t j = 0; j < SEIObjectMaskInfos::numAuxLayer[auxId][seiObjectMaskInfo.m_hdr.m_primaryPicLayerIdList[i]]; j++)
        {
          fprintf(fpPersist, "OMI MaskPicUpdateFlag[%d][%d] = %d\n", i, j, seiObjectMaskInfo.m_maskPicUpdateFlag[i][j]);
          if (seiObjectMaskInfo.m_maskPicUpdateFlag[i][j]) 
          {
            fprintf(fpPersist, "OMI NumMaskInPicUpdate[%d][%d] = %d\n", i, j, seiObjectMaskInfo.m_numMaskPicUpdate[i][j]);
            fprintf(fpPersist, "\n");
            
            for (uint32_t k = 0; k < seiObjectMaskInfo.m_numMaskPicUpdate[i][j]; k++) 
            {
              fprintf(fpPersist, "Mask Idx = %d\n", seiObjectMaskInfo.m_objectMaskInfos[maskIdx].maskId);
              fprintf(fpPersist, "Mask BoundingBoxPresentFlag = %d\n", seiObjectMaskInfo.m_objectMaskInfos[maskIdx].maskBoundingBoxPresentFlag);
              if (seiObjectMaskInfo.m_objectMaskInfos[maskIdx].maskBoundingBoxPresentFlag)
              {
                fprintf(fpPersist, "Mask Top = %d\n", seiObjectMaskInfo.m_objectMaskInfos[maskIdx].maskTop);
                fprintf(fpPersist, "Mask Left = %d\n", seiObjectMaskInfo.m_objectMaskInfos[maskIdx].maskLeft);
                fprintf(fpPersist, "Mask Width = %d\n", seiObjectMaskInfo.m_objectMaskInfos[maskIdx].maskWidth);
                fprintf(fpPersist, "Mask Height = %d\n", seiObjectMaskInfo.m_objectMaskInfos[maskIdx].maskHeight);
              }
              fprintf(fpPersist, "Mask CancelFlag = %d\n", seiObjectMaskInfo.m_objectMaskInfos[maskIdx].omiCancelFlag);
              if (!seiObjectMaskInfo.m_objectMaskInfos[maskIdx].omiCancelFlag)
              {
                if (m_omiHeader.m_maskConfidenceInfoPresentFlag)
                {
                  fprintf(fpPersist, "Mask Conf = %d\n", seiObjectMaskInfo.m_objectMaskInfos[maskIdx].maskConfidence);
                }
                if (m_omiHeader.m_maskDepthInfoPresentFlag)
                {
                  fprintf(fpPersist, "Mask Depth = %d\n", seiObjectMaskInfo.m_objectMaskInfos[maskIdx].maskConfidence);
                }
                if (m_omiHeader.m_maskLabelInfoPresentFlag)
                {
                  fprintf(fpPersist, "Mask Label = %s ", seiObjectMaskInfo.m_objectMaskInfos[maskIdx].maskLabel.c_str());
                  if (m_omiHeader.m_maskLabelLanguagePresentFlag)
                  {
                    fprintf(fpPersist, "(Lang: %s)\n", m_omiHeader.m_maskLabelLanguage.c_str());
                  }
                  else
                  {
                    fprintf(fpPersist, "\n");
                  }
                }
              }

              fprintf(fpPersist, "\n");
              maskIdx++;
            }
          }
        }
      }
      fclose(fpPersist);
    }
  }
}
#endif

#if JVET_AH0161_REGION_PACKING_INFORMATION_SEI
void DecApp::xOutputPackedRegionsInfo(Picture* pcPic)
{
  SEIMessages seis = getSeisByType(pcPic->SEIs, SEI::PayloadType::PACKED_REGIONS_INFO);
  if (!seis.empty())
  {
    const SEIPackedRegionsInfo& sei = *((SEIPackedRegionsInfo*)seis.front());
    FILE* fp = fopen(m_packedRegionsInfoSEIFileName.c_str(), "a");
    if (fp == nullptr)
    {
      std::cout << "Not able to open file for writing packed regions info SEI messages" << std::endl;
    }
    else
    {
      fprintf(fp, "SEIPRICancelFlag : %d\n", sei.m_cancelFlag);
      fprintf(fp, "SEIPRIPersistenceFlag : %d\n", sei.m_persistenceFlag);
      fprintf(fp, "SEIPRINumRegionsMinus1 : %d\n", sei.m_numRegionsMinus1);
      fprintf(fp, "SEIPRIUseMaxDimensionsFlag : %d\n", sei.m_useMaxDimensionsFlag);
      fprintf(fp, "SEIPRILog2UnitSize : %d\n", sei.m_log2UnitSize);
      fprintf(fp, "SEIPRIRegionSizeLenMinus1 : %d\n", sei.m_regionSizeLenMinus1);
      fprintf(fp, "SEIPRIRegionIdPresentFlag : %d\n", sei.m_regionIdPresentFlag);
      fprintf(fp, "SEIPRITargetPicParamsPresentFlag : %d\n", sei.m_targetPicParamsPresentFlag);
      if (sei.m_targetPicParamsPresentFlag)
      {
        fprintf(fp, "SEIPRITargetPicWidthMinus1 : %d\n", sei.m_targetPicWidthMinus1);
        fprintf(fp, "SEIPRITargetPicHeightMinus1 : %d\n", sei.m_targetPicHeightMinus1);
      }
      fprintf(fp, "SEIPRINumResamplingRatiosMinus1 : %d\n", sei.m_numResamplingRatiosMinus1);
      xOutputPackedRegionsInfoVector(fp, "SEIPRIResamplingWidthNumMinus1 :", sei.m_resamplingWidthNumMinus1);
      xOutputPackedRegionsInfoVector(fp, "SEIPRIResamplingWidthDenomMinus1 :", sei.m_resamplingWidthDenomMinus1);
      xOutputPackedRegionsInfoVector(fp, "SEIPRIResamplingHeightNumMinus1 :", sei.m_resamplingHeightNumMinus1);
      xOutputPackedRegionsInfoVector(fp, "SEIPRIResamplingHeightDenomMinus1 :", sei.m_resamplingHeightDenomMinus1);
      xOutputPackedRegionsInfoVector(fp, "SEIPRIRegionId :", sei.m_regionId);
      xOutputPackedRegionsInfoVector(fp, "SEIPRIRegionTopLeftInUnitsX :", sei.m_regionTopLeftInUnitsX);
      xOutputPackedRegionsInfoVector(fp, "SEIPRIRegionTopLeftInUnitsY :", sei.m_regionTopLeftInUnitsY);
      xOutputPackedRegionsInfoVector(fp, "SEIPRIRegionWidthInUnitsMinus1 :", sei.m_regionWidthInUnitsMinus1);
      xOutputPackedRegionsInfoVector(fp, "SEIPRIRegionHeightInUnitsMinus1 :", sei.m_regionHeightInUnitsMinus1);
      xOutputPackedRegionsInfoVector(fp, "SEIPRIResamplingRatioIdx :", sei.m_resamplingRatioIdx);
      if (sei.m_targetPicParamsPresentFlag)
      {
        xOutputPackedRegionsInfoVector(fp, "SEIPRITargetRegionTopLeftX :", sei.m_targetRegionTopLeftX);
        xOutputPackedRegionsInfoVector(fp, "SEIPRITargetRegionTopLeftY :", sei.m_targetRegionTopLeftY);
      }
      fclose(fp);
    }
  }
}

void DecApp::xOutputPackedRegionsInfoVector(FILE* fp, const char* paramName, const std::vector<uint32_t>& l)
{
  fprintf(fp, "%s", paramName);
  for (auto it : l)
  {
    fprintf(fp, " %d", it);
  }
  fprintf(fp, "\n");
}

#endif

/** \param nalu Input nalu to check whether its LayerId is within targetDecLayerIdSet
 */
bool DecApp::xIsNaluWithinTargetDecLayerIdSet( const InputNALUnit* nalu ) const
{
  if( !m_targetDecLayerIdSet.size() ) // By default, the set is empty, meaning all LayerIds are allowed
  {
    return true;
  }

  return std::find(m_targetDecLayerIdSet.begin(), m_targetDecLayerIdSet.end(), nalu->m_nuhLayerId)
         != m_targetDecLayerIdSet.end();
}

/** \param nalu Input nalu to check whether its LayerId is within targetOutputLayerIdSet
 */
bool DecApp::xIsNaluWithinTargetOutputLayerIdSet( const InputNALUnit* nalu ) const
{
  if( !m_targetOutputLayerIdSet.size() ) // By default, the set is empty, meaning all LayerIds are allowed
  {
    return true;
  }

  return std::find(m_targetOutputLayerIdSet.begin(), m_targetOutputLayerIdSet.end(), nalu->m_nuhLayerId)
         != m_targetOutputLayerIdSet.end();
}
#if JVET_AF0167_MULTI_PLANE_IMAGE_INFO_SEI
void DecApp::xUnpackTextureLayer(Picture* pcPic)
{
  PelUnitBuf currTmp = pcPic->getRecoBuf();
  PelUnitBuf postTmp = pcPic->getMpiTextureBuf();

  PelUnitBuf* currYuv = &currTmp;
  PelUnitBuf* postYuv = &postTmp;

  const Window& conf            = pcPic->getConformanceWindow();
  const SPS*    sps             = pcPic->cs->sps;
  ChromaFormat  chromaFormatIdc = sps->getChromaFormatIdc();

  int pHeight = currYuv->Y().height;
  int pWidth  = currYuv->Y().width;

  int textureFrameHeight, textureFrameWidth;
  if (m_mpiiInfo.m_mpiiTextureOpacityInterleaveFlag)
  {
    textureFrameHeight = pHeight;
    textureFrameWidth  = pWidth;
  }
  else
  {
    if (m_mpiiInfo.m_mpiiTextureOpacityArrangementFlag)
    {
      /*Arrangement of Texture and Opacity layers is side by side*/
      textureFrameHeight = pHeight;
      textureFrameWidth  = pWidth / 2;
    }
    else
    {
      /*Arrangement of Texture and Opacity layers is top and bottom*/
      textureFrameHeight = pHeight / 2;
      textureFrameWidth  = pWidth;
    }
  }

  int numLayersInheight = m_mpiiInfo.m_mpiiPictureNumLayersInHeightMinus1 + 1;
  int numLayers         = m_mpiiInfo.m_mpiiNumLayersMinus1 + 1;
  int numLayersInWidth  = numLayers / numLayersInheight;
  int layerHeight       = textureFrameHeight / numLayersInheight;
  int layerWidth        = textureFrameWidth / numLayersInWidth;

  int chromaWidth  = layerWidth / SPS::getWinUnitX(chromaFormatIdc);
  int chromaHeight = layerHeight / SPS::getWinUnitY(chromaFormatIdc);

  for (int i = 0; i < numLayers; i++)
  {
    int k = i % numLayersInWidth;
    int m = i / numLayersInWidth;
    int u = k * layerWidth;
    int v = m * layerHeight;
    for (int h = 0; h < layerHeight; h++)
    {
      Pel* dst = &postYuv->bufs[COMPONENT_Y].at(0, h);
      Pel* src = &currYuv->bufs[COMPONENT_Y].at(u, v);
      memcpy(dst, src, sizeof(Pel) * layerWidth);
      v++;
    }

    u = k * chromaWidth;
    v = m * chromaHeight;
    for (int h = 0; h < chromaHeight; h++)
    {
      for (int ch = COMPONENT_Cb; ch < MAX_NUM_COMPONENT; ch++)
      {
        Pel* dst = &postYuv->bufs[ch].at(0, h);
        Pel* src = &currYuv->bufs[ch].at(u, v);
        memcpy(dst, src, sizeof(Pel) * chromaWidth);
      }
      v++;
    }

    m_VideoIOTextureYuvReconFile.write(layerWidth, layerHeight, pcPic->getMpiTextureBuf(), m_outputColourSpaceConvert,
                                       m_packedYUVMode, conf.getWindowLeftOffset() * SPS::getWinUnitX(chromaFormatIdc),
                                       conf.getWindowRightOffset() * SPS::getWinUnitX(chromaFormatIdc),
                                       conf.getWindowTopOffset() * SPS::getWinUnitY(chromaFormatIdc),
                                       conf.getWindowBottomOffset() * SPS::getWinUnitY(chromaFormatIdc),
                                       chromaFormatIdc, m_clipOutputVideoToRec709Range);
  }
}
void DecApp::xUnpackOpacityLayer(Picture* pcPic)
{
  PelUnitBuf currTmp = pcPic->getRecoBuf();
  PelUnitBuf postTmp = pcPic->getMpiOpacityBuf();

  PelUnitBuf* currYuv = &currTmp;
  PelUnitBuf* postYuv = &postTmp;

  const Window& conf            = pcPic->getConformanceWindow();
  const SPS*    sps             = pcPic->cs->sps;
  ChromaFormat  chromaFormatIdc = sps->getChromaFormatIdc();

  int pHeight = currYuv->Y().height;
  int pWidth  = currYuv->Y().width;

  int opLayerWidthOffset, opLayerHeightOffset;

  int opacityFrameHeight, opacityFrameWidth;
  if (m_mpiiInfo.m_mpiiTextureOpacityInterleaveFlag)
  {
    opacityFrameHeight  = pHeight;
    opacityFrameWidth   = pWidth;
    opLayerWidthOffset  = 0;
    opLayerHeightOffset = 0;
  }
  else
  {
    if (m_mpiiInfo.m_mpiiTextureOpacityArrangementFlag)
    {
      /*Arrangement of Texture and Opacity layers is side by side*/
      opacityFrameHeight  = pHeight;
      opacityFrameWidth   = pWidth / 2;
      opLayerWidthOffset  = opacityFrameWidth;
      opLayerHeightOffset = 0;
    }
    else
    {
      /*Arrangement of Texture and Opacity layers is top and bottom*/
      opacityFrameHeight  = pHeight / 2;
      opacityFrameWidth   = pWidth;
      opLayerWidthOffset  = 0;
      opLayerHeightOffset = opacityFrameHeight;
    }
  }

  int numLayersInheight = m_mpiiInfo.m_mpiiPictureNumLayersInHeightMinus1 + 1;
  int numLayers         = m_mpiiInfo.m_mpiiNumLayersMinus1 + 1;
  int numLayersInWidth  = numLayers / numLayersInheight;
  int layerHeight       = opacityFrameHeight / numLayersInheight;
  int layerWidth        = opacityFrameWidth / numLayersInWidth;

  for (int i = 0; i < numLayers; i++)
  {
    int k = i % numLayersInWidth;
    int m = i / numLayersInWidth;

    int u = k * layerWidth + opLayerWidthOffset;
    int v = m * layerHeight + opLayerHeightOffset;

    for (int h = 0; h < layerHeight; h++)
    {
      Pel* dst = &postYuv->bufs[COMPONENT_Y].at(0, h);
      Pel* src = &currYuv->bufs[COMPONENT_Y].at(u, v);
      memcpy(dst, src, sizeof(Pel) * layerWidth);
      v++;
    }

    m_VideoIOOpacityYuvReconFile.write(layerWidth, layerHeight, pcPic->getMpiOpacityBuf(), m_outputColourSpaceConvert,
                                       m_packedYUVMode, conf.getWindowLeftOffset() * SPS::getWinUnitX(chromaFormatIdc),
                                       conf.getWindowRightOffset() * SPS::getWinUnitX(chromaFormatIdc),
                                       conf.getWindowTopOffset() * SPS::getWinUnitY(chromaFormatIdc),
                                       conf.getWindowBottomOffset() * SPS::getWinUnitY(chromaFormatIdc),
                                       ChromaFormat::_400, m_clipOutputVideoToRec709Range);
  }
}
#endif
//! \}

#if JVET_AI0181
void DecApp::xGenerateTargetPicture(Picture* pcPic) 
{
  SEIMessages seiListDOI = getSeisByType(pcPic->SEIs, SEI::PayloadType::DISPLAY_OVERLAYS_INFO);
  CHECK(seiListDOI.empty(), "No DOI SEI have been received");
  bool        seiDoiPresent = false;
  SEIMessages seiListCR     = getSeisByType(pcPic->SEIs, SEI::PayloadType::CONSTITUENT_RECTANGLES);
  for (auto sei: seiListDOI)
  {
    auto doisei = (SEIDisplayOverlaysInfo*) sei;
    if (doisei->m_doiSEIId == this->m_SEIDOIId)
    {
      seiDoiPresent       = true;
      int numOverlays     = doisei->m_doiSEINumDisplayOverlaysMinus2 + 2;
      int targetPicWidth  = doisei->m_doiSEITargetPicWidthMinus1 + 1;
      int targetPicHeight = doisei->m_doiSEITargetPicHeightMinus1 + 1;
      // Create internal buffers
      if (m_doiSEIPelY == nullptr)
      {
        m_doiSEIPelY = new Pel[targetPicWidth * targetPicHeight];
        m_doiSEIPelU = new Pel[(targetPicWidth >> 1) * (targetPicHeight >> 1)];
        m_doiSEIPelV = new Pel[(targetPicWidth >> 1) * (targetPicHeight >> 1)];
      }
      for (int overlay_idx = this->m_doiSEILastProcessedOverlay + 1; overlay_idx < numOverlays; overlay_idx++)
      {
        if (doisei->m_doiSEIPicPartitionFlag == 1 && doisei->m_doiSEIPartitionTypeFlag == 1)
        {
          CHECK(seiListCR.empty(), "A constituent rectangles SEI is expected when doi_pic_partition_flag and "
                                    "doi_pic_partition_type_flag are equal to 1");
          CHECK(doisei->m_doiSEIResamplingEnabledFlag, "Resampling is not implemented for DOI SEI.");
          CHECK(overlay_idx != this->m_doiSEILastProcessedOverlay + 1,
                "Coding of an overlay have been skipped, please constraint overlay to be processed in layer "
                "ascending order");

          if (doisei->m_doiSEINuhLayerId[overlay_idx] == pcPic->layerId)
          {
            // check CR
            bool                      noCRforLayerId = true;
            SEIConstituentRectangles* doicr;
            for (auto sei: seiListCR)
            {
              doicr = (SEIConstituentRectangles*) sei;
              if ((int) doicr->m_layerId == pcPic->layerId)
              {
                noCRforLayerId = false;
                break;
              }
            }
            CHECK(noCRforLayerId, "No CR SEI has been found for the current processed layer.")
            int cr_h = (doicr->m_rectSameSizeFlag
                          ? pcPic->cs->pps->getPicHeightInLumaSamples() / (doicr->m_numRowsMinus1 + 1)
                          : (1 << doicr->m_log2UnitSize)
                              * (doicr->m_rectHeightInUnitsMinus1[doisei->m_doiSEIPartitionId[overlay_idx]] + 1));
            int cr_w = (doicr->m_rectSameSizeFlag
                          ? pcPic->getPicWidthInLumaSamples() / (doicr->m_numColsMinus1 + 1)
                          : (1 << doicr->m_log2UnitSize)
                              * (doicr->m_rectWidthInUnitsMinus1[doisei->m_doiSEIPartitionId[overlay_idx]] + 1));

            int x0_t = (overlay_idx > 0 ? doisei->m_doiSEITopLeftX[overlay_idx] : 0);
            int y0_t = (overlay_idx > 0 ? doisei->m_doiSEITopLeftY[overlay_idx] : 0);
            int x0_o = (doicr->m_rectSameSizeFlag
                          ? (doisei->m_doiSEIPartitionId[overlay_idx] % (doicr->m_numColsMinus1 + 1))
                              * pcPic->cs->pps->getPicWidthInLumaSamples() / (doicr->m_numColsMinus1 + 1)
                          : doicr->m_rectTopLeftInUnitsX[overlay_idx] * (1 << doicr->m_log2UnitSize));
            int y0_o = (doicr->m_rectSameSizeFlag
                          ? (doisei->m_doiSEIPartitionId[overlay_idx] / (doicr->m_numColsMinus1 + 1))
                              * pcPic->cs->pps->getPicHeightInLumaSamples() / (doicr->m_numRowsMinus1 + 1)
                          : doicr->m_rectTopLeftInUnitsY[overlay_idx] * (1 << doicr->m_log2UnitSize));
            int x_o  = x0_o;
            int y_o  = y0_o;
            int x0_a = 0;
            int y0_a = 0;
            if (doisei->m_doiSEIAlphaPresentFlag[overlay_idx])
            {
              x0_a = (doicr->m_rectSameSizeFlag
                        ? (doisei->m_doiSEIAlphaPartitionId[overlay_idx] % (doicr->m_numColsMinus1 + 1))
                            * pcPic->cs->pps->getPicWidthInLumaSamples() / (doicr->m_numColsMinus1 + 1)
                        : doicr->m_rectTopLeftInUnitsX[overlay_idx] * (1 << doicr->m_log2UnitSize));
              y0_a = (doicr->m_rectSameSizeFlag
                        ? (doisei->m_doiSEIAlphaPartitionId[overlay_idx] / (doicr->m_numColsMinus1 + 1))
                            * pcPic->cs->pps->getPicHeightInLumaSamples() / (doicr->m_numRowsMinus1 + 1)
                        : doicr->m_rectTopLeftInUnitsY[overlay_idx] * (1 << doicr->m_log2UnitSize));
            }
            int x_a = x0_a;
            int y_a = y0_a;

            // Overlay Y component
            for (int h_idx = y0_t; (h_idx < targetPicHeight) && ((h_idx - y0_t) < cr_h); h_idx++)
            {
              x_a = x0_a;
              x_o = x0_o;
              for (int w_idx = x0_t; (w_idx < targetPicWidth) && ((w_idx - x0_t) < cr_w); w_idx++)
              {
                if (doisei->m_doiSEIAlphaPresentFlag[overlay_idx])
                {
                  CHECK(doisei->m_doiSEIAlphaNuhLayerId[overlay_idx] != doisei->m_doiSEINuhLayerId[overlay_idx],
                        "Current implementation only sypports Texture and Alpha packed as two CR in the same "
                        "layer");
                  float alpha = (float) pcPic->getRecoBuf().get(COMPONENT_Y).at(Position(x_o, y_o))
                                / (1 << pcPic->m_bitDepths[(ChannelType) ChannelType::LUMA]);
                  float b = (float) pcPic->getRecoBuf().get(COMPONENT_Y).at(Position(x_o, y_o));
                  float a = (float) m_doiSEIPelY[h_idx * targetPicWidth + w_idx];
                  m_doiSEIPelY[h_idx * targetPicWidth + w_idx] = (uint16_t) (alpha * a + (1 - alpha) * b);
                  x_a++;
                  x_o++;
                }
                else
                {
                  m_doiSEIPelY[h_idx * targetPicWidth + w_idx] =
                    pcPic->getRecoBuf().get(COMPONENT_Y).at(Position(x_o, y_o));
                  x_o++;
                }
              }
              y_a++;
              y_o++;
            }

            // Overlay U component
            x0_t = (overlay_idx > 0 ? (doisei->m_doiSEITopLeftX[overlay_idx] >> 1) : 0);
            y0_t = (overlay_idx > 0 ? (doisei->m_doiSEITopLeftY[overlay_idx] >> 1) : 0);
            x0_o = (doicr->m_rectSameSizeFlag
                      ? (doisei->m_doiSEIPartitionId[overlay_idx] % (doicr->m_numColsMinus1 + 1))
                          * (pcPic->cs->pps->getPicWidthInLumaSamples() >> 1) / (doicr->m_numColsMinus1 + 1)
                      : doicr->m_rectTopLeftInUnitsX[overlay_idx] * (1 << doicr->m_log2UnitSize));
            y0_o = (doicr->m_rectSameSizeFlag
                      ? (doisei->m_doiSEIPartitionId[overlay_idx] / (doicr->m_numColsMinus1 + 1))
                          * (pcPic->cs->pps->getPicHeightInLumaSamples() >> 1) / (doicr->m_numRowsMinus1 + 1)
                      : doicr->m_rectTopLeftInUnitsY[overlay_idx] * (1 << doicr->m_log2UnitSize));
            x0_a = 0;
            y0_a = 0;
            x_o  = x0_o;
            y_o  = y0_o;
            if (doisei->m_doiSEIAlphaPresentFlag[overlay_idx])
            {
              x0_a = (doicr->m_rectSameSizeFlag
                        ? (doisei->m_doiSEIAlphaPartitionId[overlay_idx] % (doicr->m_numColsMinus1 + 1))
                            * (pcPic->cs->pps->getPicWidthInLumaSamples() >> 1) / (doicr->m_numColsMinus1 + 1)
                        : doicr->m_rectTopLeftInUnitsX[overlay_idx] * (1 << doicr->m_log2UnitSize));
              y0_a = (doicr->m_rectSameSizeFlag
                        ? (doisei->m_doiSEIAlphaPartitionId[overlay_idx] / (doicr->m_numColsMinus1 + 1))
                            * (pcPic->cs->pps->getPicHeightInLumaSamples() >> 1) / (doicr->m_numRowsMinus1 + 1)
                        : doicr->m_rectTopLeftInUnitsY[overlay_idx] * (1 << doicr->m_log2UnitSize));
            }
            x_a = x0_a;
            y_a = y0_a;
            for (int h_idx = y0_t; (h_idx < (targetPicHeight >> 1)) && ((h_idx - y0_t) < (cr_h >> 1)); h_idx++)
            {
              x_a = x0_a;
              x_o = x0_o;
              for (int w_idx = x0_t; (w_idx < (targetPicWidth >> 1)) && ((w_idx - x0_t) < (cr_w >> 1)); w_idx++)
              {
                if (doisei->m_doiSEIAlphaPresentFlag[overlay_idx])
                {
                  CHECK(doisei->m_doiSEIAlphaNuhLayerId[overlay_idx] != doisei->m_doiSEINuhLayerId[overlay_idx],
                        "Current implementation only sypports Texture and Alpha packed as two CR in the same "
                        "layer");
                  float alpha = (float) pcPic->getRecoBuf().get(COMPONENT_Cb).at(Position(x_o, y_o))
                                / (1 << pcPic->m_bitDepths[(ChannelType) ChannelType::CHROMA]);
                  float b = (float) pcPic->getRecoBuf().get(COMPONENT_Cb).at(Position(x_o, y_o));
                  float a = (float) m_doiSEIPelU[h_idx * (targetPicWidth >> 1) + w_idx];
                  m_doiSEIPelU[h_idx * (targetPicWidth >> 1) + w_idx] = (uint16_t) (alpha * a + (1 - alpha) * b);
                  x_a++;
                  x_o++;
                }
                else
                {
                  m_doiSEIPelU[h_idx * (targetPicWidth >> 1) + w_idx] =
                    pcPic->getRecoBuf().get(COMPONENT_Cb).at(Position(x_o, y_o));
                  x_o++;
                }
              }
              y_a++;
              y_o++;
            }

            // Overlay V component
            x0_t = (overlay_idx > 0 ? (doisei->m_doiSEITopLeftX[overlay_idx] >> 1) : 0);
            y0_t = (overlay_idx > 0 ? (doisei->m_doiSEITopLeftY[overlay_idx] >> 1) : 0);
            x0_o = (doicr->m_rectSameSizeFlag
                      ? (doisei->m_doiSEIPartitionId[overlay_idx] % (doicr->m_numColsMinus1 + 1))
                          * (pcPic->cs->pps->getPicWidthInLumaSamples() >> 1) / (doicr->m_numColsMinus1 + 1)
                      : doicr->m_rectTopLeftInUnitsX[overlay_idx] * (1 << doicr->m_log2UnitSize));
            y0_o = (doicr->m_rectSameSizeFlag
                      ? (doisei->m_doiSEIPartitionId[overlay_idx] / (doicr->m_numColsMinus1 + 1))
                          * (pcPic->cs->pps->getPicHeightInLumaSamples() >> 1) / (doicr->m_numRowsMinus1 + 1)
                      : doicr->m_rectTopLeftInUnitsY[overlay_idx] * (1 << doicr->m_log2UnitSize));
            x0_a = 0;
            y0_a = 0;
            x_o  = x0_o;
            y_o  = y0_o;
            if (doisei->m_doiSEIAlphaPresentFlag[overlay_idx])
            {
              x0_a = (doicr->m_rectSameSizeFlag
                        ? (doisei->m_doiSEIAlphaPartitionId[overlay_idx] % (doicr->m_numColsMinus1 + 1))
                            * (pcPic->cs->pps->getPicWidthInLumaSamples() >> 1) / (doicr->m_numColsMinus1 + 1)
                        : doicr->m_rectTopLeftInUnitsX[overlay_idx] * (1 << doicr->m_log2UnitSize));
              y0_a = (doicr->m_rectSameSizeFlag
                        ? (doisei->m_doiSEIAlphaPartitionId[overlay_idx] / (doicr->m_numColsMinus1 + 1))
                            * (pcPic->cs->pps->getPicHeightInLumaSamples() >> 1) / (doicr->m_numRowsMinus1 + 1)
                        : doicr->m_rectTopLeftInUnitsY[overlay_idx] * (1 << doicr->m_log2UnitSize));
            }
            x_a = x0_a;
            y_a = y0_a;

            for (int h_idx = y0_t; (h_idx < (targetPicHeight >> 1)) && ((h_idx - y0_t) < (cr_h >> 1)); h_idx++)
            {
              x_a = x0_a;
              x_o = x0_o;
              for (int w_idx = x0_t; (w_idx < (targetPicWidth >> 1)) && ((w_idx - x0_t) < (cr_w >> 1)); w_idx++)
              {
                if (doisei->m_doiSEIAlphaPresentFlag[overlay_idx])
                {
                  CHECK(doisei->m_doiSEIAlphaNuhLayerId[overlay_idx] != doisei->m_doiSEINuhLayerId[overlay_idx],
                        "Current implementation only sypports Texture and Alpha packed as two CR in the same "
                        "layer");
                  float alpha = (float) pcPic->getRecoBuf().get(COMPONENT_Cr).at(Position(x_o, y_o))
                                / (1 << pcPic->m_bitDepths[(ChannelType) ChannelType::CHROMA]);
                  float b = (float) pcPic->getRecoBuf().get(COMPONENT_Cr).at(Position(x_o, y_o));
                  float a = (float) m_doiSEIPelV[h_idx * (targetPicWidth >> 1) + w_idx];
                  m_doiSEIPelV[h_idx * (targetPicWidth >> 1) + w_idx] = (uint16_t) (alpha * a + (1 - alpha) * b);
                  x_a++;
                  x_o++;
                }
                else
                {
                  m_doiSEIPelV[h_idx * (targetPicWidth >> 1) + w_idx] =
                    pcPic->getRecoBuf().get(COMPONENT_Cr).at(Position(x_o, y_o));
                  x_o++;
                }
              }
              y_a++;
              y_o++;
            }
            this->m_doiSEILastProcessedOverlay = overlay_idx;
          }
        }
        else
        {
          CHECK(true, "Only constituent rectangles are implemented for DOI SEI.");
        }
      }
      if (this->m_doiSEILastProcessedOverlay == numOverlays - 1)
      {
        // Writing output file
        FILE* fptr = fopen(m_SEIDOIFileName.c_str(), "ab+");
        for (int r = 0; r < targetPicHeight; r++)
        {
          for (int c = 0; c < targetPicWidth; c++)
          {
            int16_t val = m_doiSEIPelY[r * targetPicWidth + c];
            fwrite(&val, sizeof(int16_t), 1, fptr);
          }
        }
        for (int r = 0; r < (targetPicHeight >> 1); r++)
        {
          for (int c = 0; c < (targetPicWidth >> 1); c++)
          {
            int16_t val = m_doiSEIPelU[r * (targetPicWidth >> 1) + c];
            fwrite(&val, sizeof(int16_t), 1, fptr);
          }
        }
        for (int r = 0; r < (targetPicHeight >> 1); r++)
        {
          for (int c = 0; c < (targetPicWidth >> 1); c++)
          {
            int16_t val = m_doiSEIPelV[r * (targetPicWidth >> 1) + c];
            fwrite(&val, sizeof(int16_t), 1, fptr);
          }
        }
        fclose(fptr);
        delete m_doiSEIPelY;
        delete m_doiSEIPelU;
        delete m_doiSEIPelV;
        m_doiSEIPelY                       = nullptr;
        m_doiSEIPelU                       = nullptr;
        m_doiSEIPelV                       = nullptr;
        this->m_doiSEILastProcessedOverlay = -1;
      }
    }
    break;
  }
  CHECK(seiDoiPresent == false, "No SEI DOI messages are present for the requested DOI id");
}
#endif