diff --git a/source/App/DecoderApp/DecApp.cpp b/source/App/DecoderApp/DecApp.cpp index 297ee59d7ee9e68975379673323e97371dc7745c..d7cb64284a803d6cfcdf485c0598dfa5e1e7f9b8 100644 --- a/source/App/DecoderApp/DecApp.cpp +++ b/source/App/DecoderApp/DecApp.cpp @@ -111,7 +111,9 @@ uint32_t DecApp::decode() } // main decoder loop +#if !JVET_N0278_FIXES bool openedReconFile = false; // reconstruction file not yet opened. (must be performed after SPS is seen) +#endif bool loopFiltered = false; while (!!bitstreamFile) @@ -222,7 +224,11 @@ uint32_t DecApp::decode() if( pcListPic ) { +#if JVET_N0278_FIXES + if( !m_reconFileName.empty() && !m_cVideoIOYuvReconFile[nalu.m_nuhLayerId].isOpen() ) +#else if ( (!m_reconFileName.empty()) && (!openedReconFile) ) +#endif { const BitDepths &bitDepths=pcListPic->front()->cs->sps->getBitDepths(); // use bit depths of first reconstructed picture. for( uint32_t channelType = 0; channelType < MAX_NUM_CHANNEL_TYPE; channelType++ ) @@ -238,8 +244,14 @@ uint32_t DecApp::decode() EXIT ("Invalid output bit-depth for packed YUV output, aborting\n"); } +#if JVET_N0278_FIXES + std::string reconFileName = m_reconFileName; + reconFileName.insert( reconFileName.size() - 4, std::to_string( nalu.m_nuhLayerId ) ); + m_cVideoIOYuvReconFile[nalu.m_nuhLayerId].open( reconFileName, true, m_outputBitDepth, m_outputBitDepth, bitDepths.recon ); // write mode +#else m_cVideoIOYuvReconFile.open( m_reconFileName, true, m_outputBitDepth, m_outputBitDepth, bitDepths.recon ); // write mode openedReconFile = true; +#endif } // write reconstruction to file if( bNewPicture ) @@ -312,10 +324,20 @@ void DecApp::xCreateDecLib() void DecApp::xDestroyDecLib() { +#if JVET_N0278_FIXES + if( !m_reconFileName.empty() ) + { + for( auto & recFile : m_cVideoIOYuvReconFile ) + { + recFile.second.close(); + } + } +#else if ( !m_reconFileName.empty() ) { m_cVideoIOYuvReconFile.close(); } +#endif // destroy decoder class m_cDecLib.destroy(); @@ -412,7 +434,11 @@ void DecApp::xWriteOutput( PicList* pcListPic, uint32_t tId ) if (display) { +#if JVET_N0278_FIXES + m_cVideoIOYuvReconFile[pcPicTop->layerId].write( pcPicTop->getRecoBuf(), pcPicBottom->getRecoBuf(), +#else m_cVideoIOYuvReconFile.write( pcPicTop->getRecoBuf(), pcPicBottom->getRecoBuf(), +#endif m_outputColourSpaceConvert, false, // TODO: m_packedYUVMode, conf.getWindowLeftOffset() * SPS::getWinUnitX( pcPicTop->cs->sps->getChromaFormatIdc() ), @@ -466,10 +492,19 @@ void DecApp::xWriteOutput( PicList* pcListPic, uint32_t tId ) ChromaFormat chromaFormatIDC = sps->getChromaFormatIdc(); if( m_upscaledOutput ) { +#if JVET_N0278_FIXES + m_cVideoIOYuvReconFile[pcPic->layerId].writeUpscaledPicture( *sps, *pcPic->cs->pps, pcPic->getRecoBuf(), m_outputColourSpaceConvert, m_packedYUVMode, m_upscaledOutput, NUM_CHROMA_FORMAT, m_bClipOutputVideoToRec709Range ); +#else m_cVideoIOYuvReconFile.writeUpscaledPicture( *sps, *pcPic->cs->pps, pcPic->getRecoBuf(), m_outputColourSpaceConvert, m_packedYUVMode, m_upscaledOutput, NUM_CHROMA_FORMAT, m_bClipOutputVideoToRec709Range ); +#endif } else + { +#if JVET_N0278_FIXES + m_cVideoIOYuvReconFile[pcPic->layerId].write( pcPic->getRecoBuf().get( COMPONENT_Y ).width, pcPic->getRecoBuf().get( COMPONENT_Y ).height, pcPic->getRecoBuf(), +#else m_cVideoIOYuvReconFile.write( pcPic->getRecoBuf().get( COMPONENT_Y ).width, pcPic->getRecoBuf().get( COMPONENT_Y ).height, pcPic->getRecoBuf(), +#endif m_outputColourSpaceConvert, m_packedYUVMode, conf.getWindowLeftOffset() * SPS::getWinUnitX( chromaFormatIDC ), @@ -477,6 +512,7 @@ void DecApp::xWriteOutput( PicList* pcListPic, uint32_t tId ) conf.getWindowTopOffset() * SPS::getWinUnitY( chromaFormatIDC ), conf.getWindowBottomOffset() * SPS::getWinUnitY( chromaFormatIDC ), NUM_CHROMA_FORMAT, m_bClipOutputVideoToRec709Range ); + } } #if HEVC_SEI @@ -544,7 +580,11 @@ void DecApp::xFlushOutput( PicList* pcListPic ) const Window &conf = pcPicTop->cs->pps->getConformanceWindow(); const bool isTff = pcPicTop->topField; +#if JVET_N0278_FIXES + m_cVideoIOYuvReconFile[pcPicTop->layerId].write( pcPicTop->getRecoBuf(), pcPicBottom->getRecoBuf(), +#else m_cVideoIOYuvReconFile.write( pcPicTop->getRecoBuf(), pcPicBottom->getRecoBuf(), +#endif m_outputColourSpaceConvert, false, // TODO: m_packedYUVMode, conf.getWindowLeftOffset() * SPS::getWinUnitX( pcPicTop->cs->sps->getChromaFormatIdc() ), @@ -609,10 +649,19 @@ void DecApp::xFlushOutput( PicList* pcListPic ) ChromaFormat chromaFormatIDC = sps->getChromaFormatIdc(); if( m_upscaledOutput ) { +#if JVET_N0278_FIXES + m_cVideoIOYuvReconFile[pcPic->layerId].writeUpscaledPicture( *sps, *pcPic->cs->pps, pcPic->getRecoBuf(), m_outputColourSpaceConvert, m_packedYUVMode, m_upscaledOutput, NUM_CHROMA_FORMAT, m_bClipOutputVideoToRec709Range ); +#else m_cVideoIOYuvReconFile.writeUpscaledPicture( *sps, *pcPic->cs->pps, pcPic->getRecoBuf(), m_outputColourSpaceConvert, m_packedYUVMode, m_upscaledOutput, NUM_CHROMA_FORMAT, m_bClipOutputVideoToRec709Range ); +#endif } else + { +#if JVET_N0278_FIXES + m_cVideoIOYuvReconFile[pcPic->layerId].write( pcPic->getRecoBuf().get( COMPONENT_Y ).width, pcPic->getRecoBuf().get( COMPONENT_Y ).height, pcPic->getRecoBuf(), +#else m_cVideoIOYuvReconFile.write( pcPic->getRecoBuf().get( COMPONENT_Y ).width, pcPic->getRecoBuf().get( COMPONENT_Y ).height, pcPic->getRecoBuf(), +#endif m_outputColourSpaceConvert, m_packedYUVMode, conf.getWindowLeftOffset() * SPS::getWinUnitX( chromaFormatIDC ), @@ -620,6 +669,7 @@ void DecApp::xFlushOutput( PicList* pcListPic ) conf.getWindowTopOffset() * SPS::getWinUnitY( chromaFormatIDC ), conf.getWindowBottomOffset() * SPS::getWinUnitY( chromaFormatIDC ), NUM_CHROMA_FORMAT, m_bClipOutputVideoToRec709Range ); + } } #if HEVC_SEI diff --git a/source/App/DecoderApp/DecApp.h b/source/App/DecoderApp/DecApp.h index edb03919f4002a41dbd2c4f7c3ebe6f5ddb9443f..a912738ca06448af5f01ef30679c56bc335fbe06 100644 --- a/source/App/DecoderApp/DecApp.h +++ b/source/App/DecoderApp/DecApp.h @@ -61,7 +61,11 @@ class DecApp : public DecAppCfg private: // class interface DecLib m_cDecLib; ///< decoder class +#if JVET_N0278_FIXES + std::unordered_map<int, VideoIOYuv> m_cVideoIOYuvReconFile; ///< reconstruction YUV class +#else VideoIOYuv m_cVideoIOYuvReconFile; ///< reconstruction YUV class +#endif // for output control int m_iPOCLastDisplay; ///< last POC in display order diff --git a/source/App/EncoderApp/EncApp.cpp b/source/App/EncoderApp/EncApp.cpp index f4a89ad74322e85675703066c75342803fa63100..20f493195fd809f0c4c42d726960869d36844ace 100644 --- a/source/App/EncoderApp/EncApp.cpp +++ b/source/App/EncoderApp/EncApp.cpp @@ -678,7 +678,13 @@ void EncApp::xCreateLib( std::list<PelUnitBuf*>& recBufList ) EXIT ("Invalid chroma output bit-depth or image width for packed YUV output, aborting\n"); } +#if JVET_N0278_FIXES + std::string reconFileName = m_reconFileName; + reconFileName.insert( reconFileName.size() - 4, std::to_string( layerId ) ); + m_cVideoIOYuvReconFile.open( reconFileName, true, m_outputBitDepth, m_outputBitDepth, m_internalBitDepth ); // write mode +#else m_cVideoIOYuvReconFile.open(m_reconFileName, true, m_outputBitDepth, m_outputBitDepth, m_internalBitDepth); // write mode +#endif } // create the encoder @@ -988,8 +994,7 @@ void EncApp::encode() \param iNumEncoded number of encoded frames \param accessUnits list of access units to be written */ -void EncApp::xWriteOutput( int iNumEncoded, std::list<PelUnitBuf*>& recBufList - ) +void EncApp::xWriteOutput( int iNumEncoded, std::list<PelUnitBuf*>& recBufList ) { const InputColourSpaceConversion ipCSC = (!m_outputInternalColourSpace) ? m_inputColourSpaceConvert : IPCOLOURSPACE_UNCHANGED; std::list<PelUnitBuf*>::iterator iterPicYuvRec = recBufList.end(); diff --git a/source/App/EncoderApp/encmain.cpp b/source/App/EncoderApp/encmain.cpp index 80e134010da9b84b481ad72fad638c72735d98ba..b1151925e99890b4fa5b579b8d31def20b6ca7f5 100644 --- a/source/App/EncoderApp/encmain.cpp +++ b/source/App/EncoderApp/encmain.cpp @@ -119,20 +119,42 @@ int main(int argc, char* argv[]) #if JVET_N0278_FIXES std::vector<EncApp*> pcEncApp(1); bool resized = false; - int i = 0; + int layerIdx = 0; + + char** layerArgv = new char*[argc]; do { - pcEncApp[i] = new EncApp; + pcEncApp[layerIdx] = new EncApp; // create application encoder class per layer - pcEncApp[i]->create(); + pcEncApp[layerIdx]->create(); // parse configuration per layer try { - if( !pcEncApp[i]->parseCfg( argc, argv ) ) + int j = 0; + for( int i = 0; i < argc; i++ ) { - pcEncApp[i]->destroy(); + if( argv[i][0] == '-' && argv[i][1] == 'l' ) + { + if( argv[i][2] == std::to_string( layerIdx ).c_str()[0] ) + { + layerArgv[j] = argv[i + 1]; + layerArgv[j + 1] = argv[i + 2]; + j += 2; + } + i += 2; + } + else + { + layerArgv[j] = argv[i]; + j++; + } + } + + if( !pcEncApp[layerIdx]->parseCfg( j, layerArgv ) ) + { + pcEncApp[layerIdx]->destroy(); return 1; } } @@ -142,17 +164,19 @@ int main(int argc, char* argv[]) return 1; } - int layerId = i; //VS: layerIdx i should be converted to layerId after VPS is implemented - pcEncApp[i]->createLib( layerId ); + int layerId = layerIdx; //VS: layerIdx should be converted to layerId after VPS is implemented + pcEncApp[layerIdx]->createLib( layerId ); if( !resized ) { - pcEncApp.resize( pcEncApp[i]->getMaxLayers() ); + pcEncApp.resize( pcEncApp[layerIdx]->getMaxLayers() ); resized = true; } - i++; - } while( i < pcEncApp.size() ); + layerIdx++; + } while( layerIdx < pcEncApp.size() ); + + delete[] layerArgv; #else EncApp* pcEncApp = new EncApp; // create application encoder class diff --git a/source/Lib/EncoderLib/EncGOP.cpp b/source/Lib/EncoderLib/EncGOP.cpp index f0492686faee55093663dcc972f565d4b675498e..dd46c2c9d9f55f6dd6db2f35ee59dcb2813a6eef 100644 --- a/source/Lib/EncoderLib/EncGOP.cpp +++ b/source/Lib/EncoderLib/EncGOP.cpp @@ -80,6 +80,9 @@ int getLSB(int poc, int maxLSB) } } +#if JVET_N0278_FIXES +bool EncGOP::m_bSeqFirst = true; +#endif EncGOP::EncGOP() { @@ -95,7 +98,9 @@ EncGOP::EncGOP() m_pcSliceEncoder = NULL; m_pcListPic = NULL; m_HLSWriter = NULL; +#if !JVET_N0278_FIXES m_bSeqFirst = true; +#endif m_bRefreshPending = 0; m_pocCRA = 0; @@ -418,7 +423,11 @@ void EncGOP::xWriteSEI (NalUnitType naluType, SEIMessages& seiMessages, AccessUn { return; } +#if JVET_N0278_FIXES + OutputNALUnit nalu( naluType, m_pcEncLib->getLayerId(), temporalId ); +#else OutputNALUnit nalu(naluType, temporalId); +#endif m_seiWriter.writeSEImessages(nalu.m_Bitstream, seiMessages, sps, *m_HRD, false, temporalId); auPos = accessUnit.insert(auPos, new NALUnitEBSP(nalu)); auPos++; @@ -435,7 +444,11 @@ void EncGOP::xWriteSEISeparately (NalUnitType naluType, SEIMessages& seiMessages { SEIMessages tmpMessages; tmpMessages.push_back(*sei); +#if JVET_N0278_FIXES + OutputNALUnit nalu( naluType, m_pcEncLib->getLayerId(), temporalId ); +#else OutputNALUnit nalu(naluType, temporalId); +#endif m_seiWriter.writeSEImessages(nalu.m_Bitstream, tmpMessages, sps, *m_HRD, false, temporalId); auPos = accessUnit.insert(auPos, new NALUnitEBSP(nalu)); auPos++; diff --git a/source/Lib/EncoderLib/EncGOP.h b/source/Lib/EncoderLib/EncGOP.h index d592b236494bc1aa8bf9e3e842f505ce7fad609c..4a9700642c613607bc85f3b962c88a0d07d1d9b8 100644 --- a/source/Lib/EncoderLib/EncGOP.h +++ b/source/Lib/EncoderLib/EncGOP.h @@ -154,7 +154,11 @@ private: EncReshape* m_pcReshaper; RateCtrl* m_pcRateCtrl; // indicate sequence first +#if JVET_N0278_FIXES + static bool m_bSeqFirst; +#else bool m_bSeqFirst; +#endif EncHRD* m_HRD; diff --git a/source/Lib/EncoderLib/EncLib.cpp b/source/Lib/EncoderLib/EncLib.cpp index ad27ab778807ef7624eabcdec69758422fb6338e..b3d8fba4090fe4a3d02c98f3f82fbe75d3cb5f04 100644 --- a/source/Lib/EncoderLib/EncLib.cpp +++ b/source/Lib/EncoderLib/EncLib.cpp @@ -233,7 +233,11 @@ void EncLib::init( bool isFieldCoding, AUWriterIf* auWriterIf ) m_AUWriterIf = auWriterIf; SPS &sps0=*(m_spsMap.allocatePS(0)); // NOTE: implementations that use more than 1 SPS need to be aware of activation issues. +#if JVET_N0278_FIXES + PPS &pps0 = *( m_ppsMap.allocatePS( m_layerId ) ); +#else PPS &pps0=*(m_ppsMap.allocatePS(0)); +#endif APS &aps0 = *( m_apsMap.allocatePS( SCALING_LIST_APS ) ); aps0.setAPSId( 0 ); aps0.setAPSType( SCALING_LIST_APS ); @@ -643,40 +647,45 @@ void EncLib::encode( bool flush, PelStorage* pcPicYuvOrg, PelStorage* cPicYuvTru ppsID = 0; } } - xGetNewPicBuffer( rcListPicYuvRecOut, - pcPicCurr, ppsID ); +#if JVET_N0278_FIXES + if( m_cVPS.getMaxLayers() > 1 ) { - const PPS *pPPS=(ppsID<0) ? m_ppsMap.getFirstPS() : m_ppsMap.getPS(ppsID); - const SPS *pSPS=m_spsMap.getPS(pPPS->getSPSId()); + ppsID = m_layerId; + } +#endif - if( m_rprEnabled ) - { - pcPicCurr->M_BUFS( 0, PIC_ORIGINAL_INPUT ).getBuf( COMPONENT_Y ).copyFrom( pcPicYuvOrg->getBuf( COMPONENT_Y ) ); - pcPicCurr->M_BUFS( 0, PIC_ORIGINAL_INPUT ).getBuf( COMPONENT_Cb ).copyFrom( pcPicYuvOrg->getBuf( COMPONENT_Cb ) ); - pcPicCurr->M_BUFS( 0, PIC_ORIGINAL_INPUT ).getBuf( COMPONENT_Cr ).copyFrom( pcPicYuvOrg->getBuf( COMPONENT_Cr ) ); + xGetNewPicBuffer( rcListPicYuvRecOut, pcPicCurr, ppsID ); - pcPicCurr->M_BUFS( 0, PIC_TRUE_ORIGINAL_INPUT ).getBuf( COMPONENT_Y ).copyFrom( cPicYuvTrueOrg->getBuf( COMPONENT_Y ) ); - pcPicCurr->M_BUFS( 0, PIC_TRUE_ORIGINAL_INPUT ).getBuf( COMPONENT_Cb ).copyFrom( cPicYuvTrueOrg->getBuf( COMPONENT_Cb ) ); - pcPicCurr->M_BUFS( 0, PIC_TRUE_ORIGINAL_INPUT ).getBuf( COMPONENT_Cr ).copyFrom( cPicYuvTrueOrg->getBuf( COMPONENT_Cr ) ); + const PPS *pPPS = ( ppsID < 0 ) ? m_ppsMap.getFirstPS() : m_ppsMap.getPS( ppsID ); + const SPS *pSPS = m_spsMap.getPS( pPPS->getSPSId() ); - const ChromaFormat chromaFormatIDC = pSPS->getChromaFormatIdc(); + if( m_rprEnabled ) + { + pcPicCurr->M_BUFS( 0, PIC_ORIGINAL_INPUT ).getBuf( COMPONENT_Y ).copyFrom( pcPicYuvOrg->getBuf( COMPONENT_Y ) ); + pcPicCurr->M_BUFS( 0, PIC_ORIGINAL_INPUT ).getBuf( COMPONENT_Cb ).copyFrom( pcPicYuvOrg->getBuf( COMPONENT_Cb ) ); + pcPicCurr->M_BUFS( 0, PIC_ORIGINAL_INPUT ).getBuf( COMPONENT_Cr ).copyFrom( pcPicYuvOrg->getBuf( COMPONENT_Cr ) ); - const PPS *refPPS = m_ppsMap.getPS(0); - Picture::rescalePicture( *pcPicYuvOrg, refPPS->getConformanceWindow(), pcPicCurr->getOrigBuf(), pPPS->getConformanceWindow(), chromaFormatIDC, pSPS->getBitDepths(), true, true ); - Picture::rescalePicture( *cPicYuvTrueOrg, refPPS->getConformanceWindow(), pcPicCurr->getTrueOrigBuf(), pPPS->getConformanceWindow(), chromaFormatIDC, pSPS->getBitDepths(), true, true ); - } - else - { - pcPicCurr->M_BUFS( 0, PIC_ORIGINAL ).swap( *pcPicYuvOrg ); - pcPicCurr->M_BUFS( 0, PIC_TRUE_ORIGINAL ).swap( *cPicYuvTrueOrg ); - } + pcPicCurr->M_BUFS( 0, PIC_TRUE_ORIGINAL_INPUT ).getBuf( COMPONENT_Y ).copyFrom( cPicYuvTrueOrg->getBuf( COMPONENT_Y ) ); + pcPicCurr->M_BUFS( 0, PIC_TRUE_ORIGINAL_INPUT ).getBuf( COMPONENT_Cb ).copyFrom( cPicYuvTrueOrg->getBuf( COMPONENT_Cb ) ); + pcPicCurr->M_BUFS( 0, PIC_TRUE_ORIGINAL_INPUT ).getBuf( COMPONENT_Cr ).copyFrom( cPicYuvTrueOrg->getBuf( COMPONENT_Cr ) ); + + const ChromaFormat chromaFormatIDC = pSPS->getChromaFormatIdc(); - pcPicCurr->finalInit( *pSPS, *pPPS, m_apss, m_lmcsAPS, m_scalinglistAPS ); - PPS *ptrPPS = (ppsID<0) ? m_ppsMap.getFirstPS() : m_ppsMap.getPS(ppsID); - ptrPPS->setNumBricksInPic((int)pcPicCurr->brickMap->bricks.size()); + const PPS *refPPS = m_ppsMap.getPS( 0 ); + Picture::rescalePicture( *pcPicYuvOrg, refPPS->getConformanceWindow(), pcPicCurr->getOrigBuf(), pPPS->getConformanceWindow(), chromaFormatIDC, pSPS->getBitDepths(), true, true ); + Picture::rescalePicture( *cPicYuvTrueOrg, refPPS->getConformanceWindow(), pcPicCurr->getTrueOrigBuf(), pPPS->getConformanceWindow(), chromaFormatIDC, pSPS->getBitDepths(), true, true ); + } + else + { + pcPicCurr->M_BUFS( 0, PIC_ORIGINAL ).swap( *pcPicYuvOrg ); + pcPicCurr->M_BUFS( 0, PIC_TRUE_ORIGINAL ).swap( *cPicYuvTrueOrg ); } + pcPicCurr->finalInit( *pSPS, *pPPS, m_apss, m_lmcsAPS, m_scalinglistAPS ); + PPS *ptrPPS = ( ppsID < 0 ) ? m_ppsMap.getFirstPS() : m_ppsMap.getPS( ppsID ); + ptrPPS->setNumBricksInPic( (int)pcPicCurr->brickMap->bricks.size() ); + pcPicCurr->poc = m_iPOCLast; // compute image characteristics @@ -836,23 +845,38 @@ void EncLib::xGetNewPicBuffer ( std::list<PelUnitBuf*>& rcListPicYuvRecOut, Pict Slice::sortPicList(m_cListPic); // use an entry in the buffered list if the maximum number that need buffering has been reached: - if (m_cListPic.size() >= (uint32_t)(m_iGOPSize + getMaxDecPicBuffering(MAX_TLAYER-1) + 2) ) + if( m_cListPic.size() >= (uint32_t)( m_iGOPSize + getMaxDecPicBuffering( MAX_TLAYER - 1 ) + 2 ) ) { - PicList::iterator iterPic = m_cListPic.begin(); + PicList::iterator iterPic = m_cListPic.begin(); int iSize = int( m_cListPic.size() ); - for ( int i = 0; i < iSize; i++ ) + for( int i = 0; i < iSize; i++ ) { rpcPic = *iterPic; - if( ! rpcPic->referenced ) +#if JVET_N0278_FIXES + if( !rpcPic->referenced && rpcPic->layerId == m_layerId ) { break; } + else + { + rpcPic = nullptr; + } +#else + if( !rpcPic->referenced ) + { + break; + } +#endif iterPic++; } // If PPS ID is the same, we will assume that it has not changed since it was last used // and return the old object. +#if JVET_N0278_FIXES + if( pps.getPPSId() != rpcPic->cs->pps->getPPSId() && rpcPic ) +#else if (pps.getPPSId() != rpcPic->cs->pps->getPPSId()) +#endif { // the IDs differ - free up an entry in the list, and then create a new one, as with the case where the max buffering state has not been reached. rpcPic->destroy(); diff --git a/source/Lib/Utilities/VideoIOYuv.h b/source/Lib/Utilities/VideoIOYuv.h index de5665eda98bc8bff501a2e7451328d2215e6936..b4b6afa50f994c798c0f29f9edd24bd2c97b7ca8 100644 --- a/source/Lib/Utilities/VideoIOYuv.h +++ b/source/Lib/Utilities/VideoIOYuv.h @@ -95,6 +95,9 @@ public: bool isEof (); ///< check for end-of-file bool isFail(); ///< check for failure +#if JVET_N0278_FIXES + bool isOpen() { return m_cHandle.is_open(); } +#endif bool writeUpscaledPicture( const SPS& sps, const PPS& pps, const CPelUnitBuf& pic, const InputColourSpaceConversion ipCSC, const bool bPackedYUVOutputMode, int outputChoice = 0, ChromaFormat format = NUM_CHROMA_FORMAT, const bool bClipToRec709 = false ); ///< write one upsaled YUV frame