Commit 6169f170 authored by Karsten Suehring's avatar Karsten Suehring
Browse files

JCTVC-AM0024: Illustration of the shutter interval info SEI message in HEVC Draft

Merge branch 'tlu/HM-SII_SEI_Process'
parents b870c8d8 6104ea45
Pipeline #6325 passed with stage
in 10 minutes and 8 seconds
#======== Shutter Interval Info SEI message =====================
SEIShutterIntervalEnabled : 1
SEISiiTimeScale : 24000000
SEISiiInputNumUnitsInShutterInterval : 200000 # fixed shutter interval: only one entry, the value is assigned to sii_num_units_in_shutter_interval.
# here is one example with frame rate=120fps, encoder derives the actually value based on frame rate.
#======== Shutter Interval Info SEI message =====================
SEIShutterIntervalEnabled : 1
SEISiiTimeScale : 24000000
SEISiiInputNumUnitsInShutterInterval : 400000 400000 400000 400000 200000 # multiple shutter intervals: multiple entries, the values are set to sub_layer_num_units_in_shutter_interval[ ].
# here is one example with frame rate=120fps, encoder derives the actually values based on picture frame rate.
\ No newline at end of file
......@@ -3530,6 +3530,11 @@ The value plus 1 specifies the maximum number of temporal sub-layers that may be
\Default{true} &
Specifies if shutter interval info is the same for all temporal sub-layers in the CVS.
\\
\Option{SEIShutterIntervalPreFilename (-sii)} &
%\ShortOption{-sii} &
\Default{\NotSet} &
Specifies the file name of pre-processed video with shutter interval info SEI message. If empty, not output video.
\\
\end{OptionTableNoShorthand}
......
......@@ -88,6 +88,9 @@ Bool TAppDecCfg::parseCfg( Int argc, TChar* argv[] )
("SEIColourRemappingInfoFilename", m_colourRemapSEIFileName, string(""), "Colour Remapping YUV output file name. If empty, no remapping is applied (ignore SEI message)\n")
#if FGS_RDD5_ENABLE
("SEIFGSFilename", m_SEIFGSFileName, string(""), "FGS YUV output file name. If empty, no film grain is applied (ignore SEI message)\n")
#endif
#if SHUTTER_INTERVAL_SEI_PROCESSING
("SEIShutterIntervalPostFilename,-sii", m_shutterIntervalPostFileName, string(""), "Post Filtering with Shutter Interval SEI. If empty, no filtering is applied (ignore SEI message)\n")
#endif
("SEIAnnotatedRegionsInfoFilename", m_annotatedRegionsSEIFileName, string(""), "Annotated regions output file name. If empty, no object information will be saved (ignore SEI message)\n")
#if O0043_BEST_EFFORT_DECODING
......
......@@ -68,6 +68,9 @@ protected:
std::string m_colourRemapSEIFileName; ///< output Colour Remapping file name
#if FGS_RDD5_ENABLE
std::string m_SEIFGSFileName; ///< output reconstruction file name
#endif
#if SHUTTER_INTERVAL_SEI_PROCESSING
std::string m_shutterIntervalPostFileName; ///< output Post Filtering file name
#endif
std::string m_annotatedRegionsSEIFileName; ///< annotated regions file name
std::vector<Int> m_targetDecLayerIdSet; ///< set of LayerIds to be included in the sub-bitstream extraction process.
......@@ -94,6 +97,9 @@ public:
, m_colourRemapSEIFileName()
#if FGS_RDD5_ENABLE
, m_SEIFGSFileName()
#endif
#if SHUTTER_INTERVAL_SEI_PROCESSING
, m_shutterIntervalPostFileName()
#endif
, m_annotatedRegionsSEIFileName()
, m_targetDecLayerIdSet()
......
......@@ -72,6 +72,9 @@ Void TAppDecTop::destroy()
#if FGS_RDD5_ENABLE
m_SEIFGSFileName.clear();
#endif
#if SHUTTER_INTERVAL_SEI_PROCESSING
m_shutterIntervalPostFileName.clear();
#endif
}
// ====================================================================================================================
......@@ -143,6 +146,11 @@ Void TAppDecTop::decode()
Bool openedSEIFGSFile = false; // reconstruction file (with FGS) not yet opened. (must be performed after SPS is seen)
#endif
Bool loopFiltered = false;
#if SHUTTER_INTERVAL_SEI_PROCESSING
Bool openedPostFile = false;
setShutterFilterFlag(!m_shutterIntervalPostFileName.empty()); // not apply shutter interval SEI processing if filename is not specified.
m_cTDecTop.setShutterFilterFlag(getShutterFilterFlag());
#endif
while (!!bitstreamFile)
{
......@@ -253,6 +261,65 @@ Void TAppDecTop::decode()
openedSEIFGSFile = true;
}
#endif
#if SHUTTER_INTERVAL_SEI_PROCESSING
TComList<TComPic*>::iterator iterPic = pcListPic->begin();
TComPic* pcPic = *(iterPic);
SEIMessages shutterIntervalInfo = getSeisByType(pcPic->getSEIs(), SEI::SHUTTER_INTERVAL_INFO);
if (!m_shutterIntervalPostFileName.empty())
{
if (shutterIntervalInfo.size() > 0)
{
SEIShutterIntervalInfo *seiShutterIntervalInfo = (SEIShutterIntervalInfo*) *(shutterIntervalInfo.begin());
if (!seiShutterIntervalInfo->m_siiFixedSIwithinCLVS)
{
UInt arraySize = seiShutterIntervalInfo->m_siiMaxSubLayersMinus1 + 1;
UInt numUnitsLFR = seiShutterIntervalInfo->m_siiSubLayerNumUnitsInSI[0];
UInt numUnitsHFR = seiShutterIntervalInfo->m_siiSubLayerNumUnitsInSI[arraySize - 1];
setShutterFilterFlag(numUnitsLFR == 2 * numUnitsHFR);
const TComSPS* activeSPS = &(pcListPic->front()->getPicSym()->getSPS());
if (numUnitsLFR == 2 * numUnitsHFR && activeSPS->getMaxTLayers() == 1 && activeSPS->getMaxDecPicBuffering(0) == 1)
{
fprintf(stderr, "Warning: Shutter Interval SEI message processing is disabled for single TempLayer and single frame in DPB\n");
setShutterFilterFlag(false);
}
}
else
{
fprintf(stderr, "Warning: Shutter Interval SEI message processing is disabled for fixed shutter interval case\n");
setShutterFilterFlag(false);
}
}
else
{
fprintf(stderr, "Warning: Shutter Interval information should be specified in SII-SEI message\n");
setShutterFilterFlag(false);
}
}
if ((!m_shutterIntervalPostFileName.empty()) && (!openedPostFile) && getShutterFilterFlag())
{
const BitDepths &bitDepths = pcListPic->front()->getPicSym()->getSPS().getBitDepths();
for (UInt channelType = 0; channelType < MAX_NUM_CHANNEL_TYPE; channelType++)
{
if (m_outputBitDepth[channelType] == 0)
{
m_outputBitDepth[channelType] = bitDepths.recon[channelType];
}
}
std::ofstream ofile(m_shutterIntervalPostFileName.c_str());
if (!ofile.good() || !ofile.is_open())
{
fprintf(stderr, "\nUnable to open file '%s' for writing shutter-interval-SEI video\n", m_shutterIntervalPostFileName.c_str());
exit(EXIT_FAILURE);
}
m_cTVideoIOYuvSIIPostFile.open(m_shutterIntervalPostFileName, true, m_outputBitDepth, m_outputBitDepth, bitDepths.recon); // write mode
openedPostFile = true;
}
#endif
// write reconstruction to file
if( bNewPicture )
{
......@@ -315,6 +382,12 @@ Void TAppDecTop::xDestroyDecLib()
m_cTVideoIOYuvSEIFGSFile.close();
}
#endif
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (!m_shutterIntervalPostFileName.empty() && getShutterFilterFlag())
{
m_cTVideoIOYuvSIIPostFile.close();
}
#endif
// destroy decoder class
m_cTDecTop.destroy();
......@@ -531,6 +604,24 @@ Void TAppDecTop::xWriteOutput( TComList<TComPic*>* pcListPic, UInt tId )
xOutputColourRemapPic(pcPic);
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (!m_shutterIntervalPostFileName.empty() && getShutterFilterFlag())
{
pcPic->xOutputPostFilteredPic(pcPic, pcListPic);
const Window &conf = pcPic->getConformanceWindow();
const Window defDisp = m_respectDefDispWindow ? pcPic->getDefDisplayWindow() : Window();
m_cTVideoIOYuvSIIPostFile.write( pcPic->getPicYuvPostRec(),
m_outputColourSpaceConvert,
conf.getWindowLeftOffset() + defDisp.getWindowLeftOffset(),
conf.getWindowRightOffset() + defDisp.getWindowRightOffset(),
conf.getWindowTopOffset() + defDisp.getWindowTopOffset(),
conf.getWindowBottomOffset() + defDisp.getWindowBottomOffset(),
NUM_CHROMA_FORMAT, m_bClipOutputVideoToRec709Range );
}
#endif
// update POC of display order
m_iPOCLastDisplay = pcPic->getPOC();
......@@ -664,7 +755,25 @@ Void TAppDecTop::xFlushOutput( TComList<TComPic*>* pcListPic )
NUM_CHROMA_FORMAT, m_bClipOutputVideoToRec709Range);
}
#endif
if (!m_colourRemapSEIFileName.empty())
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (!m_shutterIntervalPostFileName.empty() && getShutterFilterFlag())
{
pcPic->xOutputPostFilteredPic(pcPic, pcListPic);
const Window &conf = pcPic->getConformanceWindow();
const Window defDisp = m_respectDefDispWindow ? pcPic->getDefDisplayWindow() : Window();
m_cTVideoIOYuvSIIPostFile.write( pcPic->getPicYuvPostRec(),
m_outputColourSpaceConvert,
conf.getWindowLeftOffset() + defDisp.getWindowLeftOffset(),
conf.getWindowRightOffset() + defDisp.getWindowRightOffset(),
conf.getWindowTopOffset() + defDisp.getWindowTopOffset(),
conf.getWindowBottomOffset() + defDisp.getWindowBottomOffset(),
NUM_CHROMA_FORMAT, m_bClipOutputVideoToRec709Range );
}
#endif
if (!m_colourRemapSEIFileName.empty())
{
xOutputColourRemapPic(pcPic);
}
......@@ -687,7 +796,11 @@ Void TAppDecTop::xFlushOutput( TComList<TComPic*>* pcListPic )
}
pcPic->setOutputMark(false);
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (pcPic != NULL && (m_shutterIntervalPostFileName.empty() || !getShutterFilterFlag()))
#else
if(pcPic != NULL)
#endif
{
pcPic->destroy();
delete pcPic;
......@@ -695,6 +808,23 @@ Void TAppDecTop::xFlushOutput( TComList<TComPic*>* pcListPic )
}
iterPic++;
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (!m_shutterIntervalPostFileName.empty() && getShutterFilterFlag())
{
iterPic = pcListPic->begin();
while (iterPic != pcListPic->end())
{
pcPic = *(iterPic);
if (pcPic != NULL)
{
pcPic->destroy();
delete pcPic;
pcPic = NULL;
}
iterPic++;
}
}
#endif
}
pcListPic->clear();
m_iPOCLastDisplay = -MAX_INT;
......
......@@ -65,6 +65,11 @@ private:
#if FGS_RDD5_ENABLE
TVideoIOYuv m_cTVideoIOYuvSEIFGSFile; ///< reconstruction YUV class
#endif
#if SHUTTER_INTERVAL_SEI_PROCESSING
Bool m_ShutterFilterEnable; ///< enable Post-processing with Shutter Interval SEI
TVideoIOYuv m_cTVideoIOYuvSIIPostFile; ///< post-filtered YUV class
#endif
// for output control
Int m_iPOCLastDisplay; ///< last POC in display order
std::ofstream m_seiMessageFileStream; ///< Used for outputing SEI messages.
......@@ -84,6 +89,11 @@ public:
Void decode (); ///< main decoding function
UInt getNumberOfChecksumErrorsDetected() const { return m_cTDecTop.getNumberOfChecksumErrorsDetected(); }
#if SHUTTER_INTERVAL_SEI_PROCESSING
Bool getShutterFilterFlag() const { return m_ShutterFilterEnable; }
Void setShutterFilterFlag(Bool value) { m_ShutterFilterEnable = value; }
#endif
protected:
Void xCreateDecLib (); ///< create internal classes
Void xDestroyDecLib (); ///< destroy internal classes
......
......@@ -787,6 +787,9 @@ Bool TAppEncCfg::parseCfg( Int argc, TChar* argv[] )
("InputPathPrefix,-ipp", inputPathPrefix, string(""), "pathname to prepend to input filename")
("BitstreamFile,b", m_bitstreamFileName, string(""), "Bitstream output file name")
("ReconFile,o", m_reconFileName, string(""), "Reconstructed YUV output file name")
#if SHUTTER_INTERVAL_SEI_PROCESSING
("SEIShutterIntervalPreFilename,-sii", m_shutterIntervalPreFileName, string(""), "File name of Pre-Filtering video. If empty, not output video\n")
#endif
("SourceWidth,-wdt", m_sourceWidth, 0, "Source picture width")
("SourceHeight,-hgt", m_sourceHeight, 0, "Source picture height")
("InputBitDepth", m_inputBitDepth[CHANNEL_TYPE_LUMA], 8, "Bit-depth of input file")
......@@ -2043,6 +2046,9 @@ Bool TAppEncCfg::parseCfg( Int argc, TChar* argv[] )
}
}
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
m_ShutterFilterEnable = false;
#endif
#if SHUTTER_INTERVAL_SEI_MESSAGE
if (m_siiSEIEnabled)
{
......@@ -2063,6 +2069,22 @@ Bool TAppEncCfg::parseCfg( Int argc, TChar* argv[] )
m_siiSEINumUnitsInShutterInterval = cfg_siiSEIInputNumUnitsInSI.values[0];
assert(m_siiSEINumUnitsInShutterInterval >= 0 && m_siiSEINumUnitsInShutterInterval <= MAX_UINT);
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (arraySize > 1 && m_siiSEISubLayerNumUnitsInSI[0] == 2 * m_siiSEISubLayerNumUnitsInSI[arraySize - 1])
{
m_ShutterFilterEnable = true;
const Double shutterAngle = 360.0;
Double fpsHFR = (Double)m_iFrameRate, fpsLFR = (Double)m_iFrameRate / 2.0;
UInt numUnitsHFR = (UInt)(((Double)m_siiSEITimeScale / fpsHFR) * (shutterAngle / 360.0));
UInt numUnitsLFR = (UInt)(((Double)m_siiSEITimeScale / fpsLFR) * (shutterAngle / 360.0));
for (Int i = 0; i < arraySize - 1; i++) m_siiSEISubLayerNumUnitsInSI[i] = numUnitsLFR;
m_siiSEISubLayerNumUnitsInSI[arraySize - 1] = numUnitsHFR;
}
else
{
printf("Warning: SII-processing is applied for multiple shutter intervals and number of LFR units should be 2 times of number of HFR units\n");
}
#endif
}
#endif
if(m_timeCodeSEIEnabled)
......@@ -2993,6 +3015,14 @@ Void TAppEncCfg::xCheckParameter()
}
#endif
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (m_siiSEIEnabled && m_ShutterFilterEnable && m_maxTempLayer == 1 && m_maxDecPicBuffering[0] == 1)
{
printf("Warning: Shutter Interval SEI message processing is disabled for single TempLayer and single frame in DPB\n");
m_ShutterFilterEnable = false;
}
#endif
if(m_timeCodeSEIEnabled)
{
xConfirmPara(m_timeCodeSEINumTs > MAX_TIMECODE_SEI_SETS, "Number of time sets cannot exceed 3");
......@@ -3148,6 +3178,12 @@ Void TAppEncCfg::xPrintParameter()
printf("Input File : %s\n", m_inputFileName.c_str() );
printf("Bitstream File : %s\n", m_bitstreamFileName.c_str() );
printf("Reconstruction File : %s\n", m_reconFileName.c_str() );
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (m_ShutterFilterEnable && !m_shutterIntervalPreFileName.empty())
{
printf("SII Pre-processed File : %s\n", m_shutterIntervalPreFileName.c_str());
}
#endif
printf("Real Format : %dx%d %gHz\n", m_sourceWidth - m_confWinLeft - m_confWinRight, m_sourceHeight - m_confWinTop - m_confWinBottom, (Double)m_iFrameRate/m_temporalSubsampleRatio );
printf("Internal Format : %dx%d %gHz\n", m_sourceWidth, m_sourceHeight, (Double)m_iFrameRate/m_temporalSubsampleRatio );
printf("Sequence PSNR output : %s\n", (m_printMSEBasedSequencePSNR ? "Linear average, MSE-based" : "Linear average only") );
......
......@@ -74,6 +74,10 @@ protected:
std::string m_inputFileName; ///< source file name
std::string m_bitstreamFileName; ///< output bitstream file
std::string m_reconFileName; ///< output reconstruction file
#if SHUTTER_INTERVAL_SEI_PROCESSING
Bool m_ShutterFilterEnable; ///< enable Pre-Filtering with Shutter Interval SEI
std::string m_shutterIntervalPreFileName; ///< output Pre-Filtering video
#endif
// Lambda modifiers
Double m_adLambdaModifier[ MAX_TLAYER ]; ///< Lambda modifier array for each temporal layer
......
......@@ -112,6 +112,10 @@ Void TAppEncTop::xInitLibCfg()
m_cTEncTop.setXPSNRWeight ( m_dXPSNRWeight[id], ComponentID(id));
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
m_cTEncTop.setShutterFilterFlag ( m_ShutterFilterEnable );
#endif
m_cTEncTop.setCabacZeroWordPaddingEnabled ( m_cabacZeroWordPaddingEnabled );
m_cTEncTop.setFrameRate ( m_iFrameRate );
......@@ -566,6 +570,12 @@ Void TAppEncTop::xCreateLib()
{
m_cTVideoIOYuvReconFile.open(m_reconFileName, true, m_outputBitDepth, m_outputBitDepth, m_internalBitDepth); // write mode
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (m_ShutterFilterEnable && !m_shutterIntervalPreFileName.empty())
{
m_cTVideoIOYuvSIIPreFile.open(m_shutterIntervalPreFileName, true, m_outputBitDepth, m_outputBitDepth, m_internalBitDepth); // write mode
}
#endif
// Neo Decoder
m_cTEncTop.create();
......@@ -576,6 +586,12 @@ Void TAppEncTop::xDestroyLib()
// Video I/O
m_cTVideoIOYuvInputFile.close();
m_cTVideoIOYuvReconFile.close();
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (m_ShutterFilterEnable && !m_shutterIntervalPreFileName.empty())
{
m_cTVideoIOYuvSIIPreFile.close();
}
#endif
// Neo Decoder
m_cTEncTop.destroy();
......@@ -700,6 +716,14 @@ Void TAppEncTop::encode()
m_cTEncTop.encode( bEos, flush ? 0 : pcPicYuvOrg, flush ? 0 : &cPicYuvTrueOrg, ipCSC, snrCSC, m_cListPicYuvRec, outputAccessUnits, iNumEncoded );
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (m_ShutterFilterEnable && !m_shutterIntervalPreFileName.empty())
{
m_cTVideoIOYuvSIIPreFile.write(pcPicYuvOrg, ipCSC, m_confWinLeft, m_confWinRight, m_confWinTop, m_confWinBottom,
NUM_CHROMA_FORMAT, m_bClipOutputVideoToRec709Range);
}
#endif
// write bistream to file if necessary
if ( iNumEncoded > 0 )
{
......
......@@ -61,6 +61,9 @@ private:
TEncTop m_cTEncTop; ///< encoder class
TVideoIOYuv m_cTVideoIOYuvInputFile; ///< input YUV file
TVideoIOYuv m_cTVideoIOYuvReconFile; ///< output reconstruction file
#if SHUTTER_INTERVAL_SEI_PROCESSING
TVideoIOYuv m_cTVideoIOYuvSIIPreFile; ///< output pre-filtered file
#endif
TComList<TComPicYuv*> m_cListPicYuvRec; ///< list of reconstruction YUV files
......
......@@ -72,9 +72,17 @@ TComPic::~TComPic()
}
#if REDUCED_ENCODER_MEMORY
#if SHUTTER_INTERVAL_SEI_PROCESSING
Void TComPic::create( const TComSPS &sps, const TComPPS &pps, const Bool bCreateEncoderSourcePicYuv, const Bool bCreateForImmediateReconstruction, const Bool bCreateForProcessedReconstruction )
#else
Void TComPic::create( const TComSPS &sps, const TComPPS &pps, const Bool bCreateEncoderSourcePicYuv, const Bool bCreateForImmediateReconstruction )
#endif
#else
Void TComPic::create( const TComSPS &sps, const TComPPS &pps, const Bool bIsVirtual)
#if SHUTTER_INTERVAL_SEI_PROCESSING
Void TComPic::create( const TComSPS &sps, const TComPPS &pps, const Bool bIsVirtual, const Bool bCreateForProcessedReconstruction )
#else
Void TComPic::create( const TComSPS &sps, const TComPPS &pps, const Bool bIsVirtual )
#endif
#endif
{
destroy();
......@@ -102,6 +110,12 @@ Void TComPic::create( const TComSPS &sps, const TComPPS &pps, const Bool bIsVirt
{
#endif
m_apcPicYuv[PIC_YUV_REC] = new TComPicYuv; m_apcPicYuv[PIC_YUV_REC]->create( iWidth, iHeight, chromaFormatIDC, uiMaxCuWidth, uiMaxCuHeight, uiMaxDepth, true );
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (bCreateForProcessedReconstruction)
{
m_apcPicYuv[PIC_YUV_POST_REC] = new TComPicYuv; m_apcPicYuv[PIC_YUV_POST_REC]->create(iWidth, iHeight, chromaFormatIDC, uiMaxCuWidth, uiMaxCuHeight, uiMaxDepth, true);
}
#endif
#if REDUCED_ENCODER_MEMORY
}
#endif
......@@ -136,7 +150,11 @@ Void TComPic::prepareForEncoderSourcePicYuv()
}
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
Void TComPic::prepareForReconstruction( const Bool bCreateForProcessedReconstruction )
#else
Void TComPic::prepareForReconstruction()
#endif
{
if (m_apcPicYuv[PIC_YUV_REC] == NULL)
{
......@@ -154,6 +172,27 @@ Void TComPic::prepareForReconstruction()
// mark it should be extended
m_apcPicYuv[PIC_YUV_REC]->setBorderExtension(false);
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (m_apcPicYuv[PIC_YUV_POST_REC] == NULL && bCreateForProcessedReconstruction)
{
const TComSPS &sps = m_picSym.getSPS();
const ChromaFormat chromaFormatIDC = sps.getChromaFormatIdc();
const Int iWidth = sps.getPicWidthInLumaSamples();
const Int iHeight = sps.getPicHeightInLumaSamples();
const UInt uiMaxCuWidth = sps.getMaxCUWidth();
const UInt uiMaxCuHeight = sps.getMaxCUHeight();
const UInt uiMaxDepth = sps.getMaxTotalCUDepth();
m_apcPicYuv[PIC_YUV_POST_REC] = new TComPicYuv; m_apcPicYuv[PIC_YUV_POST_REC]->create(iWidth, iHeight, chromaFormatIDC, uiMaxCuWidth, uiMaxCuHeight, uiMaxDepth, true);
}
// mark it should be extended
if (bCreateForProcessedReconstruction)
{
m_apcPicYuv[PIC_YUV_POST_REC]->setBorderExtension(false);
}
#endif
m_picSym.prepareForReconstruction();
}
......@@ -180,6 +219,14 @@ Void TComPic::releaseEncoderSourceImageData()
Void TComPic::releaseAllReconstructionData()
{
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (m_apcPicYuv[PIC_YUV_POST_REC])
{
m_apcPicYuv[PIC_YUV_POST_REC]->destroy();
delete m_apcPicYuv[PIC_YUV_POST_REC];
m_apcPicYuv[PIC_YUV_POST_REC] = NULL;
}
#endif
if (m_apcPicYuv[PIC_YUV_REC ])
{
m_apcPicYuv[PIC_YUV_REC]->destroy();
......@@ -324,4 +371,103 @@ TComPicYuv* TComPic::getPicYuvDisp()
#endif
#if SHUTTER_INTERVAL_SEI_PROCESSING
TComPic* TComPic::findPrevPicPOC(TComPic* pcPic, TComList<TComPic*>* pcListPic)
{
TComPic* prevPic = NULL;
TComPic* listPic = NULL;
TComList<TComPic*>::iterator iterListPic = pcListPic->begin();
for (Int i = 0; i < (Int)(pcListPic->size()); i++)
{
listPic = *(iterListPic);
listPic->setCurrSliceIdx(0);
if (listPic->getPOC() == pcPic->getPOC() - 1)
{
prevPic = *(iterListPic);
prevPic->setCurrSliceIdx(0);
}
iterListPic++;
}
return prevPic;
}
Void TComPic::xOutputPostFilteredPic(TComPic* pcPic, TComList<TComPic*>* pcListPic)
{
if (pcPic->getPOC() % 2 == 0)
{
TComPic* prevPic = findPrevPicPOC(pcPic, pcListPic);
if (prevPic)
{
TComPicYuv* currYuv = pcPic->getPicYuvRec();
TComPicYuv* prevYuv = prevPic->getPicYuvRec();
TComPicYuv* postYuv = pcPic->getPicYuvPostRec();
for (Int chan = 0; chan < currYuv->getNumberValidComponents(); chan++)
{
const ComponentID ch = ComponentID(chan);
const ChannelType cType = (ch == COMPONENT_Y) ? CHANNEL_TYPE_LUMA : CHANNEL_TYPE_CHROMA;
const Int bitDepth = pcPic->getSlice(0)->getSPS()->getBitDepth(cType);
const Int maxOutputValue = (1 << bitDepth) - 1;
Pel* currPxl = currYuv->getAddr(ch);
Pel* prevPxl = prevYuv->getAddr(ch);
Pel* postPxl = postYuv->getAddr(ch);
Int iStride = currYuv->getStride(ch);
Int iHeight = currYuv->getHeight(ch);
Int iWidth = currYuv->getWidth(ch);
for (Int y = 0; y < iHeight; y++)
{
for (Int x = 0; x < iWidth; x++)
{
postPxl[x] = std::min(maxOutputValue, std::max(0, (currPxl[x] << 1) - prevPxl[x]));
}
currPxl += iStride;
prevPxl += iStride;
postPxl += iStride;
}
}
}
else
{
pcPic->getPicYuvRec()->copyToPic(pcPic->getPicYuvPostRec());
}
}
else
{
pcPic->getPicYuvRec()->copyToPic(pcPic->getPicYuvPostRec());
}
}
Void TComPic::xOutputPreFilteredPic(TComPic* pcPic, TComList<TComPic*>* pcListPic)
{
if (pcPic->getPOC() % 2 == 0)
{
TComPic* prevPic = findPrevPicPOC(pcPic, pcListPic);
if (prevPic)
{
TComPicYuv* currYuv = pcPic->getPicYuvOrg();
TComPicYuv* prevYuv = prevPic->getPicYuvOrg();
for (Int chan = 0; chan < currYuv->getNumberValidComponents(); chan++)
{
const ComponentID ch = ComponentID(chan);
const ChannelType cType = (ch == COMPONENT_Y) ? CHANNEL_TYPE_LUMA : CHANNEL_TYPE_CHROMA;
const Int bitDepth = pcPic->getSlice(0)->getSPS()->getBitDepth(cType);
const Int maxOutputValue = (1 << bitDepth) - 1;
Pel* currPxl = currYuv->getAddr(ch);
Pel* prevPxl = prevYuv->getAddr(ch);
Int iStride = currYuv->getStride(ch);
Int iHeight = currYuv->getHeight(ch);
Int iWidth = currYuv->getWidth(ch);
for (Int y = 0; y < iHeight; y++)
{
for (Int x = 0; x < iWidth; x++)
{
currPxl[x] = std::min( maxOutputValue, std::max( 0, (currPxl[x] + prevPxl[x]) >> 1) );
}
currPxl += iStride;
prevPxl += iStride;
}
}
}
}
}
#endif
//! \}
......@@ -59,7 +59,16 @@
class TComPic
{