Commit 532fb75f authored by FANGJUN PU's avatar FANGJUN PU
Browse files

Pre-/post-processing with Shutter Interval SEI.

parent 0046fe97
#======== Shutter Interval Info SEI message =====================
SEIShutterIntervalEnabled : 1
SEISiiTimeScale : 24000000
SEISiiInputNumUnitsInShutterInterval : 200000 # fixed shutter interval: only one entry, the value is assigned to sii_num_units_in_shutter_interval.
# here is one example with frame rate=120fps, encoder derives the actually value based on frame rate.
#======== Shutter Interval Info SEI message =====================
SEIShutterIntervalEnabled : 1
SEISiiTimeScale : 24000000
SEISiiInputNumUnitsInShutterInterval : 400000 400000 400000 400000 200000 # multiple shutter intervals: multiple entries, the values are set to sub_layer_num_units_in_shutter_interval[ ].
# here is one example with frame rate=120fps, encoder derives the actually values based on picture frame rate.
\ No newline at end of file
......@@ -3434,6 +3434,11 @@ The value plus 1 specifies the maximum number of temporal sub-layers that may be
\Default{true} &
Specifies if shutter interval info is the same for all temporal sub-layers in the CVS.
\\
\Option{SEIPreFilteringFilename (-sii)} &
%\ShortOption{-sii} &
\Default{\NotSet} &
Specifies the file name of pre-filtered video. If empty, not output video.
\\
\end{OptionTableNoShorthand}
......
......@@ -86,6 +86,9 @@ Bool TAppDecCfg::parseCfg( Int argc, TChar* argv[] )
("TarDecLayerIdSetFile,l", cfg_TargetDecLayerIdSetFile, string(""), "targetDecLayerIdSet file name. The file should include white space separated LayerId values to be decoded. Omitting the option or a value of -1 in the file decodes all layers.")
("RespectDefDispWindow,w", m_respectDefDispWindow, 0, "Only output content inside the default display window\n")
("SEIColourRemappingInfoFilename", m_colourRemapSEIFileName, string(""), "Colour Remapping YUV output file name. If empty, no remapping is applied (ignore SEI message)\n")
#if SHUTTER_INTERVAL_SEI_PROCESSING
("SEIPostFilteringFilename,-sii", m_postFilterVideoFileName, string(""), "Post Filtering with Shutter Interval SEI. If empty, no filtering is applied (ignore SEI message)\n")
#endif
("SEIAnnotatedRegionsInfoFilename", m_annotatedRegionsSEIFileName, string(""), "Annotated regions output file name. If empty, no object information will be saved (ignore SEI message)\n")
#if O0043_BEST_EFFORT_DECODING
("ForceDecodeBitDepth", m_forceDecodeBitDepth, 0U, "Force the decoder to operate at a particular bit-depth (best effort decoding)")
......
......@@ -66,6 +66,9 @@ protected:
Int m_decodedPictureHashSEIEnabled; ///< Checksum(3)/CRC(2)/MD5(1)/disable(0) acting on decoded picture hash SEI message
Bool m_decodedNoDisplaySEIEnabled; ///< Enable(true)/disable(false) writing only pictures that get displayed based on the no display SEI message
std::string m_colourRemapSEIFileName; ///< output Colour Remapping file name
#if SHUTTER_INTERVAL_SEI_PROCESSING
std::string m_postFilterVideoFileName; ///< output Post Filtering file name
#endif
std::string m_annotatedRegionsSEIFileName; ///< annotated regions file name
std::vector<Int> m_targetDecLayerIdSet; ///< set of LayerIds to be included in the sub-bitstream extraction process.
Int m_respectDefDispWindow; ///< Only output content inside the default display window
......@@ -89,6 +92,9 @@ public:
, m_decodedPictureHashSEIEnabled(0)
, m_decodedNoDisplaySEIEnabled(false)
, m_colourRemapSEIFileName()
#if SHUTTER_INTERVAL_SEI_PROCESSING
, m_postFilterVideoFileName()
#endif
, m_annotatedRegionsSEIFileName()
, m_targetDecLayerIdSet()
, m_respectDefDispWindow(0)
......
......@@ -69,6 +69,9 @@ Void TAppDecTop::destroy()
{
m_bitstreamFileName.clear();
m_reconFileName.clear();
#if SHUTTER_INTERVAL_SEI_PROCESSING
m_postFilterVideoFileName.clear();
#endif
}
// ====================================================================================================================
......@@ -137,6 +140,10 @@ Void TAppDecTop::decode()
// main decoder loop
Bool openedReconFile = false; // reconstruction file not yet opened. (must be performed after SPS is seen)
Bool loopFiltered = false;
#if SHUTTER_INTERVAL_SEI_PROCESSING
Bool openedPostFile = false;
setShutterFilterFlag(false);
#endif
while (!!bitstreamFile)
{
......@@ -230,6 +237,43 @@ Void TAppDecTop::decode()
m_cTVideoIOYuvReconFile.open( m_reconFileName, true, m_outputBitDepth, m_outputBitDepth, bitDepths.recon ); // write mode
openedReconFile = true;
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
TComList<TComPic*>::iterator iterPic = pcListPic->begin();
TComPic* pcPic = *(iterPic);
SEIMessages shutterIntervalInfo = getSeisByType(pcPic->getSEIs(), SEI::SHUTTER_INTERVAL_INFO);
if (shutterIntervalInfo.size() > 0)
{
SEIShutterIntervalInfo *seiShutterIntervalInfo = (SEIShutterIntervalInfo*) *(shutterIntervalInfo.begin());
if (!seiShutterIntervalInfo->m_siiFixedSIwithinCLVS)
{
UInt arraySize = seiShutterIntervalInfo->m_siiMaxSubLayersMinus1 + 1;
UInt numUnitsLFR = seiShutterIntervalInfo->m_siiSubLayerNumUnitsInSI[0];
UInt numUnitsHFR = seiShutterIntervalInfo->m_siiSubLayerNumUnitsInSI[arraySize - 1];
setShutterFilterFlag(numUnitsLFR == 2 * numUnitsHFR);
}
}
if ((!m_postFilterVideoFileName.empty()) && (!openedPostFile) && getShutterFilterFlag())
{
const BitDepths &bitDepths = pcListPic->front()->getPicSym()->getSPS().getBitDepths(); // use bit depths of first reconstructed picture.
for (UInt channelType = 0; channelType < MAX_NUM_CHANNEL_TYPE; channelType++)
{
if (m_outputBitDepth[channelType] == 0)
{
m_outputBitDepth[channelType] = bitDepths.recon[channelType];
}
}
std::ofstream ofile(m_postFilterVideoFileName.c_str());
if (!ofile.good() || !ofile.is_open())
{
fprintf(stderr, "\nUnable to open file '%s' for writing shutter-interval-SEI video\n", m_postFilterVideoFileName.c_str());
exit(EXIT_FAILURE);
}
m_cTVideoIOYuvPostFile.open(m_postFilterVideoFileName, true, m_outputBitDepth, m_outputBitDepth, bitDepths.recon); // write mode
openedPostFile = true;
}
#endif
// write reconstruction to file
if( bNewPicture )
{
......@@ -286,6 +330,12 @@ Void TAppDecTop::xDestroyDecLib()
{
m_cTVideoIOYuvReconFile.close();
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (!m_postFilterVideoFileName.empty() && getShutterFilterFlag())
{
m_cTVideoIOYuvPostFile.close();
}
#endif
// destroy decoder class
m_cTDecTop.destroy();
......@@ -487,6 +537,24 @@ Void TAppDecTop::xWriteOutput( TComList<TComPic*>* pcListPic, UInt tId )
xOutputColourRemapPic(pcPic);
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (!m_postFilterVideoFileName.empty() && getShutterFilterFlag())
{
pcPic->xOutputPostFilteredPic(pcPic, pcListPic);
const Window &conf = pcPic->getConformanceWindow();
const Window defDisp = m_respectDefDispWindow ? pcPic->getDefDisplayWindow() : Window();
m_cTVideoIOYuvPostFile.write( pcPic->getPicYuvPostRec(),
m_outputColourSpaceConvert,
conf.getWindowLeftOffset() + defDisp.getWindowLeftOffset(),
conf.getWindowRightOffset() + defDisp.getWindowRightOffset(),
conf.getWindowTopOffset() + defDisp.getWindowTopOffset(),
conf.getWindowBottomOffset() + defDisp.getWindowBottomOffset(),
NUM_CHROMA_FORMAT, m_bClipOutputVideoToRec709Range );
}
#endif
// update POC of display order
m_iPOCLastDisplay = pcPic->getPOC();
......@@ -605,6 +673,24 @@ Void TAppDecTop::xFlushOutput( TComList<TComPic*>* pcListPic )
NUM_CHROMA_FORMAT, m_bClipOutputVideoToRec709Range );
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (!m_postFilterVideoFileName.empty() && getShutterFilterFlag())
{
pcPic->xOutputPostFilteredPic(pcPic, pcListPic);
const Window &conf = pcPic->getConformanceWindow();
const Window defDisp = m_respectDefDispWindow ? pcPic->getDefDisplayWindow() : Window();
m_cTVideoIOYuvPostFile.write( pcPic->getPicYuvPostRec(),
m_outputColourSpaceConvert,
conf.getWindowLeftOffset() + defDisp.getWindowLeftOffset(),
conf.getWindowRightOffset() + defDisp.getWindowRightOffset(),
conf.getWindowTopOffset() + defDisp.getWindowTopOffset(),
conf.getWindowBottomOffset() + defDisp.getWindowBottomOffset(),
NUM_CHROMA_FORMAT, m_bClipOutputVideoToRec709Range );
}
#endif
if (!m_colourRemapSEIFileName.empty())
{
xOutputColourRemapPic(pcPic);
......@@ -628,14 +714,29 @@ Void TAppDecTop::xFlushOutput( TComList<TComPic*>* pcListPic )
}
pcPic->setOutputMark(false);
}
#if !SHUTTER_INTERVAL_SEI_PROCESSING
if(pcPic != NULL)
{
pcPic->destroy();
delete pcPic;
pcPic = NULL;
}
#endif
iterPic++;
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
while (iterPic != pcListPic->end())
{
pcPic = *(iterPic);
if (pcPic != NULL)
{
pcPic->destroy();
delete pcPic;
pcPic = NULL;
}
iterPic++;
}
#endif
}
pcListPic->clear();
m_iPOCLastDisplay = -MAX_INT;
......
......@@ -62,6 +62,10 @@ private:
// class interface
TDecTop m_cTDecTop; ///< decoder class
TVideoIOYuv m_cTVideoIOYuvReconFile; ///< reconstruction YUV class
#if SHUTTER_INTERVAL_SEI_PROCESSING
bool m_ShutterFilterEnable; ///< enable Post-processing with Shutter Interval SEI
TVideoIOYuv m_cTVideoIOYuvPostFile; ///< post-filtered YUV class
#endif
// for output control
Int m_iPOCLastDisplay; ///< last POC in display order
......@@ -82,6 +86,11 @@ public:
Void decode (); ///< main decoding function
UInt getNumberOfChecksumErrorsDetected() const { return m_cTDecTop.getNumberOfChecksumErrorsDetected(); }
#if SHUTTER_INTERVAL_SEI_PROCESSING
Bool getShutterFilterFlag() const { return m_ShutterFilterEnable; }
Void setShutterFilterFlag(Bool value) { m_ShutterFilterEnable = value; }
#endif
protected:
Void xCreateDecLib (); ///< create internal classes
Void xDestroyDecLib (); ///< destroy internal classes
......
......@@ -781,6 +781,9 @@ Bool TAppEncCfg::parseCfg( Int argc, TChar* argv[] )
("InputPathPrefix,-ipp", inputPathPrefix, string(""), "pathname to prepend to input filename")
("BitstreamFile,b", m_bitstreamFileName, string(""), "Bitstream output file name")
("ReconFile,o", m_reconFileName, string(""), "Reconstructed YUV output file name")
#if SHUTTER_INTERVAL_SEI_PROCESSING
("SEIPreFilteringFilename,-sii", m_preFilterVideoFileName, string(""), "File name of Pre-Filtering video. If empty, not output video\n")
#endif
("SourceWidth,-wdt", m_iSourceWidth, 0, "Source picture width")
("SourceHeight,-hgt", m_iSourceHeight, 0, "Source picture height")
("InputBitDepth", m_inputBitDepth[CHANNEL_TYPE_LUMA], 8, "Bit-depth of input file")
......@@ -2002,6 +2005,9 @@ Bool TAppEncCfg::parseCfg( Int argc, TChar* argv[] )
}
}
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
m_ShutterFilterEnable = false;
#endif
#if SHUTTER_INTERVAL_SEI_MESSAGE
if (m_siiSEIEnabled)
{
......@@ -2022,6 +2028,18 @@ Bool TAppEncCfg::parseCfg( Int argc, TChar* argv[] )
m_siiSEINumUnitsInShutterInterval = cfg_siiSEIInputNumUnitsInSI.values[0];
assert(m_siiSEINumUnitsInShutterInterval >= 0 && m_siiSEINumUnitsInShutterInterval <= MAX_UINT);
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (arraySize > 1 && m_siiSEISubLayerNumUnitsInSI[0] == 2 * m_siiSEISubLayerNumUnitsInSI[arraySize - 1])
{
m_ShutterFilterEnable = true;
const double shutterAngle = 360.0;
double fpsHFR = (double)m_iFrameRate, fpsLFR = (double)m_iFrameRate / 2.0;
UInt numUnitsHFR = (UInt)(((double)m_siiSEITimeScale / fpsHFR) * (shutterAngle / 360.0));
UInt numUnitsLFR = (UInt)(((double)m_siiSEITimeScale / fpsLFR) * (shutterAngle / 360.0));
for (Int i = 0; i < arraySize - 1; i++) m_siiSEISubLayerNumUnitsInSI[i] = numUnitsLFR;
m_siiSEISubLayerNumUnitsInSI[arraySize - 1] = numUnitsHFR;
}
#endif
}
#endif
if(m_timeCodeSEIEnabled)
......@@ -3084,6 +3102,12 @@ Void TAppEncCfg::xPrintParameter()
printf("Input File : %s\n", m_inputFileName.c_str() );
printf("Bitstream File : %s\n", m_bitstreamFileName.c_str() );
printf("Reconstruction File : %s\n", m_reconFileName.c_str() );
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (m_ShutterFilterEnable && !m_preFilterVideoFileName.empty())
{
printf("Pre-Filtering File : %s\n", m_preFilterVideoFileName.c_str());
}
#endif
printf("Real Format : %dx%d %gHz\n", m_iSourceWidth - m_confWinLeft - m_confWinRight, m_iSourceHeight - m_confWinTop - m_confWinBottom, (Double)m_iFrameRate/m_temporalSubsampleRatio );
printf("Internal Format : %dx%d %gHz\n", m_iSourceWidth, m_iSourceHeight, (Double)m_iFrameRate/m_temporalSubsampleRatio );
printf("Sequence PSNR output : %s\n", (m_printMSEBasedSequencePSNR ? "Linear average, MSE-based" : "Linear average only") );
......
......@@ -74,6 +74,10 @@ protected:
std::string m_inputFileName; ///< source file name
std::string m_bitstreamFileName; ///< output bitstream file
std::string m_reconFileName; ///< output reconstruction file
#if SHUTTER_INTERVAL_SEI_PROCESSING
bool m_ShutterFilterEnable; ///< enable Pre-Filtering with Shutter Interval SEI
std::string m_preFilterVideoFileName; ///< output Pre-Filtering video
#endif
// Lambda modifiers
Double m_adLambdaModifier[ MAX_TLAYER ]; ///< Lambda modifier array for each temporal layer
......
......@@ -112,6 +112,10 @@ Void TAppEncTop::xInitLibCfg()
m_cTEncTop.setXPSNRWeight ( m_dXPSNRWeight[id], ComponentID(id));
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
m_cTEncTop.setShutterFilterFlag ( m_ShutterFilterEnable );
#endif
m_cTEncTop.setCabacZeroWordPaddingEnabled ( m_cabacZeroWordPaddingEnabled );
m_cTEncTop.setFrameRate ( m_iFrameRate );
......@@ -545,6 +549,12 @@ Void TAppEncTop::xCreateLib()
{
m_cTVideoIOYuvReconFile.open(m_reconFileName, true, m_outputBitDepth, m_outputBitDepth, m_internalBitDepth); // write mode
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (m_ShutterFilterEnable && !m_preFilterVideoFileName.empty())
{
m_cTVideoIOYuvPreFile.open(m_preFilterVideoFileName, true, m_outputBitDepth, m_outputBitDepth, m_internalBitDepth); // write mode
}
#endif
// Neo Decoder
m_cTEncTop.create();
......@@ -555,6 +565,12 @@ Void TAppEncTop::xDestroyLib()
// Video I/O
m_cTVideoIOYuvInputFile.close();
m_cTVideoIOYuvReconFile.close();
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (m_ShutterFilterEnable && !m_preFilterVideoFileName.empty())
{
m_cTVideoIOYuvPreFile.close();
}
#endif
// Neo Decoder
m_cTEncTop.destroy();
......@@ -679,6 +695,14 @@ Void TAppEncTop::encode()
m_cTEncTop.encode( bEos, flush ? 0 : pcPicYuvOrg, flush ? 0 : &cPicYuvTrueOrg, ipCSC, snrCSC, m_cListPicYuvRec, outputAccessUnits, iNumEncoded );
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (m_ShutterFilterEnable && !m_preFilterVideoFileName.empty())
{
m_cTVideoIOYuvPreFile.write(pcPicYuvOrg, ipCSC, m_confWinLeft, m_confWinRight, m_confWinTop, m_confWinBottom,
NUM_CHROMA_FORMAT, m_bClipOutputVideoToRec709Range);
}
#endif
// write bistream to file if necessary
if ( iNumEncoded > 0 )
{
......
......@@ -61,6 +61,9 @@ private:
TEncTop m_cTEncTop; ///< encoder class
TVideoIOYuv m_cTVideoIOYuvInputFile; ///< input YUV file
TVideoIOYuv m_cTVideoIOYuvReconFile; ///< output reconstruction file
#if SHUTTER_INTERVAL_SEI_PROCESSING
TVideoIOYuv m_cTVideoIOYuvPreFile; ///< output pre-filtered file
#endif
TComList<TComPicYuv*> m_cListPicYuvRec; ///< list of reconstruction YUV files
......
......@@ -98,6 +98,9 @@ Void TComPic::create( const TComSPS &sps, const TComPPS &pps, const Bool bIsVirt
{
#endif
m_apcPicYuv[PIC_YUV_REC] = new TComPicYuv; m_apcPicYuv[PIC_YUV_REC]->create( iWidth, iHeight, chromaFormatIDC, uiMaxCuWidth, uiMaxCuHeight, uiMaxDepth, true );
#if SHUTTER_INTERVAL_SEI_PROCESSING
m_apcPicYuv[PIC_YUV_POST_REC] = new TComPicYuv; m_apcPicYuv[PIC_YUV_POST_REC]->create( iWidth, iHeight, chromaFormatIDC, uiMaxCuWidth, uiMaxCuHeight, uiMaxDepth, true );
#endif
#if REDUCED_ENCODER_MEMORY
}
#endif
......@@ -150,6 +153,24 @@ Void TComPic::prepareForReconstruction()
// mark it should be extended
m_apcPicYuv[PIC_YUV_REC]->setBorderExtension(false);
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (m_apcPicYuv[PIC_YUV_POST_REC] == NULL)
{
const TComSPS &sps = m_picSym.getSPS();
const ChromaFormat chromaFormatIDC = sps.getChromaFormatIdc();
const Int iWidth = sps.getPicWidthInLumaSamples();
const Int iHeight = sps.getPicHeightInLumaSamples();
const UInt uiMaxCuWidth = sps.getMaxCUWidth();
const UInt uiMaxCuHeight = sps.getMaxCUHeight();
const UInt uiMaxDepth = sps.getMaxTotalCUDepth();
m_apcPicYuv[PIC_YUV_POST_REC] = new TComPicYuv; m_apcPicYuv[PIC_YUV_POST_REC]->create(iWidth, iHeight, chromaFormatIDC, uiMaxCuWidth, uiMaxCuHeight, uiMaxDepth, true);
}
// mark it should be extended
m_apcPicYuv[PIC_YUV_POST_REC]->setBorderExtension(false);
#endif
m_picSym.prepareForReconstruction();
}
......@@ -176,6 +197,14 @@ Void TComPic::releaseEncoderSourceImageData()
Void TComPic::releaseAllReconstructionData()
{
#if SHUTTER_INTERVAL_SEI_PROCESSING
if (m_apcPicYuv[PIC_YUV_POST_REC])
{
m_apcPicYuv[PIC_YUV_POST_REC]->destroy();
delete m_apcPicYuv[PIC_YUV_POST_REC];
m_apcPicYuv[PIC_YUV_POST_REC] = NULL;
}
#endif
if (m_apcPicYuv[PIC_YUV_REC ])
{
m_apcPicYuv[PIC_YUV_REC]->destroy();
......@@ -257,5 +286,103 @@ UInt TComPic::getSubstreamForCtuAddr(const UInt ctuAddr, const Bool bAddressInRa
return subStrm;
}
#if SHUTTER_INTERVAL_SEI_PROCESSING
TComPic* TComPic::findPrevPicPOC(TComPic* pcPic, TComList<TComPic*>* pcListPic)
{
TComPic* prevPic = NULL;
TComPic* listPic = NULL;
TComList<TComPic*>::iterator iterListPic = pcListPic->begin();
for (Int i = 0; i < (Int)(pcListPic->size()); i++)
{
listPic = *(iterListPic);
listPic->setCurrSliceIdx(0);
if (listPic->getPOC() == pcPic->getPOC() - 1)
{
prevPic = *(iterListPic);
prevPic->setCurrSliceIdx(0);
}
iterListPic++;
}
return prevPic;
}
Void TComPic::xOutputPostFilteredPic(TComPic* pcPic, TComList<TComPic*>* pcListPic)
{
if (pcPic->getPOC() % 2 == 0)
{
TComPic* prevPic = findPrevPicPOC(pcPic, pcListPic);
if (prevPic)
{
TComPicYuv* currYuv = pcPic->getPicYuvRec();
TComPicYuv* prevYuv = prevPic->getPicYuvRec();
TComPicYuv* postYuv = pcPic->getPicYuvPostRec();
for (Int chan = 0; chan < currYuv->getNumberValidComponents(); chan++)
{
const ComponentID ch = ComponentID(chan);
const ChannelType cType = (ch == COMPONENT_Y) ? CHANNEL_TYPE_LUMA : CHANNEL_TYPE_CHROMA;
const Int bitDepth = pcPic->getSlice(0)->getSPS()->getBitDepth(cType);
const Int maxOutputValue = (1 << bitDepth) - 1;
Pel* currPxl = currYuv->getAddr(ch);
Pel* prevPxl = prevYuv->getAddr(ch);
Pel* postPxl = postYuv->getAddr(ch);
Int iStride = currYuv->getStride(ch);
Int iHeight = currYuv->getHeight(ch);
Int iWidth = currYuv->getWidth(ch);
for (Int y = 0; y < iHeight; y++)
{
for (Int x = 0; x < iWidth; x++)
{
postPxl[x] = std::min(maxOutputValue, std::max(0, (currPxl[x] << 1) - prevPxl[x]));
}
currPxl += iStride;
prevPxl += iStride;
postPxl += iStride;
}
}
}
else
{
pcPic->getPicYuvRec()->copyToPic(pcPic->getPicYuvPostRec());
}
}
else
{
pcPic->getPicYuvRec()->copyToPic(pcPic->getPicYuvPostRec());
}
}
Void TComPic::xOutputPreFilteredPic(TComPic* pcPic, TComList<TComPic*>* pcListPic)
{
if (pcPic->getPOC() % 2 == 0)
{
TComPic* prevPic = findPrevPicPOC(pcPic, pcListPic);
if (prevPic)
{
TComPicYuv* currYuv = pcPic->getPicYuvOrg();
TComPicYuv* prevYuv = prevPic->getPicYuvOrg();
for (Int chan = 0; chan < currYuv->getNumberValidComponents(); chan++)
{
const ComponentID ch = ComponentID(chan);
const ChannelType cType = (ch == COMPONENT_Y) ? CHANNEL_TYPE_LUMA : CHANNEL_TYPE_CHROMA;
const Int bitDepth = pcPic->getSlice(0)->getSPS()->getBitDepth(cType);
const Int maxOutputValue = (1 << bitDepth) - 1;
Pel* currPxl = currYuv->getAddr(ch);
Pel* prevPxl = prevYuv->getAddr(ch);
Int iStride = currYuv->getStride(ch);
Int iHeight = currYuv->getHeight(ch);
Int iWidth = currYuv->getWidth(ch);
for (Int y = 0; y < iHeight; y++)
{
for (Int x = 0; x < iWidth; x++)
{
currPxl[x] = std::min( maxOutputValue, std::max( 0, (currPxl[x] + prevPxl[x]) >> 1) );
}
currPxl += iStride;
prevPxl += iStride;
}
}
}
}
}
#endif
//! \}
......@@ -56,7 +56,16 @@
class TComPic
{
public:
#if SHUTTER_INTERVAL_SEI_PROCESSING
typedef enum { PIC_YUV_ORG = 0, PIC_YUV_REC = 1, PIC_YUV_TRUE_ORG = 2, PIC_YUV_POST_REC = 3, NUM_PIC_YUV = 4 } PIC_YUV_T;
TComPicYuv* getPicYuvPostRec() { return m_apcPicYuv[PIC_YUV_POST_REC]; }
TComPic* findPrevPicPOC(TComPic* pcPic, TComList<TComPic*>* pcListPic);
Void xOutputPostFilteredPic(TComPic* pcPic, TComList<TComPic*>* pcListPic);
Void xOutputPreFilteredPic(TComPic* pcPic, TComList<TComPic*>* pcListPic);
#else
typedef enum { PIC_YUV_ORG=0, PIC_YUV_REC=1, PIC_YUV_TRUE_ORG=2, NUM_PIC_YUV=3 } PIC_YUV_T;
#endif
// TRUE_ORG is the input file without any pre-encoder colour space conversion (but with possible bit depth increment)
TComPicYuv* getPicYuvTrueOrg() { return m_apcPicYuv[PIC_YUV_TRUE_ORG]; }
......
......@@ -92,6 +92,9 @@
#define SEI_ENCODER_CONTROL 1 ///< add encoder control for the following SEI: film grain characteristics, content light level, ambient viewing environment
#define DPB_ENCODER_USAGE_CHECK 1 ///< Adds DPB encoder usage check.
#if SHUTTER_INTERVAL_SEI_MESSAGE
#define SHUTTER_INTERVAL_SEI_PROCESSING 1 ///< JCTVC-AM0024: pre-/post-processing to use shutter interval SEI
#endif
// ====================================================================================================================
// Tool Switches
// ====================================================================================================================
......
......@@ -147,6 +147,9 @@ protected:
Bool m_bXPSNREnableFlag;
Double m_dXPSNRWeight[MAX_NUM_COMPONENT];
Bool m_cabacZeroWordPaddingEnabled;
#if SHUTTER_INTERVAL_SEI_PROCESSING
bool m_ShutterFilterEnable; ///< enable Pre-Filtering with Shutter Interval SEI
#endif
/* profile & level */
Profile::Name m_profile;
......@@ -577,6 +580,11 @@ public:
Bool getCabacZeroWordPaddingEnabled() const { return m_cabacZeroWordPaddingEnabled; }
Void setCabacZeroWordPaddingEnabled(Bool value) { m_cabacZeroWordPaddingEnabled = value; }
#if SHUTTER_INTERVAL_SEI_PROCESSING
Bool getShutterFilterFlag() const { return m_ShutterFilterEnable; }
Void setShutterFilterFlag(Bool value) { m_ShutterFilterEnable = value; }
#endif
//====== Coding Structure ========
Void setIntraPeriod ( Int i ) { m_uiIntraPeriod = (UInt)i; }
Void setDecodingRefreshType ( Int i ) { m_uiDecodingRefreshType = (UInt)i; }
......
......@@ -342,6 +342,14 @@ Void TEncTop::encode( Bool flush, TComPicYuv* pcPicYuvOrg, TComPicYuv* pcPicYuvT
pcPicYuvOrg->copyToPic( pcPicCurr->getPicYuvOrg() );
pcPicYuvTrueOrg->copyToPic( pcPicCurr->getPicYuvTrueOrg() );
#if SHUTTER_INTERVAL_SEI_PROCESSING
if ( getShutterFilterFlag() )
{
pcPicCurr->xOutputPreFilteredPic(pcPicCurr, &m_cListPic);
pcPicCurr->getPicYuvOrg()->copyToPic(pcPicYuvOrg);
}
#endif
// compute image characteristics
if ( getUseAdaptiveQP() )
{
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment