...
 
Commits (53)
......@@ -109,6 +109,7 @@ AffineAmvr : 0
LMCSEnable : 1 # LMCS: 0: disable, 1:enable
LMCSSignalType : 0 # Input signal type: 0:SDR, 1:HDR-PQ, 2:HDR-HLG
LMCSUpdateCtrl : 1 # LMCS model update control: 0:RA, 1:AI, 2:LDB/LDP
LMCSOffset : 2 # chroma residual scaling offset
MIP : 1
JointCbCr : 1 # joint coding of chroma residuals (if available): 0: disable, 1: enable
ChromaTS : 1
......
......@@ -137,6 +137,7 @@ AffineAmvr : 0
LMCSEnable : 1 # LMCS: 0: disable, 1:enable
LMCSSignalType : 0 # Input signal type: 0:SDR, 1:HDR-PQ, 2:HDR-HLG
LMCSUpdateCtrl : 2 # LMCS model update control: 0:RA, 1:AI, 2:LDB/LDP
LMCSOffset : 1 # chroma residual scaling offset
MIP : 0
JointCbCr : 1 # joint coding of chroma residuals (if available): 0: disable, 1: enable
PROF : 1
......
#======== File I/O ===============
InputFile : sc_robot_1280x720_30_8bit_300_444.yuv
InputBitDepth : 8 # Input bitdepth
InputChromaFormat : 444 # Ratio of luminance to chrominance samples
FrameRate : 30 # Frame Rate per second
FrameSkip : 0 # Number of frames to be skipped in input
SourceWidth : 1280 # Input frame width
SourceHeight : 720 # Input frame height
FramesToBeEncoded : 300 # Number of frames to be coded
Level : 6.2
#======== File I/O ===============
InputFile : sc_robot_1280x720_30_8bit_300.rgb
InputBitDepth : 8 # Input bitdepth
InputChromaFormat : 444 # Ratio of luminance to chrominance samples
FrameRate : 30 # Frame Rate per second
FrameSkip : 0 # Number of frames to be skipped in input
SourceWidth : 1280 # Input frame width
SourceHeight : 720 # Input frame height
FramesToBeEncoded : 300 # Number of frames to be coded
InputColourSpaceConvert : RGBtoGBR # Non-normative colour space conversion to apply to input video
SNRInternalColourSpace : 1 # Evaluate SNRs in GBR order
OutputInternalColourSpace : 0 # Convert recon output back to RGB order. Use --OutputColourSpaceConvert GBRtoRGB on decoder to produce a matching output file.
Level : 6.2
......@@ -603,7 +603,7 @@ Specifies the output coded bit stream file.
\Option{ReconFile (-o)} &
%\ShortOption{-o} &
\Default{\NotSet} &
Specifies the output locally reconstructed video file.
Specifies the output locally reconstructed video file. If more than one layer is encoded (i.e. MaxLayers > 1), a reconstructed file is written for each layer and the layer index is added as suffix to ReconFile. If one or more dots exist in the file name, the layer id is added before the last dot, e.g. 'reconst.yuv' becomes 'reconst0.yuv' for layer id 0, 'reconst' becomes 'reconst0'.
\\
\Option{SourceWidth (-wdt)}%
......@@ -661,12 +661,10 @@ Note: This option has no effect on the decoding process.
\Option{InputBitDepthC}%
\Option{MSBExtendedBitDepthC}%
\Option{InternalBitDepthC}%
\Option{OutputBitDepthC} &
%\ShortOption{\None} &
\Default{0}%
\Default{0}%
\Default{0}%
\Default{0} &
Specifies the various bit-depths for chroma components. These only need
to be specified if non-equal luma and chroma bit-depth processing is
......@@ -973,7 +971,7 @@ Enables writing of a decoding parameter set. If disabled, no parameter set will
\Option{MaxBitDepthConstraint} &
%\ShortOption{\None} &
\Default{0} &
For --profile=main-RExt, specifies the value to use to derive the general_max_bit_depth constraint flags for RExt profiles; when 0, use $\max(InternalBitDepth, InternalBitDepthC)$
For --profile=main-RExt, specifies the value to use to derive the general_max_bit_depth constraint flags for RExt profiles; when 0, use InternalBitDepth.
\\
\Option{MaxChromaFormatConstraint} &
......@@ -1031,7 +1029,7 @@ Specifies the value of general_frame_only_constraint_flag
%% Layer parameters
%%
\begin{OptionTableNoShorthand}{Layer parameters}{tab:unit}
\begin{OptionTableNoShorthand}{Layer parameters}{tab:layer}
\Option{MaxLayers} &
%\ShortOption{\None} &
\Default{1} &
......@@ -1687,6 +1685,12 @@ If ScalingList is set to 2 and this parameter is an empty string, information on
is output and the encoder stops.
\\
\Option{DisableScalingMatrixForLFNST} &
%\ShortOption{\None} &
\Default{true} &
Specifies whether scaling matrices are to be applied to blocks coded with LFNST.
\\
\Option{MaxCUChromaQpAdjustmentDepth} &
%\ShortOption{\None} &
\Default{-1} &
......@@ -2216,6 +2220,30 @@ Enables signaling the below parameters either in PPS or for each slice according
\end{tabular}
\\
\Option{SliceLevelRpl} &
%\ShortOption{\None} &
\Default{true} &
Code reference picture lists in slice headers rather than picture header.
\\
\Option{SliceLevelDblk} &
%\ShortOption{\None} &
\Default{true} &
Code deblocking filter parameters in slice headers rather than picture header.
\\
\Option{SliceLevelSao} &
%\ShortOption{\None} &
\Default{true} &
Code SAO parameters in slice headers rather than picture header.
\\
\Option{SliceLevelAlf} &
%\ShortOption{\None} &
\Default{true} &
Code ALF parameters in slice headers rather than picture header.
\\
\Option{TransformSkip} &
%\ShortOption{\None} &
\Default{false} &
......@@ -2337,6 +2365,12 @@ Adaptive LMCS mapping derivation options: Options 1 to 4 are for experimental te
LMCS initial total codeword (valid values [$0 - 1023$]) to be used in LMCS mapping derivation when LMCSAdpOption is not equal to 0.
\\
\Option{ColorTransform} &
%\ShortOption{\None} &
\Default{false} &
Enables or disables the use of adaptive color transform (ACT).
\\
\end{OptionTableNoShorthand}
%%
......@@ -3454,7 +3488,7 @@ Defines the input bit stream file name.
\Option{ReconFile (-o)} &
%\ShortOption{-o} &
\Default{\NotSet} &
Defines reconstructed YUV file name. If empty, no file is generated. For layered coding bitstream, each layer reconstructed YUV file name is formed by adding the layer index suffix to ReconFile.
Defines the reconstructed video file name. If empty, no file is generated. If the bitstream contains multiple layer and no single target layer is specified (i.e. TargetLayer=-1), a reconstructed file is written for each layer and the layer index is added as suffix to ReconFile. If one or more dots exist in the file name, the layer id is added before the last dot, e.g. 'decoded.yuv' becomes 'decoded0.yuv' for layer id 0, 'decoded' becomes 'decoded0'.
\\
\Option{SkipFrames (-s)} &
......
......@@ -118,6 +118,48 @@ uint32_t DecApp::decode()
while (!!bitstreamFile)
{
#if JVET_P1006_PICTURE_HEADER
InputNALUnit nalu;
nalu.m_nalUnitType = NAL_UNIT_INVALID;
// determine if next NAL unit will be the first one from a new picture
bool bNewPicture = isNewPicture(&bitstreamFile, &bytestream);
if(!bNewPicture)
{
AnnexBStats stats = AnnexBStats();
// find next NAL unit in stream
byteStreamNALUnit(bytestream, nalu.getBitstream().getFifo(), stats);
if (nalu.getBitstream().getFifo().empty())
{
/* this can happen if the following occur:
* - empty input file
* - two back-to-back start_code_prefixes
* - start_code_prefix immediately followed by EOF
*/
msg( ERROR, "Warning: Attempt to decode an empty NAL unit\n");
}
else
{
// read NAL unit header
read(nalu);
// flush output for first slice of an IDR picture
if(m_cDecLib.getFirstSliceInPicture() &&
(nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR_W_RADL ||
nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR_N_LP))
{
xFlushOutput(pcListPic);
}
// parse NAL unit syntax if within target decoding layer
if ((m_iMaxTemporalLayer < 0 || nalu.m_temporalId <= m_iMaxTemporalLayer) && isNaluWithinTargetDecLayerIdSet(&nalu) && isNaluTheTargetLayer(&nalu))
{
m_cDecLib.decode(nalu, m_iSkipFrame, m_iPOCLastDisplay);
}
}
}
#else
/* location serves to work around a design fault in the decoder, whereby
* the process of reading a new slice that is the first slice of a new frame
* requires the DecApp::decode() method to be called again with the same
......@@ -154,6 +196,9 @@ uint32_t DecApp::decode()
else
{
read(nalu);
#if JVET_P0366_NUT_CONSTRAINT_FLAGS
m_cDecLib.checkNalUnitConstraints(nalu.m_nalUnitType);
#endif
if(m_cDecLib.getFirstSliceInPicture() &&
(nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR_W_RADL ||
......@@ -199,6 +244,7 @@ uint32_t DecApp::decode()
}
}
#endif
if( ( bNewPicture || !bitstreamFile || nalu.m_nalUnitType == NAL_UNIT_EOS ) && !m_cDecLib.getFirstSliceInSequence() )
......@@ -207,8 +253,10 @@ uint32_t DecApp::decode()
{
m_cDecLib.executeLoopFilters();
m_cDecLib.finishPicture( poc, pcListPic );
#if !JVET_P1006_PICTURE_HEADER
#if RExt__DECODER_DEBUG_TOOL_MAX_FRAME_STATS
CodingStatistics::UpdateMaxStat(backupStats);
#endif
#endif
}
loopFiltered = (nalu.m_nalUnitType == NAL_UNIT_EOS);
......@@ -248,9 +296,17 @@ uint32_t DecApp::decode()
#if JVET_N0278_FIXES
std::string reconFileName = m_reconFileName;
if( m_reconFileName.compare( "/dev/null" ) )
if( m_reconFileName.compare( "/dev/null" ) && (m_cDecLib.getVPS() != nullptr) && (m_cDecLib.getVPS()->getMaxLayers() > 1) && (m_iTargetLayer == -1) )
{
reconFileName.insert( reconFileName.size() - 4, std::to_string( nalu.m_nuhLayerId ) );
size_t pos = reconFileName.find_last_of('.');
if (pos != string::npos)
{
reconFileName.insert( pos, std::to_string( nalu.m_nuhLayerId ) );
}
else
{
reconFileName.append( std::to_string( nalu.m_nuhLayerId ) );
}
}
m_cVideoIOYuvReconFile[nalu.m_nuhLayerId].open( reconFileName, true, m_outputBitDepth, m_outputBitDepth, bitDepths.recon ); // write mode
#else
......@@ -279,8 +335,17 @@ uint32_t DecApp::decode()
xWriteOutput( pcListPic, nalu.m_temporalId );
}
}
#if JVET_P1006_PICTURE_HEADER
if(bNewPicture)
{
m_cDecLib.resetAccessUnitNals();
m_cDecLib.resetAccessUnitApsNals();
}
#endif
#if !JVET_P1006_PICTURE_HEADER
#if RExt__DECODER_DEBUG_STATISTICS
delete backupStats;
#endif
#endif
}
......@@ -303,6 +368,118 @@ uint32_t DecApp::decode()
return nRet;
}
#if JVET_P1006_PICTURE_HEADER
/**
- lookahead through next NAL units to determine if current NAL unit is the first NAL unit in a new picture
*/
bool DecApp::isNewPicture(ifstream *bitstreamFile, class InputByteStream *bytestream)
{
bool ret = false;
bool finished = false;
// cannot be a new picture if there haven't been any slices yet
if(m_cDecLib.getFirstSliceInPicture())
{
return false;
}
// save stream position for backup
#if RExt__DECODER_DEBUG_STATISTICS
CodingStatistics::CodingStatisticsData* backupStats = new CodingStatistics::CodingStatisticsData(CodingStatistics::GetStatistics());
streampos location = bitstreamFile->tellg() - streampos(bytestream->GetNumBufferedBytes());
#else
streampos location = bitstreamFile->tellg();
#endif
// look ahead until picture start location is determined
while (!finished && !!(*bitstreamFile))
{
AnnexBStats stats = AnnexBStats();
InputNALUnit nalu;
byteStreamNALUnit(*bytestream, nalu.getBitstream().getFifo(), stats);
if (nalu.getBitstream().getFifo().empty())
{
msg( ERROR, "Warning: Attempt to decode an empty NAL unit\n");
}
else
{
// get next NAL unit type
read(nalu);
switch( nalu.m_nalUnitType ) {
// NUT that indicate the start of a new picture
case NAL_UNIT_ACCESS_UNIT_DELIMITER:
case NAL_UNIT_DPS:
case NAL_UNIT_VPS:
case NAL_UNIT_SPS:
case NAL_UNIT_PPS:
case NAL_UNIT_PH:
ret = true;
finished = true;
break;
// NUT that are not the start of a new picture
case NAL_UNIT_CODED_SLICE_TRAIL:
case NAL_UNIT_CODED_SLICE_STSA:
case NAL_UNIT_CODED_SLICE_RASL:
case NAL_UNIT_CODED_SLICE_RADL:
case NAL_UNIT_RESERVED_VCL_4:
case NAL_UNIT_RESERVED_VCL_5:
case NAL_UNIT_RESERVED_VCL_6:
case NAL_UNIT_CODED_SLICE_IDR_W_RADL:
case NAL_UNIT_CODED_SLICE_IDR_N_LP:
case NAL_UNIT_CODED_SLICE_CRA:
case NAL_UNIT_CODED_SLICE_GDR:
case NAL_UNIT_RESERVED_IRAP_VCL_11:
case NAL_UNIT_RESERVED_IRAP_VCL_12:
case NAL_UNIT_EOS:
case NAL_UNIT_EOB:
#if JVET_P0588_SUFFIX_APS
case NAL_UNIT_SUFFIX_APS:
#endif
case NAL_UNIT_SUFFIX_SEI:
case NAL_UNIT_FD:
ret = false;
finished = true;
break;
// NUT that might indicate the start of a new picture - keep looking
#if JVET_P0588_SUFFIX_APS
case NAL_UNIT_PREFIX_APS:
#else
case NAL_UNIT_APS:
#endif
case NAL_UNIT_PREFIX_SEI:
case NAL_UNIT_RESERVED_NVCL_26:
case NAL_UNIT_RESERVED_NVCL_27:
case NAL_UNIT_UNSPECIFIED_28:
case NAL_UNIT_UNSPECIFIED_29:
case NAL_UNIT_UNSPECIFIED_30:
case NAL_UNIT_UNSPECIFIED_31:
default:
break;
}
}
}
// restore previous stream location - minus 3 due to the need for the annexB parser to read three extra bytes
#if RExt__DECODER_DEBUG_BIT_STATISTICS
bitstreamFile->clear();
bitstreamFile->seekg(location);
bytestream->reset();
CodingStatistics::SetStatistics(*backupStats);
delete backupStats;
#else
bitstreamFile->clear();
bitstreamFile->seekg(location-streamoff(3));
bytestream->reset();
#endif
// return TRUE if next NAL unit is the start of a new picture
return ret;
}
#endif
// ====================================================================================================================
// Protected member functions
// ====================================================================================================================
......@@ -703,6 +880,9 @@ void DecApp::xFlushOutput( PicList* pcListPic )
pcPic->destroy();
delete pcPic;
pcPic = NULL;
#if JVET_N0278_FIXES
*iterPic = nullptr;
#endif
}
iterPic++;
}
......@@ -711,13 +891,7 @@ void DecApp::xFlushOutput( PicList* pcListPic )
#if JVET_N0278_FIXES
if( layerId != NOT_VALID )
{
for( iterPic = pcListPic->begin(); iterPic != pcListPic->end(); iterPic++ )
{
if( *iterPic == nullptr )
{
pcListPic->erase( iterPic );
}
}
pcListPic->remove_if([](Picture* p) { return p == nullptr; });
}
else
#endif
......
......@@ -91,6 +91,9 @@ private:
#endif
bool isNaluWithinTargetDecLayerIdSet ( InputNALUnit* nalu ); ///< check whether given Nalu is within targetDecLayerIdSet
bool isNaluTheTargetLayer(InputNALUnit* nalu); ///< check whether given Nalu is within targetDecLayerIdSet
#if JVET_P1006_PICTURE_HEADER
bool isNewPicture(ifstream *bitstreamFile, class InputByteStream *bytestream); ///< check if next NAL unit will be the first NAL unit from a new picture
#endif
};
//! \}
......
......@@ -173,6 +173,16 @@ void EncApp::xInitLibCfg()
m_cEncLib.setNoQpDeltaConstraintFlag ( m_bNoQpDeltaConstraintFlag );
m_cEncLib.setNoDepQuantConstraintFlag ( !m_depQuantEnabledFlag);
m_cEncLib.setNoSignDataHidingConstraintFlag ( !m_signDataHidingEnabledFlag );
#if JVET_P0366_NUT_CONSTRAINT_FLAGS
m_cEncLib.setNoTrailConstraintFlag ( m_iIntraPeriod == 1 );
m_cEncLib.setNoStsaConstraintFlag ( m_iIntraPeriod == 1 || !xHasNonZeroTemporalID() );
m_cEncLib.setNoRaslConstraintFlag ( m_iIntraPeriod == 1 || !xHasLeadingPicture() );
m_cEncLib.setNoRadlConstraintFlag ( m_iIntraPeriod == 1 || !xHasLeadingPicture() );
m_cEncLib.setNoIdrConstraintFlag ( false ); // Not yet possible to encode bitstream starting with a GDR picture
m_cEncLib.setNoCraConstraintFlag ( m_iDecodingRefreshType != 1 );
m_cEncLib.setNoGdrConstraintFlag ( false ); // Not yet possible to encode GDR using config parameters
m_cEncLib.setNoApsConstraintFlag ( !m_alf && !m_lumaReshapeEnable && m_useScalingListId == SCALING_LIST_OFF);
#endif
//====== Coding Structure ========
m_cEncLib.setIntraPeriod ( m_iIntraPeriod );
......@@ -335,6 +345,10 @@ void EncApp::xInitLibCfg()
m_cEncLib.setDMVR ( m_DMVR );
m_cEncLib.setMMVD ( m_MMVD );
m_cEncLib.setMmvdDisNum (m_MmvdDisNum);
#if JVET_P0517_ADAPTIVE_COLOR_TRANSFORM
m_cEncLib.setRGBFormatFlag(m_rgbFormat);
m_cEncLib.setUseColorTrans(m_useColorTrans);
#endif
m_cEncLib.setPLTMode ( m_PLTMode );
m_cEncLib.setJointCbCr ( m_JointCbCrMode );
m_cEncLib.setIBCMode ( m_IBCMode );
......@@ -572,6 +586,12 @@ void EncApp::xInitLibCfg()
m_cEncLib.setLFCrossTileBoundaryFlag ( m_bLFCrossTileBoundaryFlag );
m_cEncLib.setEntropyCodingSyncEnabledFlag ( m_entropyCodingSyncEnabledFlag );
m_cEncLib.setTMVPModeId ( m_TMVPModeId );
#if JVET_P1006_PICTURE_HEADER
m_cEncLib.setSliceLevelRpl ( m_sliceLevelRpl );
m_cEncLib.setSliceLevelDblk ( m_sliceLevelDblk );
m_cEncLib.setSliceLevelSao ( m_sliceLevelSao );
m_cEncLib.setSliceLevelAlf ( m_sliceLevelAlf );
#endif
m_cEncLib.setConstantSliceHeaderParamsEnabledFlag ( m_constantSliceHeaderParamsEnabledFlag );
m_cEncLib.setPPSDepQuantEnabledIdc ( m_PPSDepQuantEnabledIdc );
m_cEncLib.setPPSRefPicListSPSIdc0 ( m_PPSRefPicListSPSIdc0 );
......@@ -588,6 +608,9 @@ void EncApp::xInitLibCfg()
m_cEncLib.setPPSMaxNumMergeCandMinusMaxNumTriangleCandPlus1 ( m_PPSMaxNumMergeCandMinusMaxNumTriangleCandPlus1 );
m_cEncLib.setUseScalingListId ( m_useScalingListId );
m_cEncLib.setScalingListFileName ( m_scalingListFileName );
#if JVET_P0365_SCALING_MATRIX_LFNST
m_cEncLib.setDisableScalingMatrixForLfnstBlks ( m_disableScalingMatrixForLfnstBlks);
#endif
m_cEncLib.setDepQuantEnabledFlag ( m_depQuantEnabledFlag);
m_cEncLib.setSignDataHidingEnabledFlag ( m_signDataHidingEnabledFlag);
m_cEncLib.setUseRateCtrl ( m_RCEnableRateControl );
......@@ -715,9 +738,17 @@ void EncApp::xCreateLib( std::list<PelUnitBuf*>& recBufList )
#if JVET_N0278_FIXES
std::string reconFileName = m_reconFileName;
if( m_reconFileName.compare( "/dev/null" ) )
if( m_reconFileName.compare( "/dev/null" ) && (m_maxLayers > 1) )
{
reconFileName.insert( reconFileName.size() - 4, std::to_string( layerId ) );
size_t pos = reconFileName.find_last_of('.');
if (pos != string::npos)
{
reconFileName.insert( pos, std::to_string( layerId ) );
}
else
{
reconFileName.append( std::to_string( layerId ) );
}
}
m_cVideoIOYuvReconFile.open( reconFileName, true, m_outputBitDepth, m_outputBitDepth, m_internalBitDepth ); // write mode
#else
......@@ -1170,7 +1201,15 @@ void EncApp::rateStatsAccum(const AccessUnit& au, const std::vector<uint32_t>& a
case NAL_UNIT_VPS:
case NAL_UNIT_SPS:
case NAL_UNIT_PPS:
#if JVET_P1006_PICTURE_HEADER
case NAL_UNIT_PH:
#endif
#if JVET_P0588_SUFFIX_APS
case NAL_UNIT_PREFIX_APS:
case NAL_UNIT_SUFFIX_APS:
#else
case NAL_UNIT_APS:
#endif
m_essentialBytes += *it_stats;
break;
default:
......
This diff is collapsed.
......@@ -163,6 +163,16 @@ protected:
bool m_bNoQpDeltaConstraintFlag;
bool m_bNoDepQuantConstraintFlag;
bool m_bNoSignDataHidingConstraintFlag;
#if JVET_P0366_NUT_CONSTRAINT_FLAGS
bool m_noTrailConstraintFlag;
bool m_noStsaConstraintFlag;
bool m_noRaslConstraintFlag;
bool m_noRadlConstraintFlag;
bool m_noIdrConstraintFlag;
bool m_noCraConstraintFlag;
bool m_noGdrConstraintFlag;
bool m_noApsConstraintFlag;
#endif
// profile/level
Profile::Name m_profile;
......@@ -307,6 +317,10 @@ protected:
bool m_DMVR;
bool m_MMVD;
int m_MmvdDisNum;
#if JVET_P0517_ADAPTIVE_COLOR_TRANSFORM
bool m_rgbFormat;
bool m_useColorTrans;
#endif
unsigned m_PLTMode;
bool m_JointCbCrMode;
#if JVET_P0058_CHROMA_TS
......@@ -542,6 +556,12 @@ protected:
uint32_t m_maxNumTriangleCand;
uint32_t m_maxNumIBCMergeCand; ///< Max number of IBC merge candidates
#if JVET_P1006_PICTURE_HEADER
bool m_sliceLevelRpl; ///< code reference picture lists in slice headers rather than picture header
bool m_sliceLevelDblk; ///< code deblocking filter parameters in slice headers rather than picture header
bool m_sliceLevelSao; ///< code SAO parameters in slice headers rather than picture header
bool m_sliceLevelAlf; ///< code ALF parameters in slice headers rather than picture header
#endif
int m_TMVPModeId;
int m_PPSorSliceMode;
bool m_constantSliceHeaderParamsEnabledFlag;
......@@ -574,6 +594,9 @@ protected:
#endif
ScalingListMode m_useScalingListId; ///< using quantization matrix
std::string m_scalingListFileName; ///< quantization matrix file name
#if JVET_P0365_SCALING_MATRIX_LFNST
bool m_disableScalingMatrixForLfnstBlks;
#endif
bool m_TransquantBypassEnabledFlag; ///< transquant_bypass_enabled_flag setting in PPS.
bool m_CUTransquantBypassFlagForce; ///< if transquant_bypass_enabled_flag, then, if true, all CU transquant bypass flags will be set to true.
CostMode m_costMode; ///< Cost mode to use
......@@ -671,6 +694,10 @@ protected:
bool xCheckParameter (); ///< check validity of configuration values
void xPrintParameter (); ///< print configuration values
void xPrintUsage (); ///< print usage
#if JVET_P0366_NUT_CONSTRAINT_FLAGS
bool xHasNonZeroTemporalID(); ///< check presence of constant temporal ID in GOP structure
bool xHasLeadingPicture(); ///< check presence of leading pictures in GOP structure
#endif
public:
EncAppCfg();
virtual ~EncAppCfg();
......
......@@ -48,9 +48,20 @@
class ParcatHLSyntaxReader : public VLCReader
{
public:
#if JVET_P1006_PICTURE_HEADER
void parseSliceHeaderUpToPoc ( ParameterSetManager *parameterSetManager );
#else
bool parseSliceHeaderUpToPoc ( ParameterSetManager *parameterSetManager, bool isRapPic );
#endif
};
#if JVET_P1006_PICTURE_HEADER
void ParcatHLSyntaxReader::parseSliceHeaderUpToPoc ( ParameterSetManager *parameterSetManager )
{
// POC is first syntax element in slice header
return;
}
#else
bool ParcatHLSyntaxReader::parseSliceHeaderUpToPoc ( ParameterSetManager *parameterSetManager, bool isRapPic )
{
uint32_t uiCode;
......@@ -120,6 +131,7 @@ bool ParcatHLSyntaxReader::parseSliceHeaderUpToPoc ( ParameterSetManager *parame
return firstSliceSegmentInPic;
}
#endif
/**
Find the beginning and end of a NAL (Network Abstraction Layer) unit in a byte buffer containing H264 bitstream data.
......@@ -200,7 +212,12 @@ const char * NALU_TYPE[] =
"NAL_UNIT_VPS",
"NAL_UNIT_SPS",
"NAL_UNIT_PPS",
#if JVET_P0588_SUFFIX_APS
"NAL_UNIT_PREFIX_APS",
"NAL_UNIT_SUFFIX_APS",
#else
"NAL_UNIT_APS",
#endif
"NAL_UNIT_PH",
"NAL_UNIT_ACCESS_UNIT_DELIMITER",
"NAL_UNIT_EOS",
......@@ -291,6 +308,9 @@ std::vector<uint8_t> filter_segment(const std::vector<uint8_t> & v, int idx, int
int bits_for_poc = 8;
bool skip_next_sei = false;
#if JVET_P1006_PICTURE_HEADER
bool first_slice_segment_in_pic_flag = false;
#endif
while(find_nal_unit(p, sz, &nal_start, &nal_end) > 0)
{
......@@ -337,19 +357,33 @@ std::vector<uint8_t> filter_segment(const std::vector<uint8_t> & v, int idx, int
HLSReader.parsePPS( pps, &parameterSetManager );
parameterSetManager.storePPS( pps, inp_nalu.getBitstream().getFifo() );
}
#if JVET_P1006_PICTURE_HEADER
if( inp_nalu.m_nalUnitType == NAL_UNIT_PH )
{
first_slice_segment_in_pic_flag = true;
}
#endif
if(nalu_type == NAL_UNIT_CODED_SLICE_IDR_W_RADL || nalu_type == NAL_UNIT_CODED_SLICE_IDR_N_LP)
{
poc = 0;
new_poc = *poc_base + poc;
#if JVET_P1006_PICTURE_HEADER
first_slice_segment_in_pic_flag = false;
#endif
}
#if JVET_P0363_CLEANUP_NUT_TABLE
if((nalu_type < NAL_UNIT_CODED_SLICE_IDR_W_RADL) || (nalu_type > NAL_UNIT_CODED_SLICE_IDR_N_LP && nalu_type < NAL_UNIT_RESERVED_IRAP_VCL_12) )
if((nalu_type < NAL_UNIT_CODED_SLICE_IDR_W_RADL) || (nalu_type > NAL_UNIT_CODED_SLICE_IDR_N_LP && nalu_type <= NAL_UNIT_RESERVED_IRAP_VCL_12) )
#else
if((nalu_type < 7) || (nalu_type > 9 && nalu_type < 15) )
#endif
{
parcatHLSReader.setBitstream( &inp_nalu.getBitstream() );
#if JVET_P1006_PICTURE_HEADER
// beginning of slice header parsing, taken from VLCReader
parcatHLSReader.parseSliceHeaderUpToPoc( &parameterSetManager );
#else
bool isRapPic =
inp_nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR_W_RADL
|| inp_nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR_N_LP
......@@ -357,6 +391,7 @@ std::vector<uint8_t> filter_segment(const std::vector<uint8_t> & v, int idx, int
// beginning of slice header parsing, taken from VLCReader
bool first_slice_segment_in_pic_flag = parcatHLSReader.parseSliceHeaderUpToPoc( &parameterSetManager, isRapPic);
#endif
int num_bits_up_to_poc_lsb = parcatHLSReader.getBitstream()->getNumBitsRead();
int offset = num_bits_up_to_poc_lsb;
......@@ -371,7 +406,11 @@ std::vector<uint8_t> filter_segment(const std::vector<uint8_t> & v, int idx, int
// int picOrderCntLSB = (pcSlice->getPOC()-pcSlice->getLastIDR()+(1<<pcSlice->getSPS()->getBitsForPOC())) & ((1<<pcSlice->getSPS()->getBitsForPOC())-1);
unsigned picOrderCntLSB = (new_poc - *last_idr_poc +(1 << bits_for_poc)) & ((1<<bits_for_poc)-1);
#if JVET_P1006_PICTURE_HEADER
int low = data & ((1 << low_bits) - 1);
#else
int low = data & ((1 << (low_bits + 1)) - 1);
#endif
int hi = data >> (16 - hi_bits);
data = (hi << (16 - hi_bits)) | (picOrderCntLSB << low_bits) | low;
......@@ -384,6 +423,9 @@ std::vector<uint8_t> filter_segment(const std::vector<uint8_t> & v, int idx, int
std::cout << "Changed poc " << poc << " to " << new_poc << std::endl;
#endif
++cnt;
#if JVET_P1006_PICTURE_HEADER
first_slice_segment_in_pic_flag = false;
#endif
}
}
......@@ -393,7 +435,19 @@ std::vector<uint8_t> filter_segment(const std::vector<uint8_t> & v, int idx, int
idr_found = true;
}
#if JVET_P1006_PICTURE_HEADER
#if JVET_P0588_SUFFIX_APS
if( ( idx > 1 && ( nalu_type == NAL_UNIT_CODED_SLICE_IDR_W_RADL || nalu_type == NAL_UNIT_CODED_SLICE_IDR_N_LP ) ) || ( ( idx > 1 && !idr_found ) && ( nalu_type == NAL_UNIT_DPS || nalu_type == NAL_UNIT_VPS || nalu_type == NAL_UNIT_SPS || nalu_type == NAL_UNIT_PPS || nalu_type == NAL_UNIT_PREFIX_APS || nalu_type == NAL_UNIT_SUFFIX_APS || nalu_type == NAL_UNIT_PH || nalu_type == NAL_UNIT_ACCESS_UNIT_DELIMITER ) )
#else
if((idx > 1 && (nalu_type == NAL_UNIT_CODED_SLICE_IDR_W_RADL || nalu_type == NAL_UNIT_CODED_SLICE_IDR_N_LP)) || ((idx > 1 && !idr_found) && (nalu_type == NAL_UNIT_DPS || nalu_type == NAL_UNIT_VPS ||nalu_type == NAL_UNIT_SPS || nalu_type == NAL_UNIT_PPS || nalu_type == NAL_UNIT_APS || nalu_type == NAL_UNIT_PH || nalu_type == NAL_UNIT_ACCESS_UNIT_DELIMITER))
#endif
#else
#if JVET_P0588_SUFFIX_APS
if( ( idx > 1 && ( nalu_type == NAL_UNIT_CODED_SLICE_IDR_W_RADL || nalu_type == NAL_UNIT_CODED_SLICE_IDR_N_LP ) ) || ( ( idx > 1 && !idr_found ) && ( nalu_type == NAL_UNIT_DPS || nalu_type == NAL_UNIT_VPS || nalu_type == NAL_UNIT_SPS || nalu_type == NAL_UNIT_PPS || nalu_type == NAL_UNIT_PREFIX_APS || nalu_type == NAL_UNIT_SUFFIX_APS || nalu_type == NAL_UNIT_ACCESS_UNIT_DELIMITER ) )
#else
if((idx > 1 && (nalu_type == NAL_UNIT_CODED_SLICE_IDR_W_RADL || nalu_type == NAL_UNIT_CODED_SLICE_IDR_N_LP)) || ((idx > 1 && !idr_found) && (nalu_type == NAL_UNIT_DPS || nalu_type == NAL_UNIT_VPS ||nalu_type == NAL_UNIT_SPS || nalu_type == NAL_UNIT_PPS || nalu_type == NAL_UNIT_APS || nalu_type == NAL_UNIT_ACCESS_UNIT_DELIMITER))
#endif
#endif
|| (nalu_type == NAL_UNIT_SUFFIX_SEI && skip_next_sei))
{
}
......
......@@ -78,6 +78,9 @@ void AdaptiveLoopFilter::getAlfBoundary( const CodingStructure& cs, int posX, in
{
const Slice& slice = *( cs.slice );
const PPS& pps = *( cs.pps );
#if JVET_P1006_PICTURE_HEADER
const PicHeader& picHeader = *( cs.picHeader );
#endif
int ctuSize = slice.getSPS()->getCTUSize();
const Position currCtuPos( posX, posY );
const CodingUnit *currCtu = cs.getCU( currCtuPos, CHANNEL_TYPE_LUMA );
......@@ -91,7 +94,11 @@ void AdaptiveLoopFilter::getAlfBoundary( const CodingStructure& cs, int posX, in
const Position prevCtuPos( posX, posY - ctuSize );
const CodingUnit *prevCtu = cs.getCU( prevCtuPos, CHANNEL_TYPE_LUMA );
#if JVET_P1006_PICTURE_HEADER
if ( !pps.getLoopFilterAcrossSlicesEnabledFlag() && !CU::isSameSlice( *currCtu, *prevCtu ) )
#else
if ( !slice.getLFCrossSliceBoundaryFlag() && !CU::isSameSlice( *currCtu, *prevCtu ) )
#endif
{
topBry = posY;
}
......@@ -108,7 +115,11 @@ void AdaptiveLoopFilter::getAlfBoundary( const CodingStructure& cs, int posX, in
const Position nextCtuPos( posX, posY + ctuSize );
const CodingUnit *nextCtu = cs.getCU( nextCtuPos, CHANNEL_TYPE_LUMA );
#if JVET_P1006_PICTURE_HEADER
if ( !pps.getLoopFilterAcrossSlicesEnabledFlag() && !CU::isSameSlice( *currCtu, *nextCtu ) )
#else
if ( !slice.getLFCrossSliceBoundaryFlag() && !CU::isSameSlice( *currCtu, *nextCtu ) )
#endif
{
botBry = posY + ctuSize;
}
......@@ -125,7 +136,11 @@ void AdaptiveLoopFilter::getAlfBoundary( const CodingStructure& cs, int posX, in
const Position prevCtuPos( posX - ctuSize, posY );
const CodingUnit *prevCtu = cs.getCU( prevCtuPos, CHANNEL_TYPE_LUMA );
#if JVET_P1006_PICTURE_HEADER
if ( !pps.getLoopFilterAcrossSlicesEnabledFlag() && !CU::isSameSlice( *currCtu, *prevCtu ) )
#else
if ( !slice.getLFCrossSliceBoundaryFlag() && !CU::isSameSlice( *currCtu, *prevCtu ) )
#endif
{
leftBry = posX;
}
......@@ -142,7 +157,11 @@ void AdaptiveLoopFilter::getAlfBoundary( const CodingStructure& cs, int posX, in
const Position nextCtuPos( posX + ctuSize, posY );
const CodingUnit *nextCtu = cs.getCU( nextCtuPos, CHANNEL_TYPE_LUMA );
#if JVET_P1006_PICTURE_HEADER
if ( !pps.getLoopFilterAcrossSlicesEnabledFlag() && !CU::isSameSlice( *currCtu, *nextCtu ) )
#else
if ( !slice.getLFCrossSliceBoundaryFlag() && !CU::isSameSlice( *currCtu, *nextCtu ) )
#endif
{
rightBry = posX + ctuSize;
}
......@@ -153,6 +172,34 @@ void AdaptiveLoopFilter::getAlfBoundary( const CodingStructure& cs, int posX, in
}
}
#if JVET_P1006_PICTURE_HEADER
if( picHeader.getLoopFilterAcrossVirtualBoundariesDisabledFlag() )
{
for( int i = 0; i < picHeader.getNumHorVirtualBoundaries(); i++ )
{
if( picHeader.getVirtualBoundariesPosY( i ) == posY )
{
topBry = posY;
}
else if( picHeader.getVirtualBoundariesPosY( i ) == posY + ctuSize )
{
botBry = posY + ctuSize;
}
}
for( int i = 0; i < picHeader.getNumVerVirtualBoundaries(); i++ )
{
if( picHeader.getVirtualBoundariesPosX( i ) == posX )
{
leftBry = posX;
}
else if( picHeader.getVirtualBoundariesPosX( i ) == posX + ctuSize )
{
rightBry = posX + ctuSize;
}
}
}
#else
if( pps.getLoopFilterAcrossVirtualBoundariesDisabledFlag() )
{
for( int i = 0; i < pps.getNumHorVirtualBoundaries(); i++ )
......@@ -179,8 +226,37 @@ void AdaptiveLoopFilter::getAlfBoundary( const CodingStructure& cs, int posX, in
}
}
}
#endif
}
#if JVET_P1006_PICTURE_HEADER
bool AdaptiveLoopFilter::isCrossedByVirtualBoundaries( const CodingStructure& cs, const int xPos, const int yPos, const int width, const int height, int &topBry, int &botBry, int &leftBry, int &rightBry, int& numHorVirBndry, int& numVerVirBndry, int horVirBndryPos[], int verVirBndryPos[], const PicHeader* picHeader)
{
numHorVirBndry = 0; numVerVirBndry = 0;
if( picHeader->getLoopFilterAcrossVirtualBoundariesDisabledFlag() )
{
for( int i = 0; i < picHeader->getNumHorVirtualBoundaries(); i++ )
{
if( yPos < picHeader->getVirtualBoundariesPosY(i) && picHeader->getVirtualBoundariesPosY(i) < yPos + height )
{
horVirBndryPos[numHorVirBndry++] = picHeader->getVirtualBoundariesPosY(i);
}
}
for( int i = 0; i < picHeader->getNumVerVirtualBoundaries(); i++ )
{
if( xPos < picHeader->getVirtualBoundariesPosX(i) && picHeader->getVirtualBoundariesPosX(i) < xPos + width )
{
verVirBndryPos[numVerVirBndry++] = picHeader->getVirtualBoundariesPosX(i);
}
}
}
getAlfBoundary( cs, xPos, yPos, topBry, botBry, leftBry, rightBry );
return numHorVirBndry > 0 || numVerVirBndry > 0 || ( topBry != ALF_NONE_BOUNDARY ) || ( botBry != ALF_NONE_BOUNDARY ) || ( leftBry != ALF_NONE_BOUNDARY ) || ( rightBry != ALF_NONE_BOUNDARY );
}
#else
bool AdaptiveLoopFilter::isCrossedByVirtualBoundaries( const CodingStructure& cs, const int xPos, const int yPos, const int width, const int height, int &topBry, int &botBry, int &leftBry, int &rightBry, int& numHorVirBndry, int& numVerVirBndry, int horVirBndryPos[], int verVirBndryPos[], const PPS* pps)
{
numHorVirBndry = 0; numVerVirBndry = 0;
......@@ -207,6 +283,7 @@ bool AdaptiveLoopFilter::isCrossedByVirtualBoundaries( const CodingStructure& cs
return numHorVirBndry > 0 || numVerVirBndry > 0 || ( topBry != ALF_NONE_BOUNDARY ) || ( botBry != ALF_NONE_BOUNDARY ) || ( leftBry != ALF_NONE_BOUNDARY ) || ( rightBry != ALF_NONE_BOUNDARY );
}
#endif
#else
bool AdaptiveLoopFilter::isCrossedByVirtualBoundaries( const int xPos, const int yPos, const int width, const int height, bool& clipTop, bool& clipBottom, bool& clipLeft, bool& clipRight, int& numHorVirBndry, int& numVerVirBndry, int horVirBndryPos[], int verVirBndryPos[], const PPS* pps)
{
......@@ -386,7 +463,11 @@ void AdaptiveLoopFilter::ALFProcess(CodingStructure& cs)
ctuEnableFlag |= m_ctuEnableFlag[compIdx][ctuIdx] > 0;
}
#if JVET_O0625_ALF_PADDING
#if JVET_P1006_PICTURE_HEADER
if( ctuEnableFlag && isCrossedByVirtualBoundaries( cs, xPos, yPos, width, height, alfBryList[0], alfBryList[1], alfBryList[2], alfBryList[3], numHorVirBndry, numVerVirBndry, horVirBndryPos, verVirBndryPos, cs.picHeader ) )
#else
if( ctuEnableFlag && isCrossedByVirtualBoundaries( cs, xPos, yPos, width, height, alfBryList[0], alfBryList[1], alfBryList[2], alfBryList[3], numHorVirBndry, numVerVirBndry, horVirBndryPos, verVirBndryPos, cs.slice->getPPS() ) )
#endif
#else
if( ctuEnableFlag && isCrossedByVirtualBoundaries( xPos, yPos, width, height, clipTop, clipBottom, clipLeft, clipRight, numHorVirBndry, numVerVirBndry, horVirBndryPos, verVirBndryPos, cs.slice->getPPS() ) )
#endif
......
......@@ -145,10 +145,14 @@ public:
#endif
protected:
#if JVET_P1006_PICTURE_HEADER
bool isCrossedByVirtualBoundaries( const CodingStructure& cs, const int xPos, const int yPos, const int width, const int height, int &topBry, int &botBry, int &leftBry, int &rightBry, int& numHorVirBndry, int& numVerVirBndry, int horVirBndryPos[], int verVirBndryPos[], const PicHeader* picHeader );
#else
#if JVET_O0625_ALF_PADDING
bool isCrossedByVirtualBoundaries( const CodingStructure& cs, const int xPos, const int yPos, const int width, const int height, int &topBry, int &botBry, int &leftBry, int &rightBry, int& numHorVirBndry, int& numVerVirBndry, int horVirBndryPos[], int verVirBndryPos[], const PPS* pps );
#else
bool isCrossedByVirtualBoundaries( const int xPos, const int yPos, const int width, const int height, bool& clipTop, bool& clipBottom, bool& clipLeft, bool& clipRight, int& numHorVirBndry, int& numVerVirBndry, int horVirBndryPos[], int verVirBndryPos[], const PPS* pps );
#endif
#endif
static const int m_classToFilterMapping[NUM_FIXED_FILTER_SETS][MAX_NUM_ALF_CLASSES];
static const int m_fixedFilterSetCoeff[ALF_FIXED_FILTER_NUM][MAX_NUM_ALF_LUMA_COEFF];
......
......@@ -920,3 +920,76 @@ const CPelUnitBuf PelStorage::getBuf( const UnitArea &unit ) const
return ( chromaFormat == CHROMA_400 ) ? CPelUnitBuf( chromaFormat, getBuf( unit.Y() ) ) : CPelUnitBuf( chromaFormat, getBuf( unit.Y() ), getBuf( unit.Cb() ), getBuf( unit.Cr() ) );
}
#if JVET_P0517_ADAPTIVE_COLOR_TRANSFORM
template<>
void UnitBuf<Pel>::colorSpaceConvert(const UnitBuf<Pel> &other, const bool forward)
{
const Pel* pOrg0 = bufs[COMPONENT_Y].buf;
const Pel* pOrg1 = bufs[COMPONENT_Cb].buf;
const Pel* pOrg2 = bufs[COMPONENT_Cr].buf;
const int strideOrg = bufs[COMPONENT_Y].stride;
Pel* pDst0 = other.bufs[COMPONENT_Y].buf;
Pel* pDst1 = other.bufs[COMPONENT_Cb].buf;
Pel* pDst2 = other.bufs[COMPONENT_Cr].buf;
const int strideDst = other.bufs[COMPONENT_Y].stride;
int width = bufs[COMPONENT_Y].width;
int height = bufs[COMPONENT_Y].height;
int r, g, b;
int y0, cg, co;
CHECK(bufs[COMPONENT_Y].stride != bufs[COMPONENT_Cb].stride || bufs[COMPONENT_Y].stride != bufs[COMPONENT_Cr].stride, "unequal stride for 444 content");
CHECK(other.bufs[COMPONENT_Y].stride != other.bufs[COMPONENT_Cb].stride || other.bufs[COMPONENT_Y].stride != other.bufs[COMPONENT_Cr].stride, "unequal stride for 444 content");
CHECK(bufs[COMPONENT_Y].width != other.bufs[COMPONENT_Y].width || bufs[COMPONENT_Y].height != other.bufs[COMPONENT_Y].height, "unequal block size")
if (forward)
{
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
r = pOrg2[x];
g = pOrg0[x];
b = pOrg1[x];
pDst0[x] = (g << 1) + r + b;
pDst1[x] = (g << 1) - r - b;
pDst2[x] = ((r - b) << 1);
pDst0[x] = (pDst0[x] + 2) >> 2;
pDst1[x] = (pDst1[x] + 2) >> 2;
pDst2[x] = (pDst2[x] + 2) >> 2;
}
pOrg0 += strideOrg;
pOrg1 += strideOrg;
pOrg2 += strideOrg;
pDst0 += strideDst;
pDst1 += strideDst;
pDst2 += strideDst;
}
}
else
{
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
y0 = pOrg0[x];
cg = pOrg1[x];
co = pOrg2[x];
pDst0[x] = (y0 + cg);
pDst1[x] = (y0 - cg - co);
pDst2[x] = (y0 - cg + co);
}
pOrg0 += strideOrg;
pOrg1 += strideOrg;
pOrg2 += strideOrg;
pDst0 += strideDst;
pDst1 += strideDst;
pDst2 += strideDst;
}
}
}
#endif
\ No newline at end of file
......@@ -758,6 +758,9 @@ struct UnitBuf
UnitBuf< T> subBuf (const UnitArea& subArea);
const UnitBuf<const T> subBuf (const UnitArea& subArea) const;
#if JVET_P0517_ADAPTIVE_COLOR_TRANSFORM
void colorSpaceConvert(const UnitBuf<T> &other, const bool forward);
#endif
};
typedef UnitBuf< Pel> PelUnitBuf;
......@@ -873,6 +876,17 @@ void UnitBuf<T>::addAvg(const UnitBuf<const T> &other1, const UnitBuf<const T> &
}
}
#if JVET_P0517_ADAPTIVE_COLOR_TRANSFORM
template<typename T>
void UnitBuf<T>::colorSpaceConvert(const UnitBuf<T> &other, const bool forward)
{
THROW("Type not supported");
}
template<>
void UnitBuf<Pel>::colorSpaceConvert(const UnitBuf<Pel> &other, const bool forward);
#endif
template<typename T>
void UnitBuf<T>::extendSingleBorderPel()
{
......
......@@ -68,6 +68,10 @@ CodingStructure::CodingStructure(CUCache& cuCache, PUCache& puCache, TUCache& tu
, m_puCache ( puCache )
, m_tuCache ( tuCache )
, bestParent ( nullptr )
#if JVET_P0517_ADAPTIVE_COLOR_TRANSFORM
, tmpColorSpaceCost(MAX_DOUBLE)
, firstColorSpaceSelected(true)
#endif
, resetIBCBuffer (false)
{
for( uint32_t i = 0; i < MAX_NUM_COMPONENT; i++ )
......@@ -93,6 +97,11 @@ CodingStructure::CodingStructure(CUCache& cuCache, PUCache& puCache, TUCache& tu
features.resize( NUM_ENC_FEATURES );
treeType = TREE_D;
modeType = MODE_TYPE_ALL;
#if JVET_P0517_ADAPTIVE_COLOR_TRANSFORM
tmpColorSpaceIntraCost[0] = MAX_DOUBLE;
tmpColorSpaceIntraCost[1] = MAX_DOUBLE;
firstColorSpaceTestOnly = false;
#endif
}
void CodingStructure::destroy()
......@@ -1015,6 +1024,9 @@ void CodingStructure::initSubStructure( CodingStructure& subStruct, const Channe
subStruct.sps = sps;
subStruct.vps = vps;
subStruct.pps = pps;
#if JVET_P1006_PICTURE_HEADER
subStruct.picHeader = picHeader;
#endif
memcpy(subStruct.alfApss, alfApss, sizeof(alfApss));
subStruct.lmcsAps = lmcsAps;
......
......@@ -92,6 +92,9 @@ public:
bool isLossless;
const SPS *sps;
const PPS *pps;
#if JVET_P1006_PICTURE_HEADER
PicHeader *picHeader;
#endif
APS* alfApss[ALF_CTB_MAX_NUM_APS];
APS * lmcsAps;
APS * scalinglistAps;
......@@ -240,6 +243,12 @@ private:
public:
CodingStructure *bestParent;
#if JVET_P0517_ADAPTIVE_COLOR_TRANSFORM
double tmpColorSpaceCost;
bool firstColorSpaceSelected;
double tmpColorSpaceIntraCost[2];
bool firstColorSpaceTestOnly;
#endif
bool resetIBCBuffer;
MotionBuf getMotionBuf( const Area& _area );
......
......@@ -285,6 +285,9 @@ static const int LUMA_INTERPOLATION_FILTER_SUB_SAMPLE_POSITIONS_SIGNAL = 1 << MV
static const int LUMA_INTERPOLATION_FILTER_SUB_SAMPLE_POSITIONS = 1 << MV_FRACTIONAL_BITS_INTERNAL;
static const int CHROMA_INTERPOLATION_FILTER_SUB_SAMPLE_POSITIONS = 1 << (MV_FRACTIONAL_BITS_INTERNAL + 1);
#if JVET_P1006_PICTURE_HEADER
static const int MAX_NUM_SUB_PICS = 255;
#endif
static const int MAX_NUM_LONG_TERM_REF_PICS = 33;
static const int NUM_LONG_TERM_REF_PIC_SPS = 0;
......@@ -493,6 +496,10 @@ static const int ENC_PPS_ID_RPR = 3;
static const int SCALE_RATIO_BITS = 14;
static const int MAX_SCALING_RATIO = 8; // max scaling ratio allowed in the software, it is used to allocated an internla buffer in the rescaling
static const std::pair<int, int> SCALE_1X = std::pair<int, int>( 1 << SCALE_RATIO_BITS, 1 << SCALE_RATIO_BITS ); // scale ratio 1x
#if JVET_P0517_ADAPTIVE_COLOR_TRANSFORM
static const int DELTA_QP_FOR_Y_Cg = -5;
static const int DELTA_QP_FOR_Co = -3;
#endif
// ====================================================================================================================
// Macro functions
......
......@@ -372,7 +372,11 @@ void MergeCtx::setMmvdMergeCandiInfo(PredictionUnit& pu, int candIdx)
fPosStep = tempIdx / 4;
fPosPosition = tempIdx - fPosStep * (4);
int offset = refMvdCands[fPosStep];
#if JVET_P1006_PICTURE_HEADER
if ( pu.cu->slice->getPicHeader()->getDisFracMMVD() )
#else
if ( pu.cu->slice->getDisFracMMVD() )
#endif
{
offset <<= 2;
}
......
......@@ -451,6 +451,16 @@ const CtxSet ContextSetCfg::QtRootCbf = ContextSetCfg::addCtxSet
{ 4, },
});
#if JVET_P0517_ADAPTIVE_COLOR_TRANSFORM
const CtxSet ContextSetCfg::ACTFlag = ContextSetCfg::addCtxSet
({
{ CNU, },
{ CNU, },
{ CNU, },
{ DWS, },
});
#endif
const CtxSet ContextSetCfg::QtCbf[] =
{
ContextSetCfg::addCtxSet
......@@ -1216,6 +1226,16 @@ const CtxSet ContextSetCfg::QtRootCbf = ContextSetCfg::addCtxSet
{ 4, },
});
#if JVET_P0517_ADAPTIVE_COLOR_TRANSFORM
const CtxSet ContextSetCfg::ACTFlag = ContextSetCfg::addCtxSet
({
{ CNU, },
{ CNU, },
{ CNU, },
{ DWS, },
});
#endif
const CtxSet ContextSetCfg::QtCbf[] =
{
ContextSetCfg::addCtxSet
......
......@@ -226,6 +226,9 @@ public:
static const CtxSet Mvd;
static const CtxSet BDPCMMode;
static const CtxSet QtRootCbf;
#if JVET_P0517_ADAPTIVE_COLOR_TRANSFORM
static const CtxSet ACTFlag;
#endif
static const CtxSet QtCbf [3]; // [ channel ]
static const CtxSet SigCoeffGroup [2]; // [ ChannelType ]
static const CtxSet LastX [2]; // [ ChannelType ]
......
......@@ -1618,7 +1618,11 @@ DepQuant::~DepQuant()
void DepQuant::quant( TransformUnit &tu, const ComponentID &compID, const CCoeffBuf &pSrc, TCoeff &uiAbsSum, const QpParam &cQP, const Ctx& ctx )
{
#if JVET_P0058_CHROMA_TS
#if JVET_P1006_PICTURE_HEADER
if ( tu.cs->picHeader->getDepQuantEnabledFlag() && (tu.mtsIdx[compID] != MTS_SKIP) )
#else
if ( tu.cs->slice->getDepQuantEnabledFlag() && (tu.mtsIdx[compID] != MTS_SKIP) )
#endif
#else
#if JVET_P0059_CHROMA_BDPCM
if ((tu.cs->slice->getDepQuantEnabledFlag() && (tu.mtsIdx != MTS_SKIP || !isLuma(compID))) &&
......@@ -1649,9 +1653,27 @@ void DepQuant::quant( TransformUnit &tu, const ComponentID &compID, const CCoeff
const uint32_t log2TrWidth = floorLog2(width);
const uint32_t log2TrHeight = floorLog2(height);
#if JVET_P0058_CHROMA_TS
#if JVET_P0365_SCALING_MATRIX_LFNST
#if JVET_P1006_PICTURE_HEADER
const bool disableSMForLFNST = tu.cs->sps->getScalingListFlag() ? tu.cs->picHeader->getScalingListAPS()->getScalingList().getDisableScalingMatrixForLfnstBlks() : false;
#else
const bool disableSMForLFNST = tu.cs->sps->getScalingListFlag() ? tu.cs->slice->getscalingListAPS()->getScalingList().getDisableScalingMatrixForLfnstBlks() : false;
#endif
const bool enableScalingLists = getUseScalingList(width, height, (tu.mtsIdx[compID] == MTS_SKIP), tu.cu->lfnstIdx > 0, disableSMForLFNST);
#else
const bool enableScalingLists = getUseScalingList(width, height, (tu.mtsIdx[compID] == MTS_SKIP));
#endif
#else
#if JVET_P0365_SCALING_MATRIX_LFNST
#if JVET_P1006_PICTURE_HEADER
const bool disableSMForLFNST = tu.cs->sps->getScalingListFlag() ? tu.cs->picHeader->getScalingListAPS()->getScalingList().getDisableScalingMatrixForLfnstBlks() : false;
#else
const bool disableSMForLFNST = tu.cs->sps->getScalingListFlag() ? tu.cs->slice->getscalingListAPS()->getScalingList().getDisableScalingMatrixForLfnstBlks() : false;
#endif
const bool enableScalingLists = getUseScalingList(width, height, (tu.mtsIdx == MTS_SKIP && isLuma(compID)), tu.cu->lfnstIdx > 0, disableSMForLFNST);
#else
const bool enableScalingLists = getUseScalingList(width, height, (tu.mtsIdx == MTS_SKIP && isLuma(compID)));
#endif
#endif
static_cast<DQIntern::DepQuant*>(p)->quant( tu, pSrc, compID, cQP, Quant::m_dLambda, ctx, uiAbsSum, enableScalingLists, Quant::getQuantCoeff(scalingListType, qpRem, log2TrWidth, log2TrHeight) );
}
......@@ -1664,7 +1686,11 @@ void DepQuant::quant( TransformUnit &tu, const ComponentID &compID, const CCoeff
void DepQuant::dequant( const TransformUnit &tu, CoeffBuf &dstCoeff, const ComponentID &compID, const QpParam &cQP )
{
#if JVET_P0058_CHROMA_TS
#if JVET_P1006_PICTURE_HEADER
if( tu.cs->picHeader->getDepQuantEnabledFlag() && (tu.mtsIdx[compID] != MTS_SKIP))
#else
if( tu.cs->slice->getDepQuantEnabledFlag() && (tu.mtsIdx[compID] != MTS_SKIP))
#endif
#else
#if JVET_P0059_CHROMA_BDPCM
if ((tu.cs->slice->getDepQuantEnabledFlag() && (tu.mtsIdx != MTS_SKIP || !isLuma(compID))) &&
......@@ -1694,9 +1720,27 @@ void DepQuant::dequant( const TransformUnit &tu, CoeffBuf &dstCoeff, const Compo
const uint32_t log2TrWidth = floorLog2(width);
const uint32_t log2TrHeight = floorLog2(height);
#if JVET_P0058_CHROMA_TS
#if JVET_P0365_SCALING_MATRIX_LFNST
#if JVET_P1006_PICTURE_HEADER
const bool disableSMForLFNST = tu.cs->sps->getScalingListFlag() ? tu.cs->picHeader->getScalingListAPS()->getScalingList().getDisableScalingMatrixForLfnstBlks() : false;
#else
const bool disableSMForLFNST = tu.cs->sps->getScalingListFlag() ? tu.cs->slice->getscalingListAPS()->getScalingList().getDisableScalingMatrixForLfnstBlks() : false;
#endif
const bool enableScalingLists = getUseScalingList(width, height, (tu.mtsIdx[compID] == MTS_SKIP), tu.cu->lfnstIdx > 0, disableSMForLFNST);
#else
const bool enableScalingLists = getUseScalingList(width, height, (tu.mtsIdx[compID] == MTS_SKIP));
#endif
#else
#if JVET_P0365_SCALING_MATRIX_LFNST
#if JVET_P1006_PICTURE_HEADER
const bool disableSMForLFNST = tu.cs->sps->getScalingListFlag() ? tu.cs->picHeader->getScalingListAPS()->getScalingList().getDisableScalingMatrixForLfnstBlks() : false;
#else
const bool disableSMForLFNST = tu.cs->sps->getScalingListFlag() ? tu.cs->slice->getscalingListAPS()->getScalingList().getDisableScalingMatrixForLfnstBlks() : false;
#endif
const bool enableScalingLists = getUseScalingList(width, height, (tu.mtsIdx == MTS_SKIP && isLuma(compID)), tu.cu->lfnstIdx > 0, disableSMForLFNST);
#else
const bool enableScalingLists = getUseScalingList(width, height, (tu.mtsIdx == MTS_SKIP && isLuma(compID)));
#endif
#endif
static_cast<DQIntern::DepQuant*>(p)->dequant( tu, dstCoeff, compID, cQP, enableScalingLists, Quant::getDequantCoeff(scalingListType, qpRem, log2TrWidth, log2TrHeight) );
}
......
......@@ -127,6 +127,11 @@ void InterPrediction::destroy()
}
m_triangleBuf.destroy();
#if JVET_P0517_ADAPTIVE_COLOR_TRANSFORM
m_colorTransResiBuf[0].destroy();
m_colorTransResiBuf[1].destroy();
m_colorTransResiBuf[2].destroy();
#endif
if (m_storedMv != nullptr)
{
......@@ -190,6 +195,11 @@ void InterPrediction::init( RdCost* pcRdCost, ChromaFormat chromaFormatIDC, cons
}
m_triangleBuf.create(UnitArea(chromaFormatIDC, Area(0, 0, MAX_CU_SIZE, MAX_CU_SIZE)));
#if JVET_P0517_ADAPTIVE_COLOR_TRANSFORM
m_colorTransResiBuf[0].create(UnitArea(chromaFormatIDC, Area(0, 0, MAX_CU_SIZE, MAX_CU_SIZE)));
m_colorTransResiBuf[1].create(UnitArea(chromaFormatIDC, Area(0, 0, MAX_CU_SIZE, MAX_CU_SIZE)));
m_colorTransResiBuf[2].create(UnitArea(chromaFormatIDC, Area(0, 0, MAX_CU_SIZE, MAX_CU_SIZE)));
#endif
m_iRefListIdx = -1;
......@@ -492,7 +502,11 @@ void InterPrediction::xPredInterBi(PredictionUnit& pu, PelUnitBuf &pcYuvPred, Pe
pu.cs->slice->getWpScaling(REF_PIC_LIST_1, refIdx1, wp1);
bool bioApplied = false;
#if JVET_P1006_PICTURE_HEADER
if (pu.cs->sps->getBDOFEnabledFlag() && (!pu.cs->picHeader->getDisBdofDmvrFlag()))
#else
if (pu.cs->sps->getBDOFEnabledFlag() && (!pu.cs->slice->getDisBdofDmvrFlag()))
#endif
{
if (pu.cu->affine || m_subPuMC)
{
......@@ -1812,7 +1826,11 @@ void InterPrediction::motionCompensation( PredictionUnit &pu, PelUnitBuf &predBu
pu.cs->slice->getWpScaling(REF_PIC_LIST_1, refIdx1, wp1);
bool bioApplied = false;
const Slice &slice = *pu.cs->slice;
#if JVET_P1006_PICTURE_HEADER
if (pu.cs->sps->getBDOFEnabledFlag() && (!pu.cs->picHeader->getDisBdofDmvrFlag()))
#else
if (pu.cs->sps->getBDOFEnabledFlag() && (!pu.cs->slice->getDisBdofDmvrFlag()))
#endif
{
if (pu.cu->affine || m_subPuMC)
......
......@@ -180,6 +180,10 @@ protected:
#if JVET_J0090_MEMORY_BANDWITH_MEASURE
CacheModel *m_cacheModel;
#endif
#if JVET_P0517_ADAPTIVE_COLOR_TRANSFORM
PelStorage m_colorTransResiBuf[3]; // 0-org; 1-act; 2-tmp
#endif
public:
InterPrediction();
virtual ~InterPrediction();
......
......@@ -268,7 +268,11 @@ void LoopFilter::xDeblockCU( CodingUnit& cu, const DeblockEdgeDir edgeDir )
int horVirBndryPos[] = { 0, 0, 0 };
int verVirBndryPos[] = { 0, 0, 0 };
#if JVET_P1006_PICTURE_HEADER
bool isCuCrossedByVirtualBoundaries = isCrossedByVirtualBoundaries( area.x, area.y, area.width, area.height, numHorVirBndry, numVerVirBndry, horVirBndryPos, verVirBndryPos, cu.cs->picHeader );
#else