diff --git a/source/App/DecoderApp/DecApp.cpp b/source/App/DecoderApp/DecApp.cpp index 5877018920f3a8bc3e96abd8cb38654b17a02827..4a5cbadfa4258fc2893e407d50f39d4383582a9a 100644 --- a/source/App/DecoderApp/DecApp.cpp +++ b/source/App/DecoderApp/DecApp.cpp @@ -745,6 +745,9 @@ void DecApp::xCreateDecLib() #if JVET_AH0080_TRANS_INPUT m_cDecLib.setUseNnlfTransInput(m_nnlfTransInput); #endif +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + m_cDecLib.setNnfuOutputFileStem(m_nnfuOutputFileStem); +#endif #endif if (!m_outputDecodedSEIMessagesFilename.empty()) diff --git a/source/App/DecoderApp/DecAppCfg.cpp b/source/App/DecoderApp/DecAppCfg.cpp index aeeff0fcd513da84d20fa5a69e6d04e1434b8d1f..1c03f10dbfe3872b05d8bd829a95df14d87442d4 100644 --- a/source/App/DecoderApp/DecAppCfg.cpp +++ b/source/App/DecoderApp/DecAppCfg.cpp @@ -91,6 +91,9 @@ bool DecAppCfg::parseCfg( int argc, char* argv[] ) #if NN_HOP_UNIFIED_TEMPORAL_FILTERING ("NnlfHopTemporalModelName", m_nnModel[NnModel::HOP_UNIFIED_TEMPORA_FILTER], string("models/nnlf_hop_temporal_model_int16.sadl"), "HOP temporal loop filter model name\n") #endif +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + ("NnfuOutputFileStem", m_nnfuOutputFileStem, string("/tmp/sequence_qp"), "Output dir for NNR bitstream and reconstructed sadl model\n") +#endif #endif @@ -313,6 +316,7 @@ bool DecAppCfg::parseCfg( int argc, char* argv[] ) return false; } #endif + return true; } diff --git a/source/App/DecoderApp/DecAppCfg.h b/source/App/DecoderApp/DecAppCfg.h index a84d6f5495c89cf240f11dfc6589fef6f734b521..76bc244bf6e34143e15208ee355e614f25b744d4 100644 --- a/source/App/DecoderApp/DecAppCfg.h +++ b/source/App/DecoderApp/DecAppCfg.h @@ -70,6 +70,9 @@ protected: #if JVET_AH0080_TRANS_INPUT bool m_nnlfTransInput; #endif +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + std::string m_nnfuOutputFileStem; ///< Directory to save the NNR bitstream and the reconstructed model +#endif #endif #if NNVC_DUMP_DATA diff --git a/source/App/EncoderApp/EncApp.cpp b/source/App/EncoderApp/EncApp.cpp index 5f9370ecbe59a61fdaf823a5ed111ff938af30e4..bcef89d4df4588b61247d6f4e29f4efeda9b23b4 100644 --- a/source/App/EncoderApp/EncApp.cpp +++ b/source/App/EncoderApp/EncApp.cpp @@ -1213,6 +1213,15 @@ void EncApp::xInitLibCfg() #if JVET_AH0080_TRANS_INPUT m_cEncLib.setUseNnlfTransInput (m_nnlfTransInput); #endif +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + m_cEncLib.setUseNnfu(m_nnfuEnabled); + m_cEncLib.setNumNnfus(m_numNnfus); + if (m_cEncLib.getUseNnfu() && m_cEncLib.getNumNnfus() > 0) + { + m_cEncLib.setNnfuModelFileNames(m_nnfuModelFileNames); + m_cEncLib.setNnfuPayloads(m_nnfuPayloads); + } +#endif #endif #if NN_FILTERING_SET_0 diff --git a/source/App/EncoderApp/EncAppCfg.cpp b/source/App/EncoderApp/EncAppCfg.cpp index 856d031d89b96df338d41454718597d8d03b2760..3807e5ceb8032923eb07b5b3e4a67482d185515e 100644 --- a/source/App/EncoderApp/EncAppCfg.cpp +++ b/source/App/EncoderApp/EncAppCfg.cpp @@ -717,6 +717,11 @@ bool EncAppCfg::parseCfg( int argc, char* argv[] ) SMultiValueInput<Level::Name> cfg_sliRefLevels(Level::NONE, Level::LEVEL15_5, 0, 8 * MAX_VPS_SUBLAYERS); +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + std::vector<std::string> cfg_nnfuModelFileNames(MAX_NUM_NNFUS, ""); + std::vector<std::string> cfg_nnfuPayloadFileNames(MAX_NUM_NNFUS, ""); +#endif + #if NN_POST_FILTERING std::vector<SMultiValueInput<uint32_t>> cfg_nnpfcInterpolatedPics; std::vector<SMultiValueInput<bool>> cfg_nnpfcInputPicOutputFlag; @@ -1607,6 +1612,21 @@ bool EncAppCfg::parseCfg( int argc, char* argv[] ) opts.addOptions()(cOSS2.str(), m_olsPtlIdx[i], 0); } +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + opts.addOptions()("NnfuEnabled", m_nnfuEnabled, false, "Enable NNFU" ); + opts.addOptions()("NumNnfus", m_numNnfus, 0, "Number of NNFUs"); + for (int i = 0; i < MAX_NUM_NNFUS; i++) + { + std::ostringstream nnfuPayloadFileName; + nnfuPayloadFileName << "NnfuPayloadFileName" << i; + opts.addOptions()(nnfuPayloadFileName.str(), cfg_nnfuPayloadFileNames[i], string(""), "NN loop filter update payload file name"); + + std::ostringstream nnfuModelFileName; + nnfuModelFileName << "NnfuModelFileName" << i; + opts.addOptions()(nnfuModelFileName.str(), cfg_nnfuModelFileNames[i], string(""), "NN loop filter update model file name"); + } +#endif + #if NN_POST_FILTERING opts.addOptions()("NnpfModelPath", m_nnModel[NnModel::POST_FILTER], string(""), "path to the over-fitted model\n"); opts.addOptions()( @@ -1906,7 +1926,7 @@ bool EncAppCfg::parseCfg( int argc, char* argv[] ) m_scalingRatioHor = 1.0; m_scalingRatioVer = 1.0; } - CHECK(m_nnsrOption && m_nnModel[NnModel::SUPER_RESOLUTION] != "default_rpr" && + CHECK(m_nnsrOption && m_nnModel[NnModel::SUPER_RESOLUTION] != "default_rpr" && (m_scalingRatioHor != 2.0 || m_scalingRatioVer != 2.0), "NNSR does not support this specified scaling ratio."); } #else @@ -2425,6 +2445,17 @@ bool EncAppCfg::parseCfg( int argc, char* argv[] ) } } +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + if (m_nnfuEnabled && m_numNnfus > 0) + { + for (int i = 0; i < m_numNnfus; i++) + { + m_nnfuPayloadFileNames.push_back(cfg_nnfuPayloadFileNames[i]); + m_nnfuModelFileNames.push_back(cfg_nnfuModelFileNames[i]); + } + } +#endif + #if NN_POST_FILTERING if (m_nnpfcSEIEnabled) { @@ -4260,6 +4291,29 @@ bool EncAppCfg::xCheckParameter() xConfirmPara(m_nnModel[NnModel::INTRA_PRED].empty(),"The description of each pair of block height and width being a map key and the path to the output graph of the prediction neural network being its string has to be provided."); #endif +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + if (m_nnfuEnabled && m_numNnfus > 0) + { + xConfirmPara(m_nnfuModelFileNames.size() != m_nnfuPayloadFileNames.size(), "There should be the same amount of NNFU models and payloads"); + for (const auto& nnfuFileName : m_nnfuPayloadFileNames) + { + std::ifstream file(nnfuFileName, std::ios::binary); + if (!file) + { + cerr << "[ERROR] unable to open NNR/NNC bitstream " << nnfuFileName << endl; + exit(-1); + } + file.seekg(0, std::ios::end); + size_t filesize = file.tellg(); + file.seekg(0, std::ios::beg); + std::vector<uint8_t> payload(filesize/sizeof(uint8_t)); + file.read((char *)payload.data(), filesize); + file.close(); + m_nnfuPayloads.push_back(payload); + } + } +#endif + if ( m_RCEnableRateControl ) { if ( m_RCForceIntraQP ) diff --git a/source/App/EncoderApp/EncAppCfg.h b/source/App/EncoderApp/EncAppCfg.h index b1ade2b20a33857dd3547a6a75b269e5e055c375..e1103697459f21556cb3a8f3728883d69ac5066d 100644 --- a/source/App/EncoderApp/EncAppCfg.h +++ b/source/App/EncoderApp/EncAppCfg.h @@ -779,6 +779,13 @@ protected: #if JVET_AH0080_TRANS_INPUT bool m_nnlfTransInput; #endif +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + bool m_nnfuEnabled; + int m_numNnfus; + std::list<std::string> m_nnfuModelFileNames; + std::list<std::string> m_nnfuPayloadFileNames; + std::list<std::vector<uint8_t>> m_nnfuPayloads; +#endif #endif #if NN_FILTERING_SET_0 diff --git a/source/Lib/CommonLib/CommonDef.h b/source/Lib/CommonLib/CommonDef.h index 1cea00436d416f56fa22e2515ec69d6ddb06d526..b93f9c13d342c41eaadb9ce2e4c20a7c288e3332 100644 --- a/source/Lib/CommonLib/CommonDef.h +++ b/source/Lib/CommonLib/CommonDef.h @@ -262,6 +262,10 @@ static const int MAX_NUM_APS = 32; //Curren static const int NUM_APS_TYPE_LEN = 3; //Currently APS Type has 3 bits static const int MAX_NUM_APS_TYPE = 8; //Currently APS Type has 3 bits so the max type is 8 +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP +static const int MAX_NUM_NNFUS = 16; +#endif + #if JVET_T0065_LEVEL_6_3 static constexpr int MAX_TILE_COLS = 30; // Maximum number of tile columns static constexpr int MAX_TILES = 990; // Maximum number of tiles diff --git a/source/Lib/CommonLib/NNFilterUnified.cpp b/source/Lib/CommonLib/NNFilterUnified.cpp index 77af3f4f0bb9c57bad6660412a8e7ba147ed7c7d..3b8daaedeb487961e14a6f01a44d05da25294efb 100644 --- a/source/Lib/CommonLib/NNFilterUnified.cpp +++ b/source/Lib/CommonLib/NNFilterUnified.cpp @@ -53,6 +53,10 @@ struct Input QPbase, QPSlice, IPB, +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + MultiplierParam, + TargetMultiplierParam, +#endif nbInputs }; }; @@ -70,14 +74,23 @@ struct InputTemporal }; }; #endif + +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP +void NNFilterUnified::init(const std::string &filename, int picWidth, int picHeight, ChromaFormat format, int prmNum, bool reloadModel /* false */, bool useInputMultiplierSwitch /* false */) +#else void NNFilterUnified::init(const std::string &filename, int picWidth, int picHeight, ChromaFormat format, int prmNum) +#endif { ifstream file(filename, ios::binary); if (!file) { cerr << "[ERROR] unable to open NNFilter model " << filename << endl; exit(-1); } +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + if (!m_model || reloadModel) +#else if (!m_model) +#endif { m_model.reset(new sadl::Model<TypeSadlLFUnified>()); if (!m_model->load(file)) @@ -88,8 +101,14 @@ void NNFilterUnified::init(const std::string &filename, int picWidth, int picHei } // prepare inputs +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + int numInputs = useInputMultiplierSwitch ? Input::nbInputs : Input::nbInputs - 2; + m_inputs.resize(numInputs); + resizeInputs(defaultInputSize + defaultBlockExt * 2, defaultInputSize + defaultBlockExt * 2, !m_model || reloadModel, useInputMultiplierSwitch); +#else m_inputs.resize(Input::nbInputs); resizeInputs(defaultInputSize + defaultBlockExt * 2, defaultInputSize + defaultBlockExt * 2); +#endif if (m_filtered.size() > 0 || m_scaled[0].size() > 0) { @@ -148,11 +167,19 @@ void NNFilterUnified::destroy() } // default is square block + extension +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP +void NNFilterUnified::resizeInputs(int width, int height, bool modelChange /* false */, bool useInputMultiplierSwitch /* false */) +#else void NNFilterUnified::resizeInputs(int width, int height) +#endif { int sizeW = width; int sizeH = height; +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + if ((sizeH == m_blocksize[0] && sizeW == m_blocksize[1]) && !modelChange) +#else if (sizeH == m_blocksize[0] && sizeW == m_blocksize[1]) +#endif { return; } @@ -187,12 +214,32 @@ void NNFilterUnified::resizeInputs(int width, int height) } #endif +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + if (useInputMultiplierSwitch) + { + m_inputs[Input::MultiplierParam].resize(sadl::Dimensions{1, 1}); + m_inputs[Input::TargetMultiplierParam].resize(sadl::Dimensions{1, 1}); + } +#endif + if (!m_model->init(m_inputs)) { cerr << "[ERROR] issue init model NNFilterUnified " << endl; exit(-1); } +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + if (useInputMultiplierSwitch) + { + assert(nb_inputs + 2 == m_inputs.size()); + } + else + { +#endif assert(nb_inputs==m_inputs.size()); +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + } + m_input_quantizer.resize(m_inputs.size()); +#endif for(int i=0;i<nb_inputs;++i) m_input_quantizer[i] = m_model->getInputsTemplate()[i].quantizer; @@ -553,7 +600,11 @@ static void extractOutputsTemporal(const Picture &pic, sadl::Model<T> &m, PelUni } #endif void NNFilterUnified::filterBlock(Picture &pic, UnitArea inferArea, int extLeft, int extRight, int extTop, int extBottom, +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + int prmId, bool applyMultiplier) +#else int prmId) +#endif { // get model auto &model = *m_model; @@ -587,6 +638,14 @@ void NNFilterUnified::filterBlock(Picture &pic, UnitArea inferArea, int extLeft, #endif NNInference::prepareInputs<TypeSadlLFUnified>(&pic, inferArea, m_inputs, seqQp, sliceQp, -1 /* sliceType */, listInputData); +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + if (m_inputs.size() == Input::nbInputs) + { + m_inputs[6](0, 0) = applyMultiplier ? 1 : 0; + m_inputs[7](0, 0) = 1; + } +#endif + NNInference::infer<TypeSadlLFUnified>(model, m_inputs); UnitArea inferAreaNoExt(inferArea.chromaFormat, Area(inferArea.lx() + extLeft, inferArea.ly() + extTop, inferArea.lwidth() - extLeft - extRight, inferArea.lheight() - extTop - extBottom)); @@ -631,7 +690,12 @@ void NNFilterUnified::filterBlockTemporal(Picture &pic, UnitArea inferArea, int extractOutputsTemporal(pic, model, bufDst, inferArea, extLeft, extRight, extTop, extBottom); } #endif + +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP +void NNFilterUnified::filter(Picture &pic, bool applyMultiplier, const bool isDec /* true */) +#else void NNFilterUnified::filter(Picture &pic, const bool isDec) +#endif { const CodingStructure &cs = *pic.cs; const PreCalcValues & pcv = *cs.pcv; @@ -677,7 +741,11 @@ void NNFilterUnified::filter(Picture &pic, const bool isDec) filterBlockTemporal(pic, inferArea, extLeft, extRight, extTop, extBottom, prmId); else #endif +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + filterBlock(pic, inferArea, extLeft, extRight, extTop, extBottom, prmId, applyMultiplier); +#else filterBlock(pic, inferArea, extLeft, extRight, extTop, extBottom, prmId); +#endif const UnitArea inferAreaNoExt(cs.area.chromaFormat, Area(xPos, yPos, width, height)); PelUnitBuf filteredBuf = getFilteredBuf(prmId, inferAreaNoExt); diff --git a/source/Lib/CommonLib/NNFilterUnified.h b/source/Lib/CommonLib/NNFilterUnified.h index 83076d234e0c96b9623ed5112dd67d4c46d987b6..a27f2e930a7564b09d1cb91d4bfc14bf827ece9d 100644 --- a/source/Lib/CommonLib/NNFilterUnified.h +++ b/source/Lib/CommonLib/NNFilterUnified.h @@ -100,15 +100,27 @@ public: static constexpr int scale_candidates[3] = { 0, (max_scale + (max_scale << 1)) >> 2, max_scale >> 1 }; static constexpr int block_sizes[]={128,256}; +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + void init(const std::string &filename, int picWidth, int picHeight, ChromaFormat format, int prmNum, bool reloadModel = false, bool useInputMultiplierSwitch = false); +#else void init(const std::string &filename, int picWidth, int picHeight, ChromaFormat format, int prmNum); +#endif void destroy(); // filter the whole picture with prms +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + void filter(Picture &pic, bool applyMultiplier, const bool isDec = true); +#else void filter(Picture &pic, const bool isDec = true); +#endif // just filter the block, output on log2OutputScale bits void filterBlock(Picture &pic, UnitArea inferArea, int extLeft, int extRight, int extTop, int extBottom, - int prmId); +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + int prmId, bool applyMultiplier); +#else + int prmId); +#endif #if NN_HOP_UNIFIED_TEMPORAL_FILTERING void initTemporal(const std::string &filename); void filterBlockTemporal(Picture &pic, UnitArea inferArea, int extLeft, int extRight, int extTop, int extBottom, @@ -145,8 +157,18 @@ public: #endif private: int m_blocksize[2]; // current inputs size +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + std::vector<int> m_input_quantizer; + void resizeInputs(int width, int height, bool modelChange = false, bool useInputMultiplierSwitch = false); +#else int m_input_quantizer[nb_inputs] = {}; void resizeInputs(int width, int height); +#endif + +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + int m_modelLoadedIdx = -1; + bool m_changeModel = false; +#endif std::unique_ptr<sadl::Model<TypeSadlLFUnified>> m_model; std::vector<sadl::Tensor<TypeSadlLFUnified>> m_inputs; #if NN_HOP_UNIFIED_TEMPORAL_FILTERING diff --git a/source/Lib/CommonLib/NNInference.h b/source/Lib/CommonLib/NNInference.h index f365490c70f07863c66d3471c2307645850a9d49..f80695d02d4f88ee542f23d40a2ddba0f6198411 100644 --- a/source/Lib/CommonLib/NNInference.h +++ b/source/Lib/CommonLib/NNInference.h @@ -65,7 +65,7 @@ public: static void fillInputFromBuf (const Picture* pic, UnitArea inferArea, sadl::Tensor<T> &input, CPelUnitBuf buf, bool luma, bool chroma, double scale, int shift) { CPelBuf bufY, bufCb, bufCr; - + if (luma) { bufY = buf.get(COMPONENT_Y); @@ -75,7 +75,7 @@ public: bufCb = buf.get(COMPONENT_Cb); bufCr = buf.get(COMPONENT_Cr); } - + int hor, ver; if (luma) { @@ -145,7 +145,7 @@ public: static void fillInputFromBuf (const Picture* pic, UnitArea inferArea, sadl::Tensor<T> &input, CPelUnitBuf buf, bool luma, bool chroma, double scale, int shift, bool flip) { CPelBuf bufY, bufCb, bufCr; - + if (luma) { bufY = buf.get(COMPONENT_Y); @@ -155,7 +155,7 @@ public: bufCb = buf.get(COMPONENT_Cb); bufCr = buf.get(COMPONENT_Cr); } - + int hor, ver; if (luma) { diff --git a/source/Lib/CommonLib/ParameterSetManager.h b/source/Lib/CommonLib/ParameterSetManager.h index 6f3a0b615e46e93054a0a610f5028fbdd0e85095..8c4cbcb9bb3a2565c63c021404cdf501f6e03628 100644 --- a/source/Lib/CommonLib/ParameterSetManager.h +++ b/source/Lib/CommonLib/ParameterSetManager.h @@ -160,6 +160,12 @@ public: { CHECK( samePU && sameNalUnitType && aps->getScalingList() != existedAPS->getScalingList(), "All APS NAL units with a particular value of nal_unit_type, a particular value of aps_adaptation_parameter_set_id, and a particular value of aps_params_type within a PU shall have the same content" ); } +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + else if (aps->getAPSType() == NN_UPDATE_APS) + { + CHECK(samePU && sameNalUnitType && aps->getNnLoopFilterUpdate() != existedAPS->getNnLoopFilterUpdate(), "All APS nal units with a particular value of nal_unit_type, a particular value of aps_adaptation_parameter_set_id, and a particular value of aps_params_type within a PU shall have the same content"); + } +#endif else { CHECK( true, "Wrong APS type" ); diff --git a/source/Lib/CommonLib/Slice.h b/source/Lib/CommonLib/Slice.h index f616bd59210b7d08fe4c4d980628139118c97bf2..413c251f2fa9624e932272ee2004ce899d643470 100644 --- a/source/Lib/CommonLib/Slice.h +++ b/source/Lib/CommonLib/Slice.h @@ -237,6 +237,25 @@ private: bool m_chromaScalingListPresentFlag; }; +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP +class NnLoopFilterUpdate +{ +public: + NnLoopFilterUpdate() { } + virtual ~NnLoopFilterUpdate() { } + + std::vector<uint8_t>& getPayload() { return m_nnfuPayload; } + void setPayload(std::vector<uint8_t>& payload) { m_nnfuPayload = payload; } + + bool operator==(const NnLoopFilterUpdate& other) { return m_nnfuPayload == other.m_nnfuPayload; } + bool operator!=(const NnLoopFilterUpdate& other) { return !(*this == other); } + + +private: + std::vector<uint8_t> m_nnfuPayload; +}; +#endif + class ConstraintInfo { bool m_gciPresentFlag; @@ -2344,6 +2363,9 @@ private: SliceReshapeInfo m_reshapeAPSInfo; ScalingList m_scalingListApsInfo; CcAlfFilterParam m_ccAlfAPSParam; +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + NnLoopFilterUpdate m_nnfuInfo; +#endif bool m_hasPrefixNalUnitType; public: @@ -2371,6 +2393,10 @@ public: ScalingList& getScalingList() { return m_scalingListApsInfo; } void setCcAlfAPSParam(CcAlfFilterParam& ccAlfAPSParam) { m_ccAlfAPSParam = ccAlfAPSParam; } CcAlfFilterParam& getCcAlfAPSParam() { return m_ccAlfAPSParam; } +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + void setNnLoopFilterUpdate(NnLoopFilterUpdate& nnfuInfo) { m_nnfuInfo = nnfuInfo; } + NnLoopFilterUpdate& getNnLoopFilterUpdate() { return m_nnfuInfo; } +#endif void setHasPrefixNalUnitType( bool b ) { m_hasPrefixNalUnitType = b; } bool getHasPrefixNalUnitType() const { return m_hasPrefixNalUnitType; } bool chromaPresentFlag; diff --git a/source/Lib/CommonLib/TypeDef.h b/source/Lib/CommonLib/TypeDef.h index 65010d032690523ff7e372d8e9b5d3b6d5f30a03..39bf5cbfc2e451b273bc68ad63e9f46412dbb5d6 100644 --- a/source/Lib/CommonLib/TypeDef.h +++ b/source/Lib/CommonLib/TypeDef.h @@ -60,8 +60,9 @@ using TypeSadlLFUnified=int16_t; #define NN_HOP_RDO 1 // hop rdo #define JVET_AF0043_AF0205_PADDING 1 // JVET-AF0043/JVET-AF0205: padding zero at picture boundary for LOP and HOP #define JVET_AF0085_RESIDUAL_ADJ 1 // JVET-AF0085: residual offset adjustment for LOP and HOP -#define JVET_AF0193_DECODER_OPTIMIZATION 1 // JVET-AF0193: decoder complexity optimization for NNVC in-loop filter +#define JVET_AF0193_DECODER_OPTIMIZATION 1 // JVET-AF0193: decoder complexity optimization for NNVC in-loop filter #define JVET_AH0080_TRANS_INPUT 1 // JVET-AH0080: joint LOP model with inputs transformed +#define JVET_AH0096_CONTENT_ADAPTIVE_LOP 1 // JVET-AH0096: Content-adaptive loop-filter #endif @@ -439,6 +440,9 @@ enum ApsType ALF_APS = 0, LMCS_APS = 1, SCALING_LIST_APS = 2, +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + NN_UPDATE_APS = 3, +#endif }; enum QuantFlags @@ -1602,7 +1606,7 @@ enum NnModel #endif #if NN_FILTERING_SET_0 FILTER_SET_0, ///< model path -#endif +#endif #if NN_FILTERING_SET_1 FILTER_SET_1_INTRA_LUMA, ///< intra luma nnlf set1 model FILTER_SET_1_INTRA_CHROMA, ///< intra chroma nnlf set1 model @@ -1619,7 +1623,7 @@ enum NnModel POST_FILTER, ///< Post filter model path #endif #if JVET_AB0149_INTRA_PRED - INTRA_PRED, + INTRA_PRED, INTRA_PRED_PREFIX, #endif #if NN_FILTERING_SET_LC diff --git a/source/Lib/DecoderLib/DecLib.cpp b/source/Lib/DecoderLib/DecLib.cpp index 10caf06b8e71adc4cfd2aa99b82293c8e5338685..3ead65ac16ca2a4c4360c26fba09adaf23d7b94a 100644 --- a/source/Lib/DecoderLib/DecLib.cpp +++ b/source/Lib/DecoderLib/DecLib.cpp @@ -62,7 +62,7 @@ #include <sadl/model.h> #endif -bool tryDecodePicture( Picture* pcEncPic, const int expectedPoc, const std::string& bitstreamFileName, +bool tryDecodePicture( Picture* pcEncPic, const int expectedPoc, const std::string& bitstreamFileName, #if NN_COMMON_API const std::array<std::string, NnModel::Num>* nnModel, #endif @@ -457,6 +457,10 @@ DecLib::DecLib() , m_seiReader() , m_cLoopFilter() , m_cSAO() +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + , m_newNnfu(false) + , m_nnfuIntraPeriod(0) +#endif , m_cReshaper() #if JVET_J0090_MEMORY_BANDWITH_MEASURE , m_cacheModel() @@ -694,7 +698,7 @@ void DecLib::executeLoopFilters() { m_pcNNFilterSet1.create(cs.pcv->lumaWidth, cs.pcv->lumaHeight, cs.pcv->chrFormat, cs.sps->getNnlfSet1MaxNumParams()); #if JVET_AC0177_MULTI_FRAME - m_pcNNFilterSet1.init(getNnModel(NnModel::FILTER_SET_1_INTER_LUMA), getNnModel(NnModel::FILTER_SET_1_INTER_CHROMA), + m_pcNNFilterSet1.init(getNnModel(NnModel::FILTER_SET_1_INTER_LUMA), getNnModel(NnModel::FILTER_SET_1_INTER_CHROMA), getNnModel(NnModel::FILTER_SET_1_INTRA_LUMA), getNnModel(NnModel::FILTER_SET_1_INTRA_CHROMA), getNnModel(NnModel::FILTER_SET_1_ALT_INTER_LUMA)); #else m_pcNNFilterSet1.init(getNnModel(NnModel::FILTER_SET_1_INTER_LUMA), getNnModel(NnModel::FILTER_SET_1_INTER_CHROMA), @@ -789,7 +793,83 @@ void DecLib::executeLoopFilters() #if JVET_AH0080_TRANS_INPUT m_nnfilterUnified.nnlfTransInput(getUseNnlfTransInput() == 1 ? true : false); #endif +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + int picIntraPeriod = m_nnfuIntraPeriod == 0 ? 0 : int(cs.slice->getPOC() / m_nnfuIntraPeriod); + + if (m_newNnfu) + { + int apsId = 0; + APS *nnfuAPS = m_parameterSetManager.getAPS(apsId, NN_UPDATE_APS); + CHECK(nnfuAPS == nullptr, "No NNFU APS present"); + if (false == m_parameterSetManager.activateAPS(apsId, NN_UPDATE_APS)) + { + THROW("NNFU APS activation failed!"); + } + + std::ostringstream payloadStream; + payloadStream << m_nnfuOutputFileStem << "_payload_id_" << apsId << "_poc_" << cs.slice->getPOC() << ".nnr"; + string payloadFileName = payloadStream.str(); + std::ofstream fs(payloadFileName, std::ios::binary); + fs.write((char *) nnfuAPS->getNnLoopFilterUpdate().getPayload().data(), nnfuAPS->getNnLoopFilterUpdate().getPayload().size() * sizeof(uint8_t)); + fs.close(); + + std::ostringstream recoModelStream; + recoModelStream << m_nnfuOutputFileStem << "_reco_model_id_" << apsId << "_poc_" << cs.slice->getPOC() << ".sadl"; + string recoModelFileName = recoModelStream.str(); + + std::ostringstream cmdStream; + cmdStream << "cd $WU_CODE && python wu_decoding.py --arch lop2 --base_model " << m_nnModel[NnModel::LOP_UNIFIED_FILTER] << " --nnr_bitstream " << payloadFileName << " --reco_model " << recoModelFileName; + string cmd = cmdStream.str(); + + char buffer[128]; + int exitCode = 0; + std::string result = ""; +#if _WIN32 + FILE* pipe = _popen(cmd.c_str(), "r"); +#else + FILE* pipe = popen(cmd.c_str(), "r"); +#endif + + if (!pipe) + { + throw std::runtime_error("popen() failed!"); + } + try + { + while (fgets(buffer, sizeof buffer, pipe) != NULL) + { + result += buffer; + } +#if _WIN32 + exitCode = _pclose(pipe); +#else + exitCode = pclose(pipe); +#endif + } + catch (...) + { +#if _WIN32 + exitCode = _pclose(pipe); +#else + exitCode = pclose(pipe); +#endif + } + + if (exitCode != 0) + { + throw std::runtime_error("Model reconstruction failed"); + } + + m_nnfilterUnified.init(recoModelFileName, cs.sps->getMaxPicWidthInLumaSamples(), cs.sps->getMaxPicHeightInLumaSamples(), cs.sps->getChromaFormatIdc(), cs.sps->getNnlfUnifiedMaxNumPrms(), true, true); + m_newNnfu = false; + m_nnfuIpUsesNnfu[picIntraPeriod] = true; + } + + bool applyMultiplier = cs.slice->getSliceType() != I_SLICE && m_nnfuIpUsesNnfu.find(picIntraPeriod) != m_nnfuIpUsesNnfu.end(); + m_nnfilterUnified.filter(*m_pcPic, applyMultiplier); +#else m_nnfilterUnified.filter(*m_pcPic); +#endif } #endif @@ -1853,7 +1933,14 @@ void DecLib::xActivateParameterSets( const InputNALUnit nalu ) #if NN_LF_UNIFIED if (sps->getNnlfUnifiedEnabledFlag()) { +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + if (pSlice->getPOC() == 0) + { +#endif m_nnfilterUnified.init(m_nnModel[NnModel::LOP_UNIFIED_FILTER], sps->getMaxPicWidthInLumaSamples(), sps->getMaxPicHeightInLumaSamples(), sps->getChromaFormatIdc(), sps->getNnlfUnifiedMaxNumPrms()); +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + } +#endif NnlfUnifiedInferGranularity unifiedInferGranularity = NNLF_UNIFIED_INFER_GRANULARITY_BASE; if (pSlice->getSliceType() == I_SLICE) @@ -1961,6 +2048,13 @@ void DecLib::xActivateParameterSets( const InputNALUnit nalu ) } } +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + if (m_nnfuIntraPeriod == 0 && m_pcPic->cs->slice->isIRAP() && m_pcPic->cs->slice->getPOC() != 0) + { + m_nnfuIntraPeriod = m_pcPic->cs->slice->getPOC(); + } +#endif + xCheckParameterSetConstraints(layerId); } void DecLib::xCheckParameterSetConstraints(const int layerId) @@ -3127,6 +3221,13 @@ void DecLib::xDecodeAPS(InputNALUnit& nalu) // aps will be deleted if it was already stored (and did not changed), // thus, storing it must be last action. m_parameterSetManager.storeAPS(aps, nalu.getBitstream().getFifo()); + +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + if (aps->getAPSType() == NN_UPDATE_APS) + { + m_newNnfu = true; + } +#endif } #if NN_POST_FILTERING bool DecLib::decode(InputNALUnit& nalu, int& iSkipFrame, int& iPOCLastDisplay, int iTargetOlsIdx, bool isEosPresentInLastPu) diff --git a/source/Lib/DecoderLib/DecLib.h b/source/Lib/DecoderLib/DecLib.h index 118392027db456da0ef6f4015bbc535cb135f930..79d077de0b039d0e808062cadfbb4fe1fa12d0c9 100644 --- a/source/Lib/DecoderLib/DecLib.h +++ b/source/Lib/DecoderLib/DecLib.h @@ -150,6 +150,12 @@ private: #if JVET_AH0080_TRANS_INPUT bool m_nnlfTransInput; #endif +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + bool m_newNnfu = false; + std::map<int, bool> m_nnfuIpUsesNnfu; + int m_nnfuIntraPeriod = 0; + std::string m_nnfuOutputFileStem; +#endif #endif #if NN_FILTERING_SET_0 NNFilterSet0 m_cCNNLF; @@ -288,6 +294,10 @@ public: void setUseNnlfTransInput(bool b) { m_nnlfTransInput = b; } int getUseNnlfTransInput() const { return m_nnlfTransInput; } #endif +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + std::string getNnfuOutputFileStem() { return m_nnfuOutputFileStem; } + void setNnfuOutputFileStem(std::string stem) { m_nnfuOutputFileStem = stem; } +#endif #endif void setDecodedPictureHashSEIEnabled(int enabled) { m_decodedPictureHashSEIEnabled=enabled; } diff --git a/source/Lib/DecoderLib/VLCReader.cpp b/source/Lib/DecoderLib/VLCReader.cpp index eeb5e130fc483b8aeb078ed915b334d6c02c0b94..eedc3c6f1d3abf5e00d6149f111f214815c770d8 100644 --- a/source/Lib/DecoderLib/VLCReader.cpp +++ b/source/Lib/DecoderLib/VLCReader.cpp @@ -940,6 +940,10 @@ void HLSyntaxReader::parseAPS( APS* aps ) { parseScalingListAps( aps ); } + else if (apsType == NN_UPDATE_APS) + { + parseNnUpdateAps(aps); + } READ_FLAG(code, "aps_extension_flag"); if (code) { @@ -1132,6 +1136,25 @@ void HLSyntaxReader::parseScalingListAps( APS* aps ) parseScalingList(&info, aps->chromaPresentFlag); } +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP +void HLSyntaxReader::parseNnUpdateAps(APS *aps) +{ + NnLoopFilterUpdate& nnfu = aps->getNnLoopFilterUpdate(); + + uint32_t bitstreamSizeMinus1; + std::vector<uint8_t> payload; + READ_CODE(16, bitstreamSizeMinus1, "nnfu_bitstream_size_minus1"); + + for (int i = 0; i <= bitstreamSizeMinus1; i++) + { + uint32_t code; + READ_CODE(8, code, "nnfu_payload_byte[i]"); + payload.push_back((uint8_t)code); + } + nnfu.setPayload(payload); +} +#endif + void HLSyntaxReader::parseVUI(VUI* pcVUI, SPS *pcSPS) { #if ENABLE_TRACING diff --git a/source/Lib/DecoderLib/VLCReader.h b/source/Lib/DecoderLib/VLCReader.h index f477520db095ea0bd948b953d03466d4a28ed5e7..3b9815092eb1bfa1e2891d9bd651d4858fde435e 100644 --- a/source/Lib/DecoderLib/VLCReader.h +++ b/source/Lib/DecoderLib/VLCReader.h @@ -185,6 +185,9 @@ public: void parseAlfAps ( APS* pcAPS ); void parseLmcsAps ( APS* pcAPS ); void parseScalingListAps ( APS* pcAPS ); +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + void parseNnUpdateAps ( APS* pcAPS ); +#endif void parseVUI ( VUI* pcVUI, SPS* pcSPS ); void parseConstraintInfo (ConstraintInfo *cinfo); void parseProfileTierLevel(ProfileTierLevel *ptl, bool profileTierPresentFlag, int maxNumSubLayersMinus1); diff --git a/source/Lib/EncoderLib/EncCfg.h b/source/Lib/EncoderLib/EncCfg.h index 9d1a7ee5d9a62a93cba077b6b05a3a2e769e00ab..52d67a3874cc4ec7c20010c8ee3df7ab5bf79099 100644 --- a/source/Lib/EncoderLib/EncCfg.h +++ b/source/Lib/EncoderLib/EncCfg.h @@ -829,6 +829,12 @@ protected: #if NN_HOP_UNIFIED_TEMPORAL_FILTERING bool m_nnlfHopTemporalFiltering; #endif +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + bool m_nnfuEnabled; + int m_numNnfus; + std::list<std::vector<uint8_t>> m_nnfuPayloads; + std::list<std::string> m_nnfuModelFileNames; +#endif #endif @@ -2254,6 +2260,17 @@ public: bool getUseNnlfTransInput() const { return m_nnlfTransInput; }; void setUseNnlfTransInput(bool b) { m_nnlfTransInput = b; }; #endif + +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + void setUseNnfu(bool useNnfu) { m_nnfuEnabled = useNnfu; } + bool getUseNnfu() const { return m_nnfuEnabled; } + void setNumNnfus(int numNnfus) { m_numNnfus = numNnfus; } + int getNumNnfus() const { return m_numNnfus; } + void setNnfuModelFileNames(std::list<std::string>& modelFileNames) { m_nnfuModelFileNames = modelFileNames; } + std::list<std::string>& getNnfuModelFileNames() { return m_nnfuModelFileNames; } + std::list<std::vector<uint8_t>>& getNnfuPayloads() { return m_nnfuPayloads; } + void setNnfuPayloads(std::list<std::vector<uint8_t>>& nnfuPayloads) { m_nnfuPayloads = nnfuPayloads; } +#endif #endif #if NN_FILTERING_SET_0 diff --git a/source/Lib/EncoderLib/EncGOP.cpp b/source/Lib/EncoderLib/EncGOP.cpp index 37b3cf0da4a82ec0da5e61cc67c265c6352f5266..8c0cf22028ee759101fa2f4a2f0571bcf99c030d 100644 --- a/source/Lib/EncoderLib/EncGOP.cpp +++ b/source/Lib/EncoderLib/EncGOP.cpp @@ -2844,6 +2844,33 @@ void EncGOP::compressGOP( int iPOCLast, int iNumPicRcvd, PicList& rcListPic, m_nnfilterUnified.nnlfTransInput(m_pcCfg->getUseNnlfTransInput() == 1 ? true : false); #endif pcPic->initPicprms(*pcSlice); +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + if (m_pcCfg->getUseNnfu() && m_pcCfg->getNumNnfus() > 0) + { + int picIntraPeriod = int(pcSlice->getPOC() / m_pcCfg->getIntraPeriod()); + m_newNnfu = pcSlice->getSliceType() != I_SLICE && picIntraPeriod != m_nnfuCurrIntraPeriod; + if (m_newNnfu) + { + int apsId = 0; // same ID + ParameterSetMap<APS> *apsMap = m_pcEncLib->getApsMap(); + APS* nnfuAPS = apsMap->getPS((apsId << NUM_APS_TYPE_LEN) + NN_UPDATE_APS); + + if (nullptr == nnfuAPS) // same ID + { + nnfuAPS = apsMap->allocatePS((apsId << NUM_APS_TYPE_LEN) + NN_UPDATE_APS); + } + + nnfuAPS->setTemporalId(pcSlice->getTLayer()); + nnfuAPS->setAPSId(apsId); + nnfuAPS->setAPSType(NN_UPDATE_APS); + nnfuAPS->chromaPresentFlag = false; + nnfuAPS->getNnLoopFilterUpdate().setPayload(m_pcCfg->getNnfuPayloads().front()); + m_pcCfg->getNnfuPayloads().pop_front(); + m_pcEncLib->getApsMap()->setChangedFlag((nnfuAPS->getAPSId() << NUM_APS_TYPE_LEN) + NN_UPDATE_APS); + m_nnfuCurrIntraPeriod = picIntraPeriod; + } + } +#endif } #endif @@ -3225,6 +3252,13 @@ void EncGOP::compressGOP( int iPOCLast, int iNumPicRcvd, PicList& rcListPic, #if NN_LF_FORCE_USE m_nnfilterUnified.forceIntraType(m_pcCfg->getNnlfDebugOption() == 2 ? true : false); if (m_pcCfg->getNnlfDebugOption() != 1 || pcSlice->isIntra()) // when m_nnlfOption == 1, skip NN filtering for inter slice (note m_picprm.sprm.mode = -1 by default) +#endif +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + if (m_pcCfg->getUseNnfu() && m_pcCfg->getNumNnfus() > 0 && m_newNnfu) + { + m_nnfilterUnified.init(m_pcCfg->getNnfuModelFileNames().front(), m_pcCfg->getSourceWidth(), m_pcCfg->getSourceHeight(), m_pcCfg->getChromaFormatIdc(), m_pcCfg->getNnlfUnifiedMaxNumPrms(), true, true); + m_pcCfg->getNnfuModelFileNames().pop_front(); + } #endif m_nnfilterUnified.chooseParameters(*pcPic); pcSlice->setNnlfUnifiedParameters(m_nnfilterUnified.getSliceprms()); @@ -3632,6 +3666,22 @@ void EncGOP::compressGOP( int iPOCLast, int iNumPicRcvd, PicList& rcListPic, m_bSeqFirst = false; } +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + if (m_pcCfg->getUseNnfu() && m_pcCfg->getNumNnfus() > 0 && m_newNnfu) + { + int apsId = 0; + ParameterSetMap<APS> *apsMap = m_pcEncLib->getApsMap(); + APS* aps = apsMap->getPS((apsId << NUM_APS_TYPE_LEN) + NN_UPDATE_APS); + bool writeAPS = aps && apsMap->getChangedFlag((apsId << NUM_APS_TYPE_LEN) + NN_UPDATE_APS); + if (writeAPS) + { + aps->chromaPresentFlag = pcSlice->getSPS()->getChromaFormatIdc() != CHROMA_400; + actualTotalBits += xWriteAPS( accessUnit, aps, m_pcEncLib->getLayerId(), true ); + apsMap->clearChangedFlag((apsId << NUM_APS_TYPE_LEN) + NN_UPDATE_APS); + } + } +#endif + //send LMCS APS when LMCSModel is updated. It can be updated even current slice does not enable reshaper. //For example, in RA, update is on intra slice, but intra slice may not use reshaper if (pcSlice->getSPS()->getUseLmcs()) diff --git a/source/Lib/EncoderLib/EncGOP.h b/source/Lib/EncoderLib/EncGOP.h index ec03c48f782f82a515efcf69e8e1ef468e43ddb8..334af76f803f393ec2bd85b9104e4ab6eaf60935 100644 --- a/source/Lib/EncoderLib/EncGOP.h +++ b/source/Lib/EncoderLib/EncGOP.h @@ -164,6 +164,10 @@ private: PicList* m_pcListPic; #if NN_LF_UNIFIED EncNNFilterUnified m_nnfilterUnified; +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + bool m_newNnfu; + int m_nnfuCurrIntraPeriod = -1; +#endif #endif #if NN_FILTERING_SET_1 diff --git a/source/Lib/EncoderLib/EncNNFilterUnified.cpp b/source/Lib/EncoderLib/EncNNFilterUnified.cpp index 9667e9d744e29e42a53219e546ddc915afd5c8fd..bd0a2220afa0a1a47174d50a3768c954924a84d1 100644 --- a/source/Lib/EncoderLib/EncNNFilterUnified.cpp +++ b/source/Lib/EncoderLib/EncNNFilterUnified.cpp @@ -135,10 +135,17 @@ void EncNNFilterUnified::chooseParameters(Picture &pic) FilterParameters& picprms = getPicprms(); +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + bool applyMultiplier = cs.slice->getSliceType() != I_SLICE; +#endif for (int prmId = 0; prmId < picprms.prmNum; prmId++ ) { fill(picprms.prmId.begin(), picprms.prmId.end(), prmId); +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + filter(pic, applyMultiplier, false); +#else filter(pic, false); +#endif for (int scaleId = 1; scaleId < 3; scaleId++) { getScaledBuf(scaleId, prmId).copyFrom(getScaledBuf(0, prmId)); diff --git a/source/Lib/EncoderLib/VLCWriter.cpp b/source/Lib/EncoderLib/VLCWriter.cpp index 89e529808e065b52b19b2dbfac0e0cf79556307d..c5adef0f0df774928eef81da5d82f53b2d0a8869 100644 --- a/source/Lib/EncoderLib/VLCWriter.cpp +++ b/source/Lib/EncoderLib/VLCWriter.cpp @@ -542,6 +542,12 @@ void HLSWriter::codeAPS( APS* pcAPS ) { codeScalingListAps( pcAPS ); } +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + else if (pcAPS->getAPSType() == NN_UPDATE_APS) + { + codeNnUpdateAps(pcAPS); + } +#endif WRITE_FLAG(0, "aps_extension_flag"); //Implementation when this flag is equal to 1 should be added when it is needed. Currently in the spec we don't have case when this flag is equal to 1 xWriteRbspTrailingBits(); } @@ -673,6 +679,19 @@ void HLSWriter::codeScalingListAps( APS* pcAPS ) codeScalingList(param, pcAPS->chromaPresentFlag); } +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP +void HLSWriter::codeNnUpdateAps(APS *aps) +{ + const std::vector<uint8_t>& payload = aps->getNnLoopFilterUpdate().getPayload(); + + WRITE_CODE((uint32_t)payload.size() - 1, 16, "nnfu_bitstream_size_minus1"); + for (auto code: payload) + { + WRITE_CODE(code, 8, "nnfu_payload_byte[i]"); + } +} +#endif + void HLSWriter::codeVUI( const VUI *pcVUI, const SPS* pcSPS ) { #if ENABLE_TRACING diff --git a/source/Lib/EncoderLib/VLCWriter.h b/source/Lib/EncoderLib/VLCWriter.h index f0374b525749c0fc21001c84960af296cc5bd946..48b6c4161f9e6ffdb12496a24f0a61a3323dc0e4 100644 --- a/source/Lib/EncoderLib/VLCWriter.h +++ b/source/Lib/EncoderLib/VLCWriter.h @@ -149,6 +149,9 @@ public: void codeAlfAps ( APS* pcAPS ); void codeLmcsAps ( APS* pcAPS ); void codeScalingListAps ( APS* pcAPS ); +#if JVET_AH0096_CONTENT_ADAPTIVE_LOP + void codeNnUpdateAps ( APS* aps ); +#endif void codeVPS ( const VPS* pcVPS ); void codeDCI ( const DCI* dci ); void codePictureHeader ( PicHeader* picHeader, bool writeRbspTrailingBits, Slice *slice = 0 );