HEVC代码追踪(四。四)

    //In case of field coding, compute the interlaced PSNR for both fields
    if (isField && ((!pcPic->isTopField() && isTff) || (pcPic->isTopField() && !isTff)) && (pcPic->getPOC()%m_iGopSize != 1))
    {
      //get complementary top field
      TComPic* pcPicTop;
      TComList<TComPic*>::iterator   iterPic = rcListPic.begin();
      while ((*iterPic)->getPOC() != pcPic->getPOC()-1)
      {
        iterPic ++;
      }
      pcPicTop = *(iterPic);
      xCalculateInterlacedAddPSNR(pcPicTop, pcPic, pcPicTop->getPicYuvRec(), pcPic->getPicYuvRec(), accessUnit, dEncTime );
    }
    else if (isField && pcPic->getPOC()!= 0 && (pcPic->getPOC()%m_iGopSize == 0))
    {
      //get complementary bottom field
      TComPic* pcPicBottom;
      TComList<TComPic*>::iterator   iterPic = rcListPic.begin();
      while ((*iterPic)->getPOC() != pcPic->getPOC()+1)
      {
        iterPic ++;
      }
      pcPicBottom = *(iterPic);
      xCalculateInterlacedAddPSNR(pcPic, pcPicBottom, pcPic->getPicYuvRec(), pcPicBottom->getPicYuvRec(), accessUnit, dEncTime );
    }

    if (digestStr)
    {
      if(m_pcCfg->getDecodedPictureHashSEIEnabled() == 1)
      {
        printf(" [MD5:%s]", digestStr);
      }
      else if(m_pcCfg->getDecodedPictureHashSEIEnabled() == 2)
      {
        printf(" [CRC:%s]", digestStr);
      }
      else if(m_pcCfg->getDecodedPictureHashSEIEnabled() == 3)
      {
        printf(" [Checksum:%s]", digestStr);
      }
    }
    if ( m_pcCfg->getUseRateCtrl() )
    {
      Double avgQP     = m_pcRateCtrl->getRCPic()->calAverageQP();
      Double avgLambda = m_pcRateCtrl->getRCPic()->calAverageLambda();
      if ( avgLambda < 0.0 )
      {
        avgLambda = lambda;
      }
      m_pcRateCtrl->getRCPic()->updateAfterPicture( actualHeadBits, actualTotalBits, avgQP, avgLambda, pcSlice->getSliceType());
      m_pcRateCtrl->getRCPic()->addToPictureLsit( m_pcRateCtrl->getPicList() );

      m_pcRateCtrl->getRCSeq()->updateAfterPic( actualTotalBits );
      if ( pcSlice->getSliceType() != I_SLICE )
      {
        m_pcRateCtrl->getRCGOP()->updateAfterPicture( actualTotalBits );
      }
      else    // for intra picture, the estimated bits are used to update the current status in the GOP
      {
        m_pcRateCtrl->getRCGOP()->updateAfterPicture( estimatedBits );
      }
    }

    if( ( m_pcCfg->getPictureTimingSEIEnabled() || m_pcCfg->getDecodingUnitInfoSEIEnabled() ) &&
       ( pcSlice->getSPS()->getVuiParametersPresentFlag() ) &&
       ( ( pcSlice->getSPS()->getVuiParameters()->getHrdParameters()->getNalHrdParametersPresentFlag() )
        || ( pcSlice->getSPS()->getVuiParameters()->getHrdParameters()->getVclHrdParametersPresentFlag() ) ) )
    {
      TComVUI *vui = pcSlice->getSPS()->getVuiParameters();
      TComHRD *hrd = vui->getHrdParameters();

      if( hrd->getSubPicCpbParamsPresentFlag() )
      {
        Int i;
        UInt64 ui64Tmp;
        UInt uiPrev = 0;
        UInt numDU = ( pictureTimingSEI.m_numDecodingUnitsMinus1 + 1 );
        UInt *pCRD = &pictureTimingSEI.m_duCpbRemovalDelayMinus1[0];
        UInt maxDiff = ( hrd->getTickDivisorMinus2() + 2 ) - 1;

        for( i = 0; i < numDU; i ++ )
        {
          pictureTimingSEI.m_numNalusInDuMinus1[ i ]       = ( i == 0 ) ? ( accumNalsDU[ i ] - 1 ) : ( accumNalsDU[ i ] - accumNalsDU[ i - 1] - 1 );
        }

        if( numDU == 1 )
        {
          pCRD[ 0 ] = 0; /* don't care */
        }
        else
        {
          pCRD[ numDU - 1 ] = 0;/* by definition */
          UInt tmp = 0;
          UInt accum = 0;

          for( i = ( numDU - 2 ); i >= 0; i -- )
          {
            ui64Tmp = ( ( ( accumBitsDU[ numDU - 1 ]  - accumBitsDU[ i ] ) * ( vui->getTimingInfo()->getTimeScale() / vui->getTimingInfo()->getNumUnitsInTick() ) * ( hrd->getTickDivisorMinus2() + 2 ) ) / ( m_pcCfg->getTargetBitrate() ) );
            if( (UInt)ui64Tmp > maxDiff )
            {
              tmp ++;
            }
          }
          uiPrev = 0;

          UInt flag = 0;
          for( i = ( numDU - 2 ); i >= 0; i -- )
          {
            flag = 0;
            ui64Tmp = ( ( ( accumBitsDU[ numDU - 1 ]  - accumBitsDU[ i ] ) * ( vui->getTimingInfo()->getTimeScale() / vui->getTimingInfo()->getNumUnitsInTick() ) * ( hrd->getTickDivisorMinus2() + 2 ) ) / ( m_pcCfg->getTargetBitrate() ) );

            if( (UInt)ui64Tmp > maxDiff )
            {
              if(uiPrev >= maxDiff - tmp)
              {
                ui64Tmp = uiPrev + 1;
                flag = 1;
              }
              else                            ui64Tmp = maxDiff - tmp + 1;
            }
            pCRD[ i ] = (UInt)ui64Tmp - uiPrev - 1;
            if( (Int)pCRD[ i ] < 0 )
            {
              pCRD[ i ] = 0;
            }
            else if (tmp > 0 && flag == 1)
            {
              tmp --;
            }
            accum += pCRD[ i ] + 1;
            uiPrev = accum;
          }
        }
      }
      if( m_pcCfg->getPictureTimingSEIEnabled() )
      {
        {
          OutputNALUnit nalu(NAL_UNIT_PREFIX_SEI, pcSlice->getTLayer());
          m_pcEntropyCoder->setEntropyCoder(m_pcCavlcCoder, pcSlice);
          pictureTimingSEI.m_picStruct = (isField && pcSlice->getPic()->isTopField())? 1 : isField? 2 : 0;
          m_seiWriter.writeSEImessage(nalu.m_Bitstream, pictureTimingSEI, pcSlice->getSPS());
          writeRBSPTrailingBits(nalu.m_Bitstream);
          UInt seiPositionInAu = xGetFirstSeiLocation(accessUnit);
          UInt offsetPosition = m_activeParameterSetSEIPresentInAU
          + m_bufferingPeriodSEIPresentInAU;    // Insert PT SEI after APS and BP SEI
          AccessUnit::iterator it;
          for(j = 0, it = accessUnit.begin(); j < seiPositionInAu + offsetPosition; j++)
          {
            it++;
          }
          accessUnit.insert(it, new NALUnitEBSP(nalu));
          m_pictureTimingSEIPresentInAU = true;
        }
        if ( m_pcCfg->getScalableNestingSEIEnabled() ) // put picture timing SEI into scalable nesting SEI
        {
          OutputNALUnit nalu(NAL_UNIT_PREFIX_SEI, pcSlice->getTLayer());
          m_pcEntropyCoder->setEntropyCoder(m_pcCavlcCoder, pcSlice);
          scalableNestingSEI.m_nestedSEIs.clear();
          scalableNestingSEI.m_nestedSEIs.push_back(&pictureTimingSEI);
          m_seiWriter.writeSEImessage(nalu.m_Bitstream, scalableNestingSEI, pcSlice->getSPS());
          writeRBSPTrailingBits(nalu.m_Bitstream);
          UInt seiPositionInAu = xGetFirstSeiLocation(accessUnit);
          UInt offsetPosition = m_activeParameterSetSEIPresentInAU
          + m_bufferingPeriodSEIPresentInAU + m_pictureTimingSEIPresentInAU + m_nestedBufferingPeriodSEIPresentInAU;    // Insert PT SEI after APS and BP SEI
          AccessUnit::iterator it;
          for(j = 0, it = accessUnit.begin(); j < seiPositionInAu + offsetPosition; j++)
          {
            it++;
          }
          accessUnit.insert(it, new NALUnitEBSP(nalu));
          m_nestedPictureTimingSEIPresentInAU = true;
        }
      }
      if( m_pcCfg->getDecodingUnitInfoSEIEnabled() && hrd->getSubPicCpbParamsPresentFlag() )
      {
        m_pcEntropyCoder->setEntropyCoder(m_pcCavlcCoder, pcSlice);
        for( Int i = 0; i < ( pictureTimingSEI.m_numDecodingUnitsMinus1 + 1 ); i ++ )
        {
          OutputNALUnit nalu(NAL_UNIT_PREFIX_SEI, pcSlice->getTLayer());

          SEIDecodingUnitInfo tempSEI;
          tempSEI.m_decodingUnitIdx = i;
          tempSEI.m_duSptCpbRemovalDelay = pictureTimingSEI.m_duCpbRemovalDelayMinus1[i] + 1;
          tempSEI.m_dpbOutputDuDelayPresentFlag = false;
          tempSEI.m_picSptDpbOutputDuDelay = picSptDpbOutputDuDelay;

          AccessUnit::iterator it;
          // Insert the first one in the right location, before the first slice
          if(i == 0)
          {
            // Insert before the first slice.
            m_seiWriter.writeSEImessage(nalu.m_Bitstream, tempSEI, pcSlice->getSPS());
            writeRBSPTrailingBits(nalu.m_Bitstream);

            UInt seiPositionInAu = xGetFirstSeiLocation(accessUnit);
            UInt offsetPosition = m_activeParameterSetSEIPresentInAU
            + m_bufferingPeriodSEIPresentInAU
            + m_pictureTimingSEIPresentInAU;  // Insert DU info SEI after APS, BP and PT SEI
            for(j = 0, it = accessUnit.begin(); j < seiPositionInAu + offsetPosition; j++)
            {
              it++;
            }
            accessUnit.insert(it, new NALUnitEBSP(nalu));
          }
          else
          {
            Int ctr;
            // For the second decoding unit onwards we know how many NALUs are present
            for (ctr = 0, it = accessUnit.begin(); it != accessUnit.end(); it++)
            {
              if(ctr == accumNalsDU[ i - 1 ])
              {
                // Insert before the first slice.
                m_seiWriter.writeSEImessage(nalu.m_Bitstream, tempSEI, pcSlice->getSPS());
                writeRBSPTrailingBits(nalu.m_Bitstream);

                accessUnit.insert(it, new NALUnitEBSP(nalu));
                break;
              }
              if ((*it)->m_nalUnitType != NAL_UNIT_PREFIX_SEI && (*it)->m_nalUnitType != NAL_UNIT_SUFFIX_SEI)
              {
                ctr++;
              }
            }
          }
        }
      }
    }

    xResetNonNestedSEIPresentFlags();
    xResetNestedSEIPresentFlags();
    pcPic->getPicYuvRec()->copyToPic(pcPicYuvRecOut);

    pcPic->setReconMark   ( true );
    m_bFirst = false;
    m_iNumPicCoded++;
    m_totalCoded ++;
    /* logging: insert a newline at end of picture period */
    printf("\n");
    fflush(stdout);

    delete[] pcSubstreamsOut;

#if EFFICIENT_FIELD_IRAP
    if(IRAPtoReorder)
    {
      if(swapIRAPForward)
      {
        if(iGOPid == IRAPGOPid)
        {
          iGOPid = IRAPGOPid +1;
          IRAPtoReorder = false;
        }
        else if(iGOPid == IRAPGOPid +1)
        {
          iGOPid --;
        }
      }
      else
      {
        if(iGOPid == IRAPGOPid)
        {
          iGOPid = IRAPGOPid -1;
        }
        else if(iGOPid == IRAPGOPid -1)
        {
          iGOPid = IRAPGOPid;
          IRAPtoReorder = false;
        }
      }
    }
#endif
时间: 2024-07-31 06:40:39

HEVC代码追踪(四。四)的相关文章

HEVC代码追踪(四。二)

/////////////////////////////////////////////////////////////////////////////////////////////////// Compress a slice // Slice compression if (m_pcCfg->getUseASR()) { m_pcSliceEncoder->setSearchRange(pcSlice); } Bool bGPBcheck=false; if ( pcSlice->

HEVC代码追踪(四。一)

Bool IRAPtoReorder = false; #if EFFICIENT_FIELD_IRAP if(IRAPtoReorder) { if(swapIRAPForward) { if(iGOPid == IRAPGOPid) { iGOPid = IRAPGOPid +1; } else if(iGOPid == IRAPGOPid +1) { iGOPid = IRAPGOPid; } } else { if(iGOPid == IRAPGOPid -1) { iGOPid = I

HEVC代码追踪(四。三)

/* use the main bitstream buffer for storing the marshalled picture */ m_pcEntropyCoder->setBitstream(NULL); startCUAddrSliceIdx = 0; startCUAddrSlice = 0; startCUAddrSliceSegmentIdx = 0; startCUAddrSliceSegment = 0; nextCUAddr = 0; pcSlice = pcPic->

HEVC代码追踪(四):compressGOP框架

Void TEncGOP::compressGOP( Int iPOCLast, Int iNumPicRcvd, TComList<TComPic*>& rcListPic, TComList<TComPicYuv*>& rcListPicYuvRecOut, std::list<AccessUnit>& accessUnitsInGOP, bool isField, bool isTff) { /*compressGOP的一些初始设定或者说是

HEVC代码追踪(十四):解码之xDecodeSlice

Bool TDecTop::decode(InputNALUnit& nalu, Int& iSkipFrame, Int& iPOCLastDisplay) { // Initialize entropy decoder m_cEntropyDecoder.setEntropyDecoder (&m_cCavlcDecoder); m_cEntropyDecoder.setBitstream (nalu.m_Bitstream); switch (nalu.m_nalUn

HEVC代码追踪(十一。四):运动估计/补偿之xPatternSearch和xPatternSearchFast

Void TEncSearch::xPatternSearch( TComPattern* pcPatternKey, Pel* piRefY, Int iRefStride, TComMv* pcMvSrchRngLT, TComMv* pcMvSrchRngRB, TComMv& rcMv, UInt& ruiSAD ) { Int iSrchRngHorLeft = pcMvSrchRngLT->getHor(); Int iSrchRngHorRight = pcMvSrch

HEVC代码追踪(十一。二):运动估计/补偿之predInterSearch

/** search of the best candidate for inter prediction * \param pcCU * \param pcOrgYuv * \param rpcPredYuv * \param rpcResiYuv * \param rpcRecoYuv * \param bUseRes * \returns Void */ #if AMP_MRG Void TEncSearch::predInterSearch( TComDataCU* pcCU, TCom

HEVC代码追踪(十一。七):运动估计/补偿之xTZ8PointDiamondSearch

xTZSearch调用了2个最为主要的函数:xTZ8PointDiamondSearch和xTZ2PointSearch,值得一提的是,HM中还提供了另外一个搜索函数xTZ8PointSquareSearch,但由于实际并没有使用这个函数,且它其实跟钻石搜索只是搜索点的选择略有不同,分析起来基本上也是一样的. __inline Void TEncSearch::xTZ8PointDiamondSearch( TComPattern* pcPatternKey, IntTZSearchStruct

HEVC代码追踪(十一):运动估计/补偿之理论知识

运动估计的英文名称是Motion Estimation,是视频编码和视频处理(例如去交织)中广泛使用的一种技术. 运动估计的基本思想是将图像序列的每一帧分成许多互不重叠的宏块,并认为宏块内所有像素的位移量都相同,然后对每个宏块到参考帧某一给定特定搜索范围内根据一定的匹配准则找出与当前块最相似的块,即匹配块,匹配块与当前块的相对位移即为运动矢量.视频压缩的时候,只需保存运动矢量和残差数据就可以完全恢复出当前块. 在帧间预测编码中,由于活动图像邻近帧中的景物存在着一定的相关性.因此,可将活动图像分成