360Lib:WS-PSNR
程序员文章站
2022-03-16 15:07:44
...
CPP-PSNR是360Lib采纳的一种360视频的客观质量评估标准。该评估标准认为,把2D图像投影到球型视野上,不同纬度的像素具有不同的权重,通过给360视频投影得到的2D图像的不同纬度增加权重,来对360视频进行评估。
TWSPSNRMetric类
下面来看360Lib中定义的CPP-PSNR类TWSPSNRMetric,其中有两个重要函数createTable(创建投影格式对应的权重列表)和xCalculateWSPSNR(计算WS-PSNR)。
class TWSPSNRMetric
{
private:
Bool m_bEnabled;
Double m_dWSPSNR[3];
Int m_outputBitDepth[MAX_NUM_CHANNEL_TYPE]; ///< bit-depth of output file
Int m_referenceBitDepth[MAX_NUM_CHANNEL_TYPE]; ///< bit-depth of reference file
//各投影格式权重,Y:亮度权重,C:色度权重
Double* m_fErpWeight_Y;
Double* m_fErpWeight_C;
Double* m_fCubeWeight_Y;
Double* m_fCubeWeight_C;
Double* m_fEapWeight_Y;
Double* m_fEapWeight_C;
Double* m_fOctaWeight_Y;
Double* m_fOctaWeight_C;
Double* m_fIcoWeight_Y;
Double* m_fIcoWeight_C;
#if SVIDEO_WSPSNR_SSP
Double* m_fSspWeight_Y;
Double* m_fSspWeight_C;
#endif
Int m_codingGeoType; //编码几何类型
Int m_iCodingFaceWidth; //编码面宽度
Int m_iCodingFaceHeight; //编码面高度
Int m_iChromaSampleLocType; //采样色度格式
#if SVIDEO_WSPSNR_E2E
//for E2E WS-PSNR calculation;
#if !SVIDEO_E2E_METRICS
TVideoIOYuv *m_pcTVideoIOYuvInputFile; //note: reference;
TGeometry *m_pRefGeometry;
TGeometry *m_pRecGeometry;
TComPicYuv *m_pcOrgPicYuv;
TComPicYuv *m_pcRecPicYuv; //in original geometry domain;
#endif
#if !SVIDEO_E2E_METRICS
Int m_iLastFrmPOC;
UInt m_temporalSubsampleRatio;
Int m_iInputWidth;
Int m_iInputHeight;
ChromaFormat m_inputChromaFomat;
#endif
#endif
public:
TWSPSNRMetric();
virtual ~TWSPSNRMetric();
Bool getWSPSNREnabled() { return m_bEnabled; } //获取m_bEnabled
Void setWSPSNREnabledFlag(Bool bEnabledFlag) { m_bEnabled = bEnabledFlag; } //设置m_bEnabled
Void setOutputBitDepth(Int iOutputBitDepth[MAX_NUM_CHANNEL_TYPE]); //输出图像bit深度
Void setReferenceBitDepth(Int iReferenceBitDepth[MAX_NUM_CHANNEL_TYPE]); //参考图像bit深度
//设置视频几何信息
Void setCodingGeoInfo(SVideoInfo& sVidInfo, Int iChromaSampleLocType) { m_codingGeoType = sVidInfo.geoType; m_iCodingFaceWidth = sVidInfo.iFaceWidth; m_iCodingFaceHeight = sVidInfo.iFaceHeight; m_iChromaSampleLocType =iChromaSampleLocType; }
#if SVIDEO_WSPSNR_E2E
#if SVIDEO_E2E_METRICS
Void setCodingGeoInfo2(SVideoInfo& sRefVideoInfo, SVideoInfo& sRecVideoInfo, InputGeoParam *pInGeoParam);
Void xCalculateE2EWSPSNR(TComPicYuv *pcRecPicYuv, TComPicYuv *pcOrigPicYuv);
#else
Void setCodingGeoInfo2(SVideoInfo& sRefVideoInfo, SVideoInfo& sRecVideoInfo, InputGeoParam *pInGeoParam, TVideoIOYuv& yuvInputFile, Int iInputWidth, Int iInputHeight, UInt tempSubsampleRatio);
Void xCalculateE2EWSPSNR(TComPicYuv *pcPicD, Int iPOC);
#endif
#endif
Double* getWSPSNR() {return m_dWSPSNR;} //获取WS-PSNR
Void createTable(TComPicYuv* pcPicD, TGeometry *pcCodingGeomtry); //创建投影格式对应的权重列表
Void xCalculateWSPSNR( TComPicYuv* pcOrgPicYuv, TComPicYuv* pcPicD ); //计算WS-PSNR
//inline Int round(POSType t) { return (Int)(t+ (t>=0? 0.5 :-0.5)); };
};
createTable
createTable:创建投影格式对应的权重列表。这里只截取了最常用的ERP和CMP对应的部分。每一种投影格式都对应一种权重,可以看到ERP的权重只与高度位置相关,CMP的权重与所在面的x,y坐标位置相关。因此WS-PSNR的参考图像和当前图像必须是相同投影格式,无法进行跨投影格式质量评估。
//创建投影格式对应的权重列表
void TWSPSNRMetric::createTable(TComPicYuv* pcPicD, TGeometry *pcCodingGeomtry)
{
if(!m_bEnabled)
{
return;
}
SVideoInfo *pCodingSVideoInfo = pcCodingGeomtry->getSVideoInfo();
Int iFaceWidth = pCodingSVideoInfo->iFaceWidth;
Int iFaceHeight = pCodingSVideoInfo->iFaceHeight;
Int iScaleX = pcPicD->getComponentScaleX(COMPONENT_Cb);
Int iScaleY = pcPicD->getComponentScaleY(COMPONENT_Cb);
Double dChromaOffset[2] = {0.0, 0.0}; //[0: X; 1: Y];
if(pcPicD->getChromaFormat() == CHROMA_420)
{
dChromaOffset[0] = (m_iChromaSampleLocType == 0 || m_iChromaSampleLocType == 2)? 0 : 0.5;
dChromaOffset[1] = (m_iChromaSampleLocType == 2 || m_iChromaSampleLocType == 3)? 0 : 0.5;
}
//ERP投影
if(pcCodingGeomtry->getType()==SVIDEO_EQUIRECT)
{
Double fWeightSum_Y=0;
Double fWeightSum_C=0;
Int iWidth = pcPicD->getWidth(COMPONENT_Y) ; //获取亮度宽度
Int iHeight = pcPicD->getHeight(COMPONENT_Y) ; //获取亮度高度
Int iWidthC = pcPicD->getWidth(COMPONENT_Cb) ; //获取色度宽度
Int iHeightC = pcPicD->getHeight(COMPONENT_Cb) ; //获取色度高度
m_fErpWeight_Y=(Double*)malloc(iHeight*sizeof(Double)); //亮度权重和高度有关
m_fErpWeight_C=(Double*)malloc(iHeightC*sizeof(Double)); //色度权重和高度有关
//计算亮度和色度的纬度权重
for(Int y=0; y< iHeight; y++)
{
m_fErpWeight_Y[y]=scos((y-(iHeight/2-0.5))*S_PI/iHeight);
fWeightSum_Y += m_fErpWeight_Y[y];
}
for(Int y=0; y< iHeightC; y++)
{
m_fErpWeight_C[y]=scos(((y<<iScaleY)+dChromaOffset[1]+0.5-iHeight/2)*S_PI/iHeight);
fWeightSum_C += m_fErpWeight_C[y];
}
for(Int y=0; y< iHeight; y++)
{
m_fErpWeight_Y[y]=m_fErpWeight_Y[y]/fWeightSum_Y/iWidth;
}
for(Int y=0; y< iHeightC; y++)
{
m_fErpWeight_C[y]=m_fErpWeight_C[y]/fWeightSum_C/(iWidthC);
}
}
//CMP投影
else if(pcCodingGeomtry->getType()==SVIDEO_CUBEMAP)
{
Double fWeightSum_Y=0;
Double fWeightSum_C=0;
//亮度、色度权重和面的高度宽度都有关
m_fCubeWeight_Y=(Double*)malloc(iFaceHeight * iFaceWidth*sizeof(Double));
m_fCubeWeight_C=(Double*)malloc((iFaceHeight >> iScaleY) * (iFaceWidth >> iScaleX)*sizeof(Double));
//计算亮度和色度权重
for(Int y = 0; y < iFaceHeight; y++ )
{
for(Int x=0; x < iFaceWidth; x++)
{
Int ci, cj, r2;
Double d2;
ci= iFaceWidth/2;
cj= iFaceHeight/2;
d2 = (x+0.5-ci)*(x+0.5-ci)+(y+0.5-cj)*(y+0.5-cj);
r2 = (iFaceWidth/2)*(iFaceWidth/2);
Double weight= 1.0/((1+d2/r2)*ssqrt(1.0*(1+d2/r2)));
m_fCubeWeight_Y[iFaceWidth*y+x] = weight;
fWeightSum_Y += weight;
}
}
for(Int y = 0; y < iFaceHeight; y++ )
{
for(Int x=0; x<iFaceWidth; x++)
{
m_fCubeWeight_Y[iFaceHeight*y+x] = (m_fCubeWeight_Y[iFaceHeight*y+x])/fWeightSum_Y/6.0;
}
}
for(Int y = 0; y < (iFaceHeight>>iScaleY); y++ )
{
for(Int x=0; x< (iFaceWidth>>iScaleX); x++)
{
Int ci, cj, r2;
Double d2;
ci= iFaceWidth/2;
cj= iFaceHeight/2;
d2 = (x*(1<<iScaleX)+dChromaOffset[0]+0.5 - ci)*(x*(1<<iScaleX)+dChromaOffset[0]+0.5 - ci) + (y*(1<<iScaleY)+dChromaOffset[1]+0.5 -cj)*(y*(1<<iScaleY)+dChromaOffset[1]+0.5 -cj);
r2 = (iFaceWidth/2)*(iFaceWidth/2);
Double weight= 1.0/((1+d2/r2)*sqrt(1.0*(1+d2/r2)));
m_fCubeWeight_C[(iFaceWidth>>iScaleX)*y+x]=weight;
fWeightSum_C += weight;
}
}
for(Int y = 0; y < (iFaceHeight>>iScaleY); y++ )
{
for(Int x=0; x< (iFaceWidth>>iScaleX); x++)
{
m_fCubeWeight_C[(iFaceWidth>>iScaleX)*y+x]=(m_fCubeWeight_C[(iFaceWidth>>iScaleX)*y+x])/fWeightSum_C/6.0;
}
}
}
}
xCalculateWSPSNR
xCalculateWSPSNR:计算WS-PSNR。这里只截取了最常用的ERP和CMP对应的部分。使用已计算好的权重,计算加权SSD,得到PSNR。
//计算WS-PSNR
Void TWSPSNRMetric::xCalculateWSPSNR( TComPicYuv* pcOrgPicYuv, TComPicYuv* pcPicD )
{
//初始化亮度色度bit信息
Int iBitDepthForPSNRCalc[MAX_NUM_CHANNEL_TYPE];
Int iReferenceBitShift[MAX_NUM_CHANNEL_TYPE];
Int iOutputBitShift[MAX_NUM_CHANNEL_TYPE];
iBitDepthForPSNRCalc[CHANNEL_TYPE_LUMA] = std::max(m_outputBitDepth[CHANNEL_TYPE_LUMA], m_referenceBitDepth[CHANNEL_TYPE_LUMA]);
iBitDepthForPSNRCalc[CHANNEL_TYPE_CHROMA] = std::max(m_outputBitDepth[CHANNEL_TYPE_CHROMA], m_referenceBitDepth[CHANNEL_TYPE_CHROMA]);
iReferenceBitShift[CHANNEL_TYPE_LUMA] = iBitDepthForPSNRCalc[CHANNEL_TYPE_LUMA] - m_referenceBitDepth[CHANNEL_TYPE_LUMA];
iReferenceBitShift[CHANNEL_TYPE_CHROMA] = iBitDepthForPSNRCalc[CHANNEL_TYPE_CHROMA] - m_referenceBitDepth[CHANNEL_TYPE_CHROMA];
iOutputBitShift[CHANNEL_TYPE_LUMA] = iBitDepthForPSNRCalc[CHANNEL_TYPE_LUMA] - m_outputBitDepth[CHANNEL_TYPE_LUMA];
iOutputBitShift[CHANNEL_TYPE_CHROMA] = iBitDepthForPSNRCalc[CHANNEL_TYPE_CHROMA] - m_outputBitDepth[CHANNEL_TYPE_CHROMA];
memset(m_dWSPSNR, 0, sizeof(Double)*3);
TComPicYuv &picd=*pcPicD;
//Double SSDspsnr[3]={0, 0 ,0};
//ChromaFormat chromaFormat = pcPicD->getChromaFormat();
for(Int chan=0; chan<pcPicD->getNumberValidComponents(); chan++)
{
const ComponentID ch=ComponentID(chan);
const Pel* pOrg = pcOrgPicYuv->getAddr(ch);
const Int iOrgStride = pcOrgPicYuv->getStride(ch);
const Pel* pRec = picd.getAddr(ch);
const Int iRecStride = picd.getStride(ch);
const Int iWidth = pcPicD->getWidth (ch) ;
const Int iHeight = pcPicD->getHeight(ch) ;
Double fWeight =1;
Double fWeightSum=0;
//Int iSize = iWidth*iHeight;
Double SSDwpsnr=0;
//WS-PSNR
for(Int y = 0; y < iHeight; y++ )
{
//ERP格式只与高度有关
if (m_codingGeoType==SVIDEO_EQUIRECT)
{
if(!chan) //亮度
{
fWeight=m_fErpWeight_Y[y];
}
else //色度
{
fWeight=m_fErpWeight_C[y];
}
}
for(Int x = 0; x < iWidth; x++ )
{
//参考图像和输出图像差值
Intermediate_Int iDiff = (Intermediate_Int)( (pOrg[x]<<iReferenceBitShift[toChannelType(ch)]) - (pRec[x]<<iOutputBitShift[toChannelType(ch)]) );
//CMP格式和x,y都有关
if (m_codingGeoType==SVIDEO_CUBEMAP)
{
if(!chan)
{
if(iWidth/4 == iHeight/3 && x >= iWidth/4 && (y< iHeight/3 || y>= 2*iHeight/3))
{
fWeight=0;
}
else
{
fWeight=m_fCubeWeight_Y[(m_iCodingFaceWidth)*(y%(m_iCodingFaceHeight)) +(x%(m_iCodingFaceWidth))];
}
}
else
{
if(iWidth/4 == iHeight/3 && x >= iWidth/4 && (y< iHeight/3 || y>= 2*iHeight/3))
{
fWeight=0;
}
else
{
fWeight=m_fCubeWeight_C[(m_iCodingFaceWidth>>(pcPicD->getComponentScaleX(COMPONENT_Cb)))*(y%(m_iCodingFaceHeight>>(pcPicD->getComponentScaleY(COMPONENT_Cb)))) +(x%(m_iCodingFaceWidth>>(pcPicD->getComponentScaleX(COMPONENT_Cb))))];
}
}
}
if(fWeight>0)
fWeightSum += fWeight;
SSDwpsnr += iDiff * iDiff*fWeight; //加权SSD
}
pOrg += iOrgStride;
pRec += iRecStride;
}
const Int maxval = 255<<(iBitDepthForPSNRCalc[toChannelType(ch)]-8) ;
//const Double fRefValue = (Double) maxval * maxval * iSize;
m_dWSPSNR[ch] = ( SSDwpsnr ? 10.0 * log10( (maxval * maxval*fWeightSum) / (Double)SSDwpsnr ) : 999.99 ); //计算PSNR
}
}
上一篇: 大数据思考
下一篇: j2me读取中文的配置文件