fix merge

This commit is contained in:
Anthony Minessale 2016-09-27 16:44:25 -05:00
commit f38dde9fec
1137 changed files with 118714 additions and 203850 deletions

View File

@ -286,6 +286,7 @@ library_include_HEADERS = \
src/include/switch_utils.h \
src/include/switch_rtp.h \
src/include/switch_jitterbuffer.h \
src/include/switch_estimators.h \
src/include/switch_rtcp_frame.h \
src/include/switch_stun.h \
src/include/switch_nat.h \
@ -352,6 +353,7 @@ libfreeswitch_la_SOURCES = \
src/switch_regex.c \
src/switch_rtp.c \
src/switch_jitterbuffer.c \
src/switch_estimators.c \
src/switch_ivr_bridge.c \
src/switch_ivr_originate.c \
src/switch_ivr_async.c \

View File

@ -1,4 +1,4 @@
JSFILES=src/jquery.FSRTC.js src/jquery.jsonrpcclient.js src/jquery.verto.js
JSFILES=src/jquery.FSRTC.js src/jquery.jsonrpcclient.js src/jquery.verto.js src/vendor/adapter-latest.js
all: jsmin verto-min.js

View File

@ -100,22 +100,10 @@
candidateList: []
};
if (moz) {
this.constraints = {
offerToReceiveAudio: this.options.useSpeak === "none" ? false : true,
offerToReceiveVideo: this.options.useVideo ? true : false,
};
} else {
this.constraints = {
optional: [{
'DtlsSrtpKeyAgreement': 'true'
}],mandatory: {
OfferToReceiveAudio: this.options.useSpeak === "none" ? false : true,
OfferToReceiveVideo: this.options.useVideo ? true : false,
}
};
}
this.constraints = {
offerToReceiveAudio: this.options.useSpeak === "none" ? false : true,
offerToReceiveVideo: this.options.useVideo ? true : false,
};
if (self.options.useVideo) {
self.options.useVideo.style.display = 'none';
@ -133,19 +121,11 @@
if (obj) {
self.options.useVideo = obj;
self.options.localVideo = local;
if (moz) {
self.constraints.offerToReceiveVideo = true;
} else {
self.constraints.mandatory.OfferToReceiveVideo = true;
}
self.constraints.offerToReceiveVideo = true;
} else {
self.options.useVideo = null;
self.options.localVideo = null;
if (moz) {
self.constraints.offerToReceiveVideo = false;
} else {
self.constraints.mandatory.OfferToReceiveVideo = false;
}
self.constraints.offerToReceiveVideo = false;
}
if (self.options.useVideo) {
@ -193,18 +173,9 @@
};
function setCompat() {
$.FSRTC.moz = !!navigator.mozGetUserMedia;
//navigator.getUserMedia || (navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia || navigator.msGetUserMedia);
if (!navigator.getUserMedia) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia || navigator.msGetUserMedia;
}
}
function checkCompat() {
if (!navigator.getUserMedia) {
alert('This application cannot function in this browser.');
return false;
}
return true;
}
@ -258,6 +229,21 @@
doCallback(self, "onICESDP", msg);
}
FSRTCattachMediaStream = function(element, stream) {
if (element && element.id && attachMediaStream) {
attachMediaStream(element, stream);
} else {
if (typeof element.srcObject !== 'undefined') {
element.srcObject = stream;
} else if (typeof element.src !== 'undefined') {
element.src = URL.createObjectURL(stream);
} else {
console.error('Error attaching stream to element.');
}
}
}
function onRemoteStream(self, stream) {
if (self.options.useVideo) {
self.options.useVideo.style.display = 'block';
@ -266,16 +252,8 @@
var element = self.options.useAudio;
console.log("REMOTE STREAM", stream, element);
if (typeof element.srcObject !== 'undefined') {
element.srcObject = stream;
} else if (typeof element.mozSrcObject !== 'undefined') {
element.mozSrcObject = stream;
} else if (typeof element.src !== 'undefined') {
element.src = URL.createObjectURL(stream);
} else {
console.error('Error attaching stream to element.');
}
FSRTCattachMediaStream(element, stream);
self.options.useAudio.play();
self.remoteStream = stream;
}
@ -306,11 +284,7 @@
if (self.options.useVideo) {
self.options.useVideo.style.display = 'none';
if (moz) {
self.options.useVideo['mozSrcObject'] = null;
} else {
self.options.useVideo['src'] = '';
}
self.options.useVideo['src'] = '';
}
if (self.localStream) {
@ -331,11 +305,7 @@
if (self.options.localVideo) {
self.options.localVideo.style.display = 'none';
if (moz) {
self.options.localVideo['mozSrcObject'] = null;
} else {
self.options.localVideo['src'] = '';
}
self.options.localVideo['src'] = '';
}
if (self.options.localVideoStream) {
@ -426,7 +396,7 @@
function onSuccess(stream) {
self.localStream = stream;
self.peer = RTCPeerConnection({
self.peer = FSRTCPeerConnection({
type: self.type,
attachStream: self.localStream,
onICE: function(candidate) {
@ -469,8 +439,8 @@
constraints: {
audio: false,
video: {
mandatory: self.options.videoParams,
optional: []
//mandatory: self.options.videoParams,
//optional: []
},
},
localVideo: self.options.localVideo,
@ -501,31 +471,29 @@
console.log("Microphone Disabled");
audio = false;
} else if (obj.options.videoParams && obj.options.screenShare) {//obj.options.videoParams.chromeMediaSource == 'desktop') {
//obj.options.videoParams = {
// chromeMediaSource: 'screen',
// maxWidth:screen.width,
// maxHeight:screen.height
// chromeMediaSourceId = sourceId;
// };
console.error("SCREEN SHARE");
console.error("SCREEN SHARE", obj.options.videoParams);
audio = false;
} else {
audio = {
mandatory: {},
optional: []
//mandatory: {},
//optional: []
advanced: []
};
if (obj.options.useMic !== "any") {
audio.optional = [{sourceId: obj.options.useMic}]
//audio.optional = [{sourceId: obj.options.useMic}]
audio.deviceId = {exact: obj.options.useMic};
}
//FIXME
if (obj.options.audioParams) {
for (var key in obj.options.audioParams) {
var con = {};
con[key] = obj.options.audioParams[key];
audio.optional.push(con);
//con[key] = obj.options.audioParams[key];
if (obj.options.audioParams[key]) {
con.exact = key;
audio.advanced.push(con);
}
}
}
@ -536,10 +504,8 @@
getUserMedia({
constraints: {
audio: false,
video: {
mandatory: obj.options.videoParams,
optional: []
},
video: obj.options.videoParams
},
localVideo: obj.options.localVideo,
onsuccess: function(e) {self.options.localVideoStream = e; console.log("local video ready");},
@ -549,33 +515,57 @@
var video = {};
var bestFrameRate = obj.options.videoParams.vertoBestFrameRate;
var minFrameRate = obj.options.videoParams.minFrameRate || 15;
delete obj.options.videoParams.vertoBestFrameRate;
video = {
mandatory: obj.options.videoParams,
optional: []
}
var useVideo = obj.options.useVideo;
if (useVideo && obj.options.useCamera && obj.options.useCamera !== "none") {
if (!video.optional) {
video.optional = [];
}
if (obj.options.useCamera !== "any") {
video.optional.push({sourceId: obj.options.useCamera});
}
if (obj.options.screenShare) {
// fix for chrome to work for now, will need to change once we figure out how to do this in a non-mandatory style constraint.
var opt = [];
opt.push({sourceId: obj.options.useCamera});
if (bestFrameRate) {
video.optional.push({minFrameRate: bestFrameRate});
video.optional.push({maxFrameRate: bestFrameRate});
opt.push({minFrameRate: bestFrameRate});
opt.push({maxFrameRate: bestFrameRate});
}
video = {
mandatory: obj.options.videoParams,
optional: opt
};
} else {
console.log("Camera Disabled");
video = false;
useVideo = false;
video = {
//mandatory: obj.options.videoParams,
width: {min: obj.options.videoParams.minWidth, max: obj.options.videoParams.maxWidth},
height: {min: obj.options.videoParams.minHeight, max: obj.options.videoParams.maxHeight}
};
var useVideo = obj.options.useVideo;
if (useVideo && obj.options.useCamera && obj.options.useCamera !== "none") {
//if (!video.optional) {
//video.optional = [];
//}
if (obj.options.useCamera !== "any") {
//video.optional.push({sourceId: obj.options.useCamera});
video.deviceId = obj.options.useCamera;
}
if (bestFrameRate) {
//video.optional.push({minFrameRate: bestFrameRate});
//video.optional.push({maxFrameRate: bestFrameRate});
video.frameRate = {ideal: bestFrameRate, min: minFrameRate, max: 30};
}
} else {
console.log("Camera Disabled");
video = false;
useVideo = false;
}
}
return {audio: audio, video: video, useVideo: useVideo};
@ -597,14 +587,10 @@
self.localStream = stream;
if (screen) {
if (moz) {
self.constraints.OfferToReceiveVideo = false;
} else {
self.constraints.mandatory.OfferToReceiveVideo = false;
}
self.constraints.offerToReceiveVideo = false;
}
self.peer = RTCPeerConnection({
self.peer = FSRTCPeerConnection({
type: self.type,
attachStream: self.localStream,
onICE: function(candidate) {
@ -671,57 +657,23 @@
// 2013, @muazkh - github.com/muaz-khan
// MIT License - https://www.webrtc-experiment.com/licence/
// Documentation - https://github.com/muaz-khan/WebRTC-Experiment/tree/master/RTCPeerConnection
window.moz = !!navigator.mozGetUserMedia;
function RTCPeerConnection(options) {
function FSRTCPeerConnection(options) {
var gathering = false, done = false;
var w = window,
PeerConnection = w.mozRTCPeerConnection || w.webkitRTCPeerConnection,
SessionDescription = w.mozRTCSessionDescription || w.RTCSessionDescription,
IceCandidate = w.mozRTCIceCandidate || w.RTCIceCandidate;
var STUN = {
url: !moz ? 'stun:stun.l.google.com:19302' : 'stun:23.21.150.121'
};
var iceServers = null;
var config = {};
var default_ice = {
urls: ['stun:stun.l.google.com:19302']
};
if (options.iceServers) {
var tmp = options.iceServers;
if (typeof(tmp) === "boolean") {
tmp = null;
}
if (tmp && !(typeof(tmp) == "object" && tmp.constructor === Array)) {
console.warn("iceServers must be an array, reverting to default ice servers");
tmp = null;
}
iceServers = {
iceServers: tmp || [STUN]
};
if (!moz && !tmp) {
iceServers.iceServers = [STUN];
}
if (typeof(options.iceServers) === "boolean") {
config.iceServers = [default_ice];
} else {
config.iceServers = options.iceServers;
}
}
var optional = {
optional: []
};
if (!moz) {
optional.optional = [{
DtlsSrtpKeyAgreement: true
},
{
RtpDataChannels: options.onChannelMessage ? true : false
}];
}
var peer = new PeerConnection(iceServers, optional);
var peer = new window.RTCPeerConnection(config);
openOffererChannel();
var x = 0;
@ -736,34 +688,10 @@
}
if (options.type == "offer") {
if ((!moz || (!options.sentICESDP && peer.localDescription.sdp.match(/a=candidate/)) && !x && options.onICESDP)) {
options.onICESDP(peer.localDescription);
//x = 1;
/*
x = 1;
peer.createOffer(function(sessionDescription) {
sessionDescription.sdp = serializeSdp(sessionDescription.sdp);
peer.setLocalDescription(sessionDescription);
if (options.onICESDP) {
options.onICESDP(sessionDescription);
}
}, onSdpError, constraints);
*/
}
options.onICESDP(peer.localDescription);
} else {
if (!x && options.onICESDP) {
options.onICESDP(peer.localDescription);
//x = 1;
/*
x = 1;
peer.createAnswer(function(sessionDescription) {
sessionDescription.sdp = serializeSdp(sessionDescription.sdp);
peer.setLocalDescription(sessionDescription);
if (options.onICESDP) {
options.onICESDP(sessionDescription);
}
}, onSdpError, constraints);
*/
}
}
}
@ -821,10 +749,10 @@
//console.debug('on:add:stream', remoteMediaStream);
};
var constraints = options.constraints || {
offerToReceiveAudio: true,
offerToReceiveVideo: true
};
//var constraints = options.constraints || {
// offerToReceiveAudio: true,
//offerToReceiveVideo: true
//};
// onOfferSDP(RTCSessionDescription)
function createOffer() {
@ -834,13 +762,8 @@
sessionDescription.sdp = serializeSdp(sessionDescription.sdp);
peer.setLocalDescription(sessionDescription);
options.onOfferSDP(sessionDescription);
/* old mozilla behaviour the SDP was already great right away */
if (moz && options.onICESDP && sessionDescription.sdp.match(/a=candidate/)) {
options.onICESDP(sessionDescription);
options.sentICESDP = 1;
}
},
onSdpError, constraints);
onSdpError, options.constraints);
}
// onAnswerSDP(RTCSessionDescription)
@ -848,7 +771,7 @@
if (options.type != "answer") return;
//options.offerSDP.sdp = addStereo(options.offerSDP.sdp);
peer.setRemoteDescription(new SessionDescription(options.offerSDP), onSdpSuccess, onSdpError);
peer.setRemoteDescription(new window.RTCSessionDescription(options.offerSDP), onSdpSuccess, onSdpError);
peer.createAnswer(function(sessionDescription) {
sessionDescription.sdp = serializeSdp(sessionDescription.sdp);
peer.setLocalDescription(sessionDescription);
@ -856,11 +779,11 @@
options.onAnswerSDP(sessionDescription);
}
},
onSdpError, constraints);
onSdpError);
}
// if Mozilla Firefox & DataChannel; offer/answer will be created later
if ((options.onChannelMessage && !moz) || !options.onChannelMessage) {
if ((options.onChannelMessage) || !options.onChannelMessage) {
createOffer();
createAnswer();
}
@ -899,9 +822,6 @@
}
function serializeSdp(sdp) {
//if (!moz) sdp = setBandwidth(sdp);
//sdp = getInteropSDP(sdp);
//console.debug(sdp);
return sdp;
}
@ -909,29 +829,18 @@
var channel;
function openOffererChannel() {
if (!options.onChannelMessage || (moz && !options.onOfferSDP)) return;
if (!options.onChannelMessage) return;
_openOffererChannel();
if (!moz) return;
navigator.mozGetUserMedia({
audio: true,
fake: true
},
function(stream) {
peer.addStream(stream);
createOffer();
},
useless);
return;
}
function _openOffererChannel() {
channel = peer.createDataChannel(options.channel || 'RTCDataChannel', moz ? {} : {
channel = peer.createDataChannel(options.channel || 'RTCDataChannel', {
reliable: false
});
if (moz) channel.binaryType = 'blob';
setChannelEvents();
}
@ -955,8 +864,6 @@
};
}
if (options.onAnswerSDP && moz && options.onChannelMessage) openAnswererChannel();
function openAnswererChannel() {
peer.ondatachannel = function(event) {
channel = event.channel;
@ -964,16 +871,7 @@
setChannelEvents();
};
if (!moz) return;
navigator.mozGetUserMedia({
audio: true,
fake: true
},
function(stream) {
peer.addStream(stream);
createAnswer();
},
useless);
return;
}
// fake:true is also available on chrome under a flag!
@ -993,10 +891,10 @@
return {
addAnswerSDP: function(sdp, cbSuccess, cbError) {
peer.setRemoteDescription(new SessionDescription(sdp), cbSuccess ? cbSuccess : onSdpSuccess, cbError ? cbError : onSdpError);
peer.setRemoteDescription(new window.RTCSessionDescription(sdp), cbSuccess ? cbSuccess : onSdpSuccess, cbError ? cbError : onSdpError);
},
addICE: function(candidate) {
peer.addIceCandidate(new IceCandidate({
peer.addIceCandidate(new window.RTCIceCandidate({
sdpMLineIndex: candidate.sdpMLineIndex,
candidate: candidate.candidate
}));
@ -1026,14 +924,14 @@
// getUserMedia
var video_constraints = {
mandatory: {},
optional: []
//mandatory: {},
//optional: []
};
function getUserMedia(options) {
var n = navigator,
media;
n.getMedia = n.webkitGetUserMedia || n.mozGetUserMedia;
n.getMedia = n.getUserMedia;
n.getMedia(options.constraints || {
audio: true,
video: video_constraints
@ -1044,15 +942,8 @@
});
function streaming(stream) {
//var video = options.video;
//var localVideo = options.localVideo;
//if (video) {
// video[moz ? 'mozSrcObject' : 'src'] = moz ? stream : window.webkitURL.createObjectURL(stream);
//video.play();
//}
if (options.localVideo) {
options.localVideo[moz ? 'mozSrcObject' : 'src'] = moz ? stream : window.webkitURL.createObjectURL(stream);
options.localVideo['src'] = window.URL.createObjectURL(stream);
options.localVideo.style.display = 'block';
}
@ -1108,23 +999,26 @@
}
var video = {
mandatory: {},
optional: []
//mandatory: {},
//optional: []
}
//FIXME
if (cam) {
video.optional = [{sourceId: cam}];
//video.optional = [{sourceId: cam}];
video.deviceId = {exact: cam};
}
w = resList[resI][0];
h = resList[resI][1];
resI++;
video.mandatory = {
"minWidth": w,
"minHeight": h,
"maxWidth": w,
"maxHeight": h
video = {
width: w,
height: h
//"minWidth": w,
//"minHeight": h,
//"maxWidth": w,
//"maxHeight": h
};
getUserMedia({

File diff suppressed because it is too large Load Diff

View File

@ -108,6 +108,7 @@
<script type="text/javascript" src="../js/src/jquery.jsonrpcclient.js"></script>
<script type="text/javascript" src="../js/src/jquery.FSRTC.js"></script>
<script type="text/javascript" src="../js/src/jquery.verto.js"></script>
<script type="text/javascript" src="../js/src/vendor/adapter-latest.js"></script>
<script type="text/javascript" src="js/3rd-party/getScreenId.js"></script>
<script type="text/javascript" src="js/3rd-party/md5.min.js"></script>

View File

@ -150,5 +150,6 @@
"CHAT_GAIN_PLUS": "Gain +",
"LANGUAGE": "Language:",
"BROWSER_LANGUAGE": "Browser Language",
"FACTORY_RESET_SETTINGS": "Factory Reset Settings"
"FACTORY_RESET_SETTINGS": "Factory Reset Settings",
"AUTOGAIN_CONTROL": "Auto Gain Control"
}

View File

@ -148,5 +148,6 @@
"CHAT_VOL_PLUS": "Vol +",
"CHAT_GAIN_MINUS": "Ganho -",
"CHAT_GAIN_PLUS": "Ganho +",
"FACTORY_RESET_SETTINGS": "Redefinir configurações"
"FACTORY_RESET_SETTINGS": "Redefinir configurações",
"AUTOGAIN_CONTROL": "Controle de Ganho Automático (AGC)"
}

View File

@ -188,7 +188,8 @@
<div class="chat-message-input">
<form ng-submit="send()" >
<div class="chat-message-input-group">
<textarea ng-model="message" ng-keydown="($event.keyCode == 13 && $event.shiftKey !== true) && send($event)" required="required" class="form-control input-sm" placeholder="{{ 'CHAT_TYPE_MESSAGE' | translate }}"></textarea>
<textarea ng-model="message" ng-keydown="($event.keyCode == 13 && $event.shiftKey !== true) && send($event)" required="required" class="form-control input-sm" placeholder="{{ 'CHAT_TYPE_MESSAGE' | translate }}"
ng-focus="disableOnKeydownDtmf()" ng-blur="enableOnKeydownDtmf()"></textarea>
<button class="btn btn-success btn-sm" type="submit">
{{ 'CHAT_SEND_MESSAGE' | translate }}
<span class="mdi-navigation-arrow-forward chat-message-input-group-icon-button"></span>

View File

@ -1,25 +1,25 @@
<div class="panel panel-default shadow-z-2" ng-class="{'shadow-z-2': !call_history}">
<form name="form">
<div class="input-group dialpad-display">
<input name="dialpadnumber" type="text" class="form-control text-center" placeholder="" ng-model="dialpadNumber" />
<input name="dtmfHistory" type="text" class="form-control text-center" placeholder="" ng-model="dtmfHistory.value" />
</div>
<div class="panel-body">
<div class="dialpad-numbers">
<div class="row">
<div class="col-md-4 col-xs-4">
<a class="btn btn-block" href="" ng-click="dtmf(1)">
<a class="btn btn-block" href="" ng-click="dtmfWidget(1)">
<h2 class="dialpad-number">1</h2>
<span class="dialpad-alpha">./@</span>
</a>
</div>
<div class="col-md-4 col-xs-4">
<a class="btn btn-block" href="" ng-click="dtmf(2)">
<a class="btn btn-block" href="" ng-click="dtmfWidget(2)">
<h2 class="dialpad-number">2</h2>
<span class="dialpad-alpha">ABC</span>
</a>
</div>
<div class="col-md-4 col-xs-4">
<a class="btn btn-block" href="" ng-click="dtmf(3)">
<a class="btn btn-block" href="" ng-click="dtmfWidget(3)">
<h2 class="dialpad-number">3</h2>
<span class="dialpad-alpha">DEF</span>
</a>
@ -27,19 +27,19 @@
</div>
<div class="row">
<div class="col-md-4 col-xs-4">
<a class="btn btn-block" href="" ng-click="dtmf(4)">
<a class="btn btn-block" href="" ng-click="dtmfWidget(4)">
<h2 class="dialpad-number">4</h2>
<span class="dialpad-alpha">GHI</span>
</a>
</div>
<div class="col-md-4 col-xs-4">
<a class="btn btn-block" href="" ng-click="dtmf(5)">
<a class="btn btn-block" href="" ng-click="dtmfWidget(5)">
<h2 class="dialpad-number">5</h2>
<span class="dialpad-alpha">JKL</span>
</a>
</div>
<div class="col-md-4 col-xs-4">
<a class="btn btn-block" href="" ng-click="dtmf(6)">
<a class="btn btn-block" href="" ng-click="dtmfWidget(6)">
<h2 class="dialpad-number">6</h2>
<span class="dialpad-alpha">MNO</span>
</a>
@ -47,19 +47,19 @@
</div>
<div class="row">
<div class="col-md-4 col-xs-4">
<a class="btn btn-block" href="" ng-click="dtmf(7)">
<a class="btn btn-block" href="" ng-click="dtmfWidget(7)">
<h2 class="dialpad-number">7</h2>
<span class="dialpad-alpha">PQRS</span>
</a>
</div>
<div class="col-md-4 col-xs-4">
<a class="btn btn-block" href="" ng-click="dtmf(8)">
<a class="btn btn-block" href="" ng-click="dtmfWidget(8)">
<h2 class="dialpad-number">8</h2>
<span class="dialpad-alpha">TUV</span>
</a>
</div>
<div class="col-md-4 col-xs-4">
<a class="btn btn-block" href="" ng-click="dtmf(9)">
<a class="btn btn-block" href="" ng-click="dtmfWidget(9)">
<h2 class="dialpad-number">9</h2>
<span class="dialpad-alpha">WXYZ</span>
</a>
@ -67,19 +67,19 @@
</div>
<div class="row">
<div class="col-md-4 col-xs-4">
<a class="btn btn-block" href="" ng-click="dtmf('*')">
<a class="btn btn-block" href="" ng-click="dtmfWidget('*')">
<h2 class="dialpad-number dialpad-star">*</h2>
<span class="dialpad-alpha"></span>
</a>
</div>
<div class="col-md-4 col-xs-4">
<a class="btn btn-block" href="" ng-click="dtmf(0)">
<a class="btn btn-block" href="" ng-click="dtmfWidget(0)">
<h2 class="dialpad-number">0</h2>
<span class="dialpad-alpha">+</span>
</a>
</div>
<div class="col-md-4 col-xs-4">
<a class="btn btn-block" href="" ng-click="dtmf('#')">
<a class="btn btn-block" href="" ng-click="dtmfWidget('#')">
<h2 class="dialpad-number dialpad-pound">#</h2>
<span class="dialpad-alpha"></span>
</a>

View File

@ -7,7 +7,7 @@
<div class="panel-body">
<div class="preview-wrapper">
<video id="videopreview" muted autoplay style="width: 100%;"></video>
<div id="mic-meter">
<div id="mic-meter" ng-if="audioContext">
<div class="volumes">
<div class="volume-segment"></div>
<div class="volume-segment"></div>

View File

@ -107,6 +107,12 @@
<span ng-bind="'HIGHPASS_FILTER' | translate"></span>
</label>
</div>
<div class="checkbox">
<label>
<input type="checkbox" name="googAutoGainControl" value="mydata.googAutoGainControl" ng-model="mydata.googAutoGainControl">
<span ng-bind="'AUTOGAIN_CONTROL' | translate"></span>
</label>
</div>
</div>
</div>
</div>

View File

@ -99,6 +99,9 @@
</div>
</div>
<div class="col-md-6 col-xs-6 text-right">
<button ng-if="!verto.data.conf" tooltips="" tooltip-title="Hold" tooltip-side="bottom" tooltip-lazy="false" class="btn btn-material-900" ng-click="hold()">
<i class="big-icon" ng-class="{'mdi-av-pause':!storage.data.onHold, 'mdi-av-play-arrow': storage.data.onHold}"></i>
</button>
<button class="btn btn-danger" ng-click="hangup()" translate>
<i class="mdi-communication-call-end"></i>
{{ watcher ? 'BUTTON_CLOSE' : 'BUTTON_END_CALL' }}

View File

@ -14,11 +14,7 @@
'status': 'success',
'message': $translate.instant('BROWSER_COMPATIBILITY')
};
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia;
if (!navigator.getUserMedia) {
if (!navigator.mediaDevices.getUserMedia) {
result['status'] = 'error';
result['message'] = $translate.instant('BROWSER_WITHOUT_WEBRTC');
reject(result);

View File

@ -23,6 +23,7 @@
mutedMic: false,
preview: true,
selectedVideo: null,
selectedVideoName: null,
selectedAudio: null,
selectedShare: null,
selectedSpeaker: null,
@ -37,6 +38,7 @@
googNoiseSuppression: true,
googHighpassFilter: true,
googEchoCancellation: true,
googAutoGainControl: true,
autoBand: true,
testSpeedJoin: true,
bestFrameRate: "15",

View File

@ -4,8 +4,8 @@
angular
.module('vertoControllers')
.controller('DialPadController', ['$rootScope', '$scope',
'$http', '$location', 'toastr', 'verto', 'storage', 'CallHistory', 'eventQueue',
function($rootScope, $scope, $http, $location, toastr, verto, storage, CallHistory, eventQueue) {
'$http', '$location', 'toastr', 'verto', 'storage', 'CallHistory', 'eventQueue', '$timeout',
function($rootScope, $scope, $http, $location, toastr, verto, storage, CallHistory, eventQueue, $timeout) {
console.debug('Executing DialPadController.');
eventQueue.process();

View File

@ -4,9 +4,11 @@
angular
.module('vertoControllers')
.controller('MainController',
function($scope, $rootScope, $location, $modal, $timeout, $q, verto, storage, CallHistory, toastr, Fullscreen, prompt, eventQueue, $translate) {
function($scope, $rootScope, $location, $modal, $timeout, $q, verto, storage, CallHistory, toastr, Fullscreen, prompt, eventQueue, $translate, $window) {
console.debug('Executing MainController.');
$rootScope.dtmfHistory = { value: '' };
$rootScope.onKeydownDtmfEnabled = true;
if (storage.data.language && storage.data.language !== 'browser') {
$translate.use(storage.data.language);
@ -239,7 +241,8 @@
* @param {String} number - New touched number.
*/
$rootScope.dtmf = function(number) {
$rootScope.dialpadNumber = $scope.dialpadNumber + number;
console.log('dtmf', number);
$rootScope.dialpadNumber = $rootScope.dialpadNumber + number;
if (verto.data.call) {
verto.dtmf(number);
}
@ -555,6 +558,38 @@
}
}
$rootScope.dtmfWidget = function(number) {
$rootScope.dtmfHistory.value = $rootScope.dtmfHistory.value + number;
if (verto.data.call) {
verto.dtmf(number);
}
}
$rootScope.disableOnKeydownDtmf = function() {
$rootScope.onKeydownDtmfEnabled = false;
};
$rootScope.enableOnKeydownDtmf = function() {
$rootScope.onKeydownDtmfEnabled = true;
};
$rootScope.$on("$routeChangeStart", function(event, next, current) {
if (next.$$route.originalPath === '/incall') {
$rootScope.dtmfHistory.value = '';
angular.element($window).bind('keydown', onKeydownDtmfHistory);
} else {
angular.element($window).unbind('keydown', onKeydownDtmfHistory);
}
});
function onKeydownDtmfHistory(event) {
var caracter = event.key;
if ($rootScope.onKeydownDtmfEnabled && caracter.match(/^(\*|\#|[0-9a-dA-D])$/g)) {
$rootScope.dtmfWidget(caracter);
$scope.$applyAsync();
}
}
}
);

View File

@ -11,7 +11,10 @@
$scope.storage = storage;
console.debug('Executing PreviewController.');
var localVideo = document.getElementById('videopreview');
var volumes = document.querySelector('#mic-meter .volumes').children;
var volumes = document.querySelector('#mic-meter .volumes');
if (volumes) {
volumes = volumes.children;
}
$scope.localVideo = function() {
var constraints = {
@ -31,10 +34,13 @@
});
};
var audioContext = null;
if (typeof AudioContext !== "undefined") {
audioContext = new AudioContext();
}
var audioContext = new AudioContext();
var mediaStreamSource = null;
var meter;
var meter = null;
var streamObj = {};
function stopMedia(stream) {
@ -55,13 +61,12 @@
}
streamObj = stream;
localVideo.src = window.URL.createObjectURL(stream);
mediaStreamSource = audioContext.createMediaStreamSource(stream);
meter = createAudioMeter(audioContext);
mediaStreamSource.connect(meter);
renderMic();
FSRTCattachMediaStream(localVideo, stream);
if (audioContext) {
mediaStreamSource = audioContext.createMediaStreamSource(stream);
meter = createAudioMeter(audioContext);
mediaStreamSource.connect(meter);
};
}
function renderMic() {
@ -109,8 +114,10 @@
$scope.endPreview = function() {
localVideo.src = null;
meter.shutdown();
meter.onaudioprocess = null;
if (audioContext) {
meter.shutdown();
meter.onaudioprocess = null;
};
stopMedia(streamObj);
$location.path('/dialpad');
storage.data.preview = false;

View File

@ -33,6 +33,15 @@
});
$scope.ok = function() {
console.log('Camera Selected is', $scope.mydata.selectedVideo, $scope.verto.data.videoDevices);
angular.forEach(verto.data.videoDevices, function(video) {
console.log('checking video ', video);
if (video.id == $scope.mydata.selectedVideo) {
$scope.mydata.selectedVideoName = video.label;
console.log('Setting selectedVideoName to ', video.label);
}
})
if ($scope.mydata.selectedSpeaker != storage.data.selectedSpeaker) {
$rootScope.$emit('changedSpeaker', $scope.mydata.selectedSpeaker);
}

View File

@ -74,7 +74,7 @@
$rootScope.$on('progress.complete', function(ev, current_progress) {
$scope.message = 'Complete';
if(verto.data.connected) {
if (storage.data.preview) {
if (0 && storage.data.preview) {
$location.path('/preview');
}
else {

View File

@ -13,17 +13,35 @@
.module('vertoDirectives')
.directive('videoTag',
function() {
function link(scope, element, attrs) {
// Moving the video tag to the new place inside the incall page.
console.log('Moving the video to element.');
jQuery('video').removeClass('hide').appendTo(element);
jQuery('video').css('display', 'block');
var videoElem = jQuery('#webcam');
var newParent = document.getElementsByClassName('video-tag-wrapper');
newParent[0].appendChild(document.getElementById('webcam'));
$("#webcam").resize(function() {
updateVideoSize();
});
$(window).resize(function() {
updateVideoSize();
});
updateVideoSize();
videoElem.removeClass('hide');
videoElem.css('display', 'block');
scope.callActive("", {useVideo: true});
element.on('$destroy', function() {
// Move the video back to the body.
console.log('Moving the video back to body.');
jQuery('video').addClass('hide').appendTo(jQuery('body'));
videoElem.addClass('hide').appendTo(jQuery('body'));
$(window).unbind('resize');
});
}

View File

@ -113,6 +113,35 @@ var framerate = [{
label: '30 FPS'
}, ];
var updateReq;
var updateVideoSize = function(ms) {
if (!ms) ms = 500;
clearTimeout(updateReq);
updateReq = setTimeout(function () {
var videoElem = jQuery('#webcam');
videoElem.width("");
videoElem.height("");
var w = videoElem.width();
var h = videoElem.height();
var new_w, new_h;
var aspect = 1920 / 1080;
var videoContainer = jQuery('div.video-wrapper');
if (w > h) {
new_w = videoContainer.width();
new_h = Math.round(videoContainer.width() / aspect);
} else {
new_h = videoContainer.height();
new_w = Math.round(videoContainer.height() / aspect);
}
videoElem.width(new_w);
videoElem.height(new_h);
console.log('Setting video size to ' + new_w + '/' + new_h);
}, ms);
}
var vertoService = angular.module('vertoService', ['ngCookies']);
vertoService.service('verto', ['$rootScope', '$cookieStore', '$location', 'storage',
@ -317,7 +346,13 @@ vertoService.service('verto', ['$rootScope', '$cookieStore', '$location', 'stora
// Verify if selected devices are valid
var videoFlag = data.videoDevices.some(function(device) {
return device.id == storage.data.selectedVideo;
console.log('Evaluating device ', device);
if (device.label == storage.data.selectedVideoName) {
console.log('Matched video selection by name: ', device.label);
storage.data.selectedVideo = device.id;
return true;
}
return device.id == storage.data.selectedVideo && storage.data.selectedVideo !== "none";
});
var shareFlag = data.shareDevices.some(function(device) {
@ -332,7 +367,10 @@ vertoService.service('verto', ['$rootScope', '$cookieStore', '$location', 'stora
return device.id == storage.data.selectedSpeaker;
});
if (!videoFlag) storage.data.selectedVideo = data.videoDevices[0].id;
console.log('Storage Video: ', storage.data.selectedVideo);
console.log('Video Flag: ', videoFlag)
if (!videoFlag) storage.data.selectedVideo = data.videoDevices[data.videoDevices.length - 1].id;
if (!shareFlag) storage.data.selectedShare = data.shareDevices[0].id;
if (!audioFlag) storage.data.selectedAudio = data.audioDevices[0].id;
if (!speakerFlag && data.speakerDevices.length > 0) storage.data.selectedSpeaker = data.speakerDevices[0].id;
@ -484,7 +522,6 @@ vertoService.service('verto', ['$rootScope', '$cookieStore', '$location', 'stora
data.liveArray.onChange = function(obj, args) {
// console.log('liveArray.onChange', obj, args);
switch (args.action) {
case 'bootObj':
$rootScope.$emit('members.boot', args.data);
@ -560,6 +597,7 @@ vertoService.service('verto', ['$rootScope', '$cookieStore', '$location', 'stora
console.log("conference-liveArray-join");
stopConference();
startConference(v, dialog, params.pvtData);
updateVideoSize();
}
break;
case "conference-liveArray-part":
@ -616,6 +654,7 @@ vertoService.service('verto', ['$rootScope', '$cookieStore', '$location', 'stora
console.debug('Talking to:', d.cidString());
data.callState = 'active';
callActive(d.lastState.name, d.params);
updateVideoSize();
break;
case "hangup":
console.debug('Call ended with cause: ' + d.cause);
@ -672,9 +711,11 @@ vertoService.service('verto', ['$rootScope', '$cookieStore', '$location', 'stora
ringFile: "sounds/bell_ring2.wav",
// TODO: Add options for this.
audioParams: {
googEchoCancellation: storage.data.googEchoCancellation || true,
googNoiseSuppression: storage.data.googNoiseSuppression || true,
googHighpassFilter: storage.data.googHighpassFilter || true
googEchoCancellation: storage.data.googEchoCancellation === undefined ? true : storage.data.googEchoCancellation,
googNoiseSuppression: storage.data.googNoiseSuppression === undefined ? true : storage.data.googNoiseSuppression,
googHighpassFilter: storage.data.googHighpassFilter === undefined ? true : storage.data.googHighpassFilter,
googAutoGainControl: storage.data.googAutoGainControl === undefined ? true : storage.data.googAutoGainControl,
googAutoGainControl2: storage.data.googAutoGainControl === undefined ? true : storage.data.googAutoGainControl
},
sessid: sessid,
iceServers: storage.data.useSTUN
@ -783,9 +824,63 @@ vertoService.service('verto', ['$rootScope', '$cookieStore', '$location', 'stora
},
screenshare: function(destination, callback) {
console.log('share screen video');
var that = this;
var that = this;
if (storage.data.selectedShare !== "screen") {
console.log('share screen from device ' + storage.data.selectedShare);
var call = data.instance.newCall({
destination_number: destination + "-screen",
caller_id_name: data.name + " (Screen)",
caller_id_number: data.login + " (screen)",
outgoingBandwidth: storage.data.outgoingBandwidth,
incomingBandwidth: storage.data.incomingBandwidth,
useCamera: storage.data.selectedShare,
useVideo: true,
screenShare: true,
dedEnc: storage.data.useDedenc,
mirrorInput: storage.data.mirrorInput,
userVariables: {
email : storage.data.email,
avatar: "http://gravatar.com/avatar/" + md5(storage.data.email) + ".png?s=600"
}
});
// Override onStream callback in $.FSRTC instance
call.rtc.options.callbacks.onStream = function(rtc, stream) {
if(stream) {
var StreamTrack = stream.getVideoTracks()[0];
StreamTrack.addEventListener('ended', stopSharing);
// (stream.getVideoTracks()[0]).onended = stopSharing;
}
console.log("screenshare started");
function stopSharing() {
if(that.data.shareCall) {
that.screenshareHangup();
console.log("screenshare ended");
}
}
};
data.shareCall = call;
console.log('shareCall', data);
data.mutedMic = false;
data.mutedVideo = false;
that.refreshDevices();
return;
}
console.log('share screen from plugin');
getScreenId(function(error, sourceId, screen_constraints) {
@ -801,7 +896,7 @@ vertoService.service('verto', ['$rootScope', '$cookieStore', '$location', 'stora
outgoingBandwidth: storage.data.outgoingBandwidth,
incomingBandwidth: storage.data.incomingBandwidth,
videoParams: screen_constraints.video.mandatory,
useVideo: storage.data.useVideo,
useVideo: true,
screenShare: true,
dedEnc: storage.data.useDedenc,
mirrorInput: storage.data.mirrorInput,

View File

@ -6,15 +6,13 @@ function getCodecPayloadType(sdpLine){var pattern=new RegExp('a=rtpmap:(\\d+) \\
function setDefaultCodec(mLine,payload){var elements=mLine.split(' ');var newLine=[];var index=0;for(var i=0;i<elements.length;i++){if(index===3){newLine[index++]=payload;}
if(elements[i]!==payload)newLine[index++]=elements[i];}
return newLine.join(' ');}
$.FSRTC=function(options){this.options=$.extend({useVideo:null,useStereo:false,userData:null,localVideo:null,screenShare:false,useCamera:"any",iceServers:false,videoParams:{},audioParams:{},callbacks:{onICEComplete:function(){},onICE:function(){},onOfferSDP:function(){}},},options);this.audioEnabled=true;this.videoEnabled=true;this.mediaData={SDP:null,profile:{},candidateList:[]};if(moz){this.constraints={offerToReceiveAudio:this.options.useSpeak==="none"?false:true,offerToReceiveVideo:this.options.useVideo?true:false,};}else{this.constraints={optional:[{'DtlsSrtpKeyAgreement':'true'}],mandatory:{OfferToReceiveAudio:this.options.useSpeak==="none"?false:true,OfferToReceiveVideo:this.options.useVideo?true:false,}};}
if(self.options.useVideo){self.options.useVideo.style.display='none';}
setCompat();checkCompat();};$.FSRTC.validRes=[];$.FSRTC.prototype.useVideo=function(obj,local){var self=this;if(obj){self.options.useVideo=obj;self.options.localVideo=local;if(moz){self.constraints.offerToReceiveVideo=true;}else{self.constraints.mandatory.OfferToReceiveVideo=true;}}else{self.options.useVideo=null;self.options.localVideo=null;if(moz){self.constraints.offerToReceiveVideo=false;}else{self.constraints.mandatory.OfferToReceiveVideo=false;}}
$.FSRTC=function(options){this.options=$.extend({useVideo:null,useStereo:false,userData:null,localVideo:null,screenShare:false,useCamera:"any",iceServers:false,videoParams:{},audioParams:{},callbacks:{onICEComplete:function(){},onICE:function(){},onOfferSDP:function(){}},},options);this.audioEnabled=true;this.videoEnabled=true;this.mediaData={SDP:null,profile:{},candidateList:[]};this.constraints={offerToReceiveAudio:this.options.useSpeak==="none"?false:true,offerToReceiveVideo:this.options.useVideo?true:false,};if(self.options.useVideo){self.options.useVideo.style.display='none';}
setCompat();checkCompat();};$.FSRTC.validRes=[];$.FSRTC.prototype.useVideo=function(obj,local){var self=this;if(obj){self.options.useVideo=obj;self.options.localVideo=local;self.constraints.offerToReceiveVideo=true;}else{self.options.useVideo=null;self.options.localVideo=null;self.constraints.offerToReceiveVideo=false;}
if(self.options.useVideo){self.options.useVideo.style.display='none';}};$.FSRTC.prototype.useStereo=function(on){var self=this;self.options.useStereo=on;};$.FSRTC.prototype.stereoHack=function(sdp){var self=this;if(!self.options.useStereo){return sdp;}
var sdpLines=sdp.split('\r\n');var opusIndex=findLine(sdpLines,'a=rtpmap','opus/48000'),opusPayload;if(!opusIndex){return sdp;}else{opusPayload=getCodecPayloadType(sdpLines[opusIndex]);}
var fmtpLineIndex=findLine(sdpLines,'a=fmtp:'+opusPayload.toString());if(fmtpLineIndex===null){sdpLines[opusIndex]=sdpLines[opusIndex]+'\r\na=fmtp:'+opusPayload.toString()+" stereo=1; sprop-stereo=1"}else{sdpLines[fmtpLineIndex]=sdpLines[fmtpLineIndex].concat('; stereo=1; sprop-stereo=1');}
sdp=sdpLines.join('\r\n');return sdp;};function setCompat(){$.FSRTC.moz=!!navigator.mozGetUserMedia;if(!navigator.getUserMedia){navigator.getUserMedia=navigator.mozGetUserMedia||navigator.webkitGetUserMedia||navigator.msGetUserMedia;}}
function checkCompat(){if(!navigator.getUserMedia){alert('This application cannot function in this browser.');return false;}
return true;}
sdp=sdpLines.join('\r\n');return sdp;};function setCompat(){}
function checkCompat(){return true;}
function onStreamError(self,e){console.log('There has been a problem retrieving the streams - did you allow access? Check Device Resolution',e);doCallback(self,"onError",e);}
function onStreamSuccess(self,stream){console.log("Stream Success");doCallback(self,"onStream",stream);}
function onICE(self,candidate){self.mediaData.candidate=candidate;self.mediaData.candidateList.push(self.mediaData.candidate);doCallback(self,"onICE");}
@ -24,15 +22,15 @@ function onChannelError(self,e){console.error("Channel Error",e);doCallback(self
function onICESDP(self,sdp){self.mediaData.SDP=self.stereoHack(sdp.sdp);console.log("ICE SDP");doCallback(self,"onICESDP");}
function onAnswerSDP(self,sdp){self.answer.SDP=self.stereoHack(sdp.sdp);console.log("ICE ANSWER SDP");doCallback(self,"onAnswerSDP",self.answer.SDP);}
function onMessage(self,msg){console.log("Message");doCallback(self,"onICESDP",msg);}
FSRTCattachMediaStream=function(element,stream){if(element&&element.id&&attachMediaStream){attachMediaStream(element,stream);}else{if(typeof element.srcObject!=='undefined'){element.srcObject=stream;}else if(typeof element.src!=='undefined'){element.src=URL.createObjectURL(stream);}else{console.error('Error attaching stream to element.');}}}
function onRemoteStream(self,stream){if(self.options.useVideo){self.options.useVideo.style.display='block';}
var element=self.options.useAudio;console.log("REMOTE STREAM",stream,element);if(typeof element.srcObject!=='undefined'){element.srcObject=stream;}else if(typeof element.mozSrcObject!=='undefined'){element.mozSrcObject=stream;}else if(typeof element.src!=='undefined'){element.src=URL.createObjectURL(stream);}else{console.error('Error attaching stream to element.');}
self.options.useAudio.play();self.remoteStream=stream;}
var element=self.options.useAudio;console.log("REMOTE STREAM",stream,element);FSRTCattachMediaStream(element,stream);self.options.useAudio.play();self.remoteStream=stream;}
function onOfferSDP(self,sdp){self.mediaData.SDP=self.stereoHack(sdp.sdp);console.log("Offer SDP");doCallback(self,"onOfferSDP");}
$.FSRTC.prototype.answer=function(sdp,onSuccess,onError){this.peer.addAnswerSDP({type:"answer",sdp:sdp},onSuccess,onError);};$.FSRTC.prototype.stopPeer=function(){if(self.peer){console.log("stopping peer");self.peer.stop();}}
$.FSRTC.prototype.stop=function(){var self=this;if(self.options.useVideo){self.options.useVideo.style.display='none';if(moz){self.options.useVideo['mozSrcObject']=null;}else{self.options.useVideo['src']='';}}
$.FSRTC.prototype.stop=function(){var self=this;if(self.options.useVideo){self.options.useVideo.style.display='none';self.options.useVideo['src']='';}
if(self.localStream){if(typeof self.localStream.stop=='function'){self.localStream.stop();}else{if(self.localStream.active){var tracks=self.localStream.getTracks();console.error(tracks);tracks.forEach(function(track,index){console.log(track);track.stop();})}}
self.localStream=null;}
if(self.options.localVideo){self.options.localVideo.style.display='none';if(moz){self.options.localVideo['mozSrcObject']=null;}else{self.options.localVideo['src']='';}}
if(self.options.localVideo){self.options.localVideo.style.display='none';self.options.localVideo['src']='';}
if(self.options.localVideoStream){if(typeof self.options.localVideoStream.stop=='function'){self.options.localVideoStream.stop();}else{if(self.options.localVideoStream.active){var tracks=self.options.localVideoStream.getTracks();console.error(tracks);tracks.forEach(function(track,index){console.log(track);track.stop();})}}}
if(self.peer){console.log("stopping peer");self.peer.stop();}};$.FSRTC.prototype.getMute=function(){var self=this;return self.audioEnabled;}
$.FSRTC.prototype.setMute=function(what){var self=this;var audioTracks=self.localStream.getAudioTracks();for(var i=0,len=audioTracks.length;i<len;i++){switch(what){case"on":audioTracks[i].enabled=true;break;case"off":audioTracks[i].enabled=false;break;case"toggle":audioTracks[i].enabled=!audioTracks[i].enabled;default:break;}
@ -42,48 +40,44 @@ $.FSRTC.prototype.getVideoMute=function(){var self=this;return self.videoEnabled
$.FSRTC.prototype.setVideoMute=function(what){var self=this;var videoTracks=self.localStream.getVideoTracks();for(var i=0,len=videoTracks.length;i<len;i++){switch(what){case"on":videoTracks[i].enabled=true;break;case"off":videoTracks[i].enabled=false;break;case"toggle":videoTracks[i].enabled=!videoTracks[i].enabled;default:break;}
self.videoEnabled=videoTracks[i].enabled;}
return!self.videoEnabled;}
$.FSRTC.prototype.createAnswer=function(params){var self=this;self.type="answer";self.remoteSDP=params.sdp;console.debug("inbound sdp: ",params.sdp);function onSuccess(stream){self.localStream=stream;self.peer=RTCPeerConnection({type:self.type,attachStream:self.localStream,onICE:function(candidate){return onICE(self,candidate);},onICEComplete:function(){return onICEComplete(self);},onRemoteStream:function(stream){return onRemoteStream(self,stream);},onICESDP:function(sdp){return onICESDP(self,sdp);},onChannelError:function(e){return onChannelError(self,e);},constraints:self.constraints,iceServers:self.options.iceServers,offerSDP:{type:"offer",sdp:self.remoteSDP}});onStreamSuccess(self);}
$.FSRTC.prototype.createAnswer=function(params){var self=this;self.type="answer";self.remoteSDP=params.sdp;console.debug("inbound sdp: ",params.sdp);function onSuccess(stream){self.localStream=stream;self.peer=FSRTCPeerConnection({type:self.type,attachStream:self.localStream,onICE:function(candidate){return onICE(self,candidate);},onICEComplete:function(){return onICEComplete(self);},onRemoteStream:function(stream){return onRemoteStream(self,stream);},onICESDP:function(sdp){return onICESDP(self,sdp);},onChannelError:function(e){return onChannelError(self,e);},constraints:self.constraints,iceServers:self.options.iceServers,offerSDP:{type:"offer",sdp:self.remoteSDP}});onStreamSuccess(self);}
function onError(e){onStreamError(self,e);}
var mediaParams=getMediaParams(self);console.log("Audio constraints",mediaParams.audio);console.log("Video constraints",mediaParams.video);if(self.options.useVideo&&self.options.localVideo){getUserMedia({constraints:{audio:false,video:{mandatory:self.options.videoParams,optional:[]},},localVideo:self.options.localVideo,onsuccess:function(e){self.options.localVideoStream=e;console.log("local video ready");},onerror:function(e){console.error("local video error!");}});}
getUserMedia({constraints:{audio:mediaParams.audio,video:mediaParams.video},video:mediaParams.useVideo,onsuccess:onSuccess,onerror:onError});};function getMediaParams(obj){var audio;if(obj.options.useMic&&obj.options.useMic==="none"){console.log("Microphone Disabled");audio=false;}else if(obj.options.videoParams&&obj.options.screenShare){console.error("SCREEN SHARE");audio=false;}else{audio={mandatory:{},optional:[]};if(obj.options.useMic!=="any"){audio.optional=[{sourceId:obj.options.useMic}]}
if(obj.options.audioParams){for(var key in obj.options.audioParams){var con={};con[key]=obj.options.audioParams[key];audio.optional.push(con);}}}
if(obj.options.useVideo&&obj.options.localVideo){getUserMedia({constraints:{audio:false,video:{mandatory:obj.options.videoParams,optional:[]},},localVideo:obj.options.localVideo,onsuccess:function(e){self.options.localVideoStream=e;console.log("local video ready");},onerror:function(e){console.error("local video error!");}});}
var video={};var bestFrameRate=obj.options.videoParams.vertoBestFrameRate;delete obj.options.videoParams.vertoBestFrameRate;video={mandatory:obj.options.videoParams,optional:[]}
var useVideo=obj.options.useVideo;if(useVideo&&obj.options.useCamera&&obj.options.useCamera!=="none"){if(!video.optional){video.optional=[];}
if(obj.options.useCamera!=="any"){video.optional.push({sourceId:obj.options.useCamera});}
if(bestFrameRate){video.optional.push({minFrameRate:bestFrameRate});video.optional.push({maxFrameRate:bestFrameRate});}}else{console.log("Camera Disabled");video=false;useVideo=false;}
var mediaParams=getMediaParams(self);console.log("Audio constraints",mediaParams.audio);console.log("Video constraints",mediaParams.video);if(self.options.useVideo&&self.options.localVideo){getUserMedia({constraints:{audio:false,video:{},},localVideo:self.options.localVideo,onsuccess:function(e){self.options.localVideoStream=e;console.log("local video ready");},onerror:function(e){console.error("local video error!");}});}
getUserMedia({constraints:{audio:mediaParams.audio,video:mediaParams.video},video:mediaParams.useVideo,onsuccess:onSuccess,onerror:onError});};function getMediaParams(obj){var audio;if(obj.options.useMic&&obj.options.useMic==="none"){console.log("Microphone Disabled");audio=false;}else if(obj.options.videoParams&&obj.options.screenShare){console.error("SCREEN SHARE",obj.options.videoParams);audio=false;}else{audio={advanced:[]};if(obj.options.useMic!=="any"){audio.deviceId={exact:obj.options.useMic};}
if(obj.options.audioParams){for(var key in obj.options.audioParams){var con={};if(obj.options.audioParams[key]){con.exact=key;audio.advanced.push(con);}}}}
if(obj.options.useVideo&&obj.options.localVideo){getUserMedia({constraints:{audio:false,video:obj.options.videoParams},localVideo:obj.options.localVideo,onsuccess:function(e){self.options.localVideoStream=e;console.log("local video ready");},onerror:function(e){console.error("local video error!");}});}
var video={};var bestFrameRate=obj.options.videoParams.vertoBestFrameRate;var minFrameRate=obj.options.videoParams.minFrameRate||15;delete obj.options.videoParams.vertoBestFrameRate;if(obj.options.screenShare){var opt=[];opt.push({sourceId:obj.options.useCamera});if(bestFrameRate){opt.push({minFrameRate:bestFrameRate});opt.push({maxFrameRate:bestFrameRate});}
video={mandatory:obj.options.videoParams,optional:opt};}else{video={width:{min:obj.options.videoParams.minWidth,max:obj.options.videoParams.maxWidth},height:{min:obj.options.videoParams.minHeight,max:obj.options.videoParams.maxHeight}};var useVideo=obj.options.useVideo;if(useVideo&&obj.options.useCamera&&obj.options.useCamera!=="none"){if(obj.options.useCamera!=="any"){video.deviceId=obj.options.useCamera;}
if(bestFrameRate){video.frameRate={ideal:bestFrameRate,min:minFrameRate,max:30};}}else{console.log("Camera Disabled");video=false;useVideo=false;}}
return{audio:audio,video:video,useVideo:useVideo};}
$.FSRTC.prototype.call=function(profile){checkCompat();var self=this;var screen=false;self.type="offer";if(self.options.videoParams&&self.options.screenShare){screen=true;}
function onSuccess(stream){self.localStream=stream;if(screen){if(moz){self.constraints.OfferToReceiveVideo=false;}else{self.constraints.mandatory.OfferToReceiveVideo=false;}}
self.peer=RTCPeerConnection({type:self.type,attachStream:self.localStream,onICE:function(candidate){return onICE(self,candidate);},onICEComplete:function(){return onICEComplete(self);},onRemoteStream:screen?function(stream){}:function(stream){return onRemoteStream(self,stream);},onOfferSDP:function(sdp){return onOfferSDP(self,sdp);},onICESDP:function(sdp){return onICESDP(self,sdp);},onChannelError:function(e){return onChannelError(self,e);},constraints:self.constraints,iceServers:self.options.iceServers,});onStreamSuccess(self,stream);}
function onSuccess(stream){self.localStream=stream;if(screen){self.constraints.offerToReceiveVideo=false;}
self.peer=FSRTCPeerConnection({type:self.type,attachStream:self.localStream,onICE:function(candidate){return onICE(self,candidate);},onICEComplete:function(){return onICEComplete(self);},onRemoteStream:screen?function(stream){}:function(stream){return onRemoteStream(self,stream);},onOfferSDP:function(sdp){return onOfferSDP(self,sdp);},onICESDP:function(sdp){return onICESDP(self,sdp);},onChannelError:function(e){return onChannelError(self,e);},constraints:self.constraints,iceServers:self.options.iceServers,});onStreamSuccess(self,stream);}
function onError(e){onStreamError(self,e);}
var mediaParams=getMediaParams(self);console.log("Audio constraints",mediaParams.audio);console.log("Video constraints",mediaParams.video);if(mediaParams.audio||mediaParams.video){getUserMedia({constraints:{audio:mediaParams.audio,video:mediaParams.video},video:mediaParams.useVideo,onsuccess:onSuccess,onerror:onError});}else{onSuccess(null);}};window.moz=!!navigator.mozGetUserMedia;function RTCPeerConnection(options){var gathering=false,done=false;var w=window,PeerConnection=w.mozRTCPeerConnection||w.webkitRTCPeerConnection,SessionDescription=w.mozRTCSessionDescription||w.RTCSessionDescription,IceCandidate=w.mozRTCIceCandidate||w.RTCIceCandidate;var STUN={url:!moz?'stun:stun.l.google.com:19302':'stun:23.21.150.121'};var iceServers=null;if(options.iceServers){var tmp=options.iceServers;if(typeof(tmp)==="boolean"){tmp=null;}
if(tmp&&!(typeof(tmp)=="object"&&tmp.constructor===Array)){console.warn("iceServers must be an array, reverting to default ice servers");tmp=null;}
iceServers={iceServers:tmp||[STUN]};if(!moz&&!tmp){iceServers.iceServers=[STUN];}}
var optional={optional:[]};if(!moz){optional.optional=[{DtlsSrtpKeyAgreement:true},{RtpDataChannels:options.onChannelMessage?true:false}];}
var peer=new PeerConnection(iceServers,optional);openOffererChannel();var x=0;function ice_handler(){done=true;gathering=null;if(options.onICEComplete){options.onICEComplete();}
if(options.type=="offer"){if((!moz||(!options.sentICESDP&&peer.localDescription.sdp.match(/a=candidate/))&&!x&&options.onICESDP)){options.onICESDP(peer.localDescription);}}else{if(!x&&options.onICESDP){options.onICESDP(peer.localDescription);}}}
var mediaParams=getMediaParams(self);console.log("Audio constraints",mediaParams.audio);console.log("Video constraints",mediaParams.video);if(mediaParams.audio||mediaParams.video){getUserMedia({constraints:{audio:mediaParams.audio,video:mediaParams.video},video:mediaParams.useVideo,onsuccess:onSuccess,onerror:onError});}else{onSuccess(null);}};function FSRTCPeerConnection(options){var gathering=false,done=false;var config={};var default_ice={urls:['stun:stun.l.google.com:19302']};if(options.iceServers){if(typeof(options.iceServers)==="boolean"){config.iceServers=[default_ice];}else{config.iceServers=options.iceServers;}}
var peer=new window.RTCPeerConnection(config);openOffererChannel();var x=0;function ice_handler(){done=true;gathering=null;if(options.onICEComplete){options.onICEComplete();}
if(options.type=="offer"){options.onICESDP(peer.localDescription);}else{if(!x&&options.onICESDP){options.onICESDP(peer.localDescription);}}}
peer.onicecandidate=function(event){if(done){return;}
if(!gathering){gathering=setTimeout(ice_handler,1000);}
if(event){if(event.candidate){options.onICE(event.candidate);}}else{done=true;if(gathering){clearTimeout(gathering);gathering=null;}
ice_handler();}};if(options.attachStream)peer.addStream(options.attachStream);if(options.attachStreams&&options.attachStream.length){var streams=options.attachStreams;for(var i=0;i<streams.length;i++){peer.addStream(streams[i]);}}
peer.onaddstream=function(event){var remoteMediaStream=event.stream;remoteMediaStream.onended=function(){if(options.onRemoteStreamEnded)options.onRemoteStreamEnded(remoteMediaStream);};if(options.onRemoteStream)options.onRemoteStream(remoteMediaStream);};var constraints=options.constraints||{offerToReceiveAudio:true,offerToReceiveVideo:true};function createOffer(){if(!options.onOfferSDP)return;peer.createOffer(function(sessionDescription){sessionDescription.sdp=serializeSdp(sessionDescription.sdp);peer.setLocalDescription(sessionDescription);options.onOfferSDP(sessionDescription);if(moz&&options.onICESDP&&sessionDescription.sdp.match(/a=candidate/)){options.onICESDP(sessionDescription);options.sentICESDP=1;}},onSdpError,constraints);}
function createAnswer(){if(options.type!="answer")return;peer.setRemoteDescription(new SessionDescription(options.offerSDP),onSdpSuccess,onSdpError);peer.createAnswer(function(sessionDescription){sessionDescription.sdp=serializeSdp(sessionDescription.sdp);peer.setLocalDescription(sessionDescription);if(options.onAnswerSDP){options.onAnswerSDP(sessionDescription);}},onSdpError,constraints);}
if((options.onChannelMessage&&!moz)||!options.onChannelMessage){createOffer();createAnswer();}
peer.onaddstream=function(event){var remoteMediaStream=event.stream;remoteMediaStream.onended=function(){if(options.onRemoteStreamEnded)options.onRemoteStreamEnded(remoteMediaStream);};if(options.onRemoteStream)options.onRemoteStream(remoteMediaStream);};function createOffer(){if(!options.onOfferSDP)return;peer.createOffer(function(sessionDescription){sessionDescription.sdp=serializeSdp(sessionDescription.sdp);peer.setLocalDescription(sessionDescription);options.onOfferSDP(sessionDescription);},onSdpError,options.constraints);}
function createAnswer(){if(options.type!="answer")return;peer.setRemoteDescription(new window.RTCSessionDescription(options.offerSDP),onSdpSuccess,onSdpError);peer.createAnswer(function(sessionDescription){sessionDescription.sdp=serializeSdp(sessionDescription.sdp);peer.setLocalDescription(sessionDescription);if(options.onAnswerSDP){options.onAnswerSDP(sessionDescription);}},onSdpError);}
if((options.onChannelMessage)||!options.onChannelMessage){createOffer();createAnswer();}
function setBandwidth(sdp){sdp=sdp.replace(/b=AS([^\r\n]+\r\n)/g,'');sdp=sdp.replace(/a=mid:data\r\n/g,'a=mid:data\r\nb=AS:1638400\r\n');return sdp;}
function getInteropSDP(sdp){var chars='ABCDEFGHIJKLMNOPQRSTUVWXYZ'.split(''),extractedChars='';function getChars(){extractedChars+=chars[parseInt(Math.random()*40)]||'';if(extractedChars.length<40)getChars();return extractedChars;}
if(options.onAnswerSDP)sdp=sdp.replace(/(a=crypto:0 AES_CM_128_HMAC_SHA1_32)(.*?)(\r\n)/g,'');var inline=getChars()+'\r\n'+(extractedChars='');sdp=sdp.indexOf('a=crypto')==-1?sdp.replace(/c=IN/g,'a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:'+inline+'c=IN'):sdp;return sdp;}
function serializeSdp(sdp){return sdp;}
var channel;function openOffererChannel(){if(!options.onChannelMessage||(moz&&!options.onOfferSDP))return;_openOffererChannel();if(!moz)return;navigator.mozGetUserMedia({audio:true,fake:true},function(stream){peer.addStream(stream);createOffer();},useless);}
function _openOffererChannel(){channel=peer.createDataChannel(options.channel||'RTCDataChannel',moz?{}:{reliable:false});if(moz)channel.binaryType='blob';setChannelEvents();}
var channel;function openOffererChannel(){if(!options.onChannelMessage)return;_openOffererChannel();return;}
function _openOffererChannel(){channel=peer.createDataChannel(options.channel||'RTCDataChannel',{reliable:false});setChannelEvents();}
function setChannelEvents(){channel.onmessage=function(event){if(options.onChannelMessage)options.onChannelMessage(event);};channel.onopen=function(){if(options.onChannelOpened)options.onChannelOpened(channel);};channel.onclose=function(event){if(options.onChannelClosed)options.onChannelClosed(event);console.warn('WebRTC DataChannel closed',event);};channel.onerror=function(event){if(options.onChannelError)options.onChannelError(event);console.error('WebRTC DataChannel error',event);};}
if(options.onAnswerSDP&&moz&&options.onChannelMessage)openAnswererChannel();function openAnswererChannel(){peer.ondatachannel=function(event){channel=event.channel;channel.binaryType='blob';setChannelEvents();};if(!moz)return;navigator.mozGetUserMedia({audio:true,fake:true},function(stream){peer.addStream(stream);createAnswer();},useless);}
function openAnswererChannel(){peer.ondatachannel=function(event){channel=event.channel;channel.binaryType='blob';setChannelEvents();};return;}
function useless(){log('Error in fake:true');}
function onSdpSuccess(){}
function onSdpError(e){if(options.onChannelError){options.onChannelError(e);}
console.error('sdp error:',e);}
return{addAnswerSDP:function(sdp,cbSuccess,cbError){peer.setRemoteDescription(new SessionDescription(sdp),cbSuccess?cbSuccess:onSdpSuccess,cbError?cbError:onSdpError);},addICE:function(candidate){peer.addIceCandidate(new IceCandidate({sdpMLineIndex:candidate.sdpMLineIndex,candidate:candidate.candidate}));},peer:peer,channel:channel,sendData:function(message){if(channel){channel.send(message);}},stop:function(){peer.close();if(options.attachStream){if(typeof options.attachStream.stop=='function'){options.attachStream.stop();}else{options.attachStream.active=false;}}}};}
var video_constraints={mandatory:{},optional:[]};function getUserMedia(options){var n=navigator,media;n.getMedia=n.webkitGetUserMedia||n.mozGetUserMedia;n.getMedia(options.constraints||{audio:true,video:video_constraints},streaming,options.onerror||function(e){console.error(e);});function streaming(stream){if(options.localVideo){options.localVideo[moz?'mozSrcObject':'src']=moz?stream:window.webkitURL.createObjectURL(stream);options.localVideo.style.display='block';}
return{addAnswerSDP:function(sdp,cbSuccess,cbError){peer.setRemoteDescription(new window.RTCSessionDescription(sdp),cbSuccess?cbSuccess:onSdpSuccess,cbError?cbError:onSdpError);},addICE:function(candidate){peer.addIceCandidate(new window.RTCIceCandidate({sdpMLineIndex:candidate.sdpMLineIndex,candidate:candidate.candidate}));},peer:peer,channel:channel,sendData:function(message){if(channel){channel.send(message);}},stop:function(){peer.close();if(options.attachStream){if(typeof options.attachStream.stop=='function'){options.attachStream.stop();}else{options.attachStream.active=false;}}}};}
var video_constraints={};function getUserMedia(options){var n=navigator,media;n.getMedia=n.getUserMedia;n.getMedia(options.constraints||{audio:true,video:video_constraints},streaming,options.onerror||function(e){console.error(e);});function streaming(stream){if(options.localVideo){options.localVideo['src']=window.URL.createObjectURL(stream);options.localVideo.style.display='block';}
if(options.onsuccess){options.onsuccess(stream);}
media=stream;}
return media;}
@ -91,10 +85,10 @@ $.FSRTC.resSupported=function(w,h){for(var i in $.FSRTC.validRes){if($.FSRTC.val
return false;}
$.FSRTC.bestResSupported=function(){var w=0,h=0;for(var i in $.FSRTC.validRes){if($.FSRTC.validRes[i][0]>w&&$.FSRTC.validRes[i][1]>h){w=$.FSRTC.validRes[i][0];h=$.FSRTC.validRes[i][1];}}
return[w,h];}
var resList=[[320,180],[320,240],[640,360],[640,480],[1280,720],[1920,1080]];var resI=0;var ttl=0;var checkRes=function(cam,func){if(resI>=resList.length){var res={'validRes':$.FSRTC.validRes,'bestResSupported':$.FSRTC.bestResSupported()};localStorage.setItem("res_"+cam,$.toJSON(res));if(func)return func(res);return;}
var video={mandatory:{},optional:[]}
if(cam){video.optional=[{sourceId:cam}];}
w=resList[resI][0];h=resList[resI][1];resI++;video.mandatory={"minWidth":w,"minHeight":h,"maxWidth":w,"maxHeight":h};getUserMedia({constraints:{audio:ttl++==0,video:video},onsuccess:function(e){e.getTracks().forEach(function(track){track.stop();});console.info(w+"x"+h+" supported.");$.FSRTC.validRes.push([w,h]);checkRes(cam,func);},onerror:function(e){console.error(w+"x"+h+" not supported.");checkRes(cam,func);}});}
var resList=[[160,120],[320,180],[320,240],[640,360],[640,480],[1280,720],[1920,1080]];var resI=0;var ttl=0;var checkRes=function(cam,func){if(resI>=resList.length){var res={'validRes':$.FSRTC.validRes,'bestResSupported':$.FSRTC.bestResSupported()};localStorage.setItem("res_"+cam,$.toJSON(res));if(func)return func(res);return;}
var video={}
if(cam){video.deviceId={exact:cam};}
w=resList[resI][0];h=resList[resI][1];resI++;video={width:w,height:h};getUserMedia({constraints:{audio:ttl++==0,video:video},onsuccess:function(e){e.getTracks().forEach(function(track){track.stop();});console.info(w+"x"+h+" supported.");$.FSRTC.validRes.push([w,h]);checkRes(cam,func);},onerror:function(e){console.error(w+"x"+h+" not supported.");checkRes(cam,func);}});}
$.FSRTC.getValidRes=function(cam,func){var used=[];var cached=localStorage.getItem("res_"+cam);if(cached){var cache=$.parseJSON(cached);if(cache){$.FSRTC.validRes=cache.validRes;console.log("CACHED RES FOR CAM "+cam,cache);}else{console.error("INVALID CACHE");}
return func?func(cache):null;}
$.FSRTC.validRes=[];resI=0;checkRes(cam,func);}
@ -254,7 +248,9 @@ if(!dialog.params.remote_caller_id_name){dialog.params.remote_caller_id_name="No
if(!dialog.params.remote_caller_id_number){dialog.params.remote_caller_id_number="UNKNOWN";}
RTCcallbacks.onMessage=function(rtc,msg){console.debug(msg);};RTCcallbacks.onAnswerSDP=function(rtc,sdp){console.error("answer sdp",sdp);};}else{dialog.params.remote_caller_id_name="Outbound Call";dialog.params.remote_caller_id_number=dialog.params.destination_number;}
RTCcallbacks.onICESDP=function(rtc){console.log("RECV "+rtc.type+" SDP",rtc.mediaData.SDP);if(dialog.state==$.verto.enum.state.requesting||dialog.state==$.verto.enum.state.answering||dialog.state==$.verto.enum.state.active){location.reload();return;}
if(rtc.type=="offer"){if(dialog.state==$.verto.enum.state.active){dialog.setState($.verto.enum.state.requesting);dialog.sendMethod("verto.attach",{sdp:rtc.mediaData.SDP});}else{dialog.setState($.verto.enum.state.requesting);dialog.sendMethod("verto.invite",{sdp:rtc.mediaData.SDP});}}else{dialog.setState($.verto.enum.state.answering);dialog.sendMethod(dialog.attach?"verto.attach":"verto.answer",{sdp:dialog.rtc.mediaData.SDP});}};RTCcallbacks.onICE=function(rtc){if(rtc.type=="offer"){console.log("offer",rtc.mediaData.candidate);return;}};RTCcallbacks.onStream=function(rtc,stream){console.log("stream started");};RTCcallbacks.onError=function(e){console.error("ERROR:",e);dialog.hangup({cause:"Device or Permission Error"});};dialog.rtc=new $.FSRTC({callbacks:RTCcallbacks,localVideo:dialog.screenShare?null:dialog.localVideo,useVideo:dialog.params.useVideo?dialog.videoStream:null,useAudio:dialog.audioStream,useStereo:dialog.params.useStereo,videoParams:dialog.params.videoParams,audioParams:verto.options.audioParams,iceServers:verto.options.iceServers,screenShare:dialog.screenShare,useCamera:dialog.useCamera,useMic:dialog.useMic,useSpeak:dialog.useSpeak});dialog.rtc.verto=dialog.verto;if(dialog.direction==$.verto.enum.direction.inbound){if(dialog.attach){dialog.answer();}else{dialog.ring();}}};$.verto.dialog.prototype.invite=function(){var dialog=this;dialog.rtc.call();};$.verto.dialog.prototype.sendMethod=function(method,obj){var dialog=this;obj.dialogParams={};for(var i in dialog.params){if(i=="sdp"&&method!="verto.invite"&&method!="verto.attach"){continue;}
if(rtc.type=="offer"){if(dialog.state==$.verto.enum.state.active){dialog.setState($.verto.enum.state.requesting);dialog.sendMethod("verto.attach",{sdp:rtc.mediaData.SDP});}else{dialog.setState($.verto.enum.state.requesting);dialog.sendMethod("verto.invite",{sdp:rtc.mediaData.SDP});}}else{dialog.setState($.verto.enum.state.answering);dialog.sendMethod(dialog.attach?"verto.attach":"verto.answer",{sdp:dialog.rtc.mediaData.SDP});}};RTCcallbacks.onICE=function(rtc){if(rtc.type=="offer"){console.log("offer",rtc.mediaData.candidate);return;}};RTCcallbacks.onStream=function(rtc,stream){if(dialog.verto.options.permissionCallback&&typeof dialog.verto.options.permissionCallback.onGranted==='function'){dialog.verto.options.permissionCallback.onGranted();}
console.log("stream started");};RTCcallbacks.onError=function(e){if(dialog.verto.options.permissionCallback&&typeof dialog.verto.options.permissionCallback.onDenied==='function'){dialog.verto.options.permissionCallback.onDenied();}
console.error("ERROR:",e);dialog.hangup({cause:"Device or Permission Error"});};dialog.rtc=new $.FSRTC({callbacks:RTCcallbacks,localVideo:dialog.screenShare?null:dialog.localVideo,useVideo:dialog.params.useVideo?dialog.videoStream:null,useAudio:dialog.audioStream,useStereo:dialog.params.useStereo,videoParams:dialog.params.videoParams,audioParams:verto.options.audioParams,iceServers:verto.options.iceServers,screenShare:dialog.screenShare,useCamera:dialog.useCamera,useMic:dialog.useMic,useSpeak:dialog.useSpeak});dialog.rtc.verto=dialog.verto;if(dialog.direction==$.verto.enum.direction.inbound){if(dialog.attach){dialog.answer();}else{dialog.ring();}}};$.verto.dialog.prototype.invite=function(){var dialog=this;dialog.rtc.call();};$.verto.dialog.prototype.sendMethod=function(method,obj){var dialog=this;obj.dialogParams={};for(var i in dialog.params){if(i=="sdp"&&method!="verto.invite"&&method!="verto.attach"){continue;}
if((obj.noDialogParams&&i!="callID")){continue;}
obj.dialogParams[i]=dialog.params[i];}
delete obj.noDialogParams;dialog.verto.rpcClient.call(method,obj,function(e){dialog.processReply(method,true,e);},function(e){dialog.processReply(method,false,e);});};function checkStateChange(oldS,newS){if(newS==$.verto.enum.state.purge||$.verto.enum.states[oldS.name][newS.name]){return true;}
@ -301,4 +297,218 @@ $.verto.videoDevices=vid;$.verto.audioInDevices=aud_in;console.info("Audio Devic
navigator.mediaDevices.enumerateDevices().then(function(devices){devices.forEach(function(device){console.log(device);console.log(device.kind+": "+device.label+" id = "+device.deviceId);if(device.kind==="videoinput"){vid.push({id:device.deviceId,kind:"video",label:device.label});}else if(device.kind==="audioinput"){aud_in.push({id:device.deviceId,kind:"audio_in",label:device.label});}else if(device.kind==="audiooutput"){aud_out.push({id:device.deviceId,kind:"audio_out",label:device.label});}});$.verto.videoDevices=vid;$.verto.audioInDevices=aud_in;$.verto.audioOutDevices=aud_out;console.info("Audio IN Devices",$.verto.audioInDevices);console.info("Audio Out Devices",$.verto.audioOutDevices);console.info("Video Devices",$.verto.videoDevices);runtime(true);}).catch(function(err){console.log(" Device Enumeration ERROR: "+err.name+": "+err.message);runtime(false);});}};$.verto.refreshDevices=function(runtime){checkDevices(runtime);}
$.verto.init=function(obj,runtime){if(!obj){obj={};}
if(!obj.skipPermCheck&&!obj.skipDeviceCheck){$.FSRTC.checkPerms(function(status){checkDevices(runtime);},true,true);}else if(obj.skipPermCheck&&!obj.skipDeviceCheck){checkDevices(runtime);}else if(!obj.skipPermCheck&&obj.skipDeviceCheck){$.FSRTC.checkPerms(function(status){runtime(status);},true,true);}else{runtime(null);}}
$.verto.genUUID=function(){return generateGUID();}})(jQuery);
$.verto.genUUID=function(){return generateGUID();}})(jQuery);var AdapterJS=AdapterJS||{};if(typeof exports!=='undefined'){module.exports=AdapterJS;}
AdapterJS.options=AdapterJS.options||{};AdapterJS.VERSION='0.13.3';AdapterJS.onwebrtcready=AdapterJS.onwebrtcready||function(isUsingPlugin){};AdapterJS._onwebrtcreadies=[];AdapterJS.webRTCReady=function(callback){if(typeof callback!=='function'){throw new Error('Callback provided is not a function');}
if(true===AdapterJS.onwebrtcreadyDone){callback(null!==AdapterJS.WebRTCPlugin.plugin);}else{AdapterJS._onwebrtcreadies.push(callback);}};AdapterJS.WebRTCPlugin=AdapterJS.WebRTCPlugin||{};AdapterJS.WebRTCPlugin.pluginInfo={prefix:'Tem',plugName:'TemWebRTCPlugin',pluginId:'plugin0',type:'application/x-temwebrtcplugin',onload:'__TemWebRTCReady0',portalLink:'http://skylink.io/plugin/',downloadLink:null,companyName:'Temasys'};if(!!navigator.platform.match(/^Mac/i)){AdapterJS.WebRTCPlugin.pluginInfo.downloadLink='http://bit.ly/1n77hco';}
else if(!!navigator.platform.match(/^Win/i)){AdapterJS.WebRTCPlugin.pluginInfo.downloadLink='http://bit.ly/1kkS4FN';}
AdapterJS.WebRTCPlugin.TAGS={NONE:'none',AUDIO:'audio',VIDEO:'video'};AdapterJS.WebRTCPlugin.pageId=Math.random().toString(36).slice(2);AdapterJS.WebRTCPlugin.plugin=null;AdapterJS.WebRTCPlugin.setLogLevel=null;AdapterJS.WebRTCPlugin.defineWebRTCInterface=null;AdapterJS.WebRTCPlugin.isPluginInstalled=null;AdapterJS.WebRTCPlugin.pluginInjectionInterval=null;AdapterJS.WebRTCPlugin.injectPlugin=null;AdapterJS.WebRTCPlugin.PLUGIN_STATES={NONE:0,INITIALIZING:1,INJECTING:2,INJECTED:3,READY:4};AdapterJS.WebRTCPlugin.pluginState=AdapterJS.WebRTCPlugin.PLUGIN_STATES.NONE;AdapterJS.onwebrtcreadyDone=false;AdapterJS.WebRTCPlugin.PLUGIN_LOG_LEVELS={NONE:'NONE',ERROR:'ERROR',WARNING:'WARNING',INFO:'INFO',VERBOSE:'VERBOSE',SENSITIVE:'SENSITIVE'};AdapterJS.WebRTCPlugin.WaitForPluginReady=null;AdapterJS.WebRTCPlugin.callWhenPluginReady=null;__TemWebRTCReady0=function(){if(document.readyState==='complete'){AdapterJS.WebRTCPlugin.pluginState=AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY;AdapterJS.maybeThroughWebRTCReady();}else{var timer=setInterval(function(){if(document.readyState==='complete'){clearInterval(timer);AdapterJS.WebRTCPlugin.pluginState=AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY;AdapterJS.maybeThroughWebRTCReady();}},100);}};AdapterJS.maybeThroughWebRTCReady=function(){if(!AdapterJS.onwebrtcreadyDone){AdapterJS.onwebrtcreadyDone=true;if(AdapterJS._onwebrtcreadies.length){AdapterJS._onwebrtcreadies.forEach(function(callback){if(typeof(callback)==='function'){callback(AdapterJS.WebRTCPlugin.plugin!==null);}});}else if(typeof(AdapterJS.onwebrtcready)==='function'){AdapterJS.onwebrtcready(AdapterJS.WebRTCPlugin.plugin!==null);}}};AdapterJS.TEXT={PLUGIN:{REQUIRE_INSTALLATION:'This website requires you to install a WebRTC-enabling plugin '+'to work on this browser.',NOT_SUPPORTED:'Your browser does not support WebRTC.',BUTTON:'Install Now'},REFRESH:{REQUIRE_REFRESH:'Please refresh page',BUTTON:'Refresh Page'}};AdapterJS._iceConnectionStates={starting:'starting',checking:'checking',connected:'connected',completed:'connected',done:'completed',disconnected:'disconnected',failed:'failed',closed:'closed'};AdapterJS._iceConnectionFiredStates=[];AdapterJS.isDefined=null;AdapterJS.parseWebrtcDetectedBrowser=function(){var hasMatch=null;if((!!window.opr&&!!opr.addons)||!!window.opera||navigator.userAgent.indexOf(' OPR/')>=0){webrtcDetectedBrowser='opera';webrtcDetectedType='webkit';webrtcMinimumVersion=26;hasMatch=/OPR\/(\d+)/i.exec(navigator.userAgent)||[];webrtcDetectedVersion=parseInt(hasMatch[1],10);}else if(typeof InstallTrigger!=='undefined'){webrtcDetectedType='moz';}else if(Object.prototype.toString.call(window.HTMLElement).indexOf('Constructor')>0){webrtcDetectedBrowser='safari';webrtcDetectedType='plugin';webrtcMinimumVersion=7;hasMatch=/version\/(\d+)/i.exec(navigator.userAgent)||[];webrtcDetectedVersion=parseInt(hasMatch[1],10);}else if(false||!!document.documentMode){webrtcDetectedBrowser='IE';webrtcDetectedType='plugin';webrtcMinimumVersion=9;hasMatch=/\brv[ :]+(\d+)/g.exec(navigator.userAgent)||[];webrtcDetectedVersion=parseInt(hasMatch[1]||'0',10);if(!webrtcDetectedVersion){hasMatch=/\bMSIE[ :]+(\d+)/g.exec(navigator.userAgent)||[];webrtcDetectedVersion=parseInt(hasMatch[1]||'0',10);}}else if(!!window.StyleMedia){webrtcDetectedType='';}else if(!!window.chrome&&!!window.chrome.webstore){webrtcDetectedType='webkit';}else if((webrtcDetectedBrowser==='chrome'||webrtcDetectedBrowser==='opera')&&!!window.CSS){webrtcDetectedBrowser='blink';}
window.webrtcDetectedBrowser=webrtcDetectedBrowser;window.webrtcDetectedVersion=webrtcDetectedVersion;window.webrtcMinimumVersion=webrtcMinimumVersion;};AdapterJS.addEvent=function(elem,evnt,func){if(elem.addEventListener){elem.addEventListener(evnt,func,false);}else if(elem.attachEvent){elem.attachEvent('on'+evnt,func);}else{elem[evnt]=func;}};AdapterJS.renderNotificationBar=function(text,buttonText,buttonLink,openNewTab,displayRefreshBar){if(document.readyState!=='complete'){return;}
var w=window;var i=document.createElement('iframe');i.name='adapterjs-alert';i.style.position='fixed';i.style.top='-41px';i.style.left=0;i.style.right=0;i.style.width='100%';i.style.height='40px';i.style.backgroundColor='#ffffe1';i.style.border='none';i.style.borderBottom='1px solid #888888';i.style.zIndex='9999999';if(typeof i.style.webkitTransition==='string'){i.style.webkitTransition='all .5s ease-out';}else if(typeof i.style.transition==='string'){i.style.transition='all .5s ease-out';}
document.body.appendChild(i);var c=(i.contentWindow)?i.contentWindow:(i.contentDocument.document)?i.contentDocument.document:i.contentDocument;c.document.open();c.document.write('<span style="display: inline-block; font-family: Helvetica, Arial,'+'sans-serif; font-size: .9rem; padding: 4px; vertical-align: '+'middle; cursor: default;">'+text+'</span>');if(buttonText&&buttonLink){c.document.write('<button id="okay">'+buttonText+'</button><button id="cancel">Cancel</button>');c.document.close();AdapterJS.addEvent(c.document.getElementById('okay'),'click',function(e){if(!!displayRefreshBar){AdapterJS.renderNotificationBar(AdapterJS.TEXT.EXTENSION?AdapterJS.TEXT.EXTENSION.REQUIRE_REFRESH:AdapterJS.TEXT.REFRESH.REQUIRE_REFRESH,AdapterJS.TEXT.REFRESH.BUTTON,'javascript:location.reload()');}
window.open(buttonLink,!!openNewTab?'_blank':'_top');e.preventDefault();try{e.cancelBubble=true;}catch(error){}
var pluginInstallInterval=setInterval(function(){if(!isIE){navigator.plugins.refresh(false);}
AdapterJS.WebRTCPlugin.isPluginInstalled(AdapterJS.WebRTCPlugin.pluginInfo.prefix,AdapterJS.WebRTCPlugin.pluginInfo.plugName,function(){clearInterval(pluginInstallInterval);AdapterJS.WebRTCPlugin.defineWebRTCInterface();},function(){});},500);});AdapterJS.addEvent(c.document.getElementById('cancel'),'click',function(e){w.document.body.removeChild(i);});}else{c.document.close();}
setTimeout(function(){if(typeof i.style.webkitTransform==='string'){i.style.webkitTransform='translateY(40px)';}else if(typeof i.style.transform==='string'){i.style.transform='translateY(40px)';}else{i.style.top='0px';}},300);};webrtcDetectedType=null;checkMediaDataChannelSettings=function(peerBrowserAgent,peerBrowserVersion,callback,constraints){if(typeof callback!=='function'){return;}
var beOfferer=true;var isLocalFirefox=webrtcDetectedBrowser==='firefox';var isLocalFirefoxInterop=webrtcDetectedType==='moz'&&webrtcDetectedVersion>30;var isPeerFirefox=peerBrowserAgent==='firefox';var isPeerFirefoxInterop=peerBrowserAgent==='firefox'&&((peerBrowserVersion)?(peerBrowserVersion>30):false);if((isLocalFirefox&&isPeerFirefox)||(isLocalFirefoxInterop)){try{delete constraints.mandatory.MozDontOfferDataChannel;}catch(error){console.error('Failed deleting MozDontOfferDataChannel');console.error(error);}}else if((isLocalFirefox&&!isPeerFirefox)){constraints.mandatory.MozDontOfferDataChannel=true;}
if(!isLocalFirefox){for(var prop in constraints.mandatory){if(constraints.mandatory.hasOwnProperty(prop)){if(prop.indexOf('Moz')!==-1){delete constraints.mandatory[prop];}}}}
if(isLocalFirefox&&!isPeerFirefox&&!isLocalFirefoxInterop){beOfferer=false;}
callback(beOfferer,constraints);};checkIceConnectionState=function(peerId,iceConnectionState,callback){if(typeof callback!=='function'){console.warn('No callback specified in checkIceConnectionState. Aborted.');return;}
peerId=(peerId)?peerId:'peer';if(!AdapterJS._iceConnectionFiredStates[peerId]||iceConnectionState===AdapterJS._iceConnectionStates.disconnected||iceConnectionState===AdapterJS._iceConnectionStates.failed||iceConnectionState===AdapterJS._iceConnectionStates.closed){AdapterJS._iceConnectionFiredStates[peerId]=[];}
iceConnectionState=AdapterJS._iceConnectionStates[iceConnectionState];if(AdapterJS._iceConnectionFiredStates[peerId].indexOf(iceConnectionState)<0){AdapterJS._iceConnectionFiredStates[peerId].push(iceConnectionState);if(iceConnectionState===AdapterJS._iceConnectionStates.connected){setTimeout(function(){AdapterJS._iceConnectionFiredStates[peerId].push(AdapterJS._iceConnectionStates.done);callback(AdapterJS._iceConnectionStates.done);},1000);}
callback(iceConnectionState);}
return;};createIceServer=null;createIceServers=null;RTCPeerConnection=null;RTCSessionDescription=(typeof RTCSessionDescription==='function')?RTCSessionDescription:null;RTCIceCandidate=(typeof RTCIceCandidate==='function')?RTCIceCandidate:null;getUserMedia=null;attachMediaStream=null;reattachMediaStream=null;webrtcDetectedBrowser=null;webrtcDetectedVersion=null;webrtcMinimumVersion=null;if(navigator.mozGetUserMedia||navigator.webkitGetUserMedia||(navigator.mediaDevices&&navigator.userAgent.match(/Edge\/(\d+).(\d+)$/))){(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.adapter=f()}})(function(){var define,module,exports;return(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){'use strict';var SDPUtils={};SDPUtils.generateIdentifier=function(){return Math.random().toString(36).substr(2,10);};SDPUtils.localCName=SDPUtils.generateIdentifier();SDPUtils.splitLines=function(blob){return blob.trim().split('\n').map(function(line){return line.trim();});};SDPUtils.splitSections=function(blob){var parts=blob.split('\nm=');return parts.map(function(part,index){return(index>0?'m='+part:part).trim()+'\r\n';});};SDPUtils.matchPrefix=function(blob,prefix){return SDPUtils.splitLines(blob).filter(function(line){return line.indexOf(prefix)===0;});};SDPUtils.parseCandidate=function(line){var parts;if(line.indexOf('a=candidate:')===0){parts=line.substring(12).split(' ');}else{parts=line.substring(10).split(' ');}
var candidate={foundation:parts[0],component:parts[1],protocol:parts[2].toLowerCase(),priority:parseInt(parts[3],10),ip:parts[4],port:parseInt(parts[5],10),type:parts[7]};for(var i=8;i<parts.length;i+=2){switch(parts[i]){case'raddr':candidate.relatedAddress=parts[i+1];break;case'rport':candidate.relatedPort=parseInt(parts[i+1],10);break;case'tcptype':candidate.tcpType=parts[i+1];break;default:break;}}
return candidate;};SDPUtils.writeCandidate=function(candidate){var sdp=[];sdp.push(candidate.foundation);sdp.push(candidate.component);sdp.push(candidate.protocol.toUpperCase());sdp.push(candidate.priority);sdp.push(candidate.ip);sdp.push(candidate.port);var type=candidate.type;sdp.push('typ');sdp.push(type);if(type!=='host'&&candidate.relatedAddress&&candidate.relatedPort){sdp.push('raddr');sdp.push(candidate.relatedAddress);sdp.push('rport');sdp.push(candidate.relatedPort);}
if(candidate.tcpType&&candidate.protocol.toLowerCase()==='tcp'){sdp.push('tcptype');sdp.push(candidate.tcpType);}
return'candidate:'+sdp.join(' ');};SDPUtils.parseRtpMap=function(line){var parts=line.substr(9).split(' ');var parsed={payloadType:parseInt(parts.shift(),10)};parts=parts[0].split('/');parsed.name=parts[0];parsed.clockRate=parseInt(parts[1],10);parsed.numChannels=parts.length===3?parseInt(parts[2],10):1;return parsed;};SDPUtils.writeRtpMap=function(codec){var pt=codec.payloadType;if(codec.preferredPayloadType!==undefined){pt=codec.preferredPayloadType;}
return'a=rtpmap:'+pt+' '+codec.name+'/'+codec.clockRate+
(codec.numChannels!==1?'/'+codec.numChannels:'')+'\r\n';};SDPUtils.parseExtmap=function(line){var parts=line.substr(9).split(' ');return{id:parseInt(parts[0],10),uri:parts[1]};};SDPUtils.writeExtmap=function(headerExtension){return'a=extmap:'+(headerExtension.id||headerExtension.preferredId)+' '+headerExtension.uri+'\r\n';};SDPUtils.parseFmtp=function(line){var parsed={};var kv;var parts=line.substr(line.indexOf(' ')+1).split(';');for(var j=0;j<parts.length;j++){kv=parts[j].trim().split('=');parsed[kv[0].trim()]=kv[1];}
return parsed;};SDPUtils.writeFmtp=function(codec){var line='';var pt=codec.payloadType;if(codec.preferredPayloadType!==undefined){pt=codec.preferredPayloadType;}
if(codec.parameters&&Object.keys(codec.parameters).length){var params=[];Object.keys(codec.parameters).forEach(function(param){params.push(param+'='+codec.parameters[param]);});line+='a=fmtp:'+pt+' '+params.join(';')+'\r\n';}
return line;};SDPUtils.parseRtcpFb=function(line){var parts=line.substr(line.indexOf(' ')+1).split(' ');return{type:parts.shift(),parameter:parts.join(' ')};};SDPUtils.writeRtcpFb=function(codec){var lines='';var pt=codec.payloadType;if(codec.preferredPayloadType!==undefined){pt=codec.preferredPayloadType;}
if(codec.rtcpFeedback&&codec.rtcpFeedback.length){codec.rtcpFeedback.forEach(function(fb){lines+='a=rtcp-fb:'+pt+' '+fb.type+
(fb.parameter&&fb.parameter.length?' '+fb.parameter:'')+'\r\n';});}
return lines;};SDPUtils.parseSsrcMedia=function(line){var sp=line.indexOf(' ');var parts={ssrc:parseInt(line.substr(7,sp-7),10)};var colon=line.indexOf(':',sp);if(colon>-1){parts.attribute=line.substr(sp+1,colon-sp-1);parts.value=line.substr(colon+1);}else{parts.attribute=line.substr(sp+1);}
return parts;};SDPUtils.getDtlsParameters=function(mediaSection,sessionpart){var lines=SDPUtils.splitLines(mediaSection);lines=lines.concat(SDPUtils.splitLines(sessionpart));var fpLine=lines.filter(function(line){return line.indexOf('a=fingerprint:')===0;})[0].substr(14);var dtlsParameters={role:'auto',fingerprints:[{algorithm:fpLine.split(' ')[0],value:fpLine.split(' ')[1]}]};return dtlsParameters;};SDPUtils.writeDtlsParameters=function(params,setupType){var sdp='a=setup:'+setupType+'\r\n';params.fingerprints.forEach(function(fp){sdp+='a=fingerprint:'+fp.algorithm+' '+fp.value+'\r\n';});return sdp;};SDPUtils.getIceParameters=function(mediaSection,sessionpart){var lines=SDPUtils.splitLines(mediaSection);lines=lines.concat(SDPUtils.splitLines(sessionpart));var iceParameters={usernameFragment:lines.filter(function(line){return line.indexOf('a=ice-ufrag:')===0;})[0].substr(12),password:lines.filter(function(line){return line.indexOf('a=ice-pwd:')===0;})[0].substr(10)};return iceParameters;};SDPUtils.writeIceParameters=function(params){return'a=ice-ufrag:'+params.usernameFragment+'\r\n'+'a=ice-pwd:'+params.password+'\r\n';};SDPUtils.parseRtpParameters=function(mediaSection){var description={codecs:[],headerExtensions:[],fecMechanisms:[],rtcp:[]};var lines=SDPUtils.splitLines(mediaSection);var mline=lines[0].split(' ');for(var i=3;i<mline.length;i++){var pt=mline[i];var rtpmapline=SDPUtils.matchPrefix(mediaSection,'a=rtpmap:'+pt+' ')[0];if(rtpmapline){var codec=SDPUtils.parseRtpMap(rtpmapline);var fmtps=SDPUtils.matchPrefix(mediaSection,'a=fmtp:'+pt+' ');codec.parameters=fmtps.length?SDPUtils.parseFmtp(fmtps[0]):{};codec.rtcpFeedback=SDPUtils.matchPrefix(mediaSection,'a=rtcp-fb:'+pt+' ').map(SDPUtils.parseRtcpFb);description.codecs.push(codec);switch(codec.name.toUpperCase()){case'RED':case'ULPFEC':description.fecMechanisms.push(codec.name.toUpperCase());break;default:break;}}}
SDPUtils.matchPrefix(mediaSection,'a=extmap:').forEach(function(line){description.headerExtensions.push(SDPUtils.parseExtmap(line));});return description;};SDPUtils.writeRtpDescription=function(kind,caps){var sdp='';sdp+='m='+kind+' ';sdp+=caps.codecs.length>0?'9':'0';sdp+=' UDP/TLS/RTP/SAVPF ';sdp+=caps.codecs.map(function(codec){if(codec.preferredPayloadType!==undefined){return codec.preferredPayloadType;}
return codec.payloadType;}).join(' ')+'\r\n';sdp+='c=IN IP4 0.0.0.0\r\n';sdp+='a=rtcp:9 IN IP4 0.0.0.0\r\n';caps.codecs.forEach(function(codec){sdp+=SDPUtils.writeRtpMap(codec);sdp+=SDPUtils.writeFmtp(codec);sdp+=SDPUtils.writeRtcpFb(codec);});sdp+='a=rtcp-mux\r\n';return sdp;};SDPUtils.parseRtpEncodingParameters=function(mediaSection){var encodingParameters=[];var description=SDPUtils.parseRtpParameters(mediaSection);var hasRed=description.fecMechanisms.indexOf('RED')!==-1;var hasUlpfec=description.fecMechanisms.indexOf('ULPFEC')!==-1;var ssrcs=SDPUtils.matchPrefix(mediaSection,'a=ssrc:').map(function(line){return SDPUtils.parseSsrcMedia(line);}).filter(function(parts){return parts.attribute==='cname';});var primarySsrc=ssrcs.length>0&&ssrcs[0].ssrc;var secondarySsrc;var flows=SDPUtils.matchPrefix(mediaSection,'a=ssrc-group:FID').map(function(line){var parts=line.split(' ');parts.shift();return parts.map(function(part){return parseInt(part,10);});});if(flows.length>0&&flows[0].length>1&&flows[0][0]===primarySsrc){secondarySsrc=flows[0][1];}
description.codecs.forEach(function(codec){if(codec.name.toUpperCase()==='RTX'&&codec.parameters.apt){var encParam={ssrc:primarySsrc,codecPayloadType:parseInt(codec.parameters.apt,10),rtx:{payloadType:codec.payloadType,ssrc:secondarySsrc}};encodingParameters.push(encParam);if(hasRed){encParam=JSON.parse(JSON.stringify(encParam));encParam.fec={ssrc:secondarySsrc,mechanism:hasUlpfec?'red+ulpfec':'red'};encodingParameters.push(encParam);}}});if(encodingParameters.length===0&&primarySsrc){encodingParameters.push({ssrc:primarySsrc});}
var bandwidth=SDPUtils.matchPrefix(mediaSection,'b=');if(bandwidth.length){if(bandwidth[0].indexOf('b=TIAS:')===0){bandwidth=parseInt(bandwidth[0].substr(7),10);}else if(bandwidth[0].indexOf('b=AS:')===0){bandwidth=parseInt(bandwidth[0].substr(5),10);}
encodingParameters.forEach(function(params){params.maxBitrate=bandwidth;});}
return encodingParameters;};SDPUtils.writeSessionBoilerplate=function(){return'v=0\r\n'+'o=thisisadapterortc 8169639915646943137 2 IN IP4 127.0.0.1\r\n'+'s=-\r\n'+'t=0 0\r\n';};SDPUtils.writeMediaSection=function(transceiver,caps,type,stream){var sdp=SDPUtils.writeRtpDescription(transceiver.kind,caps);sdp+=SDPUtils.writeIceParameters(transceiver.iceGatherer.getLocalParameters());sdp+=SDPUtils.writeDtlsParameters(transceiver.dtlsTransport.getLocalParameters(),type==='offer'?'actpass':'active');sdp+='a=mid:'+transceiver.mid+'\r\n';if(transceiver.rtpSender&&transceiver.rtpReceiver){sdp+='a=sendrecv\r\n';}else if(transceiver.rtpSender){sdp+='a=sendonly\r\n';}else if(transceiver.rtpReceiver){sdp+='a=recvonly\r\n';}else{sdp+='a=inactive\r\n';}
if(transceiver.rtpSender){var msid='msid:'+stream.id+' '+
transceiver.rtpSender.track.id+'\r\n';sdp+='a='+msid;sdp+='a=ssrc:'+transceiver.sendEncodingParameters[0].ssrc+' '+msid;}
sdp+='a=ssrc:'+transceiver.sendEncodingParameters[0].ssrc+' cname:'+SDPUtils.localCName+'\r\n';return sdp;};SDPUtils.getDirection=function(mediaSection,sessionpart){var lines=SDPUtils.splitLines(mediaSection);for(var i=0;i<lines.length;i++){switch(lines[i]){case'a=sendrecv':case'a=sendonly':case'a=recvonly':case'a=inactive':return lines[i].substr(2);default:}}
if(sessionpart){return SDPUtils.getDirection(sessionpart);}
return'sendrecv';};module.exports=SDPUtils;},{}],2:[function(require,module,exports){'use strict';(function(){var logging=require('./utils').log;var browserDetails=require('./utils').browserDetails;module.exports.browserDetails=browserDetails;module.exports.extractVersion=require('./utils').extractVersion;module.exports.disableLog=require('./utils').disableLog;var chromeShim=require('./chrome/chrome_shim')||null;var edgeShim=require('./edge/edge_shim')||null;var firefoxShim=require('./firefox/firefox_shim')||null;var safariShim=require('./safari/safari_shim')||null;switch(browserDetails.browser){case'opera':case'chrome':if(!chromeShim||!chromeShim.shimPeerConnection){logging('Chrome shim is not included in this adapter release.');return;}
logging('adapter.js shimming chrome.');module.exports.browserShim=chromeShim;chromeShim.shimGetUserMedia();chromeShim.shimMediaStream();chromeShim.shimSourceObject();chromeShim.shimPeerConnection();chromeShim.shimOnTrack();break;case'firefox':if(!firefoxShim||!firefoxShim.shimPeerConnection){logging('Firefox shim is not included in this adapter release.');return;}
logging('adapter.js shimming firefox.');module.exports.browserShim=firefoxShim;firefoxShim.shimGetUserMedia();firefoxShim.shimSourceObject();firefoxShim.shimPeerConnection();firefoxShim.shimOnTrack();break;case'edge':if(!edgeShim||!edgeShim.shimPeerConnection){logging('MS edge shim is not included in this adapter release.');return;}
logging('adapter.js shimming edge.');module.exports.browserShim=edgeShim;edgeShim.shimGetUserMedia();edgeShim.shimPeerConnection();break;case'safari':if(!safariShim){logging('Safari shim is not included in this adapter release.');return;}
logging('adapter.js shimming safari.');module.exports.browserShim=safariShim;safariShim.shimGetUserMedia();break;default:logging('Unsupported browser!');}})();},{"./chrome/chrome_shim":3,"./edge/edge_shim":5,"./firefox/firefox_shim":7,"./safari/safari_shim":9,"./utils":10}],3:[function(require,module,exports){'use strict';var logging=require('../utils.js').log;var browserDetails=require('../utils.js').browserDetails;var chromeShim={shimMediaStream:function(){window.MediaStream=window.MediaStream||window.webkitMediaStream;},shimOnTrack:function(){if(typeof window==='object'&&window.RTCPeerConnection&&!('ontrack'in
window.RTCPeerConnection.prototype)){Object.defineProperty(window.RTCPeerConnection.prototype,'ontrack',{get:function(){return this._ontrack;},set:function(f){var self=this;if(this._ontrack){this.removeEventListener('track',this._ontrack);this.removeEventListener('addstream',this._ontrackpoly);}
this.addEventListener('track',this._ontrack=f);this.addEventListener('addstream',this._ontrackpoly=function(e){e.stream.addEventListener('addtrack',function(te){var event=new Event('track');event.track=te.track;event.receiver={track:te.track};event.streams=[e.stream];self.dispatchEvent(event);});e.stream.getTracks().forEach(function(track){var event=new Event('track');event.track=track;event.receiver={track:track};event.streams=[e.stream];this.dispatchEvent(event);}.bind(this));}.bind(this));}});}},shimSourceObject:function(){if(typeof window==='object'){if(window.HTMLMediaElement&&!('srcObject'in window.HTMLMediaElement.prototype)){Object.defineProperty(window.HTMLMediaElement.prototype,'srcObject',{get:function(){return this._srcObject;},set:function(stream){var self=this;this._srcObject=stream;if(this.src){URL.revokeObjectURL(this.src);}
if(!stream){this.src='';return;}
this.src=URL.createObjectURL(stream);stream.addEventListener('addtrack',function(){if(self.src){URL.revokeObjectURL(self.src);}
self.src=URL.createObjectURL(stream);});stream.addEventListener('removetrack',function(){if(self.src){URL.revokeObjectURL(self.src);}
self.src=URL.createObjectURL(stream);});}});}}},shimPeerConnection:function(){window.RTCPeerConnection=function(pcConfig,pcConstraints){logging('PeerConnection');if(pcConfig&&pcConfig.iceTransportPolicy){pcConfig.iceTransports=pcConfig.iceTransportPolicy;}
var pc=new webkitRTCPeerConnection(pcConfig,pcConstraints);var origGetStats=pc.getStats.bind(pc);pc.getStats=function(selector,successCallback,errorCallback){var self=this;var args=arguments;if(arguments.length>0&&typeof selector==='function'){return origGetStats(selector,successCallback);}
var fixChromeStats_=function(response){var standardReport={};var reports=response.result();reports.forEach(function(report){var standardStats={id:report.id,timestamp:report.timestamp,type:report.type};report.names().forEach(function(name){standardStats[name]=report.stat(name);});standardReport[standardStats.id]=standardStats;});return standardReport;};var makeMapStats=function(stats,legacyStats){var map=new Map(Object.keys(stats).map(function(key){return[key,stats[key]];}));legacyStats=legacyStats||stats;Object.keys(legacyStats).forEach(function(key){map[key]=legacyStats[key];});return map;};if(arguments.length>=2){var successCallbackWrapper_=function(response){args[1](makeMapStats(fixChromeStats_(response)));};return origGetStats.apply(this,[successCallbackWrapper_,arguments[0]]);}
return new Promise(function(resolve,reject){if(args.length===1&&typeof selector==='object'){origGetStats.apply(self,[function(response){resolve(makeMapStats(fixChromeStats_(response)));},reject]);}else{origGetStats.apply(self,[function(response){resolve(makeMapStats(fixChromeStats_(response),response.result()));},reject]);}}).then(successCallback,errorCallback);};return pc;};window.RTCPeerConnection.prototype=webkitRTCPeerConnection.prototype;if(webkitRTCPeerConnection.generateCertificate){Object.defineProperty(window.RTCPeerConnection,'generateCertificate',{get:function(){return webkitRTCPeerConnection.generateCertificate;}});}
['createOffer','createAnswer'].forEach(function(method){var nativeMethod=webkitRTCPeerConnection.prototype[method];webkitRTCPeerConnection.prototype[method]=function(){var self=this;if(arguments.length<1||(arguments.length===1&&typeof arguments[0]==='object')){var opts=arguments.length===1?arguments[0]:undefined;return new Promise(function(resolve,reject){nativeMethod.apply(self,[resolve,reject,opts]);});}
return nativeMethod.apply(this,arguments);};});if(browserDetails.version<51){['setLocalDescription','setRemoteDescription','addIceCandidate'].forEach(function(method){var nativeMethod=webkitRTCPeerConnection.prototype[method];webkitRTCPeerConnection.prototype[method]=function(){var args=arguments;var self=this;var promise=new Promise(function(resolve,reject){nativeMethod.apply(self,[args[0],resolve,reject]);});if(args.length<2){return promise;}
return promise.then(function(){args[1].apply(null,[]);},function(err){if(args.length>=3){args[2].apply(null,[err]);}});};});}
['setLocalDescription','setRemoteDescription','addIceCandidate'].forEach(function(method){var nativeMethod=webkitRTCPeerConnection.prototype[method];webkitRTCPeerConnection.prototype[method]=function(){arguments[0]=new((method==='addIceCandidate')?RTCIceCandidate:RTCSessionDescription)(arguments[0]);return nativeMethod.apply(this,arguments);};});var nativeAddIceCandidate=RTCPeerConnection.prototype.addIceCandidate;RTCPeerConnection.prototype.addIceCandidate=function(){return arguments[0]===null?Promise.resolve():nativeAddIceCandidate.apply(this,arguments);};}};module.exports={shimMediaStream:chromeShim.shimMediaStream,shimOnTrack:chromeShim.shimOnTrack,shimSourceObject:chromeShim.shimSourceObject,shimPeerConnection:chromeShim.shimPeerConnection,shimGetUserMedia:require('./getusermedia')};},{"../utils.js":10,"./getusermedia":4}],4:[function(require,module,exports){'use strict';var logging=require('../utils.js').log;module.exports=function(){var constraintsToChrome_=function(c){if(typeof c!=='object'||c.mandatory||c.optional){return c;}
var cc={};Object.keys(c).forEach(function(key){if(key==='require'||key==='advanced'||key==='mediaSource'){return;}
var r=(typeof c[key]==='object')?c[key]:{ideal:c[key]};if(r.exact!==undefined&&typeof r.exact==='number'){r.min=r.max=r.exact;}
var oldname_=function(prefix,name){if(prefix){return prefix+name.charAt(0).toUpperCase()+name.slice(1);}
return(name==='deviceId')?'sourceId':name;};if(r.ideal!==undefined){cc.optional=cc.optional||[];var oc={};if(typeof r.ideal==='number'){oc[oldname_('min',key)]=r.ideal;cc.optional.push(oc);oc={};oc[oldname_('max',key)]=r.ideal;cc.optional.push(oc);}else{oc[oldname_('',key)]=r.ideal;cc.optional.push(oc);}}
if(r.exact!==undefined&&typeof r.exact!=='number'){cc.mandatory=cc.mandatory||{};cc.mandatory[oldname_('',key)]=r.exact;}else{['min','max'].forEach(function(mix){if(r[mix]!==undefined){cc.mandatory=cc.mandatory||{};cc.mandatory[oldname_(mix,key)]=r[mix];}});}});if(c.advanced){cc.optional=(cc.optional||[]).concat(c.advanced);}
return cc;};var shimConstraints_=function(constraints,func){constraints=JSON.parse(JSON.stringify(constraints));if(constraints&&constraints.audio){constraints.audio=constraintsToChrome_(constraints.audio);}
if(constraints&&typeof constraints.video==='object'){var face=constraints.video.facingMode;face=face&&((typeof face==='object')?face:{ideal:face});if((face&&(face.exact==='user'||face.exact==='environment'||face.ideal==='user'||face.ideal==='environment'))&&!(navigator.mediaDevices.getSupportedConstraints&&navigator.mediaDevices.getSupportedConstraints().facingMode)){delete constraints.video.facingMode;if(face.exact==='environment'||face.ideal==='environment'){return navigator.mediaDevices.enumerateDevices().then(function(devices){devices=devices.filter(function(d){return d.kind==='videoinput';});var back=devices.find(function(d){return d.label.toLowerCase().indexOf('back')!==-1;})||(devices.length&&devices[devices.length-1]);if(back){constraints.video.deviceId=face.exact?{exact:back.deviceId}:{ideal:back.deviceId};}
constraints.video=constraintsToChrome_(constraints.video);logging('chrome: '+JSON.stringify(constraints));return func(constraints);});}}
constraints.video=constraintsToChrome_(constraints.video);}
logging('chrome: '+JSON.stringify(constraints));return func(constraints);};var shimError_=function(e){return{name:{PermissionDeniedError:'NotAllowedError',ConstraintNotSatisfiedError:'OverconstrainedError'}[e.name]||e.name,message:e.message,constraint:e.constraintName,toString:function(){return this.name+(this.message&&': ')+this.message;}};};var getUserMedia_=function(constraints,onSuccess,onError){shimConstraints_(constraints,function(c){navigator.webkitGetUserMedia(c,onSuccess,function(e){onError(shimError_(e));});});};navigator.getUserMedia=getUserMedia_;var getUserMediaPromise_=function(constraints){return new Promise(function(resolve,reject){navigator.getUserMedia(constraints,resolve,reject);});};if(!navigator.mediaDevices){navigator.mediaDevices={getUserMedia:getUserMediaPromise_,enumerateDevices:function(){return new Promise(function(resolve){var kinds={audio:'audioinput',video:'videoinput'};return MediaStreamTrack.getSources(function(devices){resolve(devices.map(function(device){return{label:device.label,kind:kinds[device.kind],deviceId:device.id,groupId:''};}));});});}};}
if(!navigator.mediaDevices.getUserMedia){navigator.mediaDevices.getUserMedia=function(constraints){return getUserMediaPromise_(constraints);};}else{var origGetUserMedia=navigator.mediaDevices.getUserMedia.bind(navigator.mediaDevices);navigator.mediaDevices.getUserMedia=function(cs){return shimConstraints_(cs,function(c){return origGetUserMedia(c).catch(function(e){return Promise.reject(shimError_(e));});});};}
if(typeof navigator.mediaDevices.addEventListener==='undefined'){navigator.mediaDevices.addEventListener=function(){logging('Dummy mediaDevices.addEventListener called.');};}
if(typeof navigator.mediaDevices.removeEventListener==='undefined'){navigator.mediaDevices.removeEventListener=function(){logging('Dummy mediaDevices.removeEventListener called.');};}};},{"../utils.js":10}],5:[function(require,module,exports){'use strict';var SDPUtils=require('sdp');var edgeShim={shimPeerConnection:function(){if(window.RTCIceGatherer){if(!window.RTCIceCandidate){window.RTCIceCandidate=function(args){return args;};}
if(!window.RTCSessionDescription){window.RTCSessionDescription=function(args){return args;};}}
window.RTCPeerConnection=function(config){var self=this;var _eventTarget=document.createDocumentFragment();['addEventListener','removeEventListener','dispatchEvent'].forEach(function(method){self[method]=_eventTarget[method].bind(_eventTarget);});this.onicecandidate=null;this.onaddstream=null;this.ontrack=null;this.onremovestream=null;this.onsignalingstatechange=null;this.oniceconnectionstatechange=null;this.onnegotiationneeded=null;this.ondatachannel=null;this.localStreams=[];this.remoteStreams=[];this.getLocalStreams=function(){return self.localStreams;};this.getRemoteStreams=function(){return self.remoteStreams;};this.localDescription=new RTCSessionDescription({type:'',sdp:''});this.remoteDescription=new RTCSessionDescription({type:'',sdp:''});this.signalingState='stable';this.iceConnectionState='new';this.iceGatheringState='new';this.iceOptions={gatherPolicy:'all',iceServers:[]};if(config&&config.iceTransportPolicy){switch(config.iceTransportPolicy){case'all':case'relay':this.iceOptions.gatherPolicy=config.iceTransportPolicy;break;case'none':throw new TypeError('iceTransportPolicy "none" not supported');default:break;}}
this.usingBundle=config&&config.bundlePolicy==='max-bundle';if(config&&config.iceServers){var iceServers=JSON.parse(JSON.stringify(config.iceServers));this.iceOptions.iceServers=iceServers.filter(function(server){if(server&&server.urls){var urls=server.urls;if(typeof urls==='string'){urls=[urls];}
urls=urls.filter(function(url){return url.indexOf('turn:')===0&&url.indexOf('transport=udp')!==-1&&url.indexOf('turn:[')===-1;})[0];return!!urls;}
return false;});}
this.transceivers=[];this._localIceCandidatesBuffer=[];};window.RTCPeerConnection.prototype._emitBufferedCandidates=function(){var self=this;var sections=SDPUtils.splitSections(self.localDescription.sdp);this._localIceCandidatesBuffer.forEach(function(event){var end=!event.candidate||Object.keys(event.candidate).length===0;if(end){for(var j=1;j<sections.length;j++){if(sections[j].indexOf('\r\na=end-of-candidates\r\n')===-1){sections[j]+='a=end-of-candidates\r\n';}}}else if(event.candidate.candidate.indexOf('typ endOfCandidates')===-1){sections[event.candidate.sdpMLineIndex+1]+='a='+event.candidate.candidate+'\r\n';}
self.localDescription.sdp=sections.join('');self.dispatchEvent(event);if(self.onicecandidate!==null){self.onicecandidate(event);}
if(!event.candidate&&self.iceGatheringState!=='complete'){var complete=self.transceivers.every(function(transceiver){return transceiver.iceGatherer&&transceiver.iceGatherer.state==='completed';});if(complete){self.iceGatheringState='complete';}}});this._localIceCandidatesBuffer=[];};window.RTCPeerConnection.prototype.addStream=function(stream){this.localStreams.push(stream.clone());this._maybeFireNegotiationNeeded();};window.RTCPeerConnection.prototype.removeStream=function(stream){var idx=this.localStreams.indexOf(stream);if(idx>-1){this.localStreams.splice(idx,1);this._maybeFireNegotiationNeeded();}};window.RTCPeerConnection.prototype.getSenders=function(){return this.transceivers.filter(function(transceiver){return!!transceiver.rtpSender;}).map(function(transceiver){return transceiver.rtpSender;});};window.RTCPeerConnection.prototype.getReceivers=function(){return this.transceivers.filter(function(transceiver){return!!transceiver.rtpReceiver;}).map(function(transceiver){return transceiver.rtpReceiver;});};window.RTCPeerConnection.prototype._getCommonCapabilities=function(localCapabilities,remoteCapabilities){var commonCapabilities={codecs:[],headerExtensions:[],fecMechanisms:[]};localCapabilities.codecs.forEach(function(lCodec){for(var i=0;i<remoteCapabilities.codecs.length;i++){var rCodec=remoteCapabilities.codecs[i];if(lCodec.name.toLowerCase()===rCodec.name.toLowerCase()&&lCodec.clockRate===rCodec.clockRate&&lCodec.numChannels===rCodec.numChannels){commonCapabilities.codecs.push(rCodec);rCodec.rtcpFeedback=rCodec.rtcpFeedback.filter(function(fb){for(var j=0;j<lCodec.rtcpFeedback.length;j++){if(lCodec.rtcpFeedback[j].type===fb.type&&lCodec.rtcpFeedback[j].parameter===fb.parameter){return true;}}
return false;});break;}}});localCapabilities.headerExtensions.forEach(function(lHeaderExtension){for(var i=0;i<remoteCapabilities.headerExtensions.length;i++){var rHeaderExtension=remoteCapabilities.headerExtensions[i];if(lHeaderExtension.uri===rHeaderExtension.uri){commonCapabilities.headerExtensions.push(rHeaderExtension);break;}}});return commonCapabilities;};window.RTCPeerConnection.prototype._createIceAndDtlsTransports=function(mid,sdpMLineIndex){var self=this;var iceGatherer=new RTCIceGatherer(self.iceOptions);var iceTransport=new RTCIceTransport(iceGatherer);iceGatherer.onlocalcandidate=function(evt){var event=new Event('icecandidate');event.candidate={sdpMid:mid,sdpMLineIndex:sdpMLineIndex};var cand=evt.candidate;var end=!cand||Object.keys(cand).length===0;if(end){if(iceGatherer.state===undefined){iceGatherer.state='completed';}
event.candidate.candidate='candidate:1 1 udp 1 0.0.0.0 9 typ endOfCandidates';}else{cand.component=iceTransport.component==='RTCP'?2:1;event.candidate.candidate=SDPUtils.writeCandidate(cand);}
var sections=SDPUtils.splitSections(self.localDescription.sdp);if(event.candidate.candidate.indexOf('typ endOfCandidates')===-1){sections[event.candidate.sdpMLineIndex+1]+='a='+event.candidate.candidate+'\r\n';}else{sections[event.candidate.sdpMLineIndex+1]+='a=end-of-candidates\r\n';}
self.localDescription.sdp=sections.join('');var complete=self.transceivers.every(function(transceiver){return transceiver.iceGatherer&&transceiver.iceGatherer.state==='completed';});switch(self.iceGatheringState){case'new':self._localIceCandidatesBuffer.push(event);if(end&&complete){self._localIceCandidatesBuffer.push(new Event('icecandidate'));}
break;case'gathering':self._emitBufferedCandidates();self.dispatchEvent(event);if(self.onicecandidate!==null){self.onicecandidate(event);}
if(complete){self.dispatchEvent(new Event('icecandidate'));if(self.onicecandidate!==null){self.onicecandidate(new Event('icecandidate'));}
self.iceGatheringState='complete';}
break;case'complete':break;default:break;}};iceTransport.onicestatechange=function(){self._updateConnectionState();};var dtlsTransport=new RTCDtlsTransport(iceTransport);dtlsTransport.ondtlsstatechange=function(){self._updateConnectionState();};dtlsTransport.onerror=function(){dtlsTransport.state='failed';self._updateConnectionState();};return{iceGatherer:iceGatherer,iceTransport:iceTransport,dtlsTransport:dtlsTransport};};window.RTCPeerConnection.prototype._transceive=function(transceiver,send,recv){var params=this._getCommonCapabilities(transceiver.localCapabilities,transceiver.remoteCapabilities);if(send&&transceiver.rtpSender){params.encodings=transceiver.sendEncodingParameters;params.rtcp={cname:SDPUtils.localCName};if(transceiver.recvEncodingParameters.length){params.rtcp.ssrc=transceiver.recvEncodingParameters[0].ssrc;}
transceiver.rtpSender.send(params);}
if(recv&&transceiver.rtpReceiver){params.encodings=transceiver.recvEncodingParameters;params.rtcp={cname:transceiver.cname};if(transceiver.sendEncodingParameters.length){params.rtcp.ssrc=transceiver.sendEncodingParameters[0].ssrc;}
transceiver.rtpReceiver.receive(params);}};window.RTCPeerConnection.prototype.setLocalDescription=function(description){var self=this;var sections;var sessionpart;if(description.type==='offer'){if(this._pendingOffer){sections=SDPUtils.splitSections(description.sdp);sessionpart=sections.shift();sections.forEach(function(mediaSection,sdpMLineIndex){var caps=SDPUtils.parseRtpParameters(mediaSection);self._pendingOffer[sdpMLineIndex].localCapabilities=caps;});this.transceivers=this._pendingOffer;delete this._pendingOffer;}}else if(description.type==='answer'){sections=SDPUtils.splitSections(self.remoteDescription.sdp);sessionpart=sections.shift();var isIceLite=SDPUtils.matchPrefix(sessionpart,'a=ice-lite').length>0;sections.forEach(function(mediaSection,sdpMLineIndex){var transceiver=self.transceivers[sdpMLineIndex];var iceGatherer=transceiver.iceGatherer;var iceTransport=transceiver.iceTransport;var dtlsTransport=transceiver.dtlsTransport;var localCapabilities=transceiver.localCapabilities;var remoteCapabilities=transceiver.remoteCapabilities;var rejected=mediaSection.split('\n',1)[0].split(' ',2)[1]==='0';if(!rejected){var remoteIceParameters=SDPUtils.getIceParameters(mediaSection,sessionpart);if(isIceLite){var cands=SDPUtils.matchPrefix(mediaSection,'a=candidate:').map(function(cand){return SDPUtils.parseCandidate(cand);}).filter(function(cand){return cand.component==='1';});if(cands.length){iceTransport.setRemoteCandidates(cands);}}
var remoteDtlsParameters=SDPUtils.getDtlsParameters(mediaSection,sessionpart);if(isIceLite){remoteDtlsParameters.role='server';}
if(!self.usingBundle||sdpMLineIndex===0){iceTransport.start(iceGatherer,remoteIceParameters,isIceLite?'controlling':'controlled');dtlsTransport.start(remoteDtlsParameters);}
var params=self._getCommonCapabilities(localCapabilities,remoteCapabilities);self._transceive(transceiver,params.codecs.length>0,false);}});}
this.localDescription={type:description.type,sdp:description.sdp};switch(description.type){case'offer':this._updateSignalingState('have-local-offer');break;case'answer':this._updateSignalingState('stable');break;default:throw new TypeError('unsupported type "'+description.type+'"');}
var hasCallback=arguments.length>1&&typeof arguments[1]==='function';if(hasCallback){var cb=arguments[1];window.setTimeout(function(){cb();if(self.iceGatheringState==='new'){self.iceGatheringState='gathering';}
self._emitBufferedCandidates();},0);}
var p=Promise.resolve();p.then(function(){if(!hasCallback){if(self.iceGatheringState==='new'){self.iceGatheringState='gathering';}
window.setTimeout(self._emitBufferedCandidates.bind(self),500);}});return p;};window.RTCPeerConnection.prototype.setRemoteDescription=function(description){var self=this;var stream=new MediaStream();var receiverList=[];var sections=SDPUtils.splitSections(description.sdp);var sessionpart=sections.shift();var isIceLite=SDPUtils.matchPrefix(sessionpart,'a=ice-lite').length>0;this.usingBundle=SDPUtils.matchPrefix(sessionpart,'a=group:BUNDLE ').length>0;sections.forEach(function(mediaSection,sdpMLineIndex){var lines=SDPUtils.splitLines(mediaSection);var mline=lines[0].substr(2).split(' ');var kind=mline[0];var rejected=mline[1]==='0';var direction=SDPUtils.getDirection(mediaSection,sessionpart);var transceiver;var iceGatherer;var iceTransport;var dtlsTransport;var rtpSender;var rtpReceiver;var sendEncodingParameters;var recvEncodingParameters;var localCapabilities;var track;var remoteCapabilities=SDPUtils.parseRtpParameters(mediaSection);var remoteIceParameters;var remoteDtlsParameters;if(!rejected){remoteIceParameters=SDPUtils.getIceParameters(mediaSection,sessionpart);remoteDtlsParameters=SDPUtils.getDtlsParameters(mediaSection,sessionpart);remoteDtlsParameters.role='client';}
recvEncodingParameters=SDPUtils.parseRtpEncodingParameters(mediaSection);var mid=SDPUtils.matchPrefix(mediaSection,'a=mid:');if(mid.length){mid=mid[0].substr(6);}else{mid=SDPUtils.generateIdentifier();}
var cname;var remoteSsrc=SDPUtils.matchPrefix(mediaSection,'a=ssrc:').map(function(line){return SDPUtils.parseSsrcMedia(line);}).filter(function(obj){return obj.attribute==='cname';})[0];if(remoteSsrc){cname=remoteSsrc.value;}
var isComplete=SDPUtils.matchPrefix(mediaSection,'a=end-of-candidates',sessionpart).length>0;var cands=SDPUtils.matchPrefix(mediaSection,'a=candidate:').map(function(cand){return SDPUtils.parseCandidate(cand);}).filter(function(cand){return cand.component==='1';});if(description.type==='offer'&&!rejected){var transports=self.usingBundle&&sdpMLineIndex>0?{iceGatherer:self.transceivers[0].iceGatherer,iceTransport:self.transceivers[0].iceTransport,dtlsTransport:self.transceivers[0].dtlsTransport}:self._createIceAndDtlsTransports(mid,sdpMLineIndex);if(isComplete){transports.iceTransport.setRemoteCandidates(cands);}
localCapabilities=RTCRtpReceiver.getCapabilities(kind);sendEncodingParameters=[{ssrc:(2*sdpMLineIndex+2)*1001}];rtpReceiver=new RTCRtpReceiver(transports.dtlsTransport,kind);track=rtpReceiver.track;receiverList.push([track,rtpReceiver]);stream.addTrack(track);if(self.localStreams.length>0&&self.localStreams[0].getTracks().length>=sdpMLineIndex){var localTrack;if(kind==='audio'){localTrack=self.localStreams[0].getAudioTracks()[0];}else if(kind==='video'){localTrack=self.localStreams[0].getVideoTracks()[0];}
if(localTrack){rtpSender=new RTCRtpSender(localTrack,transports.dtlsTransport);}}
self.transceivers[sdpMLineIndex]={iceGatherer:transports.iceGatherer,iceTransport:transports.iceTransport,dtlsTransport:transports.dtlsTransport,localCapabilities:localCapabilities,remoteCapabilities:remoteCapabilities,rtpSender:rtpSender,rtpReceiver:rtpReceiver,kind:kind,mid:mid,cname:cname,sendEncodingParameters:sendEncodingParameters,recvEncodingParameters:recvEncodingParameters};self._transceive(self.transceivers[sdpMLineIndex],false,direction==='sendrecv'||direction==='sendonly');}else if(description.type==='answer'&&!rejected){transceiver=self.transceivers[sdpMLineIndex];iceGatherer=transceiver.iceGatherer;iceTransport=transceiver.iceTransport;dtlsTransport=transceiver.dtlsTransport;rtpSender=transceiver.rtpSender;rtpReceiver=transceiver.rtpReceiver;sendEncodingParameters=transceiver.sendEncodingParameters;localCapabilities=transceiver.localCapabilities;self.transceivers[sdpMLineIndex].recvEncodingParameters=recvEncodingParameters;self.transceivers[sdpMLineIndex].remoteCapabilities=remoteCapabilities;self.transceivers[sdpMLineIndex].cname=cname;if((isIceLite||isComplete)&&cands.length){iceTransport.setRemoteCandidates(cands);}
if(!self.usingBundle||sdpMLineIndex===0){iceTransport.start(iceGatherer,remoteIceParameters,'controlling');dtlsTransport.start(remoteDtlsParameters);}
self._transceive(transceiver,direction==='sendrecv'||direction==='recvonly',direction==='sendrecv'||direction==='sendonly');if(rtpReceiver&&(direction==='sendrecv'||direction==='sendonly')){track=rtpReceiver.track;receiverList.push([track,rtpReceiver]);stream.addTrack(track);}else{delete transceiver.rtpReceiver;}}});this.remoteDescription={type:description.type,sdp:description.sdp};switch(description.type){case'offer':this._updateSignalingState('have-remote-offer');break;case'answer':this._updateSignalingState('stable');break;default:throw new TypeError('unsupported type "'+description.type+'"');}
if(stream.getTracks().length){self.remoteStreams.push(stream);window.setTimeout(function(){var event=new Event('addstream');event.stream=stream;self.dispatchEvent(event);if(self.onaddstream!==null){window.setTimeout(function(){self.onaddstream(event);},0);}
receiverList.forEach(function(item){var track=item[0];var receiver=item[1];var trackEvent=new Event('track');trackEvent.track=track;trackEvent.receiver=receiver;trackEvent.streams=[stream];self.dispatchEvent(event);if(self.ontrack!==null){window.setTimeout(function(){self.ontrack(trackEvent);},0);}});},0);}
if(arguments.length>1&&typeof arguments[1]==='function'){window.setTimeout(arguments[1],0);}
return Promise.resolve();};window.RTCPeerConnection.prototype.close=function(){this.transceivers.forEach(function(transceiver){if(transceiver.iceTransport){transceiver.iceTransport.stop();}
if(transceiver.dtlsTransport){transceiver.dtlsTransport.stop();}
if(transceiver.rtpSender){transceiver.rtpSender.stop();}
if(transceiver.rtpReceiver){transceiver.rtpReceiver.stop();}});this._updateSignalingState('closed');};window.RTCPeerConnection.prototype._updateSignalingState=function(newState){this.signalingState=newState;var event=new Event('signalingstatechange');this.dispatchEvent(event);if(this.onsignalingstatechange!==null){this.onsignalingstatechange(event);}};window.RTCPeerConnection.prototype._maybeFireNegotiationNeeded=function(){var event=new Event('negotiationneeded');this.dispatchEvent(event);if(this.onnegotiationneeded!==null){this.onnegotiationneeded(event);}};window.RTCPeerConnection.prototype._updateConnectionState=function(){var self=this;var newState;var states={'new':0,closed:0,connecting:0,checking:0,connected:0,completed:0,failed:0};this.transceivers.forEach(function(transceiver){states[transceiver.iceTransport.state]++;states[transceiver.dtlsTransport.state]++;});states.connected+=states.completed;newState='new';if(states.failed>0){newState='failed';}else if(states.connecting>0||states.checking>0){newState='connecting';}else if(states.disconnected>0){newState='disconnected';}else if(states.new>0){newState='new';}else if(states.connected>0||states.completed>0){newState='connected';}
if(newState!==self.iceConnectionState){self.iceConnectionState=newState;var event=new Event('iceconnectionstatechange');this.dispatchEvent(event);if(this.oniceconnectionstatechange!==null){this.oniceconnectionstatechange(event);}}};window.RTCPeerConnection.prototype.createOffer=function(){var self=this;if(this._pendingOffer){throw new Error('createOffer called while there is a pending offer.');}
var offerOptions;if(arguments.length===1&&typeof arguments[0]!=='function'){offerOptions=arguments[0];}else if(arguments.length===3){offerOptions=arguments[2];}
var tracks=[];var numAudioTracks=0;var numVideoTracks=0;if(this.localStreams.length){numAudioTracks=this.localStreams[0].getAudioTracks().length;numVideoTracks=this.localStreams[0].getVideoTracks().length;}
if(offerOptions){if(offerOptions.mandatory||offerOptions.optional){throw new TypeError('Legacy mandatory/optional constraints not supported.');}
if(offerOptions.offerToReceiveAudio!==undefined){numAudioTracks=offerOptions.offerToReceiveAudio;}
if(offerOptions.offerToReceiveVideo!==undefined){numVideoTracks=offerOptions.offerToReceiveVideo;}}
if(this.localStreams.length){this.localStreams[0].getTracks().forEach(function(track){tracks.push({kind:track.kind,track:track,wantReceive:track.kind==='audio'?numAudioTracks>0:numVideoTracks>0});if(track.kind==='audio'){numAudioTracks--;}else if(track.kind==='video'){numVideoTracks--;}});}
while(numAudioTracks>0||numVideoTracks>0){if(numAudioTracks>0){tracks.push({kind:'audio',wantReceive:true});numAudioTracks--;}
if(numVideoTracks>0){tracks.push({kind:'video',wantReceive:true});numVideoTracks--;}}
var sdp=SDPUtils.writeSessionBoilerplate();var transceivers=[];tracks.forEach(function(mline,sdpMLineIndex){var track=mline.track;var kind=mline.kind;var mid=SDPUtils.generateIdentifier();var transports=self.usingBundle&&sdpMLineIndex>0?{iceGatherer:transceivers[0].iceGatherer,iceTransport:transceivers[0].iceTransport,dtlsTransport:transceivers[0].dtlsTransport}:self._createIceAndDtlsTransports(mid,sdpMLineIndex);var localCapabilities=RTCRtpSender.getCapabilities(kind);var rtpSender;var rtpReceiver;var sendEncodingParameters=[{ssrc:(2*sdpMLineIndex+1)*1001}];if(track){rtpSender=new RTCRtpSender(track,transports.dtlsTransport);}
if(mline.wantReceive){rtpReceiver=new RTCRtpReceiver(transports.dtlsTransport,kind);}
transceivers[sdpMLineIndex]={iceGatherer:transports.iceGatherer,iceTransport:transports.iceTransport,dtlsTransport:transports.dtlsTransport,localCapabilities:localCapabilities,remoteCapabilities:null,rtpSender:rtpSender,rtpReceiver:rtpReceiver,kind:kind,mid:mid,sendEncodingParameters:sendEncodingParameters,recvEncodingParameters:null};});if(this.usingBundle){sdp+='a=group:BUNDLE '+transceivers.map(function(t){return t.mid;}).join(' ')+'\r\n';}
tracks.forEach(function(mline,sdpMLineIndex){var transceiver=transceivers[sdpMLineIndex];sdp+=SDPUtils.writeMediaSection(transceiver,transceiver.localCapabilities,'offer',self.localStreams[0]);});this._pendingOffer=transceivers;var desc=new RTCSessionDescription({type:'offer',sdp:sdp});if(arguments.length&&typeof arguments[0]==='function'){window.setTimeout(arguments[0],0,desc);}
return Promise.resolve(desc);};window.RTCPeerConnection.prototype.createAnswer=function(){var self=this;var sdp=SDPUtils.writeSessionBoilerplate();if(this.usingBundle){sdp+='a=group:BUNDLE '+this.transceivers.map(function(t){return t.mid;}).join(' ')+'\r\n';}
this.transceivers.forEach(function(transceiver){var commonCapabilities=self._getCommonCapabilities(transceiver.localCapabilities,transceiver.remoteCapabilities);sdp+=SDPUtils.writeMediaSection(transceiver,commonCapabilities,'answer',self.localStreams[0]);});var desc=new RTCSessionDescription({type:'answer',sdp:sdp});if(arguments.length&&typeof arguments[0]==='function'){window.setTimeout(arguments[0],0,desc);}
return Promise.resolve(desc);};window.RTCPeerConnection.prototype.addIceCandidate=function(candidate){if(candidate===null){this.transceivers.forEach(function(transceiver){transceiver.iceTransport.addRemoteCandidate({});});}else{var mLineIndex=candidate.sdpMLineIndex;if(candidate.sdpMid){for(var i=0;i<this.transceivers.length;i++){if(this.transceivers[i].mid===candidate.sdpMid){mLineIndex=i;break;}}}
var transceiver=this.transceivers[mLineIndex];if(transceiver){var cand=Object.keys(candidate.candidate).length>0?SDPUtils.parseCandidate(candidate.candidate):{};if(cand.protocol==='tcp'&&(cand.port===0||cand.port===9)){return;}
if(cand.component!=='1'){return;}
if(cand.type==='endOfCandidates'){cand={};}
transceiver.iceTransport.addRemoteCandidate(cand);var sections=SDPUtils.splitSections(this.remoteDescription.sdp);sections[mLineIndex+1]+=(cand.type?candidate.candidate.trim():'a=end-of-candidates')+'\r\n';this.remoteDescription.sdp=sections.join('');}}
if(arguments.length>1&&typeof arguments[1]==='function'){window.setTimeout(arguments[1],0);}
return Promise.resolve();};window.RTCPeerConnection.prototype.getStats=function(){var promises=[];this.transceivers.forEach(function(transceiver){['rtpSender','rtpReceiver','iceGatherer','iceTransport','dtlsTransport'].forEach(function(method){if(transceiver[method]){promises.push(transceiver[method].getStats());}});});var cb=arguments.length>1&&typeof arguments[1]==='function'&&arguments[1];return new Promise(function(resolve){var results=new Map();Promise.all(promises).then(function(res){res.forEach(function(result){Object.keys(result).forEach(function(id){results.set(id,result[id]);results[id]=result[id];});});if(cb){window.setTimeout(cb,0,results);}
resolve(results);});});};}};module.exports={shimPeerConnection:edgeShim.shimPeerConnection,shimGetUserMedia:require('./getusermedia')};},{"./getusermedia":6,"sdp":1}],6:[function(require,module,exports){'use strict';module.exports=function(){var shimError_=function(e){return{name:{PermissionDeniedError:'NotAllowedError'}[e.name]||e.name,message:e.message,constraint:e.constraint,toString:function(){return this.name;}};};var origGetUserMedia=navigator.mediaDevices.getUserMedia.bind(navigator.mediaDevices);navigator.mediaDevices.getUserMedia=function(c){return origGetUserMedia(c).catch(function(e){return Promise.reject(shimError_(e));});};};},{}],7:[function(require,module,exports){'use strict';var browserDetails=require('../utils').browserDetails;var firefoxShim={shimOnTrack:function(){if(typeof window==='object'&&window.RTCPeerConnection&&!('ontrack'in
window.RTCPeerConnection.prototype)){Object.defineProperty(window.RTCPeerConnection.prototype,'ontrack',{get:function(){return this._ontrack;},set:function(f){if(this._ontrack){this.removeEventListener('track',this._ontrack);this.removeEventListener('addstream',this._ontrackpoly);}
this.addEventListener('track',this._ontrack=f);this.addEventListener('addstream',this._ontrackpoly=function(e){e.stream.getTracks().forEach(function(track){var event=new Event('track');event.track=track;event.receiver={track:track};event.streams=[e.stream];this.dispatchEvent(event);}.bind(this));}.bind(this));}});}},shimSourceObject:function(){if(typeof window==='object'){if(window.HTMLMediaElement&&!('srcObject'in window.HTMLMediaElement.prototype)){Object.defineProperty(window.HTMLMediaElement.prototype,'srcObject',{get:function(){return this.mozSrcObject;},set:function(stream){this.mozSrcObject=stream;}});}}},shimPeerConnection:function(){if(typeof window!=='object'||!(window.RTCPeerConnection||window.mozRTCPeerConnection)){return;}
if(!window.RTCPeerConnection){window.RTCPeerConnection=function(pcConfig,pcConstraints){if(browserDetails.version<38){if(pcConfig&&pcConfig.iceServers){var newIceServers=[];for(var i=0;i<pcConfig.iceServers.length;i++){var server=pcConfig.iceServers[i];if(server.hasOwnProperty('urls')){for(var j=0;j<server.urls.length;j++){var newServer={url:server.urls[j]};if(server.urls[j].indexOf('turn')===0){newServer.username=server.username;newServer.credential=server.credential;}
newIceServers.push(newServer);}}else{newIceServers.push(pcConfig.iceServers[i]);}}
pcConfig.iceServers=newIceServers;}}
return new mozRTCPeerConnection(pcConfig,pcConstraints);};window.RTCPeerConnection.prototype=mozRTCPeerConnection.prototype;if(mozRTCPeerConnection.generateCertificate){Object.defineProperty(window.RTCPeerConnection,'generateCertificate',{get:function(){return mozRTCPeerConnection.generateCertificate;}});}
window.RTCSessionDescription=mozRTCSessionDescription;window.RTCIceCandidate=mozRTCIceCandidate;}
['setLocalDescription','setRemoteDescription','addIceCandidate'].forEach(function(method){var nativeMethod=RTCPeerConnection.prototype[method];RTCPeerConnection.prototype[method]=function(){arguments[0]=new((method==='addIceCandidate')?RTCIceCandidate:RTCSessionDescription)(arguments[0]);return nativeMethod.apply(this,arguments);};});var nativeAddIceCandidate=RTCPeerConnection.prototype.addIceCandidate;RTCPeerConnection.prototype.addIceCandidate=function(){return arguments[0]===null?Promise.resolve():nativeAddIceCandidate.apply(this,arguments);};var makeMapStats=function(stats){var map=new Map();Object.keys(stats).forEach(function(key){map.set(key,stats[key]);map[key]=stats[key];});return map;};var nativeGetStats=RTCPeerConnection.prototype.getStats;RTCPeerConnection.prototype.getStats=function(selector,onSucc,onErr){return nativeGetStats.apply(this,[selector||null]).then(function(stats){return makeMapStats(stats);}).then(onSucc,onErr);};}};module.exports={shimOnTrack:firefoxShim.shimOnTrack,shimSourceObject:firefoxShim.shimSourceObject,shimPeerConnection:firefoxShim.shimPeerConnection,shimGetUserMedia:require('./getusermedia')};},{"../utils":10,"./getusermedia":8}],8:[function(require,module,exports){'use strict';var logging=require('../utils').log;var browserDetails=require('../utils').browserDetails;module.exports=function(){var shimError_=function(e){return{name:{SecurityError:'NotAllowedError',PermissionDeniedError:'NotAllowedError'}[e.name]||e.name,message:{'The operation is insecure.':'The request is not allowed by the '+'user agent or the platform in the current context.'}[e.message]||e.message,constraint:e.constraint,toString:function(){return this.name+(this.message&&': ')+this.message;}};};var getUserMedia_=function(constraints,onSuccess,onError){var constraintsToFF37_=function(c){if(typeof c!=='object'||c.require){return c;}
var require=[];Object.keys(c).forEach(function(key){if(key==='require'||key==='advanced'||key==='mediaSource'){return;}
var r=c[key]=(typeof c[key]==='object')?c[key]:{ideal:c[key]};if(r.min!==undefined||r.max!==undefined||r.exact!==undefined){require.push(key);}
if(r.exact!==undefined){if(typeof r.exact==='number'){r.min=r.max=r.exact;}else{c[key]=r.exact;}
delete r.exact;}
if(r.ideal!==undefined){c.advanced=c.advanced||[];var oc={};if(typeof r.ideal==='number'){oc[key]={min:r.ideal,max:r.ideal};}else{oc[key]=r.ideal;}
c.advanced.push(oc);delete r.ideal;if(!Object.keys(r).length){delete c[key];}}});if(require.length){c.require=require;}
return c;};constraints=JSON.parse(JSON.stringify(constraints));if(browserDetails.version<38){logging('spec: '+JSON.stringify(constraints));if(constraints.audio){constraints.audio=constraintsToFF37_(constraints.audio);}
if(constraints.video){constraints.video=constraintsToFF37_(constraints.video);}
logging('ff37: '+JSON.stringify(constraints));}
return navigator.mozGetUserMedia(constraints,onSuccess,function(e){onError(shimError_(e));});};var getUserMediaPromise_=function(constraints){return new Promise(function(resolve,reject){getUserMedia_(constraints,resolve,reject);});};if(!navigator.mediaDevices){navigator.mediaDevices={getUserMedia:getUserMediaPromise_,addEventListener:function(){},removeEventListener:function(){}};}
navigator.mediaDevices.enumerateDevices=navigator.mediaDevices.enumerateDevices||function(){return new Promise(function(resolve){var infos=[{kind:'audioinput',deviceId:'default',label:'',groupId:''},{kind:'videoinput',deviceId:'default',label:'',groupId:''}];resolve(infos);});};if(browserDetails.version<41){var orgEnumerateDevices=navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices);navigator.mediaDevices.enumerateDevices=function(){return orgEnumerateDevices().then(undefined,function(e){if(e.name==='NotFoundError'){return[];}
throw e;});};}
if(browserDetails.version<49){var origGetUserMedia=navigator.mediaDevices.getUserMedia.bind(navigator.mediaDevices);navigator.mediaDevices.getUserMedia=function(c){return origGetUserMedia(c).catch(function(e){return Promise.reject(shimError_(e));});};}
navigator.getUserMedia=function(constraints,onSuccess,onError){if(browserDetails.version<44){return getUserMedia_(constraints,onSuccess,onError);}
console.warn('navigator.getUserMedia has been replaced by '+'navigator.mediaDevices.getUserMedia');navigator.mediaDevices.getUserMedia(constraints).then(onSuccess,onError);};};},{"../utils":10}],9:[function(require,module,exports){'use strict';var safariShim={shimGetUserMedia:function(){navigator.getUserMedia=navigator.webkitGetUserMedia;}};module.exports={shimGetUserMedia:safariShim.shimGetUserMedia};},{}],10:[function(require,module,exports){'use strict';var logDisabled_=true;var utils={disableLog:function(bool){if(typeof bool!=='boolean'){return new Error('Argument type: '+typeof bool+'. Please use a boolean.');}
logDisabled_=bool;return(bool)?'adapter.js logging disabled':'adapter.js logging enabled';},log:function(){if(typeof window==='object'){if(logDisabled_){return;}
if(typeof console!=='undefined'&&typeof console.log==='function'){console.log.apply(console,arguments);}}},extractVersion:function(uastring,expr,pos){var match=uastring.match(expr);return match&&match.length>=pos&&parseInt(match[pos],10);},detectBrowser:function(){var result={};result.browser=null;result.version=null;if(typeof window==='undefined'||!window.navigator){result.browser='Not a browser.';return result;}
if(navigator.mozGetUserMedia){result.browser='firefox';result.version=this.extractVersion(navigator.userAgent,/Firefox\/([0-9]+)\./,1);}else if(navigator.webkitGetUserMedia){if(window.webkitRTCPeerConnection){result.browser='chrome';result.version=this.extractVersion(navigator.userAgent,/Chrom(e|ium)\/([0-9]+)\./,2);}else{if(navigator.userAgent.match(/Version\/(\d+).(\d+)/)){result.browser='safari';result.version=this.extractVersion(navigator.userAgent,/AppleWebKit\/([0-9]+)\./,1);}else{result.browser='Unsupported webkit-based browser '+'with GUM support but no WebRTC support.';return result;}}}else if(navigator.mediaDevices&&navigator.userAgent.match(/Edge\/(\d+).(\d+)$/)){result.browser='edge';result.version=this.extractVersion(navigator.userAgent,/Edge\/(\d+).(\d+)$/,2);}else{result.browser='Not a supported browser.';return result;}
return result;}};module.exports={log:utils.log,disableLog:utils.disableLog,browserDetails:utils.detectBrowser(),extractVersion:utils.extractVersion};},{}]},{},[2])(2)});AdapterJS.parseWebrtcDetectedBrowser();if(navigator.mozGetUserMedia){MediaStreamTrack.getSources=function(successCb){setTimeout(function(){var infos=[{kind:'audio',id:'default',label:'',facing:''},{kind:'video',id:'default',label:'',facing:''}];successCb(infos);},0);};createIceServer=function(url,username,password){console.warn('createIceServer is deprecated. It should be replaced with an application level implementation.');var iceServer=null;var urlParts=url.split(':');if(urlParts[0].indexOf('stun')===0){iceServer={urls:[url]};}else if(urlParts[0].indexOf('turn')===0){if(webrtcDetectedVersion<27){var turnUrlParts=url.split('?');if(turnUrlParts.length===1||turnUrlParts[1].indexOf('transport=udp')===0){iceServer={urls:[turnUrlParts[0]],credential:password,username:username};}}else{iceServer={urls:[url],credential:password,username:username};}}
return iceServer;};createIceServers=function(urls,username,password){console.warn('createIceServers is deprecated. It should be replaced with an application level implementation.');var iceServers=[];for(i=0;i<urls.length;i++){var iceServer=createIceServer(urls[i],username,password);if(iceServer!==null){iceServers.push(iceServer);}}
return iceServers;};}else if(navigator.webkitGetUserMedia){createIceServer=function(url,username,password){console.warn('createIceServer is deprecated. It should be replaced with an application level implementation.');var iceServer=null;var urlParts=url.split(':');if(urlParts[0].indexOf('stun')===0){iceServer={'url':url};}else if(urlParts[0].indexOf('turn')===0){iceServer={'url':url,'credential':password,'username':username};}
return iceServer;};createIceServers=function(urls,username,password){console.warn('createIceServers is deprecated. It should be replaced with an application level implementation.');var iceServers=[];if(webrtcDetectedVersion>=34){iceServers={'urls':urls,'credential':password,'username':username};}else{for(i=0;i<urls.length;i++){var iceServer=createIceServer(urls[i],username,password);if(iceServer!==null){iceServers.push(iceServer);}}}
return iceServers;};}
if(navigator.mediaDevices&&navigator.userAgent.match(/Edge\/(\d+).(\d+)$/)){getUserMedia=window.getUserMedia=navigator.getUserMedia.bind(navigator);attachMediaStream=function(element,stream){element.srcObject=stream;return element;};reattachMediaStream=function(to,from){to.srcObject=from.srcObject;return to;};}
if(attachMediaStream){attachMediaStream_base=attachMediaStream;attachMediaStream=function(element,stream){if((webrtcDetectedBrowser==='chrome'||webrtcDetectedBrowser==='opera')&&!stream){element.src='';}else{attachMediaStream_base(element,stream);}
return element;};}
if(reattachMediaStream){reattachMediaStream_base=reattachMediaStream;reattachMediaStream=function(to,from){reattachMediaStream_base(to,from);return to;};}
window.attachMediaStream=attachMediaStream;window.reattachMediaStream=reattachMediaStream;window.getUserMedia=getUserMedia;AdapterJS.attachMediaStream=attachMediaStream;AdapterJS.reattachMediaStream=reattachMediaStream;AdapterJS.getUserMedia=getUserMedia;if(typeof Promise==='undefined'){requestUserMedia=null;}
AdapterJS.maybeThroughWebRTCReady();}else{if(typeof console!=='object'||typeof console.log!=='function'){console={}||console;console.log=function(arg){};console.info=function(arg){};console.error=function(arg){};console.dir=function(arg){};console.exception=function(arg){};console.trace=function(arg){};console.warn=function(arg){};console.count=function(arg){};console.debug=function(arg){};console.count=function(arg){};console.time=function(arg){};console.timeEnd=function(arg){};console.group=function(arg){};console.groupCollapsed=function(arg){};console.groupEnd=function(arg){};}
AdapterJS.parseWebrtcDetectedBrowser();isIE=webrtcDetectedBrowser==='IE';AdapterJS.WebRTCPlugin.WaitForPluginReady=function(){while(AdapterJS.WebRTCPlugin.pluginState!==AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY){}};AdapterJS.WebRTCPlugin.callWhenPluginReady=function(callback){if(AdapterJS.WebRTCPlugin.pluginState===AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY){callback();}else{var checkPluginReadyState=setInterval(function(){if(AdapterJS.WebRTCPlugin.pluginState===AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY){clearInterval(checkPluginReadyState);callback();}},100);}};AdapterJS.WebRTCPlugin.setLogLevel=function(logLevel){AdapterJS.WebRTCPlugin.callWhenPluginReady(function(){AdapterJS.WebRTCPlugin.plugin.setLogLevel(logLevel);});};AdapterJS.WebRTCPlugin.injectPlugin=function(){if(document.readyState!=='complete'){return;}
if(AdapterJS.WebRTCPlugin.pluginState!==AdapterJS.WebRTCPlugin.PLUGIN_STATES.INITIALIZING){return;}
AdapterJS.WebRTCPlugin.pluginState=AdapterJS.WebRTCPlugin.PLUGIN_STATES.INJECTING;if(webrtcDetectedBrowser==='IE'&&webrtcDetectedVersion<=10){var frag=document.createDocumentFragment();AdapterJS.WebRTCPlugin.plugin=document.createElement('div');AdapterJS.WebRTCPlugin.plugin.innerHTML='<object id="'+
AdapterJS.WebRTCPlugin.pluginInfo.pluginId+'" type="'+
AdapterJS.WebRTCPlugin.pluginInfo.type+'" '+'width="1" height="1">'+'<param name="pluginId" value="'+
AdapterJS.WebRTCPlugin.pluginInfo.pluginId+'" /> '+'<param name="windowless" value="false" /> '+'<param name="pageId" value="'+AdapterJS.WebRTCPlugin.pageId+'" /> '+'<param name="onload" value="'+AdapterJS.WebRTCPlugin.pluginInfo.onload+'" />'+'<param name="tag" value="'+AdapterJS.WebRTCPlugin.TAGS.NONE+'" />'+
(AdapterJS.options.getAllCams?'<param name="forceGetAllCams" value="True" />':'')+'</object>';while(AdapterJS.WebRTCPlugin.plugin.firstChild){frag.appendChild(AdapterJS.WebRTCPlugin.plugin.firstChild);}
document.body.appendChild(frag);AdapterJS.WebRTCPlugin.plugin=document.getElementById(AdapterJS.WebRTCPlugin.pluginInfo.pluginId);}else{AdapterJS.WebRTCPlugin.plugin=document.createElement('object');AdapterJS.WebRTCPlugin.plugin.id=AdapterJS.WebRTCPlugin.pluginInfo.pluginId;if(isIE){AdapterJS.WebRTCPlugin.plugin.width='1px';AdapterJS.WebRTCPlugin.plugin.height='1px';}else{AdapterJS.WebRTCPlugin.plugin.width='0px';AdapterJS.WebRTCPlugin.plugin.height='0px';}
AdapterJS.WebRTCPlugin.plugin.type=AdapterJS.WebRTCPlugin.pluginInfo.type;AdapterJS.WebRTCPlugin.plugin.innerHTML='<param name="onload" value="'+
AdapterJS.WebRTCPlugin.pluginInfo.onload+'">'+'<param name="pluginId" value="'+
AdapterJS.WebRTCPlugin.pluginInfo.pluginId+'">'+'<param name="windowless" value="false" /> '+
(AdapterJS.options.getAllCams?'<param name="forceGetAllCams" value="True" />':'')+'<param name="pageId" value="'+AdapterJS.WebRTCPlugin.pageId+'">'+'<param name="tag" value="'+AdapterJS.WebRTCPlugin.TAGS.NONE+'" />';document.body.appendChild(AdapterJS.WebRTCPlugin.plugin);}
AdapterJS.WebRTCPlugin.pluginState=AdapterJS.WebRTCPlugin.PLUGIN_STATES.INJECTED;};AdapterJS.WebRTCPlugin.isPluginInstalled=function(comName,plugName,installedCb,notInstalledCb){if(!isIE){var pluginArray=navigator.plugins;for(var i=0;i<pluginArray.length;i++){if(pluginArray[i].name.indexOf(plugName)>=0){installedCb();return;}}
notInstalledCb();}else{try{var axo=new ActiveXObject(comName+'.'+plugName);}catch(e){notInstalledCb();return;}
installedCb();}};AdapterJS.WebRTCPlugin.defineWebRTCInterface=function(){if(AdapterJS.WebRTCPlugin.pluginState===AdapterJS.WebRTCPlugin.PLUGIN_STATES.READY){console.error('AdapterJS - WebRTC interface has already been defined');return;}
AdapterJS.WebRTCPlugin.pluginState=AdapterJS.WebRTCPlugin.PLUGIN_STATES.INITIALIZING;AdapterJS.isDefined=function(variable){return variable!==null&&variable!==undefined;};createIceServer=function(url,username,password){var iceServer=null;var urlParts=url.split(':');if(urlParts[0].indexOf('stun')===0){iceServer={'url':url,'hasCredentials':false};}else if(urlParts[0].indexOf('turn')===0){iceServer={'url':url,'hasCredentials':true,'credential':password,'username':username};}
return iceServer;};createIceServers=function(urls,username,password){var iceServers=[];for(var i=0;i<urls.length;++i){iceServers.push(createIceServer(urls[i],username,password));}
return iceServers;};RTCSessionDescription=function(info){AdapterJS.WebRTCPlugin.WaitForPluginReady();return AdapterJS.WebRTCPlugin.plugin.ConstructSessionDescription(info.type,info.sdp);};RTCPeerConnection=function(servers,constraints){if(!(servers===undefined||servers===null||Array.isArray(servers.iceServers))){throw new Error('Failed to construct \'RTCPeerConnection\': Malformed RTCConfiguration');}
if(typeof constraints!=='undefined'&&constraints!==null){var invalidConstraits=false;invalidConstraits|=typeof constraints!=='object';invalidConstraits|=constraints.hasOwnProperty('mandatory')&&constraints.mandatory!==undefined&&constraints.mandatory!==null&&constraints.mandatory.constructor!==Object;invalidConstraits|=constraints.hasOwnProperty('optional')&&constraints.optional!==undefined&&constraints.optional!==null&&!Array.isArray(constraints.optional);if(invalidConstraits){throw new Error('Failed to construct \'RTCPeerConnection\': Malformed constraints object');}}
AdapterJS.WebRTCPlugin.WaitForPluginReady();var iceServers=null;if(servers&&Array.isArray(servers.iceServers)){iceServers=servers.iceServers;for(var i=0;i<iceServers.length;i++){if(iceServers[i].urls&&!iceServers[i].url){iceServers[i].url=iceServers[i].urls;}
iceServers[i].hasCredentials=AdapterJS.isDefined(iceServers[i].username)&&AdapterJS.isDefined(iceServers[i].credential);}}
if(AdapterJS.WebRTCPlugin.plugin.PEER_CONNECTION_VERSION&&AdapterJS.WebRTCPlugin.plugin.PEER_CONNECTION_VERSION>1){if(iceServers){servers.iceServers=iceServers;}
return AdapterJS.WebRTCPlugin.plugin.PeerConnection(servers);}else{var mandatory=(constraints&&constraints.mandatory)?constraints.mandatory:null;var optional=(constraints&&constraints.optional)?constraints.optional:null;return AdapterJS.WebRTCPlugin.plugin.PeerConnection(AdapterJS.WebRTCPlugin.pageId,iceServers,mandatory,optional);}};MediaStreamTrack=function(){};MediaStreamTrack.getSources=function(callback){AdapterJS.WebRTCPlugin.callWhenPluginReady(function(){AdapterJS.WebRTCPlugin.plugin.GetSources(callback);});};getUserMedia=function(constraints,successCallback,failureCallback){constraints.audio=constraints.audio||false;constraints.video=constraints.video||false;AdapterJS.WebRTCPlugin.callWhenPluginReady(function(){AdapterJS.WebRTCPlugin.plugin.getUserMedia(constraints,successCallback,failureCallback);});};window.navigator.getUserMedia=getUserMedia;if(!navigator.mediaDevices&&typeof Promise!=='undefined'){requestUserMedia=function(constraints){return new Promise(function(resolve,reject){getUserMedia(constraints,resolve,reject);});};navigator.mediaDevices={getUserMedia:requestUserMedia,enumerateDevices:function(){return new Promise(function(resolve){var kinds={audio:'audioinput',video:'videoinput'};return MediaStreamTrack.getSources(function(devices){resolve(devices.map(function(device){return{label:device.label,kind:kinds[device.kind],id:device.id,deviceId:device.id,groupId:''};}));});});}};}
attachMediaStream=function(element,stream){if(!element||!element.parentNode){return;}
var streamId;if(stream===null){streamId='';}else{if(typeof stream.enableSoundTracks!=='undefined'){stream.enableSoundTracks(true);}
streamId=stream.id;}
var elementId=element.id.length===0?Math.random().toString(36).slice(2):element.id;var nodeName=element.nodeName.toLowerCase();if(nodeName!=='object'){var tag;switch(nodeName){case'audio':tag=AdapterJS.WebRTCPlugin.TAGS.AUDIO;break;case'video':tag=AdapterJS.WebRTCPlugin.TAGS.VIDEO;break;default:tag=AdapterJS.WebRTCPlugin.TAGS.NONE;}
var frag=document.createDocumentFragment();var temp=document.createElement('div');var classHTML='';if(element.className){classHTML='class="'+element.className+'" ';}else if(element.attributes&&element.attributes['class']){classHTML='class="'+element.attributes['class'].value+'" ';}
temp.innerHTML='<object id="'+elementId+'" '+classHTML+'type="'+AdapterJS.WebRTCPlugin.pluginInfo.type+'">'+'<param name="pluginId" value="'+elementId+'" /> '+'<param name="pageId" value="'+AdapterJS.WebRTCPlugin.pageId+'" /> '+'<param name="windowless" value="true" /> '+'<param name="streamId" value="'+streamId+'" /> '+'<param name="tag" value="'+tag+'" /> '+'</object>';while(temp.firstChild){frag.appendChild(temp.firstChild);}
var height='';var width='';if(element.clientWidth||element.clientHeight){width=element.clientWidth;height=element.clientHeight;}
else if(element.width||element.height){width=element.width;height=element.height;}
element.parentNode.insertBefore(frag,element);frag=document.getElementById(elementId);frag.width=width;frag.height=height;element.parentNode.removeChild(element);}else{var children=element.children;for(var i=0;i!==children.length;++i){if(children[i].name==='streamId'){children[i].value=streamId;break;}}
element.setStreamId(streamId);}
var newElement=document.getElementById(elementId);AdapterJS.forwardEventHandlers(newElement,element,Object.getPrototypeOf(element));return newElement;};reattachMediaStream=function(to,from){var stream=null;var children=from.children;for(var i=0;i!==children.length;++i){if(children[i].name==='streamId'){AdapterJS.WebRTCPlugin.WaitForPluginReady();stream=AdapterJS.WebRTCPlugin.plugin.getStreamWithId(AdapterJS.WebRTCPlugin.pageId,children[i].value);break;}}
if(stream!==null){return attachMediaStream(to,stream);}else{console.log('Could not find the stream associated with this element');}};window.attachMediaStream=attachMediaStream;window.reattachMediaStream=reattachMediaStream;window.getUserMedia=getUserMedia;AdapterJS.attachMediaStream=attachMediaStream;AdapterJS.reattachMediaStream=reattachMediaStream;AdapterJS.getUserMedia=getUserMedia;AdapterJS.forwardEventHandlers=function(destElem,srcElem,prototype){properties=Object.getOwnPropertyNames(prototype);for(var prop in properties){if(prop){propName=properties[prop];if(typeof propName.slice==='function'&&propName.slice(0,2)==='on'&&typeof srcElem[propName]==='function'){AdapterJS.addEvent(destElem,propName.slice(2),srcElem[propName]);}}}
var subPrototype=Object.getPrototypeOf(prototype);if(!!subPrototype){AdapterJS.forwardEventHandlers(destElem,srcElem,subPrototype);}};RTCIceCandidate=function(candidate){if(!candidate.sdpMid){candidate.sdpMid='';}
AdapterJS.WebRTCPlugin.WaitForPluginReady();return AdapterJS.WebRTCPlugin.plugin.ConstructIceCandidate(candidate.sdpMid,candidate.sdpMLineIndex,candidate.candidate);};AdapterJS.addEvent(document,'readystatechange',AdapterJS.WebRTCPlugin.injectPlugin);AdapterJS.WebRTCPlugin.injectPlugin();};AdapterJS.WebRTCPlugin.pluginNeededButNotInstalledCb=AdapterJS.WebRTCPlugin.pluginNeededButNotInstalledCb||function(){AdapterJS.addEvent(document,'readystatechange',AdapterJS.WebRTCPlugin.pluginNeededButNotInstalledCbPriv);AdapterJS.WebRTCPlugin.pluginNeededButNotInstalledCbPriv();};AdapterJS.WebRTCPlugin.pluginNeededButNotInstalledCbPriv=function(){if(AdapterJS.options.hidePluginInstallPrompt){return;}
var downloadLink=AdapterJS.WebRTCPlugin.pluginInfo.downloadLink;if(downloadLink){var popupString;if(AdapterJS.WebRTCPlugin.pluginInfo.portalLink){popupString='This website requires you to install the '+' <a href="'+AdapterJS.WebRTCPlugin.pluginInfo.portalLink+'" target="_blank">'+AdapterJS.WebRTCPlugin.pluginInfo.companyName+' WebRTC Plugin</a>'+' to work on this browser.';}else{popupString=AdapterJS.TEXT.PLUGIN.REQUIRE_INSTALLATION;}
AdapterJS.renderNotificationBar(popupString,AdapterJS.TEXT.PLUGIN.BUTTON,downloadLink);}else{AdapterJS.renderNotificationBar(AdapterJS.TEXT.PLUGIN.NOT_SUPPORTED);}};AdapterJS.WebRTCPlugin.isPluginInstalled(AdapterJS.WebRTCPlugin.pluginInfo.prefix,AdapterJS.WebRTCPlugin.pluginInfo.plugName,AdapterJS.WebRTCPlugin.defineWebRTCInterface,AdapterJS.WebRTCPlugin.pluginNeededButNotInstalledCb);}

View File

@ -101,14 +101,14 @@ function full_screen(name) {
}
$("#" + video_screen).resize(function(e) {
console.log("video size changed to " + $("#" + video_screen).width() + "x" + $("#" + video_screen).height());
//console.log("video size changed to " + $("#" + video_screen).width() + "x" + $("#" + video_screen).height());
if ($("#" + video_screen).width() > $(window).width()) {
//if ($("#" + video_screen).width() > $(window).width()) {
//resize(false);
$("#" + video_screen).width("100%");
$("#" + video_screen).height("100%");
}
//$("#" + video_screen).width("100%");
//$("#" + video_screen).height("100%");
//}
real_size();
});
@ -128,13 +128,41 @@ function resize(up) {
}
$( window ).resize(function() {
real_size();
});
function real_size() {
$("#" + video_screen).width("");
$("#" + video_screen).height("");
/* temasys hack */
setTimeout(function() {
$("#" + video_screen).width("");
$("#" + video_screen).height("");
var w = $("#" + video_screen).width();
var h = $("#" + video_screen).height();
var new_w;
var new_h;
var aspect = 1920 / 1080; /*temasys doesn't provide video width hack aspect to wide screen*/
if (w > h) {
new_w = window.innerWidth;
new_h = Math.round(window.innerWidth / aspect);
} else {
new_h = window.innerHeight;
new_w = Math.round(window.innerHeight / aspect);
}
$("#" + video_screen).width(new_w);
$("#" + video_screen).height(new_h);
}, 500);
console.log("video size changed to fit screen");
console.log("video size changed to natural default");
}
@ -202,8 +230,19 @@ function check_vid() {
return use_vid;
}
var DISABLE_SPEED_TEST = true;
function do_speed_test(fn)
{
if (DISABLE_SPEED_TEST) {
if (fn) {
fn();
}
return;
}
goto_page("bwtest");
vertoHandle.rpcClient.speedTest(1024 * 256, function(e, obj) {
@ -512,6 +551,7 @@ var callbacks = {
check_vid_res();
$("#ansbtn").click(function() {
console.error("WTF", cur_call, d);
cur_call.answer({
useStereo: $("#use_stereo").is(':checked'),
callee_id_name: $("#cidname").val(),
@ -574,7 +614,7 @@ var callbacks = {
}
goto_page("incall");
real_size();
break;
case $.verto.enum.state.hangup:
$("#main_info").html("Call ended with cause: " + d.cause);
@ -1077,6 +1117,13 @@ function refresh_devices()
pop_select("#usecamera","verto_demo_camera_selected", tmp);
}
var tmp;
tmp = $.cookie("verto_demo_share_selected") || "false";
if (tmp) {
$('#useshare option[value=' + tmp + ']').prop('selected', 'selected').change();
pop_select("#useshare","verto_demo_share_selected", tmp);
}
tmp = $.cookie("verto_demo_mic_selected") || "false";
if (tmp) {
$('#usemic option[value=' + tmp + ']').prop('selected', 'selected').change();

View File

@ -24,6 +24,7 @@ changjun.yang <changjun.yang@intel.com>
Charles 'Buck' Krasic <ckrasic@google.com>
chm <chm@rock-chips.com>
Christian Duvivier <cduvivier@google.com>
Daniele Castagna <dcastagna@chromium.org>
Daniel Kang <ddkang@google.com>
Deb Mukherjee <debargha@google.com>
Dim Temp <dimtemp0@gmail.com>
@ -56,10 +57,12 @@ James Zern <jzern@google.com>
Jan Gerber <j@mailb.org>
Jan Kratochvil <jan.kratochvil@redhat.com>
Janne Salonen <jsalonen@google.com>
Jean-Yves Avenard <jyavenard@mozilla.com>
Jeff Faust <jfaust@google.com>
Jeff Muizelaar <jmuizelaar@mozilla.com>
Jeff Petkau <jpet@chromium.org>
Jia Jia <jia.jia@linaro.org>
Jian Zhou <zhoujian@google.com>
Jim Bankoski <jimbankoski@google.com>
Jingning Han <jingning@google.com>
Joey Parrish <joeyparrish@google.com>
@ -74,6 +77,7 @@ Justin Clift <justin@salasaga.org>
Justin Lebar <justin.lebar@gmail.com>
KO Myung-Hun <komh@chollian.net>
Lawrence Velázquez <larryv@macports.org>
Linfeng Zhang <linfengz@google.com>
Lou Quillio <louquillio@google.com>
Luca Barbato <lu_zero@gentoo.org>
Makoto Kato <makoto.kt@gmail.com>
@ -107,9 +111,11 @@ Rob Bradford <rob@linux.intel.com>
Ronald S. Bultje <rsbultje@gmail.com>
Rui Ueyama <ruiu@google.com>
Sami Pietilä <samipietila@google.com>
Sasi Inguva <isasi@google.com>
Scott Graham <scottmg@chromium.org>
Scott LaVarnway <slavarnway@google.com>
Sean McGovern <gseanmcg@gmail.com>
Sergey Kolomenkin <kolomenkin@gmail.com>
Sergey Ulanov <sergeyu@chromium.org>
Shimon Doodkin <helpmepro1@gmail.com>
Shunyao Li <shunyaoli@google.com>
@ -126,8 +132,10 @@ Timothy B. Terriberry <tterribe@xiph.org>
Tom Finegan <tomfinegan@google.com>
Vignesh Venkatasubramanian <vigneshv@google.com>
Yaowu Xu <yaowu@google.com>
Yi Luo <luoyi@google.com>
Yongzhe Wang <yongzhe@google.com>
Yunqing Wang <yunqingwang@google.com>
Yury Gitman <yuryg@google.com>
Zoe Liu <zoeliu@google.com>
Google Inc.
The Mozilla Foundation

View File

@ -1,3 +1,33 @@
2016-07-20 v1.6.0 "Khaki Campbell Duck"
This release improves upon the VP9 encoder and speeds up the encoding and
decoding processes.
- Upgrading:
This release is ABI incompatible with 1.5.0 due to a new 'color_range' enum
in vpx_image and some minor changes to the VP8_COMP structure.
The default key frame interval for VP9 has changed from 128 to 9999.
- Enhancement:
A core focus has been performance for low end Intel processors. SSSE3
instructions such as 'pshufb' have been avoided and instructions have been
reordered to better accommodate the more constrained pipelines.
As a result, devices based on Celeron processors have seen substantial
decoding improvements. From Indian Runner Duck to Javan Whistling Duck,
decoding speed improved between 10 and 30%. Between Javan Whistling Duck
and Khaki Campbell Duck, it improved another 10 to 15%.
While Celeron benefited most, Core-i5 also improved 5% and 10% between the
respective releases.
Realtime performance for WebRTC for both speed and quality has received a
lot of attention.
- Bug Fixes:
A number of fuzzing issues, found variously by Mozilla, Chromium and others,
have been fixed and we strongly recommend updating.
2015-11-09 v1.5.0 "Javan Whistling Duck"
This release improves upon the VP9 encoder and speeds up the encoding and
decoding processes.

View File

@ -1,4 +1,4 @@
README - 23 March 2015
README - 20 July 2016
Welcome to the WebM VP8/VP9 Codec SDK!
@ -47,11 +47,8 @@ COMPILING THE APPLICATIONS/LIBRARIES:
--help output of the configure script. As of this writing, the list of
available targets is:
armv6-darwin-gcc
armv6-linux-rvct
armv6-linux-gcc
armv6-none-rvct
arm64-darwin-gcc
arm64-linux-gcc
armv7-android-gcc
armv7-darwin-gcc
armv7-linux-rvct
@ -61,6 +58,7 @@ COMPILING THE APPLICATIONS/LIBRARIES:
armv7-win32-vs12
armv7-win32-vs14
armv7s-darwin-gcc
armv8-linux-gcc
mips32-linux-gcc
mips64-linux-gcc
sparc-solaris-gcc
@ -74,15 +72,13 @@ COMPILING THE APPLICATIONS/LIBRARIES:
x86-darwin12-gcc
x86-darwin13-gcc
x86-darwin14-gcc
x86-darwin15-gcc
x86-iphonesimulator-gcc
x86-linux-gcc
x86-linux-icc
x86-os2-gcc
x86-solaris-gcc
x86-win32-gcc
x86-win32-vs7
x86-win32-vs8
x86-win32-vs9
x86-win32-vs10
x86-win32-vs11
x86-win32-vs12
@ -94,13 +90,12 @@ COMPILING THE APPLICATIONS/LIBRARIES:
x86_64-darwin12-gcc
x86_64-darwin13-gcc
x86_64-darwin14-gcc
x86_64-darwin15-gcc
x86_64-iphonesimulator-gcc
x86_64-linux-gcc
x86_64-linux-icc
x86_64-solaris-gcc
x86_64-win64-gcc
x86_64-win64-vs8
x86_64-win64-vs9
x86_64-win64-vs10
x86_64-win64-vs11
x86_64-win64-vs12
@ -133,7 +128,22 @@ VP8/VP9 TEST VECTORS:
$ ./configure --enable-unit-tests
$ LIBVPX_TEST_DATA_PATH=../libvpx-test-data make testdata
CODE STYLE:
The coding style used by this project is enforced with clang-format using the
configuration contained in the .clang-format file in the root of the
repository.
Before pushing changes for review you can format your code with:
# Apply clang-format to modified .c, .h and .cc files
$ clang-format -i --style=file \
$(git diff --name-only --diff-filter=ACMR '*.[hc]' '*.cc')
Check the .clang-format file for the version used to generate it if there is
any difference between your local formatting and the review system.
See also: http://clang.llvm.org/docs/ClangFormat.html
SUPPORT
This library is an open source project supported by its community. Please
please email webm-discuss@webmproject.org for help.
email webm-discuss@webmproject.org for help.

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include <stdlib.h>
#include <string.h>
#include <limits.h>
@ -22,42 +21,36 @@ extern void die(const char *fmt, ...) __attribute__((noreturn));
extern void die(const char *fmt, ...);
#endif
struct arg arg_init(char **argv) {
struct arg a;
a.argv = argv;
a.argv = argv;
a.argv_step = 1;
a.name = NULL;
a.val = NULL;
a.def = NULL;
a.name = NULL;
a.val = NULL;
a.def = NULL;
return a;
}
int arg_match(struct arg *arg_, const struct arg_def *def, char **argv) {
struct arg arg;
if (!argv[0] || argv[0][0] != '-')
return 0;
if (!argv[0] || argv[0][0] != '-') return 0;
arg = arg_init(argv);
if (def->short_name
&& strlen(arg.argv[0]) == strlen(def->short_name) + 1
&& !strcmp(arg.argv[0] + 1, def->short_name)) {
if (def->short_name && strlen(arg.argv[0]) == strlen(def->short_name) + 1 &&
!strcmp(arg.argv[0] + 1, def->short_name)) {
arg.name = arg.argv[0] + 1;
arg.val = def->has_val ? arg.argv[1] : NULL;
arg.argv_step = def->has_val ? 2 : 1;
} else if (def->long_name) {
const size_t name_len = strlen(def->long_name);
if (strlen(arg.argv[0]) >= name_len + 2
&& arg.argv[0][1] == '-'
&& !strncmp(arg.argv[0] + 2, def->long_name, name_len)
&& (arg.argv[0][name_len + 2] == '='
|| arg.argv[0][name_len + 2] == '\0')) {
if (strlen(arg.argv[0]) >= name_len + 2 && arg.argv[0][1] == '-' &&
!strncmp(arg.argv[0] + 2, def->long_name, name_len) &&
(arg.argv[0][name_len + 2] == '=' ||
arg.argv[0][name_len + 2] == '\0')) {
arg.name = arg.argv[0] + 2;
arg.val = arg.name[name_len] == '=' ? arg.name + name_len + 1 : NULL;
arg.argv_step = 1;
@ -70,8 +63,7 @@ int arg_match(struct arg *arg_, const struct arg_def *def, char **argv) {
if (arg.name && arg.val && !def->has_val)
die("Error: option %s requires no argument.\n", arg.name);
if (arg.name
&& (arg.val || !def->has_val)) {
if (arg.name && (arg.val || !def->has_val)) {
arg.def = def;
*arg_ = arg;
return 1;
@ -80,15 +72,12 @@ int arg_match(struct arg *arg_, const struct arg_def *def, char **argv) {
return 0;
}
const char *arg_next(struct arg *arg) {
if (arg->argv[0])
arg->argv += arg->argv_step;
if (arg->argv[0]) arg->argv += arg->argv_step;
return *arg->argv;
}
char **argv_dup(int argc, const char **argv) {
char **new_argv = malloc((argc + 1) * sizeof(*argv));
@ -97,9 +86,8 @@ char **argv_dup(int argc, const char **argv) {
return new_argv;
}
void arg_show_usage(FILE *fp, const struct arg_def *const *defs) {
char option_text[40] = {0};
char option_text[40] = { 0 };
for (; *defs; defs++) {
const struct arg_def *def = *defs;
@ -109,15 +97,12 @@ void arg_show_usage(FILE *fp, const struct arg_def *const *defs) {
if (def->short_name && def->long_name) {
char *comma = def->has_val ? "," : ", ";
snprintf(option_text, 37, "-%s%s%s --%s%6s",
def->short_name, short_val, comma,
def->long_name, long_val);
snprintf(option_text, 37, "-%s%s%s --%s%6s", def->short_name, short_val,
comma, def->long_name, long_val);
} else if (def->short_name)
snprintf(option_text, 37, "-%s%s",
def->short_name, short_val);
snprintf(option_text, 37, "-%s%s", def->short_name, short_val);
else if (def->long_name)
snprintf(option_text, 37, " --%s%s",
def->long_name, long_val);
snprintf(option_text, 37, " --%s%s", def->long_name, long_val);
fprintf(fp, " %-37s\t%s\n", option_text, def->desc);
@ -127,110 +112,103 @@ void arg_show_usage(FILE *fp, const struct arg_def *const *defs) {
fprintf(fp, " %-37s\t ", "");
for (listptr = def->enums; listptr->name; listptr++)
fprintf(fp, "%s%s", listptr->name,
listptr[1].name ? ", " : "\n");
fprintf(fp, "%s%s", listptr->name, listptr[1].name ? ", " : "\n");
}
}
}
unsigned int arg_parse_uint(const struct arg *arg) {
long int rawval;
char *endptr;
long int rawval;
char *endptr;
rawval = strtol(arg->val, &endptr, 10);
if (arg->val[0] != '\0' && endptr[0] == '\0') {
if (rawval >= 0 && rawval <= UINT_MAX)
return rawval;
if (rawval >= 0 && rawval <= UINT_MAX) return (unsigned int)rawval;
die("Option %s: Value %ld out of range for unsigned int\n",
arg->name, rawval);
die("Option %s: Value %ld out of range for unsigned int\n", arg->name,
rawval);
}
die("Option %s: Invalid character '%c'\n", arg->name, *endptr);
return 0;
}
int arg_parse_int(const struct arg *arg) {
long int rawval;
char *endptr;
long int rawval;
char *endptr;
rawval = strtol(arg->val, &endptr, 10);
if (arg->val[0] != '\0' && endptr[0] == '\0') {
if (rawval >= INT_MIN && rawval <= INT_MAX)
return rawval;
if (rawval >= INT_MIN && rawval <= INT_MAX) return (int)rawval;
die("Option %s: Value %ld out of range for signed int\n",
arg->name, rawval);
die("Option %s: Value %ld out of range for signed int\n", arg->name,
rawval);
}
die("Option %s: Invalid character '%c'\n", arg->name, *endptr);
return 0;
}
struct vpx_rational {
int num; /**< fraction numerator */
int den; /**< fraction denominator */
};
struct vpx_rational arg_parse_rational(const struct arg *arg) {
long int rawval;
char *endptr;
struct vpx_rational rat;
long int rawval;
char *endptr;
struct vpx_rational rat;
/* parse numerator */
rawval = strtol(arg->val, &endptr, 10);
if (arg->val[0] != '\0' && endptr[0] == '/') {
if (rawval >= INT_MIN && rawval <= INT_MAX)
rat.num = rawval;
else die("Option %s: Value %ld out of range for signed int\n",
arg->name, rawval);
} else die("Option %s: Expected / at '%c'\n", arg->name, *endptr);
rat.num = (int)rawval;
else
die("Option %s: Value %ld out of range for signed int\n", arg->name,
rawval);
} else
die("Option %s: Expected / at '%c'\n", arg->name, *endptr);
/* parse denominator */
rawval = strtol(endptr + 1, &endptr, 10);
if (arg->val[0] != '\0' && endptr[0] == '\0') {
if (rawval >= INT_MIN && rawval <= INT_MAX)
rat.den = rawval;
else die("Option %s: Value %ld out of range for signed int\n",
arg->name, rawval);
} else die("Option %s: Invalid character '%c'\n", arg->name, *endptr);
rat.den = (int)rawval;
else
die("Option %s: Value %ld out of range for signed int\n", arg->name,
rawval);
} else
die("Option %s: Invalid character '%c'\n", arg->name, *endptr);
return rat;
}
int arg_parse_enum(const struct arg *arg) {
const struct arg_enum_list *listptr;
long int rawval;
char *endptr;
long int rawval;
char *endptr;
/* First see if the value can be parsed as a raw value */
rawval = strtol(arg->val, &endptr, 10);
if (arg->val[0] != '\0' && endptr[0] == '\0') {
/* Got a raw value, make sure it's valid */
for (listptr = arg->def->enums; listptr->name; listptr++)
if (listptr->val == rawval)
return rawval;
if (listptr->val == rawval) return (int)rawval;
}
/* Next see if it can be parsed as a string */
for (listptr = arg->def->enums; listptr->name; listptr++)
if (!strcmp(arg->val, listptr->name))
return listptr->val;
if (!strcmp(arg->val, listptr->name)) return listptr->val;
die("Option %s: Invalid value '%s'\n", arg->name, arg->val);
return 0;
}
int arg_parse_enum_or_int(const struct arg *arg) {
if (arg->def->enums)
return arg_parse_enum(arg);
if (arg->def->enums) return arg_parse_enum(arg);
return arg_parse_int(arg);
}

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef ARGS_H_
#define ARGS_H_
#include <stdio.h>
@ -18,29 +17,33 @@ extern "C" {
#endif
struct arg {
char **argv;
const char *name;
const char *val;
unsigned int argv_step;
const struct arg_def *def;
char **argv;
const char *name;
const char *val;
unsigned int argv_step;
const struct arg_def *def;
};
struct arg_enum_list {
const char *name;
int val;
int val;
};
#define ARG_ENUM_LIST_END {0}
#define ARG_ENUM_LIST_END \
{ 0 }
typedef struct arg_def {
const char *short_name;
const char *long_name;
int has_val;
int has_val;
const char *desc;
const struct arg_enum_list *enums;
} arg_def_t;
#define ARG_DEF(s,l,v,d) {s,l,v,d, NULL}
#define ARG_DEF_ENUM(s,l,v,d,e) {s,l,v,d,e}
#define ARG_DEF_LIST_END {0}
#define ARG_DEF(s, l, v, d) \
{ s, l, v, d, NULL }
#define ARG_DEF_ENUM(s, l, v, d, e) \
{ s, l, v, d, e }
#define ARG_DEF_LIST_END \
{ 0 }
struct arg arg_init(char **argv);
int arg_match(struct arg *arg_, const struct arg_def *def, char **argv);

View File

@ -29,11 +29,6 @@
# include $(CLEAR_VARS)
# include jni/libvpx/build/make/Android.mk
#
# There are currently two TARGET_ARCH_ABI targets for ARM.
# armeabi and armeabi-v7a. armeabi-v7a is selected by creating an
# Application.mk in the jni directory that contains:
# APP_ABI := armeabi-v7a
#
# By default libvpx will detect at runtime the existance of NEON extension.
# For this we import the 'cpufeatures' module from the NDK sources.
# libvpx can also be configured without this runtime detection method.
@ -42,13 +37,29 @@
# --disable-neon-asm
# will remove any NEON dependency.
# To change to building armeabi, run ./libvpx/configure again, but with
# --target=armv6-android-gcc and modify the Application.mk file to
# set APP_ABI := armeabi
#
# Running ndk-build will build libvpx and include it in your project.
#
# Alternatively, building the examples and unit tests can be accomplished in the
# following way:
#
# Create a standalone toolchain from the NDK:
# https://developer.android.com/ndk/guides/standalone_toolchain.html
#
# For example - to test on arm64 devices with clang:
# $NDK/build/tools/make_standalone_toolchain.py \
# --arch arm64 --install-dir=/tmp/my-android-toolchain
# export PATH=/tmp/my-android-toolchain/bin:$PATH
# CROSS=aarch64-linux-android- CC=clang CXX=clang++ /path/to/libvpx/configure \
# --target=arm64-android-gcc
#
# Push the resulting binaries to a device and run them:
# adb push test_libvpx /data/tmp/test_libvpx
# adb shell /data/tmp/test_libvpx --gtest_filter=\*Sixtap\*
#
# Make sure to push the test data as well and set LIBVPX_TEST_DATA
CONFIG_DIR := $(LOCAL_PATH)/
LIBVPX_PATH := $(LOCAL_PATH)/libvpx
ASM_CNV_PATH_LOCAL := $(TARGET_ARCH_ABI)/ads2gas
@ -59,9 +70,6 @@ ASM_CNV_PATH := $(LOCAL_PATH)/$(ASM_CNV_PATH_LOCAL)
ifeq ($(TARGET_ARCH_ABI),armeabi-v7a)
include $(CONFIG_DIR)libs-armv7-android-gcc.mk
LOCAL_ARM_MODE := arm
else ifeq ($(TARGET_ARCH_ABI),armeabi)
include $(CONFIG_DIR)libs-armv6-android-gcc.mk
LOCAL_ARM_MODE := arm
else ifeq ($(TARGET_ARCH_ABI),arm64-v8a)
include $(CONFIG_DIR)libs-armv8-android-gcc.mk
LOCAL_ARM_MODE := arm
@ -174,9 +182,6 @@ endif
ifeq ($(CONFIG_VP9), yes)
$$(rtcd_dep_template_SRCS): vp9_rtcd.h
endif
ifeq ($(CONFIG_VP10), yes)
$$(rtcd_dep_template_SRCS): vp10_rtcd.h
endif
$$(rtcd_dep_template_SRCS): vpx_scale_rtcd.h
$$(rtcd_dep_template_SRCS): vpx_dsp_rtcd.h

View File

@ -26,7 +26,7 @@ test-no-data-check:: .DEFAULT
testdata:: .DEFAULT
utiltest: .DEFAULT
exampletest-no-data-check utiltest-no-data-check: .DEFAULT
test_%: .DEFAULT ;
# Note: md5sum is not installed on OS X, but openssl is. Openssl may not be
# installed on cygwin, so we need to autodetect here.
@ -119,29 +119,25 @@ utiltest:
test-no-data-check::
exampletest-no-data-check utiltest-no-data-check:
# Add compiler flags for intrinsic files
# Force to realign stack always on OS/2
ifeq ($(TOOLCHAIN), x86-os2-gcc)
STACKREALIGN=-mstackrealign
else
STACKREALIGN=
CFLAGS += -mstackrealign
endif
$(BUILD_PFX)%_mmx.c.d: CFLAGS += -mmmx
$(BUILD_PFX)%_mmx.c.o: CFLAGS += -mmmx
$(BUILD_PFX)%_sse2.c.d: CFLAGS += -msse2 $(STACKREALIGN)
$(BUILD_PFX)%_sse2.c.o: CFLAGS += -msse2 $(STACKREALIGN)
$(BUILD_PFX)%_sse3.c.d: CFLAGS += -msse3 $(STACKREALIGN)
$(BUILD_PFX)%_sse3.c.o: CFLAGS += -msse3 $(STACKREALIGN)
$(BUILD_PFX)%_ssse3.c.d: CFLAGS += -mssse3 $(STACKREALIGN)
$(BUILD_PFX)%_ssse3.c.o: CFLAGS += -mssse3 $(STACKREALIGN)
$(BUILD_PFX)%_sse4.c.d: CFLAGS += -msse4.1 $(STACKREALIGN)
$(BUILD_PFX)%_sse4.c.o: CFLAGS += -msse4.1 $(STACKREALIGN)
$(BUILD_PFX)%_avx.c.d: CFLAGS += -mavx $(STACKREALIGN)
$(BUILD_PFX)%_avx.c.o: CFLAGS += -mavx $(STACKREALIGN)
$(BUILD_PFX)%_avx2.c.d: CFLAGS += -mavx2 $(STACKREALIGN)
$(BUILD_PFX)%_avx2.c.o: CFLAGS += -mavx2 $(STACKREALIGN)
$(BUILD_PFX)%vp9_reconintra.c.d: CFLAGS += $(STACKREALIGN)
$(BUILD_PFX)%vp9_reconintra.c.o: CFLAGS += $(STACKREALIGN)
$(BUILD_PFX)%_sse2.c.d: CFLAGS += -msse2
$(BUILD_PFX)%_sse2.c.o: CFLAGS += -msse2
$(BUILD_PFX)%_sse3.c.d: CFLAGS += -msse3
$(BUILD_PFX)%_sse3.c.o: CFLAGS += -msse3
$(BUILD_PFX)%_ssse3.c.d: CFLAGS += -mssse3
$(BUILD_PFX)%_ssse3.c.o: CFLAGS += -mssse3
$(BUILD_PFX)%_sse4.c.d: CFLAGS += -msse4.1
$(BUILD_PFX)%_sse4.c.o: CFLAGS += -msse4.1
$(BUILD_PFX)%_avx.c.d: CFLAGS += -mavx
$(BUILD_PFX)%_avx.c.o: CFLAGS += -mavx
$(BUILD_PFX)%_avx2.c.d: CFLAGS += -mavx2
$(BUILD_PFX)%_avx2.c.o: CFLAGS += -mavx2
$(BUILD_PFX)%.c.d: %.c
$(if $(quiet),@echo " [DEP] $@")
@ -422,7 +418,6 @@ ifneq ($(call enabled,DIST-SRCS),)
DIST-SRCS-yes += build/make/gen_asm_deps.sh
DIST-SRCS-yes += build/make/Makefile
DIST-SRCS-$(CONFIG_MSVS) += build/make/gen_msvs_def.sh
DIST-SRCS-$(CONFIG_MSVS) += build/make/gen_msvs_proj.sh
DIST-SRCS-$(CONFIG_MSVS) += build/make/gen_msvs_sln.sh
DIST-SRCS-$(CONFIG_MSVS) += build/make/gen_msvs_vcxproj.sh
DIST-SRCS-$(CONFIG_MSVS) += build/make/msvs_common.sh
@ -453,3 +448,5 @@ all: $(BUILD_TARGETS)
install:: $(INSTALL_TARGETS)
dist: $(INSTALL_TARGETS)
test::
.SUFFIXES: # Delete default suffix rules

View File

@ -18,12 +18,6 @@
# Usage: cat inputfile | perl ads2gas_apple.pl > outputfile
#
my $chromium = 0;
foreach my $arg (@ARGV) {
$chromium = 1 if ($arg eq "-chromium");
}
print "@ This file was created from a .asm file\n";
print "@ using the ads2gas_apple.pl script.\n\n";
print "\t.set WIDE_REFERENCE, 0\n";
@ -218,18 +212,5 @@ while (<STDIN>)
s/\bMEND\b/.endm/; # No need to tell it where to stop assembling
next if /^\s*END\s*$/;
# Clang used by Chromium differs slightly from clang in XCode in what it
# will accept in the assembly.
if ($chromium) {
s/qsubaddx/qsax/i;
s/qaddsubx/qasx/i;
s/ldrneb/ldrbne/i;
s/ldrneh/ldrhne/i;
s/(vqshrun\.s16 .*, \#)0$/${1}8/i;
# http://llvm.org/bugs/show_bug.cgi?id=16022
s/\.include/#include/;
}
print;
}

View File

@ -185,6 +185,7 @@ add_extralibs() {
#
# Boolean Manipulation Functions
#
enable_feature(){
set_all yes $*
}
@ -201,6 +202,20 @@ disabled(){
eval test "x\$$1" = "xno"
}
enable_codec(){
enabled "${1}" || echo " enabling ${1}"
enable_feature "${1}"
is_in "${1}" vp8 vp9 && enable_feature "${1}_encoder" "${1}_decoder"
}
disable_codec(){
disabled "${1}" || echo " disabling ${1}"
disable_feature "${1}"
is_in "${1}" vp8 vp9 && disable_feature "${1}_encoder" "${1}_decoder"
}
# Iterates through positional parameters, checks to confirm the parameter has
# not been explicitly (force) disabled, and enables the setting controlled by
# the parameter when the setting is not disabled.
@ -531,22 +546,20 @@ process_common_cmdline() {
;;
--enable-?*|--disable-?*)
eval `echo "$opt" | sed 's/--/action=/;s/-/ option=/;s/-/_/g'`
if echo "${ARCH_EXT_LIST}" | grep "^ *$option\$" >/dev/null; then
if is_in ${option} ${ARCH_EXT_LIST}; then
[ $action = "disable" ] && RTCD_OPTIONS="${RTCD_OPTIONS}--disable-${option} "
elif [ $action = "disable" ] && ! disabled $option ; then
echo "${CMDLINE_SELECT}" | grep "^ *$option\$" >/dev/null ||
die_unknown $opt
is_in ${option} ${CMDLINE_SELECT} || die_unknown $opt
log_echo " disabling $option"
elif [ $action = "enable" ] && ! enabled $option ; then
echo "${CMDLINE_SELECT}" | grep "^ *$option\$" >/dev/null ||
die_unknown $opt
is_in ${option} ${CMDLINE_SELECT} || die_unknown $opt
log_echo " enabling $option"
fi
${action}_feature $option
;;
--require-?*)
eval `echo "$opt" | sed 's/--/action=/;s/-/ option=/;s/-/_/g'`
if echo "${ARCH_EXT_LIST}" none | grep "^ *$option\$" >/dev/null; then
if is_in ${option} ${ARCH_EXT_LIST}; then
RTCD_OPTIONS="${RTCD_OPTIONS}${opt} "
else
die_unknown $opt
@ -648,14 +661,34 @@ show_darwin_sdk_major_version() {
xcrun --sdk $1 --show-sdk-version 2>/dev/null | cut -d. -f1
}
# Print the Xcode version.
show_xcode_version() {
xcodebuild -version | head -n1 | cut -d' ' -f2
}
# Fails when Xcode version is less than 6.3.
check_xcode_minimum_version() {
xcode_major=$(show_xcode_version | cut -f1 -d.)
xcode_minor=$(show_xcode_version | cut -f2 -d.)
xcode_min_major=6
xcode_min_minor=3
if [ ${xcode_major} -lt ${xcode_min_major} ]; then
return 1
fi
if [ ${xcode_major} -eq ${xcode_min_major} ] \
&& [ ${xcode_minor} -lt ${xcode_min_minor} ]; then
return 1
fi
}
process_common_toolchain() {
if [ -z "$toolchain" ]; then
gcctarget="${CHOST:-$(gcc -dumpmachine 2> /dev/null)}"
# detect tgt_isa
case "$gcctarget" in
armv6*)
tgt_isa=armv6
aarch64*)
tgt_isa=arm64
;;
armv7*-hardfloat* | armv7*-gnueabihf | arm-*-gnueabihf)
tgt_isa=armv7
@ -758,7 +791,14 @@ process_common_toolchain() {
enabled shared && soft_enable pic
# Minimum iOS version for all target platforms (darwin and iphonesimulator).
IOS_VERSION_MIN="6.0"
# Shared library framework builds are only possible on iOS 8 and later.
if enabled shared; then
IOS_VERSION_OPTIONS="--enable-shared"
IOS_VERSION_MIN="8.0"
else
IOS_VERSION_OPTIONS=""
IOS_VERSION_MIN="6.0"
fi
# Handle darwin variants. Newer SDKs allow targeting older
# platforms, so use the newest one available.
@ -850,36 +890,6 @@ process_common_toolchain() {
if disabled neon && enabled neon_asm; then
die "Disabling neon while keeping neon-asm is not supported"
fi
case ${toolchain} in
# Apple iOS SDKs no longer support armv6 as of the version 9
# release (coincides with release of Xcode 7). Only enable media
# when using earlier SDK releases.
*-darwin*)
if [ "$(show_darwin_sdk_major_version iphoneos)" -lt 9 ]; then
soft_enable media
else
soft_disable media
RTCD_OPTIONS="${RTCD_OPTIONS}--disable-media "
fi
;;
*)
soft_enable media
;;
esac
;;
armv6)
case ${toolchain} in
*-darwin*)
if [ "$(show_darwin_sdk_major_version iphoneos)" -lt 9 ]; then
soft_enable media
else
die "Your iOS SDK does not support armv6."
fi
;;
*)
soft_enable media
;;
esac
;;
esac
@ -908,6 +918,9 @@ EOF
check_add_cflags -mfpu=neon #-ftree-vectorize
check_add_asflags -mfpu=neon
fi
elif [ ${tgt_isa} = "arm64" ] || [ ${tgt_isa} = "armv8" ]; then
check_add_cflags -march=armv8-a
check_add_asflags -march=armv8-a
else
check_add_cflags -march=${tgt_isa}
check_add_asflags -march=${tgt_isa}
@ -975,43 +988,50 @@ EOF
;;
android*)
SDK_PATH=${sdk_path}
COMPILER_LOCATION=`find "${SDK_PATH}" \
-name "arm-linux-androideabi-gcc*" -print -quit`
TOOLCHAIN_PATH=${COMPILER_LOCATION%/*}/arm-linux-androideabi-
CC=${TOOLCHAIN_PATH}gcc
CXX=${TOOLCHAIN_PATH}g++
AR=${TOOLCHAIN_PATH}ar
LD=${TOOLCHAIN_PATH}gcc
AS=${TOOLCHAIN_PATH}as
STRIP=${TOOLCHAIN_PATH}strip
NM=${TOOLCHAIN_PATH}nm
if [ -n "${sdk_path}" ]; then
SDK_PATH=${sdk_path}
COMPILER_LOCATION=`find "${SDK_PATH}" \
-name "arm-linux-androideabi-gcc*" -print -quit`
TOOLCHAIN_PATH=${COMPILER_LOCATION%/*}/arm-linux-androideabi-
CC=${TOOLCHAIN_PATH}gcc
CXX=${TOOLCHAIN_PATH}g++
AR=${TOOLCHAIN_PATH}ar
LD=${TOOLCHAIN_PATH}gcc
AS=${TOOLCHAIN_PATH}as
STRIP=${TOOLCHAIN_PATH}strip
NM=${TOOLCHAIN_PATH}nm
if [ -z "${alt_libc}" ]; then
alt_libc=`find "${SDK_PATH}" -name arch-arm -print | \
awk '{n = split($0,a,"/"); \
if [ -z "${alt_libc}" ]; then
alt_libc=`find "${SDK_PATH}" -name arch-arm -print | \
awk '{n = split($0,a,"/"); \
split(a[n-1],b,"-"); \
print $0 " " b[2]}' | \
sort -g -k 2 | \
awk '{ print $1 }' | tail -1`
fi
fi
if [ -d "${alt_libc}" ]; then
add_cflags "--sysroot=${alt_libc}"
add_ldflags "--sysroot=${alt_libc}"
fi
if [ -d "${alt_libc}" ]; then
add_cflags "--sysroot=${alt_libc}"
add_ldflags "--sysroot=${alt_libc}"
fi
# linker flag that routes around a CPU bug in some
# Cortex-A8 implementations (NDK Dev Guide)
add_ldflags "-Wl,--fix-cortex-a8"
# linker flag that routes around a CPU bug in some
# Cortex-A8 implementations (NDK Dev Guide)
add_ldflags "-Wl,--fix-cortex-a8"
enable_feature pic
soft_enable realtime_only
if [ ${tgt_isa} = "armv7" ]; then
soft_enable runtime_cpu_detect
fi
if enabled runtime_cpu_detect; then
add_cflags "-I${SDK_PATH}/sources/android/cpufeatures"
enable_feature pic
soft_enable realtime_only
if [ ${tgt_isa} = "armv7" ]; then
soft_enable runtime_cpu_detect
fi
if enabled runtime_cpu_detect; then
add_cflags "-I${SDK_PATH}/sources/android/cpufeatures"
fi
else
echo "Assuming standalone build with NDK toolchain."
echo "See build/make/Android.mk for details."
check_add_ldflags -static
soft_enable unit_tests
fi
;;
@ -1025,18 +1045,7 @@ EOF
NM="$(${XCRUN_FIND} nm)"
RANLIB="$(${XCRUN_FIND} ranlib)"
AS_SFX=.s
# Special handling of ld for armv6 because libclang_rt.ios.a does
# not contain armv6 support in Apple's clang package:
# Apple LLVM version 5.1 (clang-503.0.40) (based on LLVM 3.4svn).
# TODO(tomfinegan): Remove this. Our minimum iOS version (6.0)
# renders support for armv6 unnecessary because the 3GS and up
# support neon.
if [ "${tgt_isa}" = "armv6" ]; then
LD="$(${XCRUN_FIND} ld)"
else
LD="${CXX:-$(${XCRUN_FIND} ld)}"
fi
LD="${CXX:-$(${XCRUN_FIND} ld)}"
# ASFLAGS is written here instead of using check_add_asflags
# because we need to overwrite all of ASFLAGS and purge the
@ -1062,6 +1071,19 @@ EOF
[ -d "${try_dir}" ] && add_ldflags -L"${try_dir}"
done
case ${tgt_isa} in
armv7|armv7s|armv8|arm64)
if enabled neon && ! check_xcode_minimum_version; then
soft_disable neon
log_echo " neon disabled: upgrade Xcode (need v6.3+)."
if enabled neon_asm; then
soft_disable neon_asm
log_echo " neon_asm disabled: upgrade Xcode (need v6.3+)."
fi
fi
;;
esac
asm_conversion_cmd="${source_path}/build/make/ads2gas_apple.pl"
if [ "$(show_darwin_sdk_major_version iphoneos)" -gt 8 ]; then
@ -1076,7 +1098,7 @@ EOF
if enabled rvct; then
# Check if we have CodeSourcery GCC in PATH. Needed for
# libraries
hash arm-none-linux-gnueabi-gcc 2>&- || \
which arm-none-linux-gnueabi-gcc 2>&- || \
die "Couldn't find CodeSourcery GCC from PATH"
# Use armcc as a linker to enable translation of
@ -1111,13 +1133,13 @@ EOF
if [ -n "${tune_cpu}" ]; then
case ${tune_cpu} in
p5600)
check_add_cflags -mips32r5 -funroll-loops -mload-store-pairs
check_add_cflags -mips32r5 -mload-store-pairs
check_add_cflags -msched-weight -mhard-float -mfp64
check_add_asflags -mips32r5 -mhard-float -mfp64
check_add_ldflags -mfp64
;;
i6400)
check_add_cflags -mips64r6 -mabi=64 -funroll-loops -msched-weight
i6400|p6600)
check_add_cflags -mips64r6 -mabi=64 -msched-weight
check_add_cflags -mload-store-pairs -mhard-float -mfp64
check_add_asflags -mips64r6 -mabi=64 -mhard-float -mfp64
check_add_ldflags -mips64r6 -mabi=64 -mfp64
@ -1197,6 +1219,12 @@ EOF
soft_disable avx2
;;
esac
case $vc_version in
7|8|9)
echo "${tgt_cc} omits stdint.h, disabling webm-io..."
soft_disable webm_io
;;
esac
;;
esac
@ -1348,10 +1376,6 @@ EOF
fi
fi
if [ "${tgt_isa}" = "x86_64" ] || [ "${tgt_isa}" = "x86" ]; then
soft_enable use_x86inc
fi
# Position Independent Code (PIC) support, for building relocatable
# shared objects
enabled gcc && enabled pic && check_add_cflags -fPIC
@ -1381,6 +1405,7 @@ EOF
*-win*-vs*)
;;
*-android-gcc)
# bionic includes basic pthread functionality, obviating -lpthread.
;;
*)
check_header pthread.h && add_extralibs -lpthread

View File

@ -1,490 +0,0 @@
#!/bin/bash
##
## Copyright (c) 2010 The WebM project authors. All Rights Reserved.
##
## Use of this source code is governed by a BSD-style license
## that can be found in the LICENSE file in the root of the source
## tree. An additional intellectual property rights grant can be found
## in the file PATENTS. All contributing project authors may
## be found in the AUTHORS file in the root of the source tree.
##
self=$0
self_basename=${self##*/}
self_dirname=$(dirname "$0")
. "$self_dirname/msvs_common.sh"|| exit 127
show_help() {
cat <<EOF
Usage: ${self_basename} --name=projname [options] file1 [file2 ...]
This script generates a Visual Studio project file from a list of source
code files.
Options:
--help Print this message
--exe Generate a project for building an Application
--lib Generate a project for creating a static library
--dll Generate a project for creating a dll
--static-crt Use the static C runtime (/MT)
--target=isa-os-cc Target specifier (required)
--out=filename Write output to a file [stdout]
--name=project_name Name of the project (required)
--proj-guid=GUID GUID to use for the project
--module-def=filename File containing export definitions (for DLLs)
--ver=version Version (7,8,9) of visual studio to generate for
--src-path-bare=dir Path to root of source tree
-Ipath/to/include Additional include directories
-DFLAG[=value] Preprocessor macros to define
-Lpath/to/lib Additional library search paths
-llibname Library to link against
EOF
exit 1
}
generate_filter() {
local var=$1
local name=$2
local pats=$3
local file_list_sz
local i
local f
local saveIFS="$IFS"
local pack
echo "generating filter '$name' from ${#file_list[@]} files" >&2
IFS=*
open_tag Filter \
Name=$name \
Filter=$pats \
UniqueIdentifier=`generate_uuid` \
file_list_sz=${#file_list[@]}
for i in ${!file_list[@]}; do
f=${file_list[i]}
for pat in ${pats//;/$IFS}; do
if [ "${f##*.}" == "$pat" ]; then
unset file_list[i]
objf=$(echo ${f%.*}.obj \
| sed -e "s,$src_path_bare,," \
-e 's/^[\./]\+//g' -e 's,[:/ ],_,g')
open_tag File RelativePath="$f"
if [ "$pat" == "asm" ] && $asm_use_custom_step; then
# Avoid object file name collisions, i.e. vpx_config.c and
# vpx_config.asm produce the same object file without
# this additional suffix.
objf=${objf%.obj}_asm.obj
for plat in "${platforms[@]}"; do
for cfg in Debug Release; do
open_tag FileConfiguration \
Name="${cfg}|${plat}" \
tag Tool \
Name="VCCustomBuildTool" \
Description="Assembling \$(InputFileName)" \
CommandLine="$(eval echo \$asm_${cfg}_cmdline) -o \$(IntDir)\\$objf" \
Outputs="\$(IntDir)\\$objf" \
close_tag FileConfiguration
done
done
fi
if [ "$pat" == "c" ] || \
[ "$pat" == "cc" ] || [ "$pat" == "cpp" ]; then
for plat in "${platforms[@]}"; do
for cfg in Debug Release; do
open_tag FileConfiguration \
Name="${cfg}|${plat}" \
tag Tool \
Name="VCCLCompilerTool" \
ObjectFile="\$(IntDir)\\$objf" \
close_tag FileConfiguration
done
done
fi
close_tag File
break
fi
done
done
close_tag Filter
IFS="$saveIFS"
}
# Process command line
unset target
for opt in "$@"; do
optval="${opt#*=}"
case "$opt" in
--help|-h) show_help
;;
--target=*) target="${optval}"
;;
--out=*) outfile="$optval"
;;
--name=*) name="${optval}"
;;
--proj-guid=*) guid="${optval}"
;;
--module-def=*) link_opts="${link_opts} ModuleDefinitionFile=${optval}"
;;
--exe) proj_kind="exe"
;;
--dll) proj_kind="dll"
;;
--lib) proj_kind="lib"
;;
--src-path-bare=*)
src_path_bare=$(fix_path "$optval")
src_path_bare=${src_path_bare%/}
;;
--static-crt) use_static_runtime=true
;;
--ver=*)
vs_ver="$optval"
case "$optval" in
[789])
;;
*) die Unrecognized Visual Studio Version in $opt
;;
esac
;;
-I*)
opt=${opt##-I}
opt=$(fix_path "$opt")
opt="${opt%/}"
incs="${incs}${incs:+;}&quot;${opt}&quot;"
yasmincs="${yasmincs} -I&quot;${opt}&quot;"
;;
-D*) defines="${defines}${defines:+;}${opt##-D}"
;;
-L*) # fudge . to $(OutDir)
if [ "${opt##-L}" == "." ]; then
libdirs="${libdirs}${libdirs:+;}&quot;\$(OutDir)&quot;"
else
# Also try directories for this platform/configuration
opt=${opt##-L}
opt=$(fix_path "$opt")
libdirs="${libdirs}${libdirs:+;}&quot;${opt}&quot;"
libdirs="${libdirs}${libdirs:+;}&quot;${opt}/\$(PlatformName)/\$(ConfigurationName)&quot;"
libdirs="${libdirs}${libdirs:+;}&quot;${opt}/\$(PlatformName)&quot;"
fi
;;
-l*) libs="${libs}${libs:+ }${opt##-l}.lib"
;;
-*) die_unknown $opt
;;
*)
# The paths in file_list are fixed outside of the loop.
file_list[${#file_list[@]}]="$opt"
case "$opt" in
*.asm) uses_asm=true
;;
esac
;;
esac
done
# Make one call to fix_path for file_list to improve performance.
fix_file_list
outfile=${outfile:-/dev/stdout}
guid=${guid:-`generate_uuid`}
asm_use_custom_step=false
uses_asm=${uses_asm:-false}
case "${vs_ver:-8}" in
7) vs_ver_id="7.10"
asm_use_custom_step=$uses_asm
warn_64bit='Detect64BitPortabilityProblems=true'
;;
8) vs_ver_id="8.00"
asm_use_custom_step=$uses_asm
warn_64bit='Detect64BitPortabilityProblems=true'
;;
9) vs_ver_id="9.00"
asm_use_custom_step=$uses_asm
warn_64bit='Detect64BitPortabilityProblems=false'
;;
esac
[ -n "$name" ] || die "Project name (--name) must be specified!"
[ -n "$target" ] || die "Target (--target) must be specified!"
if ${use_static_runtime:-false}; then
release_runtime=0
debug_runtime=1
lib_sfx=mt
else
release_runtime=2
debug_runtime=3
lib_sfx=md
fi
# Calculate debug lib names: If a lib ends in ${lib_sfx}.lib, then rename
# it to ${lib_sfx}d.lib. This precludes linking to release libs from a
# debug exe, so this may need to be refactored later.
for lib in ${libs}; do
if [ "$lib" != "${lib%${lib_sfx}.lib}" ]; then
lib=${lib%.lib}d.lib
fi
debug_libs="${debug_libs}${debug_libs:+ }${lib}"
done
# List Keyword for this target
case "$target" in
x86*) keyword="ManagedCProj"
;;
*) die "Unsupported target $target!"
esac
# List of all platforms supported for this target
case "$target" in
x86_64*)
platforms[0]="x64"
asm_Debug_cmdline="yasm -Xvc -g cv8 -f win64 ${yasmincs} &quot;\$(InputPath)&quot;"
asm_Release_cmdline="yasm -Xvc -f win64 ${yasmincs} &quot;\$(InputPath)&quot;"
;;
x86*)
platforms[0]="Win32"
asm_Debug_cmdline="yasm -Xvc -g cv8 -f win32 ${yasmincs} &quot;\$(InputPath)&quot;"
asm_Release_cmdline="yasm -Xvc -f win32 ${yasmincs} &quot;\$(InputPath)&quot;"
;;
*) die "Unsupported target $target!"
;;
esac
generate_vcproj() {
case "$proj_kind" in
exe) vs_ConfigurationType=1
;;
dll) vs_ConfigurationType=2
;;
*) vs_ConfigurationType=4
;;
esac
echo "<?xml version=\"1.0\" encoding=\"Windows-1252\"?>"
open_tag VisualStudioProject \
ProjectType="Visual C++" \
Version="${vs_ver_id}" \
Name="${name}" \
ProjectGUID="{${guid}}" \
RootNamespace="${name}" \
Keyword="${keyword}" \
open_tag Platforms
for plat in "${platforms[@]}"; do
tag Platform Name="$plat"
done
close_tag Platforms
open_tag Configurations
for plat in "${platforms[@]}"; do
plat_no_ws=`echo $plat | sed 's/[^A-Za-z0-9_]/_/g'`
open_tag Configuration \
Name="Debug|$plat" \
OutputDirectory="\$(SolutionDir)$plat_no_ws/\$(ConfigurationName)" \
IntermediateDirectory="$plat_no_ws/\$(ConfigurationName)/${name}" \
ConfigurationType="$vs_ConfigurationType" \
CharacterSet="1" \
case "$target" in
x86*)
case "$name" in
vpx)
tag Tool \
Name="VCCLCompilerTool" \
Optimization="0" \
AdditionalIncludeDirectories="$incs" \
PreprocessorDefinitions="WIN32;_DEBUG;_CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;$defines" \
RuntimeLibrary="$debug_runtime" \
UsePrecompiledHeader="0" \
WarningLevel="3" \
DebugInformationFormat="2" \
$warn_64bit \
$uses_asm && tag Tool Name="YASM" IncludePaths="$incs" Debug="true"
;;
*)
tag Tool \
Name="VCCLCompilerTool" \
Optimization="0" \
AdditionalIncludeDirectories="$incs" \
PreprocessorDefinitions="WIN32;_DEBUG;_CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;$defines" \
RuntimeLibrary="$debug_runtime" \
UsePrecompiledHeader="0" \
WarningLevel="3" \
DebugInformationFormat="2" \
$warn_64bit \
$uses_asm && tag Tool Name="YASM" IncludePaths="$incs" Debug="true"
;;
esac
;;
esac
case "$proj_kind" in
exe)
case "$target" in
x86*)
case "$name" in
*)
tag Tool \
Name="VCLinkerTool" \
AdditionalDependencies="$debug_libs \$(NoInherit)" \
AdditionalLibraryDirectories="$libdirs" \
GenerateDebugInformation="true" \
ProgramDatabaseFile="\$(OutDir)/${name}.pdb" \
;;
esac
;;
esac
;;
lib)
case "$target" in
x86*)
tag Tool \
Name="VCLibrarianTool" \
OutputFile="\$(OutDir)/${name}${lib_sfx}d.lib" \
;;
esac
;;
dll)
tag Tool \
Name="VCLinkerTool" \
AdditionalDependencies="\$(NoInherit)" \
LinkIncremental="2" \
GenerateDebugInformation="true" \
AssemblyDebug="1" \
TargetMachine="1" \
$link_opts \
;;
esac
close_tag Configuration
open_tag Configuration \
Name="Release|$plat" \
OutputDirectory="\$(SolutionDir)$plat_no_ws/\$(ConfigurationName)" \
IntermediateDirectory="$plat_no_ws/\$(ConfigurationName)/${name}" \
ConfigurationType="$vs_ConfigurationType" \
CharacterSet="1" \
WholeProgramOptimization="0" \
case "$target" in
x86*)
case "$name" in
vpx)
tag Tool \
Name="VCCLCompilerTool" \
Optimization="2" \
FavorSizeorSpeed="1" \
AdditionalIncludeDirectories="$incs" \
PreprocessorDefinitions="WIN32;NDEBUG;_CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;$defines" \
RuntimeLibrary="$release_runtime" \
UsePrecompiledHeader="0" \
WarningLevel="3" \
DebugInformationFormat="0" \
$warn_64bit \
$uses_asm && tag Tool Name="YASM" IncludePaths="$incs"
;;
*)
tag Tool \
Name="VCCLCompilerTool" \
AdditionalIncludeDirectories="$incs" \
Optimization="2" \
FavorSizeorSpeed="1" \
PreprocessorDefinitions="WIN32;NDEBUG;_CRT_SECURE_NO_WARNINGS;_CRT_SECURE_NO_DEPRECATE;$defines" \
RuntimeLibrary="$release_runtime" \
UsePrecompiledHeader="0" \
WarningLevel="3" \
DebugInformationFormat="0" \
$warn_64bit \
$uses_asm && tag Tool Name="YASM" IncludePaths="$incs"
;;
esac
;;
esac
case "$proj_kind" in
exe)
case "$target" in
x86*)
case "$name" in
*)
tag Tool \
Name="VCLinkerTool" \
AdditionalDependencies="$libs \$(NoInherit)" \
AdditionalLibraryDirectories="$libdirs" \
;;
esac
;;
esac
;;
lib)
case "$target" in
x86*)
tag Tool \
Name="VCLibrarianTool" \
OutputFile="\$(OutDir)/${name}${lib_sfx}.lib" \
;;
esac
;;
dll) # note differences to debug version: LinkIncremental, AssemblyDebug
tag Tool \
Name="VCLinkerTool" \
AdditionalDependencies="\$(NoInherit)" \
LinkIncremental="1" \
GenerateDebugInformation="true" \
TargetMachine="1" \
$link_opts \
;;
esac
close_tag Configuration
done
close_tag Configurations
open_tag Files
generate_filter srcs "Source Files" "c;cc;cpp;def;odl;idl;hpj;bat;asm;asmx"
generate_filter hdrs "Header Files" "h;hm;inl;inc;xsd"
generate_filter resrcs "Resource Files" "rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav"
generate_filter resrcs "Build Files" "mk"
close_tag Files
tag Globals
close_tag VisualStudioProject
# This must be done from within the {} subshell
echo "Ignored files list (${#file_list[@]} items) is:" >&2
for f in "${file_list[@]}"; do
echo " $f" >&2
done
}
generate_vcproj |
sed -e '/"/s;\([^ "]\)/;\1\\;g' > ${outfile}
exit
<!--
TODO: Add any files not captured by filters.
<File
RelativePath=".\ReadMe.txt"
>
</File>
-->

View File

@ -55,16 +55,11 @@ indent_pop() {
parse_project() {
local file=$1
if [ "$sfx" = "vcproj" ]; then
local name=`grep Name "$file" | awk 'BEGIN {FS="\""}{if (NR==1) print $2}'`
local guid=`grep ProjectGUID "$file" | awk 'BEGIN {FS="\""}{if (NR==1) print $2}'`
else
local name=`grep RootNamespace "$file" | sed 's,.*<.*>\(.*\)</.*>.*,\1,'`
local guid=`grep ProjectGuid "$file" | sed 's,.*<.*>\(.*\)</.*>.*,\1,'`
fi
local name=`grep RootNamespace "$file" | sed 's,.*<.*>\(.*\)</.*>.*,\1,'`
local guid=`grep ProjectGuid "$file" | sed 's,.*<.*>\(.*\)</.*>.*,\1,'`
# save the project GUID to a varaible, normalizing to the basename of the
# vcproj file without the extension
# vcxproj file without the extension
local var
var=${file##*/}
var=${var%%.${sfx}}
@ -72,13 +67,8 @@ parse_project() {
eval "${var}_name=$name"
eval "${var}_guid=$guid"
if [ "$sfx" = "vcproj" ]; then
cur_config_list=`grep -A1 '<Configuration' $file |
grep Name | cut -d\" -f2`
else
cur_config_list=`grep -B1 'Label="Configuration"' $file |
grep Condition | cut -d\' -f4`
fi
cur_config_list=`grep -B1 'Label="Configuration"' $file |
grep Condition | cut -d\' -f4`
new_config_list=$(for i in $config_list $cur_config_list; do
echo $i
done | sort | uniq)
@ -103,25 +93,6 @@ process_project() {
eval "${var}_guid=$guid"
echo "Project(\"{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}\") = \"$name\", \"$file\", \"$guid\""
indent_push
eval "local deps=\"\${${var}_deps}\""
if [ -n "$deps" ] && [ "$sfx" = "vcproj" ]; then
echo "${indent}ProjectSection(ProjectDependencies) = postProject"
indent_push
for dep in $deps; do
eval "local dep_guid=\${${dep}_guid}"
[ -z "${dep_guid}" ] && die "Unknown GUID for $dep (dependency of $var)"
echo "${indent}$dep_guid = $dep_guid"
done
indent_pop
echo "${indent}EndProjectSection"
fi
indent_pop
echo "EndProject"
}
@ -191,11 +162,7 @@ process_makefile() {
IFS=$'\r'$'\n'
local TAB=$'\t'
cat <<EOF
ifeq (\$(CONFIG_VS_VERSION),7)
MSBUILD_TOOL := devenv.com
else
MSBUILD_TOOL := msbuild.exe
endif
found_devenv := \$(shell which \$(MSBUILD_TOOL) >/dev/null 2>&1 && echo yes)
.nodevenv.once:
${TAB}@echo " * \$(MSBUILD_TOOL) not found in path."
@ -204,7 +171,7 @@ ${TAB}@echo " * You will have to build all configurations manually using the"
${TAB}@echo " * Visual Studio IDE. To allow make to build them automatically,"
${TAB}@echo " * add the Common7/IDE directory of your Visual Studio"
${TAB}@echo " * installation to your path, eg:"
${TAB}@echo " * C:\Program Files\Microsoft Visual Studio 8\Common7\IDE"
${TAB}@echo " * C:\Program Files\Microsoft Visual Studio 10.0\Common7\IDE"
${TAB}@echo " * "
${TAB}@touch \$@
CLEAN-OBJS += \$(if \$(found_devenv),,.nodevenv.once)
@ -221,16 +188,9 @@ clean::
${TAB}rm -rf "$platform"/"$config"
.PHONY: $nows_sln_config
ifneq (\$(found_devenv),)
ifeq (\$(CONFIG_VS_VERSION),7)
$nows_sln_config: $outfile
${TAB}\$(MSBUILD_TOOL) $outfile -build "$config"
else
$nows_sln_config: $outfile
${TAB}\$(MSBUILD_TOOL) $outfile -m -t:Build \\
${TAB}${TAB}-p:Configuration="$config" -p:Platform="$platform"
endif
else
$nows_sln_config: $outfile .nodevenv.once
${TAB}@echo " * Skipping build of $sln_config (\$(MSBUILD_TOOL) not in path)."
@ -255,23 +215,12 @@ for opt in "$@"; do
;;
--ver=*) vs_ver="$optval"
case $optval in
[789]|10|11|12|14)
10|11|12|14)
;;
*) die Unrecognized Visual Studio Version in $opt
;;
esac
;;
--ver=*) vs_ver="$optval"
case $optval in
7) sln_vers="8.00"
sln_vers_str="Visual Studio .NET 2003"
;;
[89])
;;
*) die "Unrecognized Visual Studio Version '$optval' in $opt"
;;
esac
;;
--target=*) target="${optval}"
;;
-*) die_unknown $opt
@ -281,16 +230,7 @@ for opt in "$@"; do
done
outfile=${outfile:-/dev/stdout}
mkoutfile=${mkoutfile:-/dev/stdout}
case "${vs_ver:-8}" in
7) sln_vers="8.00"
sln_vers_str="Visual Studio .NET 2003"
;;
8) sln_vers="9.00"
sln_vers_str="Visual Studio 2005"
;;
9) sln_vers="10.00"
sln_vers_str="Visual Studio 2008"
;;
case "${vs_ver:-10}" in
10) sln_vers="11.00"
sln_vers_str="Visual Studio 2010"
;;
@ -304,14 +244,7 @@ case "${vs_ver:-8}" in
sln_vers_str="Visual Studio 2015"
;;
esac
case "${vs_ver:-8}" in
[789])
sfx=vcproj
;;
10|11|12|14)
sfx=vcxproj
;;
esac
sfx=vcxproj
for f in "${file_list[@]}"; do
parse_project $f

View File

@ -211,7 +211,7 @@ for opt in "$@"; do
done
# Make one call to fix_path for file_list to improve performance.
fix_file_list
fix_file_list file_list
outfile=${outfile:-/dev/stdout}
guid=${guid:-`generate_uuid`}

View File

@ -0,0 +1,37 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>VPX</string>
<key>CFBundleIdentifier</key>
<string>org.webmproject.VPX</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>VPX</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>${VERSION}</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleSupportedPlatforms</key>
<array>
<string>iPhoneOS</string>
</array>
<key>CFBundleVersion</key>
<string>${VERSION}</string>
<key>MinimumOSVersion</key>
<string>${IOS_VERSION_MIN}</string>
<key>UIDeviceFamily</key>
<array>
<integer>1</integer>
<integer>2</integer>
</array>
<key>VPXFullVersion</key>
<string>${FULLVERSION}</string>
</dict>
</plist>

View File

@ -24,6 +24,7 @@ CONFIGURE_ARGS="--disable-docs
--disable-unit-tests"
DIST_DIR="_dist"
FRAMEWORK_DIR="VPX.framework"
FRAMEWORK_LIB="VPX.framework/VPX"
HEADER_DIR="${FRAMEWORK_DIR}/Headers/vpx"
SCRIPT_DIR=$(dirname "$0")
LIBVPX_SOURCE_DIR=$(cd ${SCRIPT_DIR}/../..; pwd)
@ -137,6 +138,44 @@ create_vpx_framework_config_shim() {
printf "#endif // ${include_guard}" >> "${config_file}"
}
# Verifies that $FRAMEWORK_LIB fat library contains requested builds.
verify_framework_targets() {
local requested_cpus=""
local cpu=""
# Extract CPU from full target name.
for target; do
cpu="${target%%-*}"
if [ "${cpu}" = "x86" ]; then
# lipo -info outputs i386 for libvpx x86 targets.
cpu="i386"
fi
requested_cpus="${requested_cpus}${cpu} "
done
# Get target CPUs present in framework library.
local targets_built=$(${LIPO} -info ${FRAMEWORK_LIB})
# $LIPO -info outputs a string like the following:
# Architectures in the fat file: $FRAMEWORK_LIB <architectures>
# Capture only the architecture strings.
targets_built=${targets_built##*: }
# Sort CPU strings to make the next step a simple string compare.
local actual=$(echo ${targets_built} | tr " " "\n" | sort | tr "\n" " ")
local requested=$(echo ${requested_cpus} | tr " " "\n" | sort | tr "\n" " ")
vlog "Requested ${FRAMEWORK_LIB} CPUs: ${requested}"
vlog "Actual ${FRAMEWORK_LIB} CPUs: ${actual}"
if [ "${requested}" != "${actual}" ]; then
elog "Actual ${FRAMEWORK_LIB} targets do not match requested target list."
elog " Requested target CPUs: ${requested}"
elog " Actual target CPUs: ${actual}"
return 1
fi
}
# Configures and builds each target specified by $1, and then builds
# VPX.framework.
build_framework() {
@ -157,7 +196,12 @@ build_framework() {
for target in ${targets}; do
build_target "${target}"
target_dist_dir="${BUILD_ROOT}/${target}/${DIST_DIR}"
lib_list="${lib_list} ${target_dist_dir}/lib/libvpx.a"
if [ "${ENABLE_SHARED}" = "yes" ]; then
local suffix="dylib"
else
local suffix="a"
fi
lib_list="${lib_list} ${target_dist_dir}/lib/libvpx.${suffix}"
done
cd "${ORIG_PWD}"
@ -176,13 +220,25 @@ build_framework() {
# Copy in vpx_version.h.
cp -p "${BUILD_ROOT}/${target}/vpx_version.h" "${HEADER_DIR}"
vlog "Created fat library ${FRAMEWORK_DIR}/VPX containing:"
if [ "${ENABLE_SHARED}" = "yes" ]; then
# Adjust the dylib's name so dynamic linking in apps works as expected.
install_name_tool -id '@rpath/VPX.framework/VPX' ${FRAMEWORK_DIR}/VPX
# Copy in Info.plist.
cat "${SCRIPT_DIR}/ios-Info.plist" \
| sed "s/\${FULLVERSION}/${FULLVERSION}/g" \
| sed "s/\${VERSION}/${VERSION}/g" \
| sed "s/\${IOS_VERSION_MIN}/${IOS_VERSION_MIN}/g" \
> "${FRAMEWORK_DIR}/Info.plist"
fi
# Confirm VPX.framework/VPX contains the targets requested.
verify_framework_targets ${targets}
vlog "Created fat library ${FRAMEWORK_LIB} containing:"
for lib in ${lib_list}; do
vlog " $(echo ${lib} | awk -F / '{print $2, $NF}')"
done
# TODO(tomfinegan): Verify that expected targets are included within
# VPX.framework/VPX via lipo -info.
}
# Trap function. Cleans up the subtree used to build all targets contained in
@ -213,6 +269,7 @@ iosbuild_usage() {
cat << EOF
Usage: ${0##*/} [arguments]
--help: Display this message and exit.
--enable-shared: Build a dynamic framework for use on iOS 8 or later.
--extra-configure-args <args>: Extra args to pass when configuring libvpx.
--macosx: Uses darwin15 targets instead of iphonesimulator targets for x86
and x86_64. Allows linking to framework when builds target MacOSX
@ -251,6 +308,9 @@ while [ -n "$1" ]; do
iosbuild_usage
exit
;;
--enable-shared)
ENABLE_SHARED=yes
;;
--preserve-build-output)
PRESERVE_BUILD_OUTPUT=yes
;;
@ -278,6 +338,21 @@ while [ -n "$1" ]; do
shift
done
if [ "${ENABLE_SHARED}" = "yes" ]; then
CONFIGURE_ARGS="--enable-shared ${CONFIGURE_ARGS}"
fi
FULLVERSION=$("${SCRIPT_DIR}"/version.sh --bare "${LIBVPX_SOURCE_DIR}")
VERSION=$(echo "${FULLVERSION}" | sed -E 's/^v([0-9]+\.[0-9]+\.[0-9]+).*$/\1/')
if [ "$ENABLE_SHARED" = "yes" ]; then
IOS_VERSION_OPTIONS="--enable-shared"
IOS_VERSION_MIN="8.0"
else
IOS_VERSION_OPTIONS=""
IOS_VERSION_MIN="6.0"
fi
if [ "${VERBOSE}" = "yes" ]; then
cat << EOF
BUILD_ROOT=${BUILD_ROOT}
@ -285,6 +360,7 @@ cat << EOF
CONFIGURE_ARGS=${CONFIGURE_ARGS}
EXTRA_CONFIGURE_ARGS=${EXTRA_CONFIGURE_ARGS}
FRAMEWORK_DIR=${FRAMEWORK_DIR}
FRAMEWORK_LIB=${FRAMEWORK_LIB}
HEADER_DIR=${HEADER_DIR}
LIBVPX_SOURCE_DIR=${LIBVPX_SOURCE_DIR}
LIPO=${LIPO}
@ -292,8 +368,13 @@ cat << EOF
ORIG_PWD=${ORIG_PWD}
PRESERVE_BUILD_OUTPUT=${PRESERVE_BUILD_OUTPUT}
TARGETS="$(print_list "" ${TARGETS})"
ENABLE_SHARED=${ENABLE_SHARED}
OSX_TARGETS="${OSX_TARGETS}"
SIM_TARGETS="${SIM_TARGETS}"
SCRIPT_DIR="${SCRIPT_DIR}"
FULLVERSION="${FULLVERSION}"
VERSION="${VERSION}"
IOS_VERSION_MIN="${IOS_VERSION_MIN}"
EOF
fi

View File

@ -39,11 +39,12 @@ fix_path() {
}
# Corrects the paths in file_list in one pass for efficiency.
# $1 is the name of the array to be modified.
fix_file_list() {
# TODO(jzern): this could be more generic and take the array as a param.
files=$(fix_path "${file_list[@]}")
declare -n array_ref=$1
files=$(fix_path "${array_ref[@]}")
local IFS=$'\n'
file_list=($files)
array_ref=($files)
}
generate_uuid() {

View File

@ -384,13 +384,8 @@ if ($opts{arch} eq 'x86') {
}
close CONFIG_FILE;
mips;
} elsif ($opts{arch} eq 'armv6') {
@ALL_ARCHS = filter(qw/media/);
arm;
} elsif ($opts{arch} =~ /armv7\w?/) {
@ALL_ARCHS = filter(qw/media neon_asm neon/);
@REQUIRES = filter(keys %required ? keys %required : qw/media/);
&require(@REQUIRES);
@ALL_ARCHS = filter(qw/neon_asm neon/);
arm;
} elsif ($opts{arch} eq 'armv8' || $opts{arch} eq 'arm64' ) {
@ALL_ARCHS = filter(qw/neon/);

View File

@ -24,8 +24,9 @@ out_file=${2}
id=${3:-VERSION_STRING}
git_version_id=""
if [ -d "${source_path}/.git" ]; then
if [ -e "${source_path}/.git" ]; then
# Source Path is a git working copy. Check for local modifications.
# Note that git submodules may have a file as .git, not a directory.
export GIT_DIR="${source_path}/.git"
git_version_id=`git describe --match=v[0-9]* 2>/dev/null`
fi

View File

@ -2,3 +2,4 @@
GERRIT_HOST: chromium-review.googlesource.com
GERRIT_PORT: 29418
CODE_REVIEW_SERVER: chromium-review.googlesource.com
GERRIT_SQUASH_UPLOADS: False

86
libs/libvpx/configure vendored
View File

@ -40,7 +40,6 @@ Advanced options:
hardware decoder compatibility
${toggle_vp8} VP8 codec support
${toggle_vp9} VP9 codec support
${toggle_vp10} VP10 codec support
${toggle_internal_stats} output of encoder internal stats for debug, if supported (encoders)
${toggle_postproc} postprocessing
${toggle_vp9_postproc} vp9 specific postprocessing
@ -98,11 +97,9 @@ EOF
# all_platforms is a list of all supported target platforms. Maintain
# alphabetically by architecture, generic-gnu last.
all_platforms="${all_platforms} armv6-darwin-gcc"
all_platforms="${all_platforms} armv6-linux-rvct"
all_platforms="${all_platforms} armv6-linux-gcc"
all_platforms="${all_platforms} armv6-none-rvct"
all_platforms="${all_platforms} arm64-android-gcc"
all_platforms="${all_platforms} arm64-darwin-gcc"
all_platforms="${all_platforms} arm64-linux-gcc"
all_platforms="${all_platforms} armv7-android-gcc" #neon Cortex-A8
all_platforms="${all_platforms} armv7-darwin-gcc" #neon Cortex-A8
all_platforms="${all_platforms} armv7-linux-rvct" #neon Cortex-A8
@ -112,6 +109,7 @@ all_platforms="${all_platforms} armv7-win32-vs11"
all_platforms="${all_platforms} armv7-win32-vs12"
all_platforms="${all_platforms} armv7-win32-vs14"
all_platforms="${all_platforms} armv7s-darwin-gcc"
all_platforms="${all_platforms} armv8-linux-gcc"
all_platforms="${all_platforms} mips32-linux-gcc"
all_platforms="${all_platforms} mips64-linux-gcc"
all_platforms="${all_platforms} sparc-solaris-gcc"
@ -132,9 +130,6 @@ all_platforms="${all_platforms} x86-linux-icc"
all_platforms="${all_platforms} x86-os2-gcc"
all_platforms="${all_platforms} x86-solaris-gcc"
all_platforms="${all_platforms} x86-win32-gcc"
all_platforms="${all_platforms} x86-win32-vs7"
all_platforms="${all_platforms} x86-win32-vs8"
all_platforms="${all_platforms} x86-win32-vs9"
all_platforms="${all_platforms} x86-win32-vs10"
all_platforms="${all_platforms} x86-win32-vs11"
all_platforms="${all_platforms} x86-win32-vs12"
@ -152,8 +147,6 @@ all_platforms="${all_platforms} x86_64-linux-gcc"
all_platforms="${all_platforms} x86_64-linux-icc"
all_platforms="${all_platforms} x86_64-solaris-gcc"
all_platforms="${all_platforms} x86_64-win64-gcc"
all_platforms="${all_platforms} x86_64-win64-vs8"
all_platforms="${all_platforms} x86_64-win64-vs9"
all_platforms="${all_platforms} x86_64-win64-vs10"
all_platforms="${all_platforms} x86_64-win64-vs11"
all_platforms="${all_platforms} x86_64-win64-vs12"
@ -195,12 +188,8 @@ if [ ${doxy_major:-0} -ge 1 ]; then
fi
# disable codecs when their source directory does not exist
[ -d "${source_path}/vp8" ] || disable_feature vp8
[ -d "${source_path}/vp9" ] || disable_feature vp9
[ -d "${source_path}/vp10" ] || disable_feature vp10
# disable vp10 codec by default
disable_feature vp10
[ -d "${source_path}/vp8" ] || disable_codec vp8
[ -d "${source_path}/vp9" ] || disable_codec vp9
# install everything except the sources, by default. sources will have
# to be enabled when doing dist builds, since that's no longer a common
@ -222,13 +211,10 @@ CODECS="
vp8_decoder
vp9_encoder
vp9_decoder
vp10_encoder
vp10_decoder
"
CODEC_FAMILIES="
vp8
vp9
vp10
"
ARCH_LIST="
@ -248,8 +234,6 @@ ARCH_EXT_LIST_X86="
avx2
"
ARCH_EXT_LIST="
edsp
media
neon
neon_asm
@ -279,7 +263,6 @@ CONFIG_LIST="
install_bins
install_libs
install_srcs
use_x86inc
debug
gprof
gcov
@ -341,7 +324,6 @@ CMDLINE_SELECT="
gprof
gcov
pic
use_x86inc
optimizations
ccache
runtime_cpu_detect
@ -391,15 +373,19 @@ process_cmdline() {
for opt do
optval="${opt#*=}"
case "$opt" in
--disable-codecs) for c in ${CODECS}; do disable_feature $c; done ;;
--disable-codecs)
for c in ${CODEC_FAMILIES}; do disable_codec $c; done
;;
--enable-?*|--disable-?*)
eval `echo "$opt" | sed 's/--/action=/;s/-/ option=/;s/-/_/g'`
if echo "${EXPERIMENT_LIST}" | grep "^ *$option\$" >/dev/null; then
if is_in ${option} ${EXPERIMENT_LIST}; then
if enabled experimental; then
${action}_feature $option
else
log_echo "Ignoring $opt -- not in experimental mode."
fi
elif is_in ${option} "${CODECS} ${CODEC_FAMILIES}"; then
${action}_codec ${option}
else
process_common_cmdline $opt
fi
@ -413,14 +399,6 @@ process_cmdline() {
post_process_cmdline() {
c=""
# If the codec family is disabled, disable all components of that family.
# If the codec family is enabled, enable all components of that family.
log_echo "Configuring selected codecs"
for c in ${CODECS}; do
disabled ${c%%_*} && disable_feature ${c}
enabled ${c%%_*} && enable_feature ${c}
done
# Enable all detected codecs, if they haven't been disabled
for c in ${CODECS}; do soft_enable $c; done
@ -515,13 +493,18 @@ process_detect() {
# Can only build shared libs on a subset of platforms. Doing this check
# here rather than at option parse time because the target auto-detect
# magic happens after the command line has been parsed.
if ! enabled linux && ! enabled os2; then
case "${tgt_os}" in
linux|os2|darwin*|iphonesimulator*)
# Supported platforms
;;
*)
if enabled gnu; then
echo "--enable-shared is only supported on ELF; assuming this is OK"
else
die "--enable-shared only supported on ELF and OS/2 for now"
die "--enable-shared only supported on ELF, OS/2, and Darwin for now"
fi
fi
;;
esac
fi
if [ -z "$CC" ] || enabled external_build; then
echo "Bypassing toolchain for environment detection."
@ -584,26 +567,19 @@ process_toolchain() {
check_add_cflags -Wall
check_add_cflags -Wdeclaration-after-statement
check_add_cflags -Wdisabled-optimization
check_add_cflags -Wfloat-conversion
check_add_cflags -Wpointer-arith
check_add_cflags -Wtype-limits
check_add_cflags -Wcast-qual
check_add_cflags -Wvla
check_add_cflags -Wimplicit-function-declaration
check_add_cflags -Wuninitialized
check_add_cflags -Wunused-variable
case ${CC} in
*clang*)
# libvpx and/or clang have issues with aliasing:
# https://code.google.com/p/webm/issues/detail?id=603
# work around them until they are fixed
check_add_cflags -fno-strict-aliasing
;;
*) check_add_cflags -Wunused-but-set-variable ;;
esac
check_add_cflags -Wunused
# check_add_cflags also adds to cxxflags. gtest does not do well with
# -Wundef so add it explicitly to CFLAGS only.
check_cflags -Wundef && add_cflags_only -Wundef
if enabled mips || [ -z "${INLINE}" ]; then
enabled extra_warnings || check_add_cflags -Wno-unused-function
else
check_add_cflags -Wunused-function
fi
fi
@ -652,17 +628,9 @@ process_toolchain() {
vs*) enable_feature msvs
enable_feature solution
vs_version=${tgt_cc##vs}
case $vs_version in
[789])
VCPROJ_SFX=vcproj
gen_vcproj_cmd=${source_path}/build/make/gen_msvs_proj.sh
;;
10|11|12|14)
VCPROJ_SFX=vcxproj
gen_vcproj_cmd=${source_path}/build/make/gen_msvs_vcxproj.sh
enabled werror && gen_vcproj_cmd="${gen_vcproj_cmd} --enable-werror"
;;
esac
VCPROJ_SFX=vcxproj
gen_vcproj_cmd=${source_path}/build/make/gen_msvs_vcxproj.sh
enabled werror && gen_vcproj_cmd="${gen_vcproj_cmd} --enable-werror"
all_targets="${all_targets} solution"
INLINE="__forceinline"
;;

View File

@ -36,21 +36,30 @@ LIBYUV_SRCS += third_party/libyuv/include/libyuv/basic_types.h \
third_party/libyuv/source/scale_neon64.cc \
third_party/libyuv/source/scale_win.cc \
LIBWEBM_COMMON_SRCS += third_party/libwebm/webmids.hpp
LIBWEBM_COMMON_SRCS += third_party/libwebm/common/hdr_util.cc \
third_party/libwebm/common/hdr_util.h \
third_party/libwebm/common/webmids.h
LIBWEBM_MUXER_SRCS += third_party/libwebm/mkvmuxer.cpp \
third_party/libwebm/mkvmuxerutil.cpp \
third_party/libwebm/mkvwriter.cpp \
third_party/libwebm/mkvmuxer.hpp \
third_party/libwebm/mkvmuxertypes.hpp \
third_party/libwebm/mkvmuxerutil.hpp \
third_party/libwebm/mkvparser.hpp \
third_party/libwebm/mkvwriter.hpp
LIBWEBM_MUXER_SRCS += third_party/libwebm/mkvmuxer/mkvmuxer.cc \
third_party/libwebm/mkvmuxer/mkvmuxerutil.cc \
third_party/libwebm/mkvmuxer/mkvwriter.cc \
third_party/libwebm/mkvmuxer/mkvmuxer.h \
third_party/libwebm/mkvmuxer/mkvmuxertypes.h \
third_party/libwebm/mkvmuxer/mkvmuxerutil.h \
third_party/libwebm/mkvparser/mkvparser.h \
third_party/libwebm/mkvmuxer/mkvwriter.h
LIBWEBM_PARSER_SRCS = third_party/libwebm/mkvparser/mkvparser.cc \
third_party/libwebm/mkvparser/mkvreader.cc \
third_party/libwebm/mkvparser/mkvparser.h \
third_party/libwebm/mkvparser/mkvreader.h
# Add compile flags and include path for libwebm sources.
ifeq ($(CONFIG_WEBM_IO),yes)
CXXFLAGS += -D__STDC_CONSTANT_MACROS -D__STDC_LIMIT_MACROS
INC_PATH-yes += $(SRC_PATH_BARE)/third_party/libwebm
endif
LIBWEBM_PARSER_SRCS = third_party/libwebm/mkvparser.cpp \
third_party/libwebm/mkvreader.cpp \
third_party/libwebm/mkvparser.hpp \
third_party/libwebm/mkvreader.hpp
# List of examples to build. UTILS are tools meant for distribution
# while EXAMPLES demonstrate specific portions of the API.
@ -70,6 +79,7 @@ ifeq ($(CONFIG_LIBYUV),yes)
endif
ifeq ($(CONFIG_WEBM_IO),yes)
vpxdec.SRCS += $(LIBWEBM_COMMON_SRCS)
vpxdec.SRCS += $(LIBWEBM_MUXER_SRCS)
vpxdec.SRCS += $(LIBWEBM_PARSER_SRCS)
vpxdec.SRCS += webmdec.cc webmdec.h
endif
@ -93,6 +103,7 @@ endif
ifeq ($(CONFIG_WEBM_IO),yes)
vpxenc.SRCS += $(LIBWEBM_COMMON_SRCS)
vpxenc.SRCS += $(LIBWEBM_MUXER_SRCS)
vpxenc.SRCS += $(LIBWEBM_PARSER_SRCS)
vpxenc.SRCS += webmenc.cc webmenc.h
endif
vpxenc.GUID = 548DEC74-7A15-4B2B-AFC3-AA102E7C25C1
@ -204,6 +215,17 @@ vp8cx_set_ref.SRCS += vpx_ports/msvc.h
vp8cx_set_ref.GUID = C5E31F7F-96F6-48BD-BD3E-10EBF6E8057A
vp8cx_set_ref.DESCRIPTION = VP8 set encoder reference frame
ifeq ($(CONFIG_VP9_ENCODER),yes)
ifeq ($(CONFIG_DECODERS),yes)
EXAMPLES-yes += vp9cx_set_ref.c
vp9cx_set_ref.SRCS += ivfenc.h ivfenc.c
vp9cx_set_ref.SRCS += tools_common.h tools_common.c
vp9cx_set_ref.SRCS += video_common.h
vp9cx_set_ref.SRCS += video_writer.h video_writer.c
vp9cx_set_ref.GUID = 65D7F14A-2EE6-4293-B958-AB5107A03B55
vp9cx_set_ref.DESCRIPTION = VP9 set encoder reference frame
endif
endif
ifeq ($(CONFIG_MULTI_RES_ENCODING),yes)
ifeq ($(CONFIG_LIBYUV),yes)

View File

@ -65,8 +65,7 @@ static void get_image_md5(const vpx_image_t *img, unsigned char digest[16]) {
static void print_md5(FILE *stream, unsigned char digest[16]) {
int i;
for (i = 0; i < 16; ++i)
fprintf(stream, "%02x", digest[i]);
for (i = 0; i < 16; ++i) fprintf(stream, "%02x", digest[i]);
}
static const char *exec_name;
@ -86,12 +85,10 @@ int main(int argc, char **argv) {
exec_name = argv[0];
if (argc != 3)
die("Invalid number of arguments.");
if (argc != 3) die("Invalid number of arguments.");
reader = vpx_video_reader_open(argv[1]);
if (!reader)
die("Failed to open %s for reading.", argv[1]);
if (!reader) die("Failed to open %s for reading.", argv[1]);
if (!(outfile = fopen(argv[2], "wb")))
die("Failed to open %s for writing.", argv[2]);
@ -99,8 +96,7 @@ int main(int argc, char **argv) {
info = vpx_video_reader_get_info(reader);
decoder = get_vpx_decoder_by_fourcc(info->codec_fourcc);
if (!decoder)
die("Unknown input codec.");
if (!decoder) die("Unknown input codec.");
printf("Using %s\n", vpx_codec_iface_name(decoder->codec_interface()));
@ -111,8 +107,8 @@ int main(int argc, char **argv) {
vpx_codec_iter_t iter = NULL;
vpx_image_t *img = NULL;
size_t frame_size = 0;
const unsigned char *frame = vpx_video_reader_get_frame(reader,
&frame_size);
const unsigned char *frame =
vpx_video_reader_get_frame(reader, &frame_size);
if (vpx_codec_decode(&codec, frame, (unsigned int)frame_size, NULL, 0))
die_codec(&codec, "Failed to decode frame");
@ -121,14 +117,13 @@ int main(int argc, char **argv) {
get_image_md5(img, digest);
print_md5(outfile, digest);
fprintf(outfile, " img-%dx%d-%04d.i420\n",
img->d_w, img->d_h, ++frame_cnt);
fprintf(outfile, " img-%dx%d-%04d.i420\n", img->d_w, img->d_h,
++frame_cnt);
}
}
printf("Processed %d frames.\n", frame_cnt);
if (vpx_codec_destroy(&codec))
die_codec(&codec, "Failed to destroy codec.");
if (vpx_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec.");
vpx_video_reader_close(reader);

View File

@ -84,18 +84,16 @@ int main(int argc, char **argv) {
exec_name = argv[0];
if (argc != 4)
die("Invalid number of arguments.");
if (argc != 4) die("Invalid number of arguments.");
reader = vpx_video_reader_open(argv[1]);
if (!reader)
die("Failed to open %s for reading.", argv[1]);
if (!reader) die("Failed to open %s for reading.", argv[1]);
if (!(outfile = fopen(argv[2], "wb")))
die("Failed to open %s for writing.", argv[2]);
n = strtol(argv[3], &nptr, 0);
m = strtol(nptr + 1, NULL, 0);
n = (int)strtol(argv[3], &nptr, 0);
m = (int)strtol(nptr + 1, NULL, 0);
is_range = (*nptr == '-');
if (!n || !m || (*nptr != '-' && *nptr != '/'))
die("Couldn't parse pattern %s.\n", argv[3]);
@ -103,8 +101,7 @@ int main(int argc, char **argv) {
info = vpx_video_reader_get_info(reader);
decoder = get_vpx_decoder_by_fourcc(info->codec_fourcc);
if (!decoder)
die("Unknown input codec.");
if (!decoder) die("Unknown input codec.");
printf("Using %s\n", vpx_codec_iface_name(decoder->codec_interface()));
@ -116,8 +113,8 @@ int main(int argc, char **argv) {
vpx_image_t *img = NULL;
size_t frame_size = 0;
int skip;
const unsigned char *frame = vpx_video_reader_get_frame(reader,
&frame_size);
const unsigned char *frame =
vpx_video_reader_get_frame(reader, &frame_size);
if (vpx_codec_decode(&codec, frame, (unsigned int)frame_size, NULL, 0))
die_codec(&codec, "Failed to decode frame.");
@ -139,8 +136,7 @@ int main(int argc, char **argv) {
}
printf("Processed %d frames.\n", frame_cnt);
if (vpx_codec_destroy(&codec))
die_codec(&codec, "Failed to destroy codec.");
if (vpx_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec.");
printf("Play: ffplay -f rawvideo -pix_fmt yuv420p -s %dx%d %s\n",
info->frame_width, info->frame_height, argv[2]);

View File

@ -68,12 +68,10 @@ int main(int argc, char **argv) {
exec_name = argv[0];
if (argc != 3)
die("Invalid number of arguments.");
if (argc != 3) die("Invalid number of arguments.");
reader = vpx_video_reader_open(argv[1]);
if (!reader)
die("Failed to open %s for reading.", argv[1]);
if (!reader) die("Failed to open %s for reading.", argv[1]);
if (!(outfile = fopen(argv[2], "wb")))
die("Failed to open %s for writing", argv[2]);
@ -81,8 +79,7 @@ int main(int argc, char **argv) {
info = vpx_video_reader_get_info(reader);
decoder = get_vpx_decoder_by_fourcc(info->codec_fourcc);
if (!decoder)
die("Unknown input codec.");
if (!decoder) die("Unknown input codec.");
printf("Using %s\n", vpx_codec_iface_name(decoder->codec_interface()));
@ -91,26 +88,25 @@ int main(int argc, char **argv) {
if (res == VPX_CODEC_INCAPABLE)
die_codec(&codec, "Postproc not supported by this decoder.");
if (res)
die_codec(&codec, "Failed to initialize decoder.");
if (res) die_codec(&codec, "Failed to initialize decoder.");
while (vpx_video_reader_read_frame(reader)) {
vpx_codec_iter_t iter = NULL;
vpx_image_t *img = NULL;
size_t frame_size = 0;
const unsigned char *frame = vpx_video_reader_get_frame(reader,
&frame_size);
const unsigned char *frame =
vpx_video_reader_get_frame(reader, &frame_size);
++frame_cnt;
if (frame_cnt % 30 == 1) {
vp8_postproc_cfg_t pp = {0, 0, 0};
vp8_postproc_cfg_t pp = { 0, 0, 0 };
if (vpx_codec_control(&codec, VP8_SET_POSTPROC, &pp))
die_codec(&codec, "Failed to turn off postproc.");
if (vpx_codec_control(&codec, VP8_SET_POSTPROC, &pp))
die_codec(&codec, "Failed to turn off postproc.");
} else if (frame_cnt % 30 == 16) {
vp8_postproc_cfg_t pp = {VP8_DEBLOCK | VP8_DEMACROBLOCK | VP8_MFQE,
4, 0};
vp8_postproc_cfg_t pp = { VP8_DEBLOCK | VP8_DEMACROBLOCK | VP8_MFQE, 4,
0 };
if (vpx_codec_control(&codec, VP8_SET_POSTPROC, &pp))
die_codec(&codec, "Failed to turn on postproc.");
};
@ -125,8 +121,7 @@ int main(int argc, char **argv) {
}
printf("Processed %d frames.\n", frame_cnt);
if (vpx_codec_destroy(&codec))
die_codec(&codec, "Failed to destroy codec");
if (vpx_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec");
printf("Play: ffplay -f rawvideo -pix_fmt yuv420p -s %dx%d %s\n",
info->frame_width, info->frame_height, argv[2]);

View File

@ -34,10 +34,8 @@ void usage_exit(void) {
static int parse_dim(char *v, int *width, int *height) {
char *x = strchr(v, 'x');
if (x == NULL)
x = strchr(v, 'X');
if (x == NULL)
return 0;
if (x == NULL) x = strchr(v, 'X');
if (x == NULL) return 0;
*width = atoi(v);
*height = atoi(&x[1]);
if (*width <= 0 || *height <= 0)
@ -93,30 +91,25 @@ int main(int argc, char *argv[]) {
else
frames = INT_MAX;
printf("Input size: %dx%d\n",
width, height);
printf("Target size: %dx%d, Frames: ",
target_width, target_height);
printf("Input size: %dx%d\n", width, height);
printf("Target size: %dx%d, Frames: ", target_width, target_height);
if (frames == INT_MAX)
printf("All\n");
else
printf("%d\n", frames);
inbuf = (uint8_t*)malloc(width * height * 3 / 2);
outbuf = (uint8_t*)malloc(target_width * target_height * 3 / 2);
inbuf = (uint8_t *)malloc(width * height * 3 / 2);
outbuf = (uint8_t *)malloc(target_width * target_height * 3 / 2);
inbuf_u = inbuf + width * height;
inbuf_v = inbuf_u + width * height / 4;
outbuf_u = outbuf + target_width * target_height;
outbuf_v = outbuf_u + target_width * target_height / 4;
f = 0;
while (f < frames) {
if (fread(inbuf, width * height * 3 / 2, 1, fpin) != 1)
break;
vp9_resize_frame420(inbuf, width, inbuf_u, inbuf_v, width / 2,
height, width,
outbuf, target_width, outbuf_u, outbuf_v,
target_width / 2,
target_height, target_width);
if (fread(inbuf, width * height * 3 / 2, 1, fpin) != 1) break;
vp9_resize_frame420(inbuf, width, inbuf_u, inbuf_v, width / 2, height,
width, outbuf, target_width, outbuf_u, outbuf_v,
target_width / 2, target_height, target_width);
fwrite(outbuf, target_width * target_height * 3 / 2, 1, fpout);
f++;
}

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
// VP8 Set Active and ROI Maps
// ===========================
//
@ -86,8 +85,7 @@ static void set_roi_map(const vpx_codec_enc_cfg_t *cfg,
roi.static_threshold[3] = 0;
roi.roi_map = (uint8_t *)malloc(roi.rows * roi.cols);
for (i = 0; i < roi.rows * roi.cols; ++i)
roi.roi_map[i] = i % 4;
for (i = 0; i < roi.rows * roi.cols; ++i) roi.roi_map[i] = i % 4;
if (vpx_codec_control(codec, VP8E_SET_ROI_MAP, &roi))
die_codec(codec, "Failed to set ROI map");
@ -98,14 +96,13 @@ static void set_roi_map(const vpx_codec_enc_cfg_t *cfg,
static void set_active_map(const vpx_codec_enc_cfg_t *cfg,
vpx_codec_ctx_t *codec) {
unsigned int i;
vpx_active_map_t map = {0, 0, 0};
vpx_active_map_t map = { 0, 0, 0 };
map.rows = (cfg->g_h + 15) / 16;
map.cols = (cfg->g_w + 15) / 16;
map.active_map = (uint8_t *)malloc(map.rows * map.cols);
for (i = 0; i < map.rows * map.cols; ++i)
map.active_map[i] = i % 2;
for (i = 0; i < map.rows * map.cols; ++i) map.active_map[i] = i % 2;
if (vpx_codec_control(codec, VP8E_SET_ACTIVEMAP, &map))
die_codec(codec, "Failed to set active map");
@ -115,7 +112,7 @@ static void set_active_map(const vpx_codec_enc_cfg_t *cfg,
static void unset_active_map(const vpx_codec_enc_cfg_t *cfg,
vpx_codec_ctx_t *codec) {
vpx_active_map_t map = {0, 0, 0};
vpx_active_map_t map = { 0, 0, 0 };
map.rows = (cfg->g_h + 15) / 16;
map.cols = (cfg->g_w + 15) / 16;
@ -125,25 +122,21 @@ static void unset_active_map(const vpx_codec_enc_cfg_t *cfg,
die_codec(codec, "Failed to set active map");
}
static int encode_frame(vpx_codec_ctx_t *codec,
vpx_image_t *img,
int frame_index,
VpxVideoWriter *writer) {
static int encode_frame(vpx_codec_ctx_t *codec, vpx_image_t *img,
int frame_index, VpxVideoWriter *writer) {
int got_pkts = 0;
vpx_codec_iter_t iter = NULL;
const vpx_codec_cx_pkt_t *pkt = NULL;
const vpx_codec_err_t res = vpx_codec_encode(codec, img, frame_index, 1, 0,
VPX_DL_GOOD_QUALITY);
if (res != VPX_CODEC_OK)
die_codec(codec, "Failed to encode frame");
const vpx_codec_err_t res =
vpx_codec_encode(codec, img, frame_index, 1, 0, VPX_DL_GOOD_QUALITY);
if (res != VPX_CODEC_OK) die_codec(codec, "Failed to encode frame");
while ((pkt = vpx_codec_get_cx_data(codec, &iter)) != NULL) {
got_pkts = 1;
if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) {
const int keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0;
if (!vpx_video_writer_write_frame(writer,
pkt->data.frame.buf,
if (!vpx_video_writer_write_frame(writer, pkt->data.frame.buf,
pkt->data.frame.sz,
pkt->data.frame.pts)) {
die_codec(codec, "Failed to write compressed frame");
@ -167,12 +160,11 @@ int main(int argc, char **argv) {
VpxVideoInfo info;
VpxVideoWriter *writer = NULL;
const VpxInterface *encoder = NULL;
const int fps = 2; // TODO(dkovalev) add command line argument
const int fps = 2; // TODO(dkovalev) add command line argument
const double bits_per_pixel_per_frame = 0.067;
exec_name = argv[0];
if (argc != 6)
die("Invalid number of arguments");
if (argc != 6) die("Invalid number of arguments");
memset(&info, 0, sizeof(info));
@ -182,40 +174,36 @@ int main(int argc, char **argv) {
}
assert(encoder != NULL);
info.codec_fourcc = encoder->fourcc;
info.frame_width = strtol(argv[2], NULL, 0);
info.frame_height = strtol(argv[3], NULL, 0);
info.frame_width = (int)strtol(argv[2], NULL, 0);
info.frame_height = (int)strtol(argv[3], NULL, 0);
info.time_base.numerator = 1;
info.time_base.denominator = fps;
if (info.frame_width <= 0 ||
info.frame_height <= 0 ||
(info.frame_width % 2) != 0 ||
(info.frame_height % 2) != 0) {
if (info.frame_width <= 0 || info.frame_height <= 0 ||
(info.frame_width % 2) != 0 || (info.frame_height % 2) != 0) {
die("Invalid frame size: %dx%d", info.frame_width, info.frame_height);
}
if (!vpx_img_alloc(&raw, VPX_IMG_FMT_I420, info.frame_width,
info.frame_height, 1)) {
info.frame_height, 1)) {
die("Failed to allocate image.");
}
printf("Using %s\n", vpx_codec_iface_name(encoder->codec_interface()));
res = vpx_codec_enc_config_default(encoder->codec_interface(), &cfg, 0);
if (res)
die_codec(&codec, "Failed to get default codec config.");
if (res) die_codec(&codec, "Failed to get default codec config.");
cfg.g_w = info.frame_width;
cfg.g_h = info.frame_height;
cfg.g_timebase.num = info.time_base.numerator;
cfg.g_timebase.den = info.time_base.denominator;
cfg.rc_target_bitrate = (unsigned int)(bits_per_pixel_per_frame * cfg.g_w *
cfg.g_h * fps / 1000);
cfg.rc_target_bitrate =
(unsigned int)(bits_per_pixel_per_frame * cfg.g_w * cfg.g_h * fps / 1000);
cfg.g_lag_in_frames = 0;
writer = vpx_video_writer_open(argv[5], kContainerIVF, &info);
if (!writer)
die("Failed to open %s for writing.", argv[5]);
if (!writer) die("Failed to open %s for writing.", argv[5]);
if (!(infile = fopen(argv[4], "rb")))
die("Failed to open %s for reading.", argv[4]);
@ -239,15 +227,15 @@ int main(int argc, char **argv) {
}
// Flush encoder.
while (encode_frame(&codec, NULL, -1, writer)) {}
while (encode_frame(&codec, NULL, -1, writer)) {
}
printf("\n");
fclose(infile);
printf("Processed %d frames.\n", frame_count);
vpx_img_free(&raw);
if (vpx_codec_destroy(&codec))
die_codec(&codec, "Failed to destroy codec.");
if (vpx_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec.");
vpx_video_writer_close(writer);

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
// Simple Decoder
// ==============
//
@ -103,12 +102,10 @@ int main(int argc, char **argv) {
exec_name = argv[0];
if (argc != 3)
die("Invalid number of arguments.");
if (argc != 3) die("Invalid number of arguments.");
reader = vpx_video_reader_open(argv[1]);
if (!reader)
die("Failed to open %s for reading.", argv[1]);
if (!reader) die("Failed to open %s for reading.", argv[1]);
if (!(outfile = fopen(argv[2], "wb")))
die("Failed to open %s for writing.", argv[2]);
@ -116,8 +113,7 @@ int main(int argc, char **argv) {
info = vpx_video_reader_get_info(reader);
decoder = get_vpx_decoder_by_fourcc(info->codec_fourcc);
if (!decoder)
die("Unknown input codec.");
if (!decoder) die("Unknown input codec.");
printf("Using %s\n", vpx_codec_iface_name(decoder->codec_interface()));
@ -128,8 +124,8 @@ int main(int argc, char **argv) {
vpx_codec_iter_t iter = NULL;
vpx_image_t *img = NULL;
size_t frame_size = 0;
const unsigned char *frame = vpx_video_reader_get_frame(reader,
&frame_size);
const unsigned char *frame =
vpx_video_reader_get_frame(reader, &frame_size);
if (vpx_codec_decode(&codec, frame, (unsigned int)frame_size, NULL, 0))
die_codec(&codec, "Failed to decode frame.");
@ -140,8 +136,7 @@ int main(int argc, char **argv) {
}
printf("Processed %d frames.\n", frame_cnt);
if (vpx_codec_destroy(&codec))
die_codec(&codec, "Failed to destroy codec");
if (vpx_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec");
printf("Play: ffplay -f rawvideo -pix_fmt yuv420p -s %dx%d %s\n",
info->frame_width, info->frame_height, argv[2]);

View File

@ -109,32 +109,27 @@ static const char *exec_name;
void usage_exit(void) {
fprintf(stderr,
"Usage: %s <codec> <width> <height> <infile> <outfile> "
"<keyframe-interval> [<error-resilient>]\nSee comments in "
"simple_encoder.c for more information.\n",
"<keyframe-interval> <error-resilient> <frames to encode>\n"
"See comments in simple_encoder.c for more information.\n",
exec_name);
exit(EXIT_FAILURE);
}
static int encode_frame(vpx_codec_ctx_t *codec,
vpx_image_t *img,
int frame_index,
int flags,
VpxVideoWriter *writer) {
static int encode_frame(vpx_codec_ctx_t *codec, vpx_image_t *img,
int frame_index, int flags, VpxVideoWriter *writer) {
int got_pkts = 0;
vpx_codec_iter_t iter = NULL;
const vpx_codec_cx_pkt_t *pkt = NULL;
const vpx_codec_err_t res = vpx_codec_encode(codec, img, frame_index, 1,
flags, VPX_DL_GOOD_QUALITY);
if (res != VPX_CODEC_OK)
die_codec(codec, "Failed to encode frame");
const vpx_codec_err_t res =
vpx_codec_encode(codec, img, frame_index, 1, flags, VPX_DL_GOOD_QUALITY);
if (res != VPX_CODEC_OK) die_codec(codec, "Failed to encode frame");
while ((pkt = vpx_codec_get_cx_data(codec, &iter)) != NULL) {
got_pkts = 1;
if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) {
const int keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0;
if (!vpx_video_writer_write_frame(writer,
pkt->data.frame.buf,
if (!vpx_video_writer_write_frame(writer, pkt->data.frame.buf,
pkt->data.frame.sz,
pkt->data.frame.pts)) {
die_codec(codec, "Failed to write compressed frame");
@ -147,6 +142,7 @@ static int encode_frame(vpx_codec_ctx_t *codec,
return got_pkts;
}
// TODO(tomfinegan): Improve command line parsing and add args for bitrate/fps.
int main(int argc, char **argv) {
FILE *infile = NULL;
vpx_codec_ctx_t codec;
@ -154,15 +150,14 @@ int main(int argc, char **argv) {
int frame_count = 0;
vpx_image_t raw;
vpx_codec_err_t res;
VpxVideoInfo info = {0};
VpxVideoInfo info = { 0, 0, 0, { 0, 0 } };
VpxVideoWriter *writer = NULL;
const VpxInterface *encoder = NULL;
const int fps = 30; // TODO(dkovalev) add command line argument
const int bitrate = 200; // kbit/s TODO(dkovalev) add command line argument
const int fps = 30;
const int bitrate = 200;
int keyframe_interval = 0;
// TODO(dkovalev): Add some simple command line parsing code to make the
// command line more flexible.
int max_frames = 0;
int frames_encoded = 0;
const char *codec_arg = NULL;
const char *width_arg = NULL;
const char *height_arg = NULL;
@ -172,8 +167,7 @@ int main(int argc, char **argv) {
exec_name = argv[0];
if (argc < 7)
die("Invalid number of arguments");
if (argc != 9) die("Invalid number of arguments");
codec_arg = argv[1];
width_arg = argv[2];
@ -181,49 +175,44 @@ int main(int argc, char **argv) {
infile_arg = argv[4];
outfile_arg = argv[5];
keyframe_interval_arg = argv[6];
max_frames = (int)strtol(argv[8], NULL, 0);
encoder = get_vpx_encoder_by_name(codec_arg);
if (!encoder)
die("Unsupported codec.");
if (!encoder) die("Unsupported codec.");
info.codec_fourcc = encoder->fourcc;
info.frame_width = strtol(width_arg, NULL, 0);
info.frame_height = strtol(height_arg, NULL, 0);
info.frame_width = (int)strtol(width_arg, NULL, 0);
info.frame_height = (int)strtol(height_arg, NULL, 0);
info.time_base.numerator = 1;
info.time_base.denominator = fps;
if (info.frame_width <= 0 ||
info.frame_height <= 0 ||
(info.frame_width % 2) != 0 ||
(info.frame_height % 2) != 0) {
if (info.frame_width <= 0 || info.frame_height <= 0 ||
(info.frame_width % 2) != 0 || (info.frame_height % 2) != 0) {
die("Invalid frame size: %dx%d", info.frame_width, info.frame_height);
}
if (!vpx_img_alloc(&raw, VPX_IMG_FMT_I420, info.frame_width,
info.frame_height, 1)) {
info.frame_height, 1)) {
die("Failed to allocate image.");
}
keyframe_interval = strtol(keyframe_interval_arg, NULL, 0);
if (keyframe_interval < 0)
die("Invalid keyframe interval value.");
keyframe_interval = (int)strtol(keyframe_interval_arg, NULL, 0);
if (keyframe_interval < 0) die("Invalid keyframe interval value.");
printf("Using %s\n", vpx_codec_iface_name(encoder->codec_interface()));
res = vpx_codec_enc_config_default(encoder->codec_interface(), &cfg, 0);
if (res)
die_codec(&codec, "Failed to get default codec config.");
if (res) die_codec(&codec, "Failed to get default codec config.");
cfg.g_w = info.frame_width;
cfg.g_h = info.frame_height;
cfg.g_timebase.num = info.time_base.numerator;
cfg.g_timebase.den = info.time_base.denominator;
cfg.rc_target_bitrate = bitrate;
cfg.g_error_resilient = argc > 7 ? strtol(argv[7], NULL, 0) : 0;
cfg.g_error_resilient = (vpx_codec_er_flags_t)strtoul(argv[7], NULL, 0);
writer = vpx_video_writer_open(outfile_arg, kContainerIVF, &info);
if (!writer)
die("Failed to open %s for writing.", outfile_arg);
if (!writer) die("Failed to open %s for writing.", outfile_arg);
if (!(infile = fopen(infile_arg, "rb")))
die("Failed to open %s for reading.", infile_arg);
@ -237,18 +226,20 @@ int main(int argc, char **argv) {
if (keyframe_interval > 0 && frame_count % keyframe_interval == 0)
flags |= VPX_EFLAG_FORCE_KF;
encode_frame(&codec, &raw, frame_count++, flags, writer);
frames_encoded++;
if (max_frames > 0 && frames_encoded >= max_frames) break;
}
// Flush encoder.
while (encode_frame(&codec, NULL, -1, 0, writer)) {};
while (encode_frame(&codec, NULL, -1, 0, writer)) {
}
printf("\n");
fclose(infile);
printf("Processed %d frames.\n", frame_count);
vpx_img_free(&raw);
if (vpx_codec_destroy(&codec))
die_codec(&codec, "Failed to destroy codec.");
if (vpx_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec.");
vpx_video_writer_close(writer);

View File

@ -59,25 +59,23 @@
static const char *exec_name;
void usage_exit(void) {
fprintf(stderr, "Usage: %s <codec> <width> <height> <infile> <outfile>\n",
fprintf(stderr,
"Usage: %s <codec> <width> <height> <infile> <outfile> "
"<frame limit>\n",
exec_name);
exit(EXIT_FAILURE);
}
static int get_frame_stats(vpx_codec_ctx_t *ctx,
const vpx_image_t *img,
vpx_codec_pts_t pts,
unsigned int duration,
vpx_enc_frame_flags_t flags,
unsigned int deadline,
static int get_frame_stats(vpx_codec_ctx_t *ctx, const vpx_image_t *img,
vpx_codec_pts_t pts, unsigned int duration,
vpx_enc_frame_flags_t flags, unsigned int deadline,
vpx_fixed_buf_t *stats) {
int got_pkts = 0;
vpx_codec_iter_t iter = NULL;
const vpx_codec_cx_pkt_t *pkt = NULL;
const vpx_codec_err_t res = vpx_codec_encode(ctx, img, pts, duration, flags,
deadline);
if (res != VPX_CODEC_OK)
die_codec(ctx, "Failed to get frame stats.");
const vpx_codec_err_t res =
vpx_codec_encode(ctx, img, pts, duration, flags, deadline);
if (res != VPX_CODEC_OK) die_codec(ctx, "Failed to get frame stats.");
while ((pkt = vpx_codec_get_cx_data(ctx, &iter)) != NULL) {
got_pkts = 1;
@ -94,20 +92,16 @@ static int get_frame_stats(vpx_codec_ctx_t *ctx,
return got_pkts;
}
static int encode_frame(vpx_codec_ctx_t *ctx,
const vpx_image_t *img,
vpx_codec_pts_t pts,
unsigned int duration,
vpx_enc_frame_flags_t flags,
unsigned int deadline,
static int encode_frame(vpx_codec_ctx_t *ctx, const vpx_image_t *img,
vpx_codec_pts_t pts, unsigned int duration,
vpx_enc_frame_flags_t flags, unsigned int deadline,
VpxVideoWriter *writer) {
int got_pkts = 0;
vpx_codec_iter_t iter = NULL;
const vpx_codec_cx_pkt_t *pkt = NULL;
const vpx_codec_err_t res = vpx_codec_encode(ctx, img, pts, duration, flags,
deadline);
if (res != VPX_CODEC_OK)
die_codec(ctx, "Failed to encode frame.");
const vpx_codec_err_t res =
vpx_codec_encode(ctx, img, pts, duration, flags, deadline);
if (res != VPX_CODEC_OK) die_codec(ctx, "Failed to encode frame.");
while ((pkt = vpx_codec_get_cx_data(ctx, &iter)) != NULL) {
got_pkts = 1;
@ -115,8 +109,8 @@ static int encode_frame(vpx_codec_ctx_t *ctx,
const int keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0;
if (!vpx_video_writer_write_frame(writer, pkt->data.frame.buf,
pkt->data.frame.sz,
pkt->data.frame.pts))
pkt->data.frame.sz,
pkt->data.frame.pts))
die_codec(ctx, "Failed to write compressed frame.");
printf(keyframe ? "K" : ".");
fflush(stdout);
@ -126,13 +120,12 @@ static int encode_frame(vpx_codec_ctx_t *ctx,
return got_pkts;
}
static vpx_fixed_buf_t pass0(vpx_image_t *raw,
FILE *infile,
static vpx_fixed_buf_t pass0(vpx_image_t *raw, FILE *infile,
const VpxInterface *encoder,
const vpx_codec_enc_cfg_t *cfg) {
const vpx_codec_enc_cfg_t *cfg, int max_frames) {
vpx_codec_ctx_t codec;
int frame_count = 0;
vpx_fixed_buf_t stats = {NULL, 0};
vpx_fixed_buf_t stats = { NULL, 0 };
if (vpx_codec_enc_init(&codec, encoder->codec_interface(), cfg, 0))
die_codec(&codec, "Failed to initialize encoder");
@ -142,37 +135,33 @@ static vpx_fixed_buf_t pass0(vpx_image_t *raw,
++frame_count;
get_frame_stats(&codec, raw, frame_count, 1, 0, VPX_DL_GOOD_QUALITY,
&stats);
if (max_frames > 0 && frame_count >= max_frames) break;
}
// Flush encoder.
while (get_frame_stats(&codec, NULL, frame_count, 1, 0,
VPX_DL_GOOD_QUALITY, &stats)) {}
while (get_frame_stats(&codec, NULL, frame_count, 1, 0, VPX_DL_GOOD_QUALITY,
&stats)) {
}
printf("Pass 0 complete. Processed %d frames.\n", frame_count);
if (vpx_codec_destroy(&codec))
die_codec(&codec, "Failed to destroy codec.");
if (vpx_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec.");
return stats;
}
static void pass1(vpx_image_t *raw,
FILE *infile,
const char *outfile_name,
const VpxInterface *encoder,
const vpx_codec_enc_cfg_t *cfg) {
VpxVideoInfo info = {
encoder->fourcc,
cfg->g_w,
cfg->g_h,
{cfg->g_timebase.num, cfg->g_timebase.den}
};
static void pass1(vpx_image_t *raw, FILE *infile, const char *outfile_name,
const VpxInterface *encoder, const vpx_codec_enc_cfg_t *cfg,
int max_frames) {
VpxVideoInfo info = { encoder->fourcc,
cfg->g_w,
cfg->g_h,
{ cfg->g_timebase.num, cfg->g_timebase.den } };
VpxVideoWriter *writer = NULL;
vpx_codec_ctx_t codec;
int frame_count = 0;
writer = vpx_video_writer_open(outfile_name, kContainerIVF, &info);
if (!writer)
die("Failed to open %s for writing", outfile_name);
if (!writer) die("Failed to open %s for writing", outfile_name);
if (vpx_codec_enc_init(&codec, encoder->codec_interface(), cfg, 0))
die_codec(&codec, "Failed to initialize encoder");
@ -181,15 +170,17 @@ static void pass1(vpx_image_t *raw,
while (vpx_img_read(raw, infile)) {
++frame_count;
encode_frame(&codec, raw, frame_count, 1, 0, VPX_DL_GOOD_QUALITY, writer);
if (max_frames > 0 && frame_count >= max_frames) break;
}
// Flush encoder.
while (encode_frame(&codec, NULL, -1, 1, 0, VPX_DL_GOOD_QUALITY, writer)) {}
while (encode_frame(&codec, NULL, -1, 1, 0, VPX_DL_GOOD_QUALITY, writer)) {
}
printf("\n");
if (vpx_codec_destroy(&codec))
die_codec(&codec, "Failed to destroy codec.");
if (vpx_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec.");
vpx_video_writer_close(writer);
@ -206,26 +197,27 @@ int main(int argc, char **argv) {
vpx_fixed_buf_t stats;
const VpxInterface *encoder = NULL;
const int fps = 30; // TODO(dkovalev) add command line argument
const int bitrate = 200; // kbit/s TODO(dkovalev) add command line argument
const int fps = 30; // TODO(dkovalev) add command line argument
const int bitrate = 200; // kbit/s TODO(dkovalev) add command line argument
const char *const codec_arg = argv[1];
const char *const width_arg = argv[2];
const char *const height_arg = argv[3];
const char *const infile_arg = argv[4];
const char *const outfile_arg = argv[5];
int max_frames = 0;
exec_name = argv[0];
if (argc != 6)
die("Invalid number of arguments.");
if (argc != 7) die("Invalid number of arguments.");
max_frames = (int)strtol(argv[6], NULL, 0);
encoder = get_vpx_encoder_by_name(codec_arg);
if (!encoder)
die("Unsupported codec.");
if (!encoder) die("Unsupported codec.");
w = strtol(width_arg, NULL, 0);
h = strtol(height_arg, NULL, 0);
w = (int)strtol(width_arg, NULL, 0);
h = (int)strtol(height_arg, NULL, 0);
if (w <= 0 || h <= 0 || (w % 2) != 0 || (h % 2) != 0)
if (w <= 0 || h <= 0 || (w % 2) != 0 || (h % 2) != 0)
die("Invalid frame size: %dx%d", w, h);
if (!vpx_img_alloc(&raw, VPX_IMG_FMT_I420, w, h, 1))
@ -235,8 +227,7 @@ int main(int argc, char **argv) {
// Configuration
res = vpx_codec_enc_config_default(encoder->codec_interface(), &cfg, 0);
if (res)
die_codec(&codec, "Failed to get default codec config.");
if (res) die_codec(&codec, "Failed to get default codec config.");
cfg.g_w = w;
cfg.g_h = h;
@ -249,13 +240,13 @@ int main(int argc, char **argv) {
// Pass 0
cfg.g_pass = VPX_RC_FIRST_PASS;
stats = pass0(&raw, infile, encoder, &cfg);
stats = pass0(&raw, infile, encoder, &cfg, max_frames);
// Pass 1
rewind(infile);
cfg.g_pass = VPX_RC_LAST_PASS;
cfg.rc_twopass_stats_in = stats;
pass1(&raw, infile, outfile_arg, encoder, &cfg);
pass1(&raw, infile, outfile_arg, encoder, &cfg, max_frames);
free(stats.buf);
vpx_img_free(&raw);

File diff suppressed because it is too large Load Diff

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
// VP8 Set Reference Frame
// =======================
//
@ -52,6 +51,7 @@
#include "vpx/vp8cx.h"
#include "vpx/vpx_encoder.h"
#include "vp8/common/common.h"
#include "../tools_common.h"
#include "../video_writer.h"
@ -64,25 +64,21 @@ void usage_exit(void) {
exit(EXIT_FAILURE);
}
static int encode_frame(vpx_codec_ctx_t *codec,
vpx_image_t *img,
int frame_index,
VpxVideoWriter *writer) {
static int encode_frame(vpx_codec_ctx_t *codec, vpx_image_t *img,
int frame_index, VpxVideoWriter *writer) {
int got_pkts = 0;
vpx_codec_iter_t iter = NULL;
const vpx_codec_cx_pkt_t *pkt = NULL;
const vpx_codec_err_t res = vpx_codec_encode(codec, img, frame_index, 1, 0,
VPX_DL_GOOD_QUALITY);
if (res != VPX_CODEC_OK)
die_codec(codec, "Failed to encode frame");
const vpx_codec_err_t res =
vpx_codec_encode(codec, img, frame_index, 1, 0, VPX_DL_GOOD_QUALITY);
if (res != VPX_CODEC_OK) die_codec(codec, "Failed to encode frame");
while ((pkt = vpx_codec_get_cx_data(codec, &iter)) != NULL) {
got_pkts = 1;
if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) {
const int keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0;
if (!vpx_video_writer_write_frame(writer,
pkt->data.frame.buf,
if (!vpx_video_writer_write_frame(writer, pkt->data.frame.buf,
pkt->data.frame.sz,
pkt->data.frame.pts)) {
die_codec(codec, "Failed to write compressed frame");
@ -98,55 +94,53 @@ static int encode_frame(vpx_codec_ctx_t *codec,
int main(int argc, char **argv) {
FILE *infile = NULL;
vpx_codec_ctx_t codec = {0};
vpx_codec_enc_cfg_t cfg = {0};
vpx_codec_ctx_t codec;
vpx_codec_enc_cfg_t cfg;
int frame_count = 0;
vpx_image_t raw;
vpx_codec_err_t res;
VpxVideoInfo info = {0};
VpxVideoInfo info;
VpxVideoWriter *writer = NULL;
const VpxInterface *encoder = NULL;
int update_frame_num = 0;
const int fps = 30; // TODO(dkovalev) add command line argument
const int bitrate = 200; // kbit/s TODO(dkovalev) add command line argument
const int fps = 30; // TODO(dkovalev) add command line argument
const int bitrate = 200; // kbit/s TODO(dkovalev) add command line argument
vp8_zero(codec);
vp8_zero(cfg);
vp8_zero(info);
exec_name = argv[0];
if (argc != 6)
die("Invalid number of arguments");
if (argc != 6) die("Invalid number of arguments");
// TODO(dkovalev): add vp9 support and rename the file accordingly
encoder = get_vpx_encoder_by_name("vp8");
if (!encoder)
die("Unsupported codec.");
if (!encoder) die("Unsupported codec.");
update_frame_num = atoi(argv[5]);
if (!update_frame_num)
die("Couldn't parse frame number '%s'\n", argv[5]);
if (!update_frame_num) die("Couldn't parse frame number '%s'\n", argv[5]);
info.codec_fourcc = encoder->fourcc;
info.frame_width = strtol(argv[1], NULL, 0);
info.frame_height = strtol(argv[2], NULL, 0);
info.frame_width = (int)strtol(argv[1], NULL, 0);
info.frame_height = (int)strtol(argv[2], NULL, 0);
info.time_base.numerator = 1;
info.time_base.denominator = fps;
if (info.frame_width <= 0 ||
info.frame_height <= 0 ||
(info.frame_width % 2) != 0 ||
(info.frame_height % 2) != 0) {
if (info.frame_width <= 0 || info.frame_height <= 0 ||
(info.frame_width % 2) != 0 || (info.frame_height % 2) != 0) {
die("Invalid frame size: %dx%d", info.frame_width, info.frame_height);
}
if (!vpx_img_alloc(&raw, VPX_IMG_FMT_I420, info.frame_width,
info.frame_height, 1)) {
info.frame_height, 1)) {
die("Failed to allocate image.");
}
printf("Using %s\n", vpx_codec_iface_name(encoder->codec_interface()));
res = vpx_codec_enc_config_default(encoder->codec_interface(), &cfg, 0);
if (res)
die_codec(&codec, "Failed to get default codec config.");
if (res) die_codec(&codec, "Failed to get default codec config.");
cfg.g_w = info.frame_width;
cfg.g_h = info.frame_height;
@ -155,8 +149,7 @@ int main(int argc, char **argv) {
cfg.rc_target_bitrate = bitrate;
writer = vpx_video_writer_open(argv[4], kContainerIVF, &info);
if (!writer)
die("Failed to open %s for writing.", argv[4]);
if (!writer) die("Failed to open %s for writing.", argv[4]);
if (!(infile = fopen(argv[3], "rb")))
die("Failed to open %s for reading.", argv[3]);
@ -178,15 +171,15 @@ int main(int argc, char **argv) {
}
// Flush encoder.
while (encode_frame(&codec, NULL, -1, writer)) {}
while (encode_frame(&codec, NULL, -1, writer)) {
}
printf("\n");
fclose(infile);
printf("Processed %d frames.\n", frame_count);
vpx_img_free(&raw);
if (vpx_codec_destroy(&codec))
die_codec(&codec, "Failed to destroy codec.");
if (vpx_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec.");
vpx_video_writer_close(writer);

View File

@ -14,6 +14,7 @@
#include "vpx/vpx_encoder.h"
#include "vpx/vp8cx.h"
#include "vp9/common/vp9_common.h"
#include "../tools_common.h"
#include "../video_writer.h"
@ -21,32 +22,28 @@
static const char *exec_name;
void usage_exit(void) {
fprintf(stderr, "vp9_lossless_encoder: Example demonstrating VP9 lossless "
"encoding feature. Supports raw input only.\n");
fprintf(stderr,
"vp9_lossless_encoder: Example demonstrating VP9 lossless "
"encoding feature. Supports raw input only.\n");
fprintf(stderr, "Usage: %s <width> <height> <infile> <outfile>\n", exec_name);
exit(EXIT_FAILURE);
}
static int encode_frame(vpx_codec_ctx_t *codec,
vpx_image_t *img,
int frame_index,
int flags,
VpxVideoWriter *writer) {
static int encode_frame(vpx_codec_ctx_t *codec, vpx_image_t *img,
int frame_index, int flags, VpxVideoWriter *writer) {
int got_pkts = 0;
vpx_codec_iter_t iter = NULL;
const vpx_codec_cx_pkt_t *pkt = NULL;
const vpx_codec_err_t res = vpx_codec_encode(codec, img, frame_index, 1,
flags, VPX_DL_GOOD_QUALITY);
if (res != VPX_CODEC_OK)
die_codec(codec, "Failed to encode frame");
const vpx_codec_err_t res =
vpx_codec_encode(codec, img, frame_index, 1, flags, VPX_DL_GOOD_QUALITY);
if (res != VPX_CODEC_OK) die_codec(codec, "Failed to encode frame");
while ((pkt = vpx_codec_get_cx_data(codec, &iter)) != NULL) {
got_pkts = 1;
if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) {
const int keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0;
if (!vpx_video_writer_write_frame(writer,
pkt->data.frame.buf,
if (!vpx_video_writer_write_frame(writer, pkt->data.frame.buf,
pkt->data.frame.sz,
pkt->data.frame.pts)) {
die_codec(codec, "Failed to write compressed frame");
@ -66,43 +63,40 @@ int main(int argc, char **argv) {
int frame_count = 0;
vpx_image_t raw;
vpx_codec_err_t res;
VpxVideoInfo info = {0};
VpxVideoInfo info;
VpxVideoWriter *writer = NULL;
const VpxInterface *encoder = NULL;
const int fps = 30;
vp9_zero(info);
exec_name = argv[0];
if (argc < 5)
die("Invalid number of arguments");
if (argc < 5) die("Invalid number of arguments");
encoder = get_vpx_encoder_by_name("vp9");
if (!encoder)
die("Unsupported codec.");
if (!encoder) die("Unsupported codec.");
info.codec_fourcc = encoder->fourcc;
info.frame_width = strtol(argv[1], NULL, 0);
info.frame_height = strtol(argv[2], NULL, 0);
info.frame_width = (int)strtol(argv[1], NULL, 0);
info.frame_height = (int)strtol(argv[2], NULL, 0);
info.time_base.numerator = 1;
info.time_base.denominator = fps;
if (info.frame_width <= 0 ||
info.frame_height <= 0 ||
(info.frame_width % 2) != 0 ||
(info.frame_height % 2) != 0) {
if (info.frame_width <= 0 || info.frame_height <= 0 ||
(info.frame_width % 2) != 0 || (info.frame_height % 2) != 0) {
die("Invalid frame size: %dx%d", info.frame_width, info.frame_height);
}
if (!vpx_img_alloc(&raw, VPX_IMG_FMT_I420, info.frame_width,
info.frame_height, 1)) {
info.frame_height, 1)) {
die("Failed to allocate image.");
}
printf("Using %s\n", vpx_codec_iface_name(encoder->codec_interface()));
res = vpx_codec_enc_config_default(encoder->codec_interface(), &cfg, 0);
if (res)
die_codec(&codec, "Failed to get default codec config.");
if (res) die_codec(&codec, "Failed to get default codec config.");
cfg.g_w = info.frame_width;
cfg.g_h = info.frame_height;
@ -110,8 +104,7 @@ int main(int argc, char **argv) {
cfg.g_timebase.den = info.time_base.denominator;
writer = vpx_video_writer_open(argv[4], kContainerIVF, &info);
if (!writer)
die("Failed to open %s for writing.", argv[4]);
if (!writer) die("Failed to open %s for writing.", argv[4]);
if (!(infile = fopen(argv[3], "rb")))
die("Failed to open %s for reading.", argv[3]);
@ -128,15 +121,15 @@ int main(int argc, char **argv) {
}
// Flush encoder.
while (encode_frame(&codec, NULL, -1, 0, writer)) {}
while (encode_frame(&codec, NULL, -1, 0, writer)) {
}
printf("\n");
fclose(infile);
printf("Processed %d frames.\n", frame_count);
vpx_img_free(&raw);
if (vpx_codec_destroy(&codec))
die_codec(&codec, "Failed to destroy codec.");
if (vpx_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec.");
vpx_video_writer_close(writer);

View File

@ -20,7 +20,6 @@
#include <string.h>
#include <time.h>
#include "../args.h"
#include "../tools_common.h"
#include "../video_writer.h"
@ -54,8 +53,9 @@ static const arg_def_t spatial_layers_arg =
static const arg_def_t temporal_layers_arg =
ARG_DEF("tl", "temporal-layers", 1, "number of temporal SVC layers");
static const arg_def_t temporal_layering_mode_arg =
ARG_DEF("tlm", "temporal-layering-mode", 1, "temporal layering scheme."
"VP9E_TEMPORAL_LAYERING_MODE");
ARG_DEF("tlm", "temporal-layering-mode", 1,
"temporal layering scheme."
"VP9E_TEMPORAL_LAYERING_MODE");
static const arg_def_t kf_dist_arg =
ARG_DEF("k", "kf-dist", 1, "number of frames between keyframes");
static const arg_def_t scale_factors_arg =
@ -75,8 +75,9 @@ static const arg_def_t min_bitrate_arg =
static const arg_def_t max_bitrate_arg =
ARG_DEF(NULL, "max-bitrate", 1, "Maximum bitrate");
static const arg_def_t lag_in_frame_arg =
ARG_DEF(NULL, "lag-in-frames", 1, "Number of frame to input before "
"generating any outputs");
ARG_DEF(NULL, "lag-in-frames", 1,
"Number of frame to input before "
"generating any outputs");
static const arg_def_t rc_end_usage_arg =
ARG_DEF(NULL, "rc-end-usage", 1, "0 - 3: VBR, CBR, CQ, Q");
static const arg_def_t speed_arg =
@ -86,35 +87,44 @@ static const arg_def_t aqmode_arg =
#if CONFIG_VP9_HIGHBITDEPTH
static const struct arg_enum_list bitdepth_enum[] = {
{"8", VPX_BITS_8},
{"10", VPX_BITS_10},
{"12", VPX_BITS_12},
{NULL, 0}
{ "8", VPX_BITS_8 }, { "10", VPX_BITS_10 }, { "12", VPX_BITS_12 }, { NULL, 0 }
};
static const arg_def_t bitdepth_arg =
ARG_DEF_ENUM("d", "bit-depth", 1, "Bit depth for codec 8, 10 or 12. ",
bitdepth_enum);
static const arg_def_t bitdepth_arg = ARG_DEF_ENUM(
"d", "bit-depth", 1, "Bit depth for codec 8, 10 or 12. ", bitdepth_enum);
#endif // CONFIG_VP9_HIGHBITDEPTH
static const arg_def_t *svc_args[] = {
&frames_arg, &width_arg, &height_arg,
&timebase_arg, &bitrate_arg, &skip_frames_arg, &spatial_layers_arg,
&kf_dist_arg, &scale_factors_arg, &passes_arg, &pass_arg,
&fpf_name_arg, &min_q_arg, &max_q_arg, &min_bitrate_arg,
&max_bitrate_arg, &temporal_layers_arg, &temporal_layering_mode_arg,
&lag_in_frame_arg, &threads_arg, &aqmode_arg,
static const arg_def_t *svc_args[] = { &frames_arg,
&width_arg,
&height_arg,
&timebase_arg,
&bitrate_arg,
&skip_frames_arg,
&spatial_layers_arg,
&kf_dist_arg,
&scale_factors_arg,
&passes_arg,
&pass_arg,
&fpf_name_arg,
&min_q_arg,
&max_q_arg,
&min_bitrate_arg,
&max_bitrate_arg,
&temporal_layers_arg,
&temporal_layering_mode_arg,
&lag_in_frame_arg,
&threads_arg,
&aqmode_arg,
#if OUTPUT_RC_STATS
&output_rc_stats_arg,
&output_rc_stats_arg,
#endif
#if CONFIG_VP9_HIGHBITDEPTH
&bitdepth_arg,
&bitdepth_arg,
#endif
&speed_arg,
&rc_end_usage_arg, NULL
};
&speed_arg,
&rc_end_usage_arg,
NULL };
static const uint32_t default_frames_to_skip = 0;
static const uint32_t default_frames_to_code = 60 * 60;
@ -128,7 +138,7 @@ static const uint32_t default_temporal_layers = 1;
static const uint32_t default_kf_dist = 100;
static const uint32_t default_temporal_layering_mode = 0;
static const uint32_t default_output_rc_stats = 0;
static const int32_t default_speed = -1; // -1 means use library default.
static const int32_t default_speed = -1; // -1 means use library default.
static const uint32_t default_threads = 0; // zero means use library default.
typedef struct {
@ -155,7 +165,7 @@ void usage_exit(void) {
static void parse_command_line(int argc, const char **argv_,
AppInput *app_input, SvcContext *svc_ctx,
vpx_codec_enc_cfg_t *enc_cfg) {
struct arg arg = {0};
struct arg arg = { 0 };
char **argv = NULL;
char **argi = NULL;
char **argj = NULL;
@ -165,7 +175,7 @@ static void parse_command_line(int argc, const char **argv_,
const char *fpf_file_name = NULL;
unsigned int min_bitrate = 0;
unsigned int max_bitrate = 0;
char string_options[1024] = {0};
char string_options[1024] = { 0 };
// initialize SvcContext with parameters that will be passed to vpx_svc_init
svc_ctx->log_level = SVC_LOG_DEBUG;
@ -229,8 +239,8 @@ static void parse_command_line(int argc, const char **argv_,
} else if (arg_match(&arg, &threads_arg, argi)) {
svc_ctx->threads = arg_parse_uint(&arg);
} else if (arg_match(&arg, &temporal_layering_mode_arg, argi)) {
svc_ctx->temporal_layering_mode =
enc_cfg->temporal_layering_mode = arg_parse_int(&arg);
svc_ctx->temporal_layering_mode = enc_cfg->temporal_layering_mode =
arg_parse_int(&arg);
if (svc_ctx->temporal_layering_mode) {
enc_cfg->g_error_resilient = 1;
}
@ -278,7 +288,7 @@ static void parse_command_line(int argc, const char **argv_,
enc_cfg->g_input_bit_depth = 10;
enc_cfg->g_profile = 2;
break;
case VPX_BITS_12:
case VPX_BITS_12:
enc_cfg->g_input_bit_depth = 12;
enc_cfg->g_profile = 2;
break;
@ -360,9 +370,8 @@ static void parse_command_line(int argc, const char **argv_,
"num: %d, den: %d, bitrate: %d,\n"
"gop size: %d\n",
vpx_codec_iface_name(vpx_codec_vp9_cx()), app_input->frames_to_code,
app_input->frames_to_skip,
svc_ctx->spatial_layers, enc_cfg->g_w, enc_cfg->g_h,
enc_cfg->g_timebase.num, enc_cfg->g_timebase.den,
app_input->frames_to_skip, svc_ctx->spatial_layers, enc_cfg->g_w,
enc_cfg->g_h, enc_cfg->g_timebase.num, enc_cfg->g_timebase.den,
enc_cfg->rc_target_bitrate, enc_cfg->kf_max_dist);
}
@ -399,7 +408,7 @@ struct RateControlStats {
// Note: these rate control stats assume only 1 key frame in the
// sequence (i.e., first frame only).
static void set_rate_control_stats(struct RateControlStats *rc,
vpx_codec_enc_cfg_t *cfg) {
vpx_codec_enc_cfg_t *cfg) {
unsigned int sl, tl;
// Set the layer (cumulative) framerate and the target layer (non-cumulative)
// per-frame-bandwidth, for the rate control encoding stats below.
@ -412,18 +421,15 @@ static void set_rate_control_stats(struct RateControlStats *rc,
if (cfg->ts_number_layers == 1)
rc->layer_framerate[layer] = framerate;
else
rc->layer_framerate[layer] =
framerate / cfg->ts_rate_decimator[tl];
rc->layer_framerate[layer] = framerate / cfg->ts_rate_decimator[tl];
if (tl > 0) {
rc->layer_pfb[layer] = 1000.0 *
(cfg->layer_target_bitrate[layer] -
cfg->layer_target_bitrate[layer - 1]) /
(rc->layer_framerate[layer] -
rc->layer_framerate[layer - 1]);
rc->layer_pfb[layer] =
1000.0 * (cfg->layer_target_bitrate[layer] -
cfg->layer_target_bitrate[layer - 1]) /
(rc->layer_framerate[layer] - rc->layer_framerate[layer - 1]);
} else {
rc->layer_pfb[tlayer0] = 1000.0 *
cfg->layer_target_bitrate[tlayer0] /
rc->layer_framerate[tlayer0];
rc->layer_pfb[tlayer0] = 1000.0 * cfg->layer_target_bitrate[tlayer0] /
rc->layer_framerate[tlayer0];
}
rc->layer_input_frames[layer] = 0;
rc->layer_enc_frames[layer] = 0;
@ -447,31 +453,33 @@ static void printout_rate_control_summary(struct RateControlStats *rc,
double perc_fluctuation = 0.0;
printf("Total number of processed frames: %d\n\n", frame_cnt - 1);
printf("Rate control layer stats for sl%d tl%d layer(s):\n\n",
cfg->ss_number_layers, cfg->ts_number_layers);
cfg->ss_number_layers, cfg->ts_number_layers);
for (sl = 0; sl < cfg->ss_number_layers; ++sl) {
for (tl = 0; tl < cfg->ts_number_layers; ++tl) {
const int layer = sl * cfg->ts_number_layers + tl;
const int num_dropped = (tl > 0) ?
(rc->layer_input_frames[layer] - rc->layer_enc_frames[layer]) :
(rc->layer_input_frames[layer] - rc->layer_enc_frames[layer] - 1);
if (!sl)
tot_num_frames += rc->layer_input_frames[layer];
const int num_dropped =
(tl > 0)
? (rc->layer_input_frames[layer] - rc->layer_enc_frames[layer])
: (rc->layer_input_frames[layer] - rc->layer_enc_frames[layer] -
1);
if (!sl) tot_num_frames += rc->layer_input_frames[layer];
rc->layer_encoding_bitrate[layer] = 0.001 * rc->layer_framerate[layer] *
rc->layer_encoding_bitrate[layer] / tot_num_frames;
rc->layer_avg_frame_size[layer] = rc->layer_avg_frame_size[layer] /
rc->layer_enc_frames[layer];
rc->layer_avg_rate_mismatch[layer] =
100.0 * rc->layer_avg_rate_mismatch[layer] /
rc->layer_enc_frames[layer];
rc->layer_encoding_bitrate[layer] /
tot_num_frames;
rc->layer_avg_frame_size[layer] =
rc->layer_avg_frame_size[layer] / rc->layer_enc_frames[layer];
rc->layer_avg_rate_mismatch[layer] = 100.0 *
rc->layer_avg_rate_mismatch[layer] /
rc->layer_enc_frames[layer];
printf("For layer#: sl%d tl%d \n", sl, tl);
printf("Bitrate (target vs actual): %d %f.0 kbps\n",
cfg->layer_target_bitrate[layer],
rc->layer_encoding_bitrate[layer]);
printf("Average frame size (target vs actual): %f %f bits\n",
rc->layer_pfb[layer], rc->layer_avg_frame_size[layer]);
printf("Average rate_mismatch: %f\n",
rc->layer_avg_rate_mismatch[layer]);
printf("Number of input frames, encoded (non-key) frames, "
printf("Average rate_mismatch: %f\n", rc->layer_avg_rate_mismatch[layer]);
printf(
"Number of input frames, encoded (non-key) frames, "
"and percent dropped frames: %d %d %f.0 \n",
rc->layer_input_frames[layer], rc->layer_enc_frames[layer],
100.0 * num_dropped / rc->layer_input_frames[layer]);
@ -483,19 +491,18 @@ static void printout_rate_control_summary(struct RateControlStats *rc,
rc->variance_st_encoding_bitrate / rc->window_count -
(rc->avg_st_encoding_bitrate * rc->avg_st_encoding_bitrate);
perc_fluctuation = 100.0 * sqrt(rc->variance_st_encoding_bitrate) /
rc->avg_st_encoding_bitrate;
rc->avg_st_encoding_bitrate;
printf("Short-time stats, for window of %d frames: \n", rc->window_size);
printf("Average, rms-variance, and percent-fluct: %f %f %f \n",
rc->avg_st_encoding_bitrate,
sqrt(rc->variance_st_encoding_bitrate),
rc->avg_st_encoding_bitrate, sqrt(rc->variance_st_encoding_bitrate),
perc_fluctuation);
if (frame_cnt != tot_num_frames)
die("Error: Number of input frames not equal to output encoded frames != "
"%d tot_num_frames = %d\n", frame_cnt, tot_num_frames);
"%d tot_num_frames = %d\n",
frame_cnt, tot_num_frames);
}
vpx_codec_err_t parse_superframe_index(const uint8_t *data,
size_t data_sz,
vpx_codec_err_t parse_superframe_index(const uint8_t *data, size_t data_sz,
uint32_t sizes[8], int *count) {
// A chunk ending with a byte matching 0xc0 is an invalid chunk unless
// it is a super frame index. If the last byte of real video compression
@ -508,7 +515,6 @@ vpx_codec_err_t parse_superframe_index(const uint8_t *data,
marker = *(data + data_sz - 1);
*count = 0;
if ((marker & 0xe0) == 0xc0) {
const uint32_t frames = (marker & 0x7) + 1;
const uint32_t mag = ((marker >> 3) & 0x3) + 1;
@ -516,8 +522,7 @@ vpx_codec_err_t parse_superframe_index(const uint8_t *data,
// This chunk is marked as having a superframe index but doesn't have
// enough data for it, thus it's an invalid superframe index.
if (data_sz < index_sz)
return VPX_CODEC_CORRUPT_FRAME;
if (data_sz < index_sz) return VPX_CODEC_CORRUPT_FRAME;
{
const uint8_t marker2 = *(data + data_sz - index_sz);
@ -525,8 +530,7 @@ vpx_codec_err_t parse_superframe_index(const uint8_t *data,
// This chunk is marked as having a superframe index but doesn't have
// the matching marker byte at the front of the index therefore it's an
// invalid chunk.
if (marker != marker2)
return VPX_CODEC_CORRUPT_FRAME;
if (marker != marker2) return VPX_CODEC_CORRUPT_FRAME;
}
{
@ -537,8 +541,7 @@ vpx_codec_err_t parse_superframe_index(const uint8_t *data,
for (i = 0; i < frames; ++i) {
uint32_t this_sz = 0;
for (j = 0; j < mag; ++j)
this_sz |= (*x++) << (j * 8);
for (j = 0; j < mag; ++j) this_sz |= (*x++) << (j * 8);
sizes[i] = this_sz;
}
*count = frames;
@ -558,32 +561,27 @@ void set_frame_flags_bypass_mode(int sl, int tl, int num_spatial_layers,
for (sl = 0; sl < num_spatial_layers; ++sl) {
if (!tl) {
if (!sl) {
ref_frame_config->frame_flags[sl] = VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
ref_frame_config->frame_flags[sl] =
VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
} else {
if (is_key_frame) {
ref_frame_config->frame_flags[sl] = VP8_EFLAG_NO_REF_LAST |
VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
ref_frame_config->frame_flags[sl] =
VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
} else {
ref_frame_config->frame_flags[sl] = VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
ref_frame_config->frame_flags[sl] =
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
}
}
} else if (tl == 1) {
if (!sl) {
ref_frame_config->frame_flags[sl] = VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_GF;
ref_frame_config->frame_flags[sl] =
VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_GF;
} else {
ref_frame_config->frame_flags[sl] = VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_GF;
ref_frame_config->frame_flags[sl] =
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF;
}
}
if (tl == 0) {
@ -602,9 +600,9 @@ void set_frame_flags_bypass_mode(int sl, int tl, int num_spatial_layers,
}
int main(int argc, const char **argv) {
AppInput app_input = {0};
AppInput app_input = { 0 };
VpxVideoWriter *writer = NULL;
VpxVideoInfo info = {0};
VpxVideoInfo info = { 0 };
vpx_codec_ctx_t codec;
vpx_codec_enc_cfg_t enc_cfg;
SvcContext svc_ctx;
@ -618,14 +616,14 @@ int main(int argc, const char **argv) {
int end_of_stream = 0;
int frames_received = 0;
#if OUTPUT_RC_STATS
VpxVideoWriter *outfile[VPX_TS_MAX_LAYERS] = {NULL};
VpxVideoWriter *outfile[VPX_TS_MAX_LAYERS] = { NULL };
struct RateControlStats rc;
vpx_svc_layer_id_t layer_id;
vpx_svc_ref_frame_config_t ref_frame_config;
int sl, tl;
double sum_bitrate = 0.0;
double sum_bitrate2 = 0.0;
double framerate = 30.0;
double framerate = 30.0;
#endif
struct vpx_usec_timer timer;
int64_t cx_time = 0;
@ -634,10 +632,10 @@ int main(int argc, const char **argv) {
exec_name = argv[0];
parse_command_line(argc, argv, &app_input, &svc_ctx, &enc_cfg);
// Allocate image buffer
// Allocate image buffer
#if CONFIG_VP9_HIGHBITDEPTH
if (!vpx_img_alloc(&raw, enc_cfg.g_input_bit_depth == 8 ?
VPX_IMG_FMT_I420 : VPX_IMG_FMT_I42016,
if (!vpx_img_alloc(&raw, enc_cfg.g_input_bit_depth == 8 ? VPX_IMG_FMT_I420
: VPX_IMG_FMT_I42016,
enc_cfg.g_w, enc_cfg.g_h, 32)) {
die("Failed to allocate image %dx%d\n", enc_cfg.g_w, enc_cfg.g_h);
}
@ -668,8 +666,8 @@ int main(int argc, const char **argv) {
if (!(app_input.passes == 2 && app_input.pass == 1)) {
// We don't save the bitstream for the 1st pass on two pass rate control
writer = vpx_video_writer_open(app_input.output_filename, kContainerIVF,
&info);
writer =
vpx_video_writer_open(app_input.output_filename, kContainerIVF, &info);
if (!writer)
die("Failed to open %s for writing\n", app_input.output_filename);
}
@ -683,15 +681,13 @@ int main(int argc, const char **argv) {
snprintf(file_name, sizeof(file_name), "%s_t%d.ivf",
app_input.output_filename, tl);
outfile[tl] = vpx_video_writer_open(file_name, kContainerIVF, &info);
if (!outfile[tl])
die("Failed to open %s for writing", file_name);
if (!outfile[tl]) die("Failed to open %s for writing", file_name);
}
}
#endif
// skip initial frames
for (i = 0; i < app_input.frames_to_skip; ++i)
vpx_img_read(&raw, infile);
for (i = 0; i < app_input.frames_to_skip; ++i) vpx_img_read(&raw, infile);
if (svc_ctx.speed != -1)
vpx_codec_control(&codec, VP8E_SET_CPUUSED, svc_ctx.speed);
@ -700,7 +696,6 @@ int main(int argc, const char **argv) {
if (svc_ctx.speed >= 5 && svc_ctx.aqmode == 1)
vpx_codec_control(&codec, VP9E_SET_AQ_MODE, 3);
// Encode frames
while (!end_of_stream) {
vpx_codec_iter_t iter = NULL;
@ -729,8 +724,7 @@ int main(int argc, const char **argv) {
// over all the spatial layers for the current superframe.
vpx_codec_control(&codec, VP9E_SET_SVC_LAYER_ID, &layer_id);
set_frame_flags_bypass_mode(sl, layer_id.temporal_layer_id,
svc_ctx.spatial_layers,
frame_cnt == 0,
svc_ctx.spatial_layers, frame_cnt == 0,
&ref_frame_config);
vpx_codec_control(&codec, VP9E_SET_SVC_REF_FRAME_CONFIG,
&ref_frame_config);
@ -743,9 +737,9 @@ int main(int argc, const char **argv) {
}
vpx_usec_timer_start(&timer);
res = vpx_svc_encode(&svc_ctx, &codec, (end_of_stream ? NULL : &raw),
pts, frame_duration, svc_ctx.speed >= 5 ?
VPX_DL_REALTIME : VPX_DL_GOOD_QUALITY);
res = vpx_svc_encode(
&svc_ctx, &codec, (end_of_stream ? NULL : &raw), pts, frame_duration,
svc_ctx.speed >= 5 ? VPX_DL_REALTIME : VPX_DL_GOOD_QUALITY);
vpx_usec_timer_mark(&timer);
cx_time += vpx_usec_timer_elapsed(&timer);
@ -764,8 +758,7 @@ int main(int argc, const char **argv) {
uint32_t sizes[8];
int count = 0;
#endif
vpx_video_writer_write_frame(writer,
cx_pkt->data.frame.buf,
vpx_video_writer_write_frame(writer, cx_pkt->data.frame.buf,
cx_pkt->data.frame.sz,
cx_pkt->data.frame.pts);
#if OUTPUT_RC_STATS
@ -782,20 +775,19 @@ int main(int argc, const char **argv) {
VP9E_TEMPORAL_LAYERING_MODE_BYPASS) {
for (sl = 0; sl < enc_cfg.ss_number_layers; ++sl) {
++rc.layer_input_frames[sl * enc_cfg.ts_number_layers +
layer_id.temporal_layer_id];
layer_id.temporal_layer_id];
}
}
for (tl = layer_id.temporal_layer_id;
tl < enc_cfg.ts_number_layers; ++tl) {
vpx_video_writer_write_frame(outfile[tl],
cx_pkt->data.frame.buf,
cx_pkt->data.frame.sz,
cx_pkt->data.frame.pts);
tl < enc_cfg.ts_number_layers; ++tl) {
vpx_video_writer_write_frame(
outfile[tl], cx_pkt->data.frame.buf, cx_pkt->data.frame.sz,
cx_pkt->data.frame.pts);
}
for (sl = 0; sl < enc_cfg.ss_number_layers; ++sl) {
for (tl = layer_id.temporal_layer_id;
tl < enc_cfg.ts_number_layers; ++tl) {
tl < enc_cfg.ts_number_layers; ++tl) {
const int layer = sl * enc_cfg.ts_number_layers + tl;
++rc.layer_tot_enc_frames[layer];
rc.layer_encoding_bitrate[layer] += 8.0 * sizes[sl];
@ -832,20 +824,20 @@ int main(int argc, const char **argv) {
// Second shifted window.
if (frame_cnt > rc.window_size + rc.window_size / 2) {
tl = layer_id.temporal_layer_id;
for (sl = 0; sl < enc_cfg.ss_number_layers; ++sl) {
sum_bitrate2 += 0.001 * 8.0 * sizes[sl] * framerate;
}
tl = layer_id.temporal_layer_id;
for (sl = 0; sl < enc_cfg.ss_number_layers; ++sl) {
sum_bitrate2 += 0.001 * 8.0 * sizes[sl] * framerate;
}
if (frame_cnt > 2 * rc.window_size &&
frame_cnt % rc.window_size == 0) {
rc.window_count += 1;
rc.avg_st_encoding_bitrate += sum_bitrate2 / rc.window_size;
rc.variance_st_encoding_bitrate +=
(sum_bitrate2 / rc.window_size) *
(sum_bitrate2 / rc.window_size);
sum_bitrate2 = 0.0;
}
if (frame_cnt > 2 * rc.window_size &&
frame_cnt % rc.window_size == 0) {
rc.window_count += 1;
rc.avg_st_encoding_bitrate += sum_bitrate2 / rc.window_size;
rc.variance_st_encoding_bitrate +=
(sum_bitrate2 / rc.window_size) *
(sum_bitrate2 / rc.window_size);
sum_bitrate2 = 0.0;
}
}
}
#endif
@ -860,14 +852,11 @@ int main(int argc, const char **argv) {
break;
}
case VPX_CODEC_STATS_PKT: {
stats_write(&app_input.rc_stats,
cx_pkt->data.twopass_stats.buf,
stats_write(&app_input.rc_stats, cx_pkt->data.twopass_stats.buf,
cx_pkt->data.twopass_stats.sz);
break;
}
default: {
break;
}
default: { break; }
}
}
@ -880,8 +869,8 @@ int main(int argc, const char **argv) {
// Compensate for the extra frame count for the bypass mode.
if (svc_ctx.temporal_layering_mode == VP9E_TEMPORAL_LAYERING_MODE_BYPASS) {
for (sl = 0; sl < enc_cfg.ss_number_layers; ++sl) {
const int layer = sl * enc_cfg.ts_number_layers +
layer_id.temporal_layer_id;
const int layer =
sl * enc_cfg.ts_number_layers + layer_id.temporal_layer_id;
--rc.layer_input_frames[layer];
}
}
@ -895,8 +884,7 @@ int main(int argc, const char **argv) {
}
#endif
if (vpx_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec");
if (app_input.passes == 2)
stats_close(&app_input.rc_stats, 1);
if (app_input.passes == 2) stats_close(&app_input.rc_stats, 1);
if (writer) {
vpx_video_writer_close(writer);
}
@ -908,8 +896,7 @@ int main(int argc, const char **argv) {
}
#endif
printf("Frame cnt and encoding time/FPS stats for encoding: %d %f %f \n",
frame_cnt,
1000 * (float)cx_time / (double)(frame_cnt * 1000000),
frame_cnt, 1000 * (float)cx_time / (double)(frame_cnt * 1000000),
1000000 * (double)frame_cnt / (double)cx_time);
vpx_img_free(&raw);
// display average size, psnr

View File

@ -0,0 +1,438 @@
/*
* Copyright (c) 2016 The WebM project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// VP9 Set Reference Frame
// ============================
//
// This is an example demonstrating how to overwrite the VP9 encoder's
// internal reference frame. In the sample we set the last frame to the
// current frame. This technique could be used to bounce between two cameras.
//
// The decoder would also have to set the reference frame to the same value
// on the same frame, or the video will become corrupt. The 'test_decode'
// variable is set to 1 in this example that tests if the encoder and decoder
// results are matching.
//
// Usage
// -----
// This example encodes a raw video. And the last argument passed in specifies
// the frame number to update the reference frame on. For example, run
// examples/vp9cx_set_ref 352 288 in.yuv out.ivf 4 30
// The parameter is parsed as follows:
//
//
// Extra Variables
// ---------------
// This example maintains the frame number passed on the command line
// in the `update_frame_num` variable.
//
//
// Configuration
// -------------
//
// The reference frame is updated on the frame specified on the command
// line.
//
// Observing The Effects
// ---------------------
// The encoder and decoder results should be matching when the same reference
// frame setting operation is done in both encoder and decoder. Otherwise,
// the encoder/decoder mismatch would be seen.
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "vpx/vp8cx.h"
#include "vpx/vpx_decoder.h"
#include "vpx/vpx_encoder.h"
#include "vp9/common/vp9_common.h"
#include "./tools_common.h"
#include "./video_writer.h"
static const char *exec_name;
void usage_exit() {
fprintf(stderr,
"Usage: %s <width> <height> <infile> <outfile> "
"<frame> <limit(optional)>\n",
exec_name);
exit(EXIT_FAILURE);
}
static int compare_img(const vpx_image_t *const img1,
const vpx_image_t *const img2) {
uint32_t l_w = img1->d_w;
uint32_t c_w = (img1->d_w + img1->x_chroma_shift) >> img1->x_chroma_shift;
const uint32_t c_h =
(img1->d_h + img1->y_chroma_shift) >> img1->y_chroma_shift;
uint32_t i;
int match = 1;
match &= (img1->fmt == img2->fmt);
match &= (img1->d_w == img2->d_w);
match &= (img1->d_h == img2->d_h);
for (i = 0; i < img1->d_h; ++i)
match &= (memcmp(img1->planes[VPX_PLANE_Y] + i * img1->stride[VPX_PLANE_Y],
img2->planes[VPX_PLANE_Y] + i * img2->stride[VPX_PLANE_Y],
l_w) == 0);
for (i = 0; i < c_h; ++i)
match &= (memcmp(img1->planes[VPX_PLANE_U] + i * img1->stride[VPX_PLANE_U],
img2->planes[VPX_PLANE_U] + i * img2->stride[VPX_PLANE_U],
c_w) == 0);
for (i = 0; i < c_h; ++i)
match &= (memcmp(img1->planes[VPX_PLANE_V] + i * img1->stride[VPX_PLANE_V],
img2->planes[VPX_PLANE_V] + i * img2->stride[VPX_PLANE_V],
c_w) == 0);
return match;
}
#define mmin(a, b) ((a) < (b) ? (a) : (b))
static void find_mismatch(const vpx_image_t *const img1,
const vpx_image_t *const img2, int yloc[4],
int uloc[4], int vloc[4]) {
const uint32_t bsize = 64;
const uint32_t bsizey = bsize >> img1->y_chroma_shift;
const uint32_t bsizex = bsize >> img1->x_chroma_shift;
const uint32_t c_w =
(img1->d_w + img1->x_chroma_shift) >> img1->x_chroma_shift;
const uint32_t c_h =
(img1->d_h + img1->y_chroma_shift) >> img1->y_chroma_shift;
int match = 1;
uint32_t i, j;
yloc[0] = yloc[1] = yloc[2] = yloc[3] = -1;
for (i = 0, match = 1; match && i < img1->d_h; i += bsize) {
for (j = 0; match && j < img1->d_w; j += bsize) {
int k, l;
const int si = mmin(i + bsize, img1->d_h) - i;
const int sj = mmin(j + bsize, img1->d_w) - j;
for (k = 0; match && k < si; ++k) {
for (l = 0; match && l < sj; ++l) {
if (*(img1->planes[VPX_PLANE_Y] +
(i + k) * img1->stride[VPX_PLANE_Y] + j + l) !=
*(img2->planes[VPX_PLANE_Y] +
(i + k) * img2->stride[VPX_PLANE_Y] + j + l)) {
yloc[0] = i + k;
yloc[1] = j + l;
yloc[2] = *(img1->planes[VPX_PLANE_Y] +
(i + k) * img1->stride[VPX_PLANE_Y] + j + l);
yloc[3] = *(img2->planes[VPX_PLANE_Y] +
(i + k) * img2->stride[VPX_PLANE_Y] + j + l);
match = 0;
break;
}
}
}
}
}
uloc[0] = uloc[1] = uloc[2] = uloc[3] = -1;
for (i = 0, match = 1; match && i < c_h; i += bsizey) {
for (j = 0; match && j < c_w; j += bsizex) {
int k, l;
const int si = mmin(i + bsizey, c_h - i);
const int sj = mmin(j + bsizex, c_w - j);
for (k = 0; match && k < si; ++k) {
for (l = 0; match && l < sj; ++l) {
if (*(img1->planes[VPX_PLANE_U] +
(i + k) * img1->stride[VPX_PLANE_U] + j + l) !=
*(img2->planes[VPX_PLANE_U] +
(i + k) * img2->stride[VPX_PLANE_U] + j + l)) {
uloc[0] = i + k;
uloc[1] = j + l;
uloc[2] = *(img1->planes[VPX_PLANE_U] +
(i + k) * img1->stride[VPX_PLANE_U] + j + l);
uloc[3] = *(img2->planes[VPX_PLANE_U] +
(i + k) * img2->stride[VPX_PLANE_U] + j + l);
match = 0;
break;
}
}
}
}
}
vloc[0] = vloc[1] = vloc[2] = vloc[3] = -1;
for (i = 0, match = 1; match && i < c_h; i += bsizey) {
for (j = 0; match && j < c_w; j += bsizex) {
int k, l;
const int si = mmin(i + bsizey, c_h - i);
const int sj = mmin(j + bsizex, c_w - j);
for (k = 0; match && k < si; ++k) {
for (l = 0; match && l < sj; ++l) {
if (*(img1->planes[VPX_PLANE_V] +
(i + k) * img1->stride[VPX_PLANE_V] + j + l) !=
*(img2->planes[VPX_PLANE_V] +
(i + k) * img2->stride[VPX_PLANE_V] + j + l)) {
vloc[0] = i + k;
vloc[1] = j + l;
vloc[2] = *(img1->planes[VPX_PLANE_V] +
(i + k) * img1->stride[VPX_PLANE_V] + j + l);
vloc[3] = *(img2->planes[VPX_PLANE_V] +
(i + k) * img2->stride[VPX_PLANE_V] + j + l);
match = 0;
break;
}
}
}
}
}
}
static void testing_decode(vpx_codec_ctx_t *encoder, vpx_codec_ctx_t *decoder,
unsigned int frame_out, int *mismatch_seen) {
vpx_image_t enc_img, dec_img;
struct vp9_ref_frame ref_enc, ref_dec;
if (*mismatch_seen) return;
ref_enc.idx = 0;
ref_dec.idx = 0;
if (vpx_codec_control(encoder, VP9_GET_REFERENCE, &ref_enc))
die_codec(encoder, "Failed to get encoder reference frame");
enc_img = ref_enc.img;
if (vpx_codec_control(decoder, VP9_GET_REFERENCE, &ref_dec))
die_codec(decoder, "Failed to get decoder reference frame");
dec_img = ref_dec.img;
if (!compare_img(&enc_img, &dec_img)) {
int y[4], u[4], v[4];
*mismatch_seen = 1;
find_mismatch(&enc_img, &dec_img, y, u, v);
printf(
"Encode/decode mismatch on frame %d at"
" Y[%d, %d] {%d/%d},"
" U[%d, %d] {%d/%d},"
" V[%d, %d] {%d/%d}",
frame_out, y[0], y[1], y[2], y[3], u[0], u[1], u[2], u[3], v[0], v[1],
v[2], v[3]);
}
vpx_img_free(&enc_img);
vpx_img_free(&dec_img);
}
static int encode_frame(vpx_codec_ctx_t *ecodec, vpx_image_t *img,
unsigned int frame_in, VpxVideoWriter *writer,
int test_decode, vpx_codec_ctx_t *dcodec,
unsigned int *frame_out, int *mismatch_seen) {
int got_pkts = 0;
vpx_codec_iter_t iter = NULL;
const vpx_codec_cx_pkt_t *pkt = NULL;
int got_data;
const vpx_codec_err_t res =
vpx_codec_encode(ecodec, img, frame_in, 1, 0, VPX_DL_GOOD_QUALITY);
if (res != VPX_CODEC_OK) die_codec(ecodec, "Failed to encode frame");
got_data = 0;
while ((pkt = vpx_codec_get_cx_data(ecodec, &iter)) != NULL) {
got_pkts = 1;
if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) {
const int keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0;
if (!(pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT)) {
*frame_out += 1;
}
if (!vpx_video_writer_write_frame(writer, pkt->data.frame.buf,
pkt->data.frame.sz,
pkt->data.frame.pts)) {
die_codec(ecodec, "Failed to write compressed frame");
}
printf(keyframe ? "K" : ".");
fflush(stdout);
got_data = 1;
// Decode 1 frame.
if (test_decode) {
if (vpx_codec_decode(dcodec, pkt->data.frame.buf,
(unsigned int)pkt->data.frame.sz, NULL, 0))
die_codec(dcodec, "Failed to decode frame.");
}
}
}
// Mismatch checking
if (got_data && test_decode) {
testing_decode(ecodec, dcodec, *frame_out, mismatch_seen);
}
return got_pkts;
}
int main(int argc, char **argv) {
FILE *infile = NULL;
// Encoder
vpx_codec_ctx_t ecodec;
vpx_codec_enc_cfg_t cfg;
unsigned int frame_in = 0;
vpx_image_t raw;
vpx_codec_err_t res;
VpxVideoInfo info;
VpxVideoWriter *writer = NULL;
const VpxInterface *encoder = NULL;
// Test encoder/decoder mismatch.
int test_decode = 1;
// Decoder
vpx_codec_ctx_t dcodec;
unsigned int frame_out = 0;
// The frame number to set reference frame on
unsigned int update_frame_num = 0;
int mismatch_seen = 0;
const int fps = 30;
const int bitrate = 500;
const char *width_arg = NULL;
const char *height_arg = NULL;
const char *infile_arg = NULL;
const char *outfile_arg = NULL;
unsigned int limit = 0;
vp9_zero(ecodec);
vp9_zero(cfg);
vp9_zero(info);
exec_name = argv[0];
if (argc < 6) die("Invalid number of arguments");
width_arg = argv[1];
height_arg = argv[2];
infile_arg = argv[3];
outfile_arg = argv[4];
encoder = get_vpx_encoder_by_name("vp9");
if (!encoder) die("Unsupported codec.");
update_frame_num = atoi(argv[5]);
// In VP9, the reference buffers (cm->buffer_pool->frame_bufs[i].buf) are
// allocated while calling vpx_codec_encode(), thus, setting reference for
// 1st frame isn't supported.
if (update_frame_num <= 1) die("Couldn't parse frame number '%s'\n", argv[5]);
if (argc > 6) {
limit = atoi(argv[6]);
if (update_frame_num > limit)
die("Update frame number couldn't larger than limit\n");
}
info.codec_fourcc = encoder->fourcc;
info.frame_width = (int)strtol(width_arg, NULL, 0);
info.frame_height = (int)strtol(height_arg, NULL, 0);
info.time_base.numerator = 1;
info.time_base.denominator = fps;
if (info.frame_width <= 0 || info.frame_height <= 0 ||
(info.frame_width % 2) != 0 || (info.frame_height % 2) != 0) {
die("Invalid frame size: %dx%d", info.frame_width, info.frame_height);
}
if (!vpx_img_alloc(&raw, VPX_IMG_FMT_I420, info.frame_width,
info.frame_height, 1)) {
die("Failed to allocate image.");
}
printf("Using %s\n", vpx_codec_iface_name(encoder->codec_interface()));
res = vpx_codec_enc_config_default(encoder->codec_interface(), &cfg, 0);
if (res) die_codec(&ecodec, "Failed to get default codec config.");
cfg.g_w = info.frame_width;
cfg.g_h = info.frame_height;
cfg.g_timebase.num = info.time_base.numerator;
cfg.g_timebase.den = info.time_base.denominator;
cfg.rc_target_bitrate = bitrate;
cfg.g_lag_in_frames = 3;
writer = vpx_video_writer_open(outfile_arg, kContainerIVF, &info);
if (!writer) die("Failed to open %s for writing.", outfile_arg);
if (!(infile = fopen(infile_arg, "rb")))
die("Failed to open %s for reading.", infile_arg);
if (vpx_codec_enc_init(&ecodec, encoder->codec_interface(), &cfg, 0))
die_codec(&ecodec, "Failed to initialize encoder");
// Disable alt_ref.
if (vpx_codec_control(&ecodec, VP8E_SET_ENABLEAUTOALTREF, 0))
die_codec(&ecodec, "Failed to set enable auto alt ref");
if (test_decode) {
const VpxInterface *decoder = get_vpx_decoder_by_name("vp9");
if (vpx_codec_dec_init(&dcodec, decoder->codec_interface(), NULL, 0))
die_codec(&dcodec, "Failed to initialize decoder.");
}
// Encode frames.
while (vpx_img_read(&raw, infile)) {
if (limit && frame_in >= limit) break;
if (update_frame_num > 1 && frame_out + 1 == update_frame_num) {
vpx_ref_frame_t ref;
ref.frame_type = VP8_LAST_FRAME;
ref.img = raw;
// Set reference frame in encoder.
if (vpx_codec_control(&ecodec, VP8_SET_REFERENCE, &ref))
die_codec(&ecodec, "Failed to set reference frame");
printf(" <SET_REF>");
// If set_reference in decoder is commented out, the enc/dec mismatch
// would be seen.
if (test_decode) {
if (vpx_codec_control(&dcodec, VP8_SET_REFERENCE, &ref))
die_codec(&dcodec, "Failed to set reference frame");
}
}
encode_frame(&ecodec, &raw, frame_in, writer, test_decode, &dcodec,
&frame_out, &mismatch_seen);
frame_in++;
if (mismatch_seen) break;
}
// Flush encoder.
if (!mismatch_seen)
while (encode_frame(&ecodec, NULL, frame_in, writer, test_decode, &dcodec,
&frame_out, &mismatch_seen)) {
}
printf("\n");
fclose(infile);
printf("Processed %d frames.\n", frame_out);
if (test_decode) {
if (!mismatch_seen)
printf("Encoder/decoder results are matching.\n");
else
printf("Encoder/decoder results are NOT matching.\n");
}
if (test_decode)
if (vpx_codec_destroy(&dcodec))
die_codec(&dcodec, "Failed to destroy decoder");
vpx_img_free(&raw);
if (vpx_codec_destroy(&ecodec))
die_codec(&ecodec, "Failed to destroy encoder.");
vpx_video_writer_close(writer);
return EXIT_SUCCESS;
}

View File

@ -28,9 +28,7 @@
static const char *exec_name;
void usage_exit(void) {
exit(EXIT_FAILURE);
}
void usage_exit(void) { exit(EXIT_FAILURE); }
// Denoiser states, for temporal denoising.
enum denoiserState {
@ -41,7 +39,7 @@ enum denoiserState {
kDenoiserOnAdaptive
};
static int mode_to_num_layers[13] = {1, 2, 2, 3, 3, 3, 3, 5, 2, 3, 3, 3, 3};
static int mode_to_num_layers[13] = { 1, 2, 2, 3, 3, 3, 3, 5, 2, 3, 3, 3, 3 };
// For rate control encoding stats.
struct RateControlMetrics {
@ -86,14 +84,14 @@ static void set_rate_control_metrics(struct RateControlMetrics *rc,
// per-frame-bandwidth, for the rate control encoding stats below.
const double framerate = cfg->g_timebase.den / cfg->g_timebase.num;
rc->layer_framerate[0] = framerate / cfg->ts_rate_decimator[0];
rc->layer_pfb[0] = 1000.0 * rc->layer_target_bitrate[0] /
rc->layer_framerate[0];
rc->layer_pfb[0] =
1000.0 * rc->layer_target_bitrate[0] / rc->layer_framerate[0];
for (i = 0; i < cfg->ts_number_layers; ++i) {
if (i > 0) {
rc->layer_framerate[i] = framerate / cfg->ts_rate_decimator[i];
rc->layer_pfb[i] = 1000.0 *
(rc->layer_target_bitrate[i] - rc->layer_target_bitrate[i - 1]) /
(rc->layer_framerate[i] - rc->layer_framerate[i - 1]);
rc->layer_pfb[i] = 1000.0 * (rc->layer_target_bitrate[i] -
rc->layer_target_bitrate[i - 1]) /
(rc->layer_framerate[i] - rc->layer_framerate[i - 1]);
}
rc->layer_input_frames[i] = 0;
rc->layer_enc_frames[i] = 0;
@ -114,29 +112,31 @@ static void printout_rate_control_summary(struct RateControlMetrics *rc,
unsigned int i = 0;
int tot_num_frames = 0;
double perc_fluctuation = 0.0;
printf("Total number of processed frames: %d\n\n", frame_cnt -1);
printf("Total number of processed frames: %d\n\n", frame_cnt - 1);
printf("Rate control layer stats for %d layer(s):\n\n",
cfg->ts_number_layers);
cfg->ts_number_layers);
for (i = 0; i < cfg->ts_number_layers; ++i) {
const int num_dropped = (i > 0) ?
(rc->layer_input_frames[i] - rc->layer_enc_frames[i]) :
(rc->layer_input_frames[i] - rc->layer_enc_frames[i] - 1);
const int num_dropped =
(i > 0) ? (rc->layer_input_frames[i] - rc->layer_enc_frames[i])
: (rc->layer_input_frames[i] - rc->layer_enc_frames[i] - 1);
tot_num_frames += rc->layer_input_frames[i];
rc->layer_encoding_bitrate[i] = 0.001 * rc->layer_framerate[i] *
rc->layer_encoding_bitrate[i] / tot_num_frames;
rc->layer_avg_frame_size[i] = rc->layer_avg_frame_size[i] /
rc->layer_enc_frames[i];
rc->layer_avg_rate_mismatch[i] = 100.0 * rc->layer_avg_rate_mismatch[i] /
rc->layer_enc_frames[i];
rc->layer_encoding_bitrate[i] /
tot_num_frames;
rc->layer_avg_frame_size[i] =
rc->layer_avg_frame_size[i] / rc->layer_enc_frames[i];
rc->layer_avg_rate_mismatch[i] =
100.0 * rc->layer_avg_rate_mismatch[i] / rc->layer_enc_frames[i];
printf("For layer#: %d \n", i);
printf("Bitrate (target vs actual): %d %f \n", rc->layer_target_bitrate[i],
rc->layer_encoding_bitrate[i]);
printf("Average frame size (target vs actual): %f %f \n", rc->layer_pfb[i],
rc->layer_avg_frame_size[i]);
printf("Average rate_mismatch: %f \n", rc->layer_avg_rate_mismatch[i]);
printf("Number of input frames, encoded (non-key) frames, "
"and perc dropped frames: %d %d %f \n", rc->layer_input_frames[i],
rc->layer_enc_frames[i],
printf(
"Number of input frames, encoded (non-key) frames, "
"and perc dropped frames: %d %d %f \n",
rc->layer_input_frames[i], rc->layer_enc_frames[i],
100.0 * num_dropped / rc->layer_input_frames[i]);
printf("\n");
}
@ -145,11 +145,10 @@ static void printout_rate_control_summary(struct RateControlMetrics *rc,
rc->variance_st_encoding_bitrate / rc->window_count -
(rc->avg_st_encoding_bitrate * rc->avg_st_encoding_bitrate);
perc_fluctuation = 100.0 * sqrt(rc->variance_st_encoding_bitrate) /
rc->avg_st_encoding_bitrate;
printf("Short-time stats, for window of %d frames: \n",rc->window_size);
rc->avg_st_encoding_bitrate;
printf("Short-time stats, for window of %d frames: \n", rc->window_size);
printf("Average, rms-variance, and percent-fluct: %f %f %f \n",
rc->avg_st_encoding_bitrate,
sqrt(rc->variance_st_encoding_bitrate),
rc->avg_st_encoding_bitrate, sqrt(rc->variance_st_encoding_bitrate),
perc_fluctuation);
if ((frame_cnt - 1) != tot_num_frames)
die("Error: Number of input frames not equal to output! \n");
@ -167,20 +166,20 @@ static void set_temporal_layer_pattern(int layering_mode,
switch (layering_mode) {
case 0: {
// 1-layer.
int ids[1] = {0};
int ids[1] = { 0 };
cfg->ts_periodicity = 1;
*flag_periodicity = 1;
cfg->ts_number_layers = 1;
cfg->ts_rate_decimator[0] = 1;
memcpy(cfg->ts_layer_id, ids, sizeof(ids));
// Update L only.
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
layer_flags[0] =
VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
break;
}
case 1: {
// 2-layers, 2-frame period.
int ids[2] = {0, 1};
int ids[2] = { 0, 1 };
cfg->ts_periodicity = 2;
*flag_periodicity = 2;
cfg->ts_number_layers = 2;
@ -189,22 +188,24 @@ static void set_temporal_layer_pattern(int layering_mode,
memcpy(cfg->ts_layer_id, ids, sizeof(ids));
#if 1
// 0=L, 1=GF, Intra-layer prediction enabled.
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF;
layer_flags[1] = VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_REF_ARF;
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF;
layer_flags[1] =
VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_REF_ARF;
#else
// 0=L, 1=GF, Intra-layer prediction disabled.
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF;
// 0=L, 1=GF, Intra-layer prediction disabled.
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF;
layer_flags[1] = VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_REF_LAST;
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_REF_LAST;
#endif
break;
}
case 2: {
// 2-layers, 3-frame period.
int ids[3] = {0, 1, 1};
int ids[3] = { 0, 1, 1 };
cfg->ts_periodicity = 3;
*flag_periodicity = 3;
cfg->ts_number_layers = 2;
@ -212,16 +213,17 @@ static void set_temporal_layer_pattern(int layering_mode,
cfg->ts_rate_decimator[1] = 1;
memcpy(cfg->ts_layer_id, ids, sizeof(ids));
// 0=L, 1=GF, Intra-layer prediction enabled.
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
layer_flags[1] =
layer_flags[2] = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST;
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
layer_flags[1] = layer_flags[2] =
VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_ARF |
VP8_EFLAG_NO_UPD_LAST;
break;
}
case 3: {
// 3-layers, 6-frame period.
int ids[6] = {0, 2, 2, 1, 2, 2};
int ids[6] = { 0, 2, 2, 1, 2, 2 };
cfg->ts_periodicity = 6;
*flag_periodicity = 6;
cfg->ts_number_layers = 3;
@ -230,19 +232,18 @@ static void set_temporal_layer_pattern(int layering_mode,
cfg->ts_rate_decimator[2] = 1;
memcpy(cfg->ts_layer_id, ids, sizeof(ids));
// 0=L, 1=GF, 2=ARF, Intra-layer prediction enabled.
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
layer_flags[3] = VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_ARF |
VP8_EFLAG_NO_UPD_LAST;
layer_flags[1] =
layer_flags[2] =
layer_flags[4] =
layer_flags[5] = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_LAST;
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
layer_flags[3] =
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST;
layer_flags[1] = layer_flags[2] = layer_flags[4] = layer_flags[5] =
VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_LAST;
break;
}
case 4: {
// 3-layers, 4-frame period.
int ids[4] = {0, 2, 1, 2};
int ids[4] = { 0, 2, 1, 2 };
cfg->ts_periodicity = 4;
*flag_periodicity = 4;
cfg->ts_number_layers = 3;
@ -251,39 +252,41 @@ static void set_temporal_layer_pattern(int layering_mode,
cfg->ts_rate_decimator[2] = 1;
memcpy(cfg->ts_layer_id, ids, sizeof(ids));
// 0=L, 1=GF, 2=ARF, Intra-layer prediction disabled.
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
layer_flags[2] = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST;
layer_flags[1] =
layer_flags[3] = VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST;
layer_flags[1] = layer_flags[3] =
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
break;
}
case 5: {
// 3-layers, 4-frame period.
int ids[4] = {0, 2, 1, 2};
int ids[4] = { 0, 2, 1, 2 };
cfg->ts_periodicity = 4;
*flag_periodicity = 4;
cfg->ts_number_layers = 3;
cfg->ts_number_layers = 3;
cfg->ts_rate_decimator[0] = 4;
cfg->ts_rate_decimator[1] = 2;
cfg->ts_rate_decimator[2] = 1;
memcpy(cfg->ts_layer_id, ids, sizeof(ids));
// 0=L, 1=GF, 2=ARF, Intra-layer prediction enabled in layer 1, disabled
// in layer 2.
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
layer_flags[2] = VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_LAST |
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
layer_flags[2] =
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ARF;
layer_flags[1] = layer_flags[3] =
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
layer_flags[1] =
layer_flags[3] = VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
break;
}
case 6: {
// 3-layers, 4-frame period.
int ids[4] = {0, 2, 1, 2};
int ids[4] = { 0, 2, 1, 2 };
cfg->ts_periodicity = 4;
*flag_periodicity = 4;
cfg->ts_number_layers = 3;
@ -292,18 +295,19 @@ static void set_temporal_layer_pattern(int layering_mode,
cfg->ts_rate_decimator[2] = 1;
memcpy(cfg->ts_layer_id, ids, sizeof(ids));
// 0=L, 1=GF, 2=ARF, Intra-layer prediction enabled.
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
layer_flags[2] = VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_ARF;
layer_flags[1] =
layer_flags[3] = VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF;
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
layer_flags[2] =
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ARF;
layer_flags[1] = layer_flags[3] =
VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF;
break;
}
case 7: {
// NOTE: Probably of academic interest only.
// 5-layers, 16-frame period.
int ids[16] = {0, 4, 3, 4, 2, 4, 3, 4, 1, 4, 3, 4, 2, 4, 3, 4};
int ids[16] = { 0, 4, 3, 4, 2, 4, 3, 4, 1, 4, 3, 4, 2, 4, 3, 4 };
cfg->ts_periodicity = 16;
*flag_periodicity = 16;
cfg->ts_number_layers = 5;
@ -313,28 +317,21 @@ static void set_temporal_layer_pattern(int layering_mode,
cfg->ts_rate_decimator[3] = 2;
cfg->ts_rate_decimator[4] = 1;
memcpy(cfg->ts_layer_id, ids, sizeof(ids));
layer_flags[0] = VPX_EFLAG_FORCE_KF;
layer_flags[1] =
layer_flags[3] =
layer_flags[5] =
layer_flags[7] =
layer_flags[9] =
layer_flags[11] =
layer_flags[13] =
layer_flags[15] = VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
layer_flags[2] =
layer_flags[6] =
layer_flags[10] =
layer_flags[14] = VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_GF;
layer_flags[4] =
layer_flags[12] = VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_UPD_ARF;
layer_flags[8] = VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_REF_GF;
layer_flags[0] = VPX_EFLAG_FORCE_KF;
layer_flags[1] = layer_flags[3] = layer_flags[5] = layer_flags[7] =
layer_flags[9] = layer_flags[11] = layer_flags[13] = layer_flags[15] =
VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
layer_flags[2] = layer_flags[6] = layer_flags[10] = layer_flags[14] =
VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_GF;
layer_flags[4] = layer_flags[12] =
VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_UPD_ARF;
layer_flags[8] = VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_REF_GF;
break;
}
case 8: {
// 2-layers, with sync point at first frame of layer 1.
int ids[2] = {0, 1};
int ids[2] = { 0, 1 };
cfg->ts_periodicity = 2;
*flag_periodicity = 8;
cfg->ts_number_layers = 2;
@ -346,17 +343,17 @@ static void set_temporal_layer_pattern(int layering_mode,
// key frame. Sync point every 8 frames.
// Layer 0: predict from L and ARF, update L and G.
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_UPD_ARF;
layer_flags[0] =
VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_ARF;
// Layer 1: sync point: predict from L and ARF, and update G.
layer_flags[1] = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_ARF;
layer_flags[1] =
VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ARF;
// Layer 0, predict from L and ARF, update L.
layer_flags[2] = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
layer_flags[2] =
VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
// Layer 1: predict from L, G and ARF, and update G.
layer_flags[3] = VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_ENTROPY;
VP8_EFLAG_NO_UPD_ENTROPY;
// Layer 0.
layer_flags[4] = layer_flags[2];
// Layer 1.
@ -365,11 +362,11 @@ static void set_temporal_layer_pattern(int layering_mode,
layer_flags[6] = layer_flags[4];
// Layer 1.
layer_flags[7] = layer_flags[5];
break;
break;
}
case 9: {
// 3-layers: Sync points for layer 1 and 2 every 8 frames.
int ids[4] = {0, 2, 1, 2};
int ids[4] = { 0, 2, 1, 2 };
cfg->ts_periodicity = 4;
*flag_periodicity = 8;
cfg->ts_number_layers = 3;
@ -378,20 +375,21 @@ static void set_temporal_layer_pattern(int layering_mode,
cfg->ts_rate_decimator[2] = 1;
memcpy(cfg->ts_layer_id, ids, sizeof(ids));
// 0=L, 1=GF, 2=ARF.
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
layer_flags[1] = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF;
layer_flags[2] = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ARF;
layer_flags[3] = layer_flags[5] =
VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF;
layer_flags[2] = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ARF;
layer_flags[3] =
layer_flags[5] = VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF;
layer_flags[4] = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
layer_flags[6] = VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_ARF;
VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
layer_flags[6] =
VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ARF;
layer_flags[7] = VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_ENTROPY;
VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_ENTROPY;
break;
}
case 10: {
@ -399,7 +397,7 @@ static void set_temporal_layer_pattern(int layering_mode,
// and is only updated on key frame.
// Sync points for layer 1 and 2 every 8 frames.
int ids[4] = {0, 2, 1, 2};
int ids[4] = { 0, 2, 1, 2 };
cfg->ts_periodicity = 4;
*flag_periodicity = 8;
cfg->ts_number_layers = 3;
@ -409,21 +407,21 @@ static void set_temporal_layer_pattern(int layering_mode,
memcpy(cfg->ts_layer_id, ids, sizeof(ids));
// 0=L, 1=GF, 2=ARF.
// Layer 0: predict from L and ARF; update L and G.
layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_UPD_ARF |
VP8_EFLAG_NO_REF_GF;
layer_flags[0] =
VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF;
// Layer 2: sync point: predict from L and ARF; update none.
layer_flags[1] = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_ENTROPY;
VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_ENTROPY;
// Layer 1: sync point: predict from L and ARF; update G.
layer_flags[2] = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_ARF |
VP8_EFLAG_NO_UPD_LAST;
layer_flags[2] =
VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST;
// Layer 2: predict from L, G, ARF; update none.
layer_flags[3] = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ENTROPY;
VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ENTROPY;
// Layer 0: predict from L and ARF; update L.
layer_flags[4] = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
VP8_EFLAG_NO_REF_GF;
layer_flags[4] =
VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF;
// Layer 2: predict from L, G, ARF; update none.
layer_flags[5] = layer_flags[3];
// Layer 1: predict from L, G, ARF; update G.
@ -438,7 +436,7 @@ static void set_temporal_layer_pattern(int layering_mode,
// This was added to compare with vp9_spatial_svc_encoder.
// 3-layers, 4-frame period.
int ids[4] = {0, 2, 1, 2};
int ids[4] = { 0, 2, 1, 2 };
cfg->ts_periodicity = 4;
*flag_periodicity = 4;
cfg->ts_number_layers = 3;
@ -448,20 +446,20 @@ static void set_temporal_layer_pattern(int layering_mode,
memcpy(cfg->ts_layer_id, ids, sizeof(ids));
// 0=L, 1=GF, 2=ARF, Intra-layer prediction disabled.
layer_flags[0] = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
layer_flags[2] = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST;
VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST;
layer_flags[1] = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF;
VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF;
layer_flags[3] = VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF;
VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF;
break;
}
case 12:
default: {
// 3-layers structure as in case 10, but no sync/refresh points for
// layer 1 and 2.
int ids[4] = {0, 2, 1, 2};
int ids[4] = { 0, 2, 1, 2 };
cfg->ts_periodicity = 4;
*flag_periodicity = 8;
cfg->ts_number_layers = 3;
@ -471,15 +469,15 @@ static void set_temporal_layer_pattern(int layering_mode,
memcpy(cfg->ts_layer_id, ids, sizeof(ids));
// 0=L, 1=GF, 2=ARF.
// Layer 0: predict from L and ARF; update L.
layer_flags[0] = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
VP8_EFLAG_NO_REF_GF;
layer_flags[0] =
VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF;
layer_flags[4] = layer_flags[0];
// Layer 1: predict from L, G, ARF; update G.
layer_flags[2] = VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST;
layer_flags[6] = layer_flags[2];
// Layer 2: predict from L, G, ARF; update none.
layer_flags[1] = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ENTROPY;
VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ENTROPY;
layer_flags[3] = layer_flags[1];
layer_flags[5] = layer_flags[1];
layer_flags[7] = layer_flags[1];
@ -489,7 +487,7 @@ static void set_temporal_layer_pattern(int layering_mode,
}
int main(int argc, char **argv) {
VpxVideoWriter *outfile[VPX_TS_MAX_LAYERS] = {NULL};
VpxVideoWriter *outfile[VPX_TS_MAX_LAYERS] = { NULL };
vpx_codec_ctx_t codec;
vpx_codec_enc_cfg_t cfg;
int frame_cnt = 0;
@ -502,15 +500,15 @@ int main(int argc, char **argv) {
int got_data;
int flags = 0;
unsigned int i;
int pts = 0; // PTS starts at 0.
int pts = 0; // PTS starts at 0.
int frame_duration = 1; // 1 timebase tick per frame.
int layering_mode = 0;
int layer_flags[VPX_TS_MAX_PERIODICITY] = {0};
int layer_flags[VPX_TS_MAX_PERIODICITY] = { 0 };
int flag_periodicity = 1;
#if VPX_ENCODER_ABI_VERSION > (4 + VPX_CODEC_ABI_VERSION)
vpx_svc_layer_id_t layer_id = {0, 0};
vpx_svc_layer_id_t layer_id = { 0, 0 };
#else
vpx_svc_layer_id_t layer_id = {0};
vpx_svc_layer_id_t layer_id = { 0 };
#endif
const VpxInterface *encoder = NULL;
FILE *infile = NULL;
@ -526,7 +524,7 @@ int main(int argc, char **argv) {
#endif // CONFIG_VP9_HIGHBITDEPTH
double sum_bitrate = 0.0;
double sum_bitrate2 = 0.0;
double framerate = 30.0;
double framerate = 30.0;
exec_name = argv[0];
// Check usage and arguments.
@ -534,27 +532,28 @@ int main(int argc, char **argv) {
#if CONFIG_VP9_HIGHBITDEPTH
die("Usage: %s <infile> <outfile> <codec_type(vp8/vp9)> <width> <height> "
"<rate_num> <rate_den> <speed> <frame_drop_threshold> <mode> "
"<Rate_0> ... <Rate_nlayers-1> <bit-depth> \n", argv[0]);
"<Rate_0> ... <Rate_nlayers-1> <bit-depth> \n",
argv[0]);
#else
die("Usage: %s <infile> <outfile> <codec_type(vp8/vp9)> <width> <height> "
"<rate_num> <rate_den> <speed> <frame_drop_threshold> <mode> "
"<Rate_0> ... <Rate_nlayers-1> \n", argv[0]);
"<Rate_0> ... <Rate_nlayers-1> \n",
argv[0]);
#endif // CONFIG_VP9_HIGHBITDEPTH
}
encoder = get_vpx_encoder_by_name(argv[3]);
if (!encoder)
die("Unsupported codec.");
if (!encoder) die("Unsupported codec.");
printf("Using %s\n", vpx_codec_iface_name(encoder->codec_interface()));
width = strtol(argv[4], NULL, 0);
height = strtol(argv[5], NULL, 0);
width = (unsigned int)strtoul(argv[4], NULL, 0);
height = (unsigned int)strtoul(argv[5], NULL, 0);
if (width < 16 || width % 2 || height < 16 || height % 2) {
die("Invalid resolution: %d x %d", width, height);
}
layering_mode = strtol(argv[10], NULL, 0);
layering_mode = (int)strtol(argv[10], NULL, 0);
if (layering_mode < 0 || layering_mode > 13) {
die("Invalid layering mode (0..12) %s", argv[10]);
}
@ -564,7 +563,7 @@ int main(int argc, char **argv) {
}
#if CONFIG_VP9_HIGHBITDEPTH
switch (strtol(argv[argc-1], NULL, 0)) {
switch (strtol(argv[argc - 1], NULL, 0)) {
case 8:
bit_depth = VPX_BITS_8;
input_bit_depth = 8;
@ -577,13 +576,11 @@ int main(int argc, char **argv) {
bit_depth = VPX_BITS_12;
input_bit_depth = 12;
break;
default:
die("Invalid bit depth (8, 10, 12) %s", argv[argc-1]);
default: die("Invalid bit depth (8, 10, 12) %s", argv[argc - 1]);
}
if (!vpx_img_alloc(&raw,
bit_depth == VPX_BITS_8 ? VPX_IMG_FMT_I420 :
VPX_IMG_FMT_I42016,
width, height, 32)) {
if (!vpx_img_alloc(
&raw, bit_depth == VPX_BITS_8 ? VPX_IMG_FMT_I420 : VPX_IMG_FMT_I42016,
width, height, 32)) {
die("Failed to allocate image", width, height);
}
#else
@ -612,18 +609,17 @@ int main(int argc, char **argv) {
#endif // CONFIG_VP9_HIGHBITDEPTH
// Timebase format e.g. 30fps: numerator=1, demoninator = 30.
cfg.g_timebase.num = strtol(argv[6], NULL, 0);
cfg.g_timebase.den = strtol(argv[7], NULL, 0);
cfg.g_timebase.num = (int)strtol(argv[6], NULL, 0);
cfg.g_timebase.den = (int)strtol(argv[7], NULL, 0);
speed = strtol(argv[8], NULL, 0);
speed = (int)strtol(argv[8], NULL, 0);
if (speed < 0) {
die("Invalid speed setting: must be positive");
}
for (i = min_args_base;
(int)i < min_args_base + mode_to_num_layers[layering_mode];
++i) {
rc.layer_target_bitrate[i - 11] = strtol(argv[i], NULL, 0);
(int)i < min_args_base + mode_to_num_layers[layering_mode]; ++i) {
rc.layer_target_bitrate[i - 11] = (int)strtol(argv[i], NULL, 0);
if (strncmp(encoder->name, "vp8", 3) == 0)
cfg.ts_target_bitrate[i - 11] = rc.layer_target_bitrate[i - 11];
else if (strncmp(encoder->name, "vp9", 3) == 0)
@ -631,12 +627,11 @@ int main(int argc, char **argv) {
}
// Real time parameters.
cfg.rc_dropframe_thresh = strtol(argv[9], NULL, 0);
cfg.rc_dropframe_thresh = (unsigned int)strtoul(argv[9], NULL, 0);
cfg.rc_end_usage = VPX_CBR;
cfg.rc_min_quantizer = 2;
cfg.rc_max_quantizer = 56;
if (strncmp(encoder->name, "vp9", 3) == 0)
cfg.rc_max_quantizer = 52;
if (strncmp(encoder->name, "vp9", 3) == 0) cfg.rc_max_quantizer = 52;
cfg.rc_undershoot_pct = 50;
cfg.rc_overshoot_pct = 50;
cfg.rc_buf_initial_sz = 500;
@ -651,7 +646,7 @@ int main(int argc, char **argv) {
// Enable error resilient mode.
cfg.g_error_resilient = 1;
cfg.g_lag_in_frames = 0;
cfg.g_lag_in_frames = 0;
cfg.kf_mode = VPX_KF_AUTO;
// Disable automatic keyframe placement.
@ -659,9 +654,7 @@ int main(int argc, char **argv) {
cfg.temporal_layering_mode = VP9E_TEMPORAL_LAYERING_MODE_BYPASS;
set_temporal_layer_pattern(layering_mode,
&cfg,
layer_flags,
set_temporal_layer_pattern(layering_mode, &cfg, layer_flags,
&flag_periodicity);
set_rate_control_metrics(&rc, &cfg);
@ -688,15 +681,14 @@ int main(int argc, char **argv) {
snprintf(file_name, sizeof(file_name), "%s_%d.ivf", argv[2], i);
outfile[i] = vpx_video_writer_open(file_name, kContainerIVF, &info);
if (!outfile[i])
die("Failed to open %s for writing", file_name);
if (!outfile[i]) die("Failed to open %s for writing", file_name);
assert(outfile[i] != NULL);
}
// No spatial layers in this encoder.
cfg.ss_number_layers = 1;
// Initialize codec.
// Initialize codec.
#if CONFIG_VP9_HIGHBITDEPTH
if (vpx_codec_enc_init(
&codec, encoder->codec_interface(), &cfg,
@ -712,14 +704,15 @@ int main(int argc, char **argv) {
vpx_codec_control(&codec, VP8E_SET_STATIC_THRESHOLD, 1);
} else if (strncmp(encoder->name, "vp9", 3) == 0) {
vpx_svc_extra_cfg_t svc_params;
memset(&svc_params, 0, sizeof(svc_params));
vpx_codec_control(&codec, VP8E_SET_CPUUSED, speed);
vpx_codec_control(&codec, VP9E_SET_AQ_MODE, 3);
vpx_codec_control(&codec, VP9E_SET_FRAME_PERIODIC_BOOST, 0);
vpx_codec_control(&codec, VP9E_SET_NOISE_SENSITIVITY, 0);
vpx_codec_control(&codec, VP9E_SET_NOISE_SENSITIVITY, kDenoiserOff);
vpx_codec_control(&codec, VP8E_SET_STATIC_THRESHOLD, 1);
vpx_codec_control(&codec, VP9E_SET_TUNE_CONTENT, 0);
vpx_codec_control(&codec, VP9E_SET_TILE_COLUMNS, (cfg.g_threads >> 1));
if (vpx_codec_control(&codec, VP9E_SET_SVC, layering_mode > 0 ? 1: 0))
if (vpx_codec_control(&codec, VP9E_SET_SVC, layering_mode > 0 ? 1 : 0))
die_codec(&codec, "Failed to set SVC");
for (i = 0; i < cfg.ts_number_layers; ++i) {
svc_params.max_quantizers[i] = cfg.rc_max_quantizer;
@ -760,14 +753,12 @@ int main(int argc, char **argv) {
layer_id.temporal_layer_id);
}
flags = layer_flags[frame_cnt % flag_periodicity];
if (layering_mode == 0)
flags = 0;
if (layering_mode == 0) flags = 0;
frame_avail = vpx_img_read(&raw, infile);
if (frame_avail)
++rc.layer_input_frames[layer_id.temporal_layer_id];
if (frame_avail) ++rc.layer_input_frames[layer_id.temporal_layer_id];
vpx_usec_timer_start(&timer);
if (vpx_codec_encode(&codec, frame_avail? &raw : NULL, pts, 1, flags,
VPX_DL_REALTIME)) {
if (vpx_codec_encode(&codec, frame_avail ? &raw : NULL, pts, 1, flags,
VPX_DL_REALTIME)) {
die_codec(&codec, "Failed to encode frame");
}
vpx_usec_timer_mark(&timer);
@ -777,12 +768,12 @@ int main(int argc, char **argv) {
layer_flags[0] &= ~VPX_EFLAG_FORCE_KF;
}
got_data = 0;
while ( (pkt = vpx_codec_get_cx_data(&codec, &iter)) ) {
while ((pkt = vpx_codec_get_cx_data(&codec, &iter))) {
got_data = 1;
switch (pkt->kind) {
case VPX_CODEC_CX_FRAME_PKT:
for (i = cfg.ts_layer_id[frame_cnt % cfg.ts_periodicity];
i < cfg.ts_number_layers; ++i) {
i < cfg.ts_number_layers; ++i) {
vpx_video_writer_write_frame(outfile[i], pkt->data.frame.buf,
pkt->data.frame.sz, pts);
++rc.layer_tot_enc_frames[i];
@ -825,8 +816,7 @@ int main(int argc, char **argv) {
}
}
break;
default:
break;
default: break;
}
}
++frame_cnt;
@ -836,16 +826,13 @@ int main(int argc, char **argv) {
printout_rate_control_summary(&rc, &cfg, frame_cnt);
printf("\n");
printf("Frame cnt and encoding time/FPS stats for encoding: %d %f %f \n",
frame_cnt,
1000 * (float)cx_time / (double)(frame_cnt * 1000000),
1000000 * (double)frame_cnt / (double)cx_time);
frame_cnt, 1000 * (float)cx_time / (double)(frame_cnt * 1000000),
1000000 * (double)frame_cnt / (double)cx_time);
if (vpx_codec_destroy(&codec))
die_codec(&codec, "Failed to destroy codec");
if (vpx_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec");
// Try to rewrite the output file headers with the actual frame count.
for (i = 0; i < cfg.ts_number_layers; ++i)
vpx_video_writer_close(outfile[i]);
for (i = 0; i < cfg.ts_number_layers; ++i) vpx_video_writer_close(outfile[i]);
vpx_img_free(&raw);
return EXIT_SUCCESS;

View File

@ -23,7 +23,7 @@ static void fix_framerate(int *num, int *den) {
// we can guess the framerate using only the timebase in this
// case. Other files would require reading ahead to guess the
// timebase, like we do for webm.
if (*num < 1000) {
if (*den > 0 && *den < 1000000000 && *num > 0 && *num < 1000) {
// Correct for the factor of 2 applied to the timebase in the encoder.
if (*num & 1)
*den *= 2;
@ -46,7 +46,8 @@ int file_is_ivf(struct VpxInputContext *input_ctx) {
is_ivf = 1;
if (mem_get_le16(raw_hdr + 4) != 0) {
fprintf(stderr, "Error: Unrecognized IVF version! This file may not"
fprintf(stderr,
"Error: Unrecognized IVF version! This file may not"
" decode properly.");
}
@ -69,14 +70,13 @@ int file_is_ivf(struct VpxInputContext *input_ctx) {
return is_ivf;
}
int ivf_read_frame(FILE *infile, uint8_t **buffer,
size_t *bytes_read, size_t *buffer_size) {
char raw_header[IVF_FRAME_HDR_SZ] = {0};
int ivf_read_frame(FILE *infile, uint8_t **buffer, size_t *bytes_read,
size_t *buffer_size) {
char raw_header[IVF_FRAME_HDR_SZ] = { 0 };
size_t frame_size = 0;
if (fread(raw_header, IVF_FRAME_HDR_SZ, 1, infile) != 1) {
if (!feof(infile))
warn("Failed to read frame size\n");
if (!feof(infile)) warn("Failed to read frame size\n");
} else {
frame_size = mem_get_le32(raw_header);

View File

@ -18,11 +18,11 @@ extern "C" {
int file_is_ivf(struct VpxInputContext *input);
int ivf_read_frame(FILE *infile, uint8_t **buffer,
size_t *bytes_read, size_t *buffer_size);
int ivf_read_frame(FILE *infile, uint8_t **buffer, size_t *bytes_read,
size_t *buffer_size);
#ifdef __cplusplus
} /* extern "C" */
} /* extern "C" */
#endif
#endif // IVFDEC_H_

View File

@ -13,10 +13,8 @@
#include "vpx/vpx_encoder.h"
#include "vpx_ports/mem_ops.h"
void ivf_write_file_header(FILE *outfile,
const struct vpx_codec_enc_cfg *cfg,
unsigned int fourcc,
int frame_cnt) {
void ivf_write_file_header(FILE *outfile, const struct vpx_codec_enc_cfg *cfg,
unsigned int fourcc, int frame_cnt) {
char header[32];
header[0] = 'D';

View File

@ -19,17 +19,15 @@ struct vpx_codec_cx_pkt;
extern "C" {
#endif
void ivf_write_file_header(FILE *outfile,
const struct vpx_codec_enc_cfg *cfg,
uint32_t fourcc,
int frame_cnt);
void ivf_write_file_header(FILE *outfile, const struct vpx_codec_enc_cfg *cfg,
uint32_t fourcc, int frame_cnt);
void ivf_write_frame_header(FILE *outfile, int64_t pts, size_t frame_size);
void ivf_write_frame_size(FILE *outfile, size_t frame_size);
#ifdef __cplusplus
} /* extern "C" */
} /* extern "C" */
#endif
#endif // IVFENC_H_

View File

@ -109,40 +109,6 @@ endif
VP9_PREFIX=vp9/
$(BUILD_PFX)$(VP9_PREFIX)%.c.o:
# VP10 make file
ifeq ($(CONFIG_VP10),yes)
VP10_PREFIX=vp10/
include $(SRC_PATH_BARE)/$(VP10_PREFIX)vp10_common.mk
endif
ifeq ($(CONFIG_VP10_ENCODER),yes)
VP10_PREFIX=vp10/
include $(SRC_PATH_BARE)/$(VP10_PREFIX)vp10cx.mk
CODEC_SRCS-yes += $(addprefix $(VP10_PREFIX),$(call enabled,VP10_CX_SRCS))
CODEC_EXPORTS-yes += $(addprefix $(VP10_PREFIX),$(VP10_CX_EXPORTS))
CODEC_SRCS-yes += $(VP10_PREFIX)vp10cx.mk vpx/vp8.h vpx/vp8cx.h
INSTALL-LIBS-yes += include/vpx/vp8.h include/vpx/vp8cx.h
INSTALL-LIBS-$(CONFIG_SPATIAL_SVC) += include/vpx/svc_context.h
INSTALL_MAPS += include/vpx/% $(SRC_PATH_BARE)/$(VP10_PREFIX)/%
CODEC_DOC_SRCS += vpx/vp8.h vpx/vp8cx.h
CODEC_DOC_SECTIONS += vp9 vp9_encoder
endif
ifeq ($(CONFIG_VP10_DECODER),yes)
VP10_PREFIX=vp10/
include $(SRC_PATH_BARE)/$(VP10_PREFIX)vp10dx.mk
CODEC_SRCS-yes += $(addprefix $(VP10_PREFIX),$(call enabled,VP10_DX_SRCS))
CODEC_EXPORTS-yes += $(addprefix $(VP10_PREFIX),$(VP10_DX_EXPORTS))
CODEC_SRCS-yes += $(VP10_PREFIX)vp10dx.mk vpx/vp8.h vpx/vp8dx.h
INSTALL-LIBS-yes += include/vpx/vp8.h include/vpx/vp8dx.h
INSTALL_MAPS += include/vpx/% $(SRC_PATH_BARE)/$(VP10_PREFIX)/%
CODEC_DOC_SRCS += vpx/vp8.h vpx/vp8dx.h
CODEC_DOC_SECTIONS += vp9 vp9_decoder
endif
VP10_PREFIX=vp10/
$(BUILD_PFX)$(VP10_PREFIX)%.c.o:
ifeq ($(CONFIG_ENCODERS),yes)
CODEC_DOC_SECTIONS += encoder
endif
@ -183,6 +149,9 @@ INSTALL-SRCS-$(CONFIG_CODEC_SRCS) += third_party/x86inc/x86inc.asm
endif
CODEC_EXPORTS-yes += vpx/exports_com
CODEC_EXPORTS-$(CONFIG_ENCODERS) += vpx/exports_enc
ifeq ($(CONFIG_SPATIAL_SVC),yes)
CODEC_EXPORTS-$(CONFIG_ENCODERS) += vpx/exports_spatial_svc
endif
CODEC_EXPORTS-$(CONFIG_DECODERS) += vpx/exports_dec
INSTALL-LIBS-yes += include/vpx/vpx_codec.h
@ -260,7 +229,7 @@ OBJS-yes += $(LIBVPX_OBJS)
LIBS-$(if yes,$(CONFIG_STATIC)) += $(BUILD_PFX)libvpx.a $(BUILD_PFX)libvpx_g.a
$(BUILD_PFX)libvpx_g.a: $(LIBVPX_OBJS)
SO_VERSION_MAJOR := 3
SO_VERSION_MAJOR := 4
SO_VERSION_MINOR := 0
SO_VERSION_PATCH := 0
ifeq ($(filter darwin%,$(TGT_OS)),$(TGT_OS))
@ -270,6 +239,12 @@ EXPORT_FILE := libvpx.syms
LIBVPX_SO_SYMLINKS := $(addprefix $(LIBSUBDIR)/, \
libvpx.dylib )
else
ifeq ($(filter iphonesimulator%,$(TGT_OS)),$(TGT_OS))
LIBVPX_SO := libvpx.$(SO_VERSION_MAJOR).dylib
SHARED_LIB_SUF := .dylib
EXPORT_FILE := libvpx.syms
LIBVPX_SO_SYMLINKS := $(addprefix $(LIBSUBDIR)/, libvpx.dylib)
else
ifeq ($(filter os2%,$(TGT_OS)),$(TGT_OS))
LIBVPX_SO := libvpx$(SO_VERSION_MAJOR).dll
SHARED_LIB_SUF := _dll.a
@ -285,6 +260,7 @@ LIBVPX_SO_SYMLINKS := $(addprefix $(LIBSUBDIR)/, \
libvpx.so.$(SO_VERSION_MAJOR).$(SO_VERSION_MINOR))
endif
endif
endif
LIBS-$(CONFIG_SHARED) += $(BUILD_PFX)$(LIBVPX_SO)\
$(notdir $(LIBVPX_SO_SYMLINKS)) \
@ -394,6 +370,12 @@ $(filter %$(ASM).o,$(OBJS-yes)): $(BUILD_PFX)vpx_config.asm
$(shell $(SRC_PATH_BARE)/build/make/version.sh "$(SRC_PATH_BARE)" $(BUILD_PFX)vpx_version.h)
CLEAN-OBJS += $(BUILD_PFX)vpx_version.h
#
# Add include path for libwebm sources.
#
ifeq ($(CONFIG_WEBM_IO),yes)
CXXFLAGS += -I$(SRC_PATH_BARE)/third_party/libwebm
endif
##
## libvpx test directives
@ -469,6 +451,7 @@ test_libvpx.$(VCPROJ_SFX): $(LIBVPX_TEST_SRCS) vpx.$(VCPROJ_SFX) gtest.$(VCPROJ_
$(if $(CONFIG_STATIC_MSVCRT),--static-crt) \
--out=$@ $(INTERNAL_CFLAGS) $(CFLAGS) \
-I. -I"$(SRC_PATH_BARE)/third_party/googletest/src/include" \
$(if $(CONFIG_WEBM_IO),-I"$(SRC_PATH_BARE)/third_party/libwebm") \
-L. -l$(CODEC_LIB) -l$(GTEST_LIB) $^
PROJECTS-$(CONFIG_MSVS) += test_libvpx.$(VCPROJ_SFX)

View File

@ -20,19 +20,17 @@
* Still in the public domain.
*/
#include <string.h> /* for memcpy() */
#include <string.h> /* for memcpy() */
#include "md5_utils.h"
static void
byteSwap(UWORD32 *buf, unsigned words) {
static void byteSwap(UWORD32 *buf, unsigned words) {
md5byte *p;
/* Only swap bytes for big endian machines */
int i = 1;
if (*(char *)&i == 1)
return;
if (*(char *)&i == 1) return;
p = (md5byte *)buf;
@ -47,8 +45,7 @@ byteSwap(UWORD32 *buf, unsigned words) {
* Start MD5 accumulation. Set bit count to 0 and buffer to mysterious
* initialization constants.
*/
void
MD5Init(struct MD5Context *ctx) {
void MD5Init(struct MD5Context *ctx) {
ctx->buf[0] = 0x67452301;
ctx->buf[1] = 0xefcdab89;
ctx->buf[2] = 0x98badcfe;
@ -62,8 +59,7 @@ MD5Init(struct MD5Context *ctx) {
* Update context to reflect the concatenation of another buffer full
* of bytes.
*/
void
MD5Update(struct MD5Context *ctx, md5byte const *buf, unsigned len) {
void MD5Update(struct MD5Context *ctx, md5byte const *buf, unsigned len) {
UWORD32 t;
/* Update byte count */
@ -71,9 +67,9 @@ MD5Update(struct MD5Context *ctx, md5byte const *buf, unsigned len) {
t = ctx->bytes[0];
if ((ctx->bytes[0] = t + len) < t)
ctx->bytes[1]++; /* Carry from low to high */
ctx->bytes[1]++; /* Carry from low to high */
t = 64 - (t & 0x3f); /* Space available in ctx->in (at least 1) */
t = 64 - (t & 0x3f); /* Space available in ctx->in (at least 1) */
if (t > len) {
memcpy((md5byte *)ctx->in + 64 - t, buf, len);
@ -104,8 +100,7 @@ MD5Update(struct MD5Context *ctx, md5byte const *buf, unsigned len) {
* Final wrapup - pad to 64-byte boundary with the bit pattern
* 1 0* (64-bit count of bits processed, MSB-first)
*/
void
MD5Final(md5byte digest[16], struct MD5Context *ctx) {
void MD5Final(md5byte digest[16], struct MD5Context *ctx) {
int count = ctx->bytes[0] & 0x3f; /* Number of bytes in ctx->in */
md5byte *p = (md5byte *)ctx->in + count;
@ -115,7 +110,7 @@ MD5Final(md5byte digest[16], struct MD5Context *ctx) {
/* Bytes of padding needed to make 56 bytes (-8..55) */
count = 56 - 1 - count;
if (count < 0) { /* Padding forces an extra block */
if (count < 0) { /* Padding forces an extra block */
memset(p, 0, count + 8);
byteSwap(ctx->in, 16);
MD5Transform(ctx->buf, ctx->in);
@ -147,16 +142,27 @@ MD5Final(md5byte digest[16], struct MD5Context *ctx) {
#define F4(x, y, z) (y ^ (x | ~z))
/* This is the central step in the MD5 algorithm. */
#define MD5STEP(f,w,x,y,z,in,s) \
(w += f(x,y,z) + in, w = (w<<s | w>>(32-s)) + x)
#define MD5STEP(f, w, x, y, z, in, s) \
(w += f(x, y, z) + in, w = (w << s | w >> (32 - s)) + x)
#if defined(__clang__) && defined(__has_attribute)
#if __has_attribute(no_sanitize)
#define VPX_NO_UNSIGNED_OVERFLOW_CHECK \
__attribute__((no_sanitize("unsigned-integer-overflow")))
#endif
#endif
#ifndef VPX_NO_UNSIGNED_OVERFLOW_CHECK
#define VPX_NO_UNSIGNED_OVERFLOW_CHECK
#endif
/*
* The core of the MD5 algorithm, this alters an existing MD5 hash to
* reflect the addition of 16 longwords of new data. MD5Update blocks
* the data and converts bytes into longwords for this routine.
*/
void
MD5Transform(UWORD32 buf[4], UWORD32 const in[16]) {
VPX_NO_UNSIGNED_OVERFLOW_CHECK void MD5Transform(UWORD32 buf[4],
UWORD32 const in[16]) {
register UWORD32 a, b, c, d;
a = buf[0];
@ -238,4 +244,6 @@ MD5Transform(UWORD32 buf[4], UWORD32 const in[16]) {
buf[3] += d;
}
#undef VPX_NO_UNSIGNED_OVERFLOW_CHECK
#endif

View File

@ -45,8 +45,7 @@ struct rate_hist *init_rate_histogram(const vpx_codec_enc_cfg_t *cfg,
hist->samples = cfg->rc_buf_sz * 5 / 4 * fps->num / fps->den / 1000;
// prevent division by zero
if (hist->samples == 0)
hist->samples = 1;
if (hist->samples == 0) hist->samples = 1;
hist->frames = 0;
hist->total = 0;
@ -78,18 +77,16 @@ void update_rate_histogram(struct rate_hist *hist,
int64_t avg_bitrate = 0;
int64_t sum_sz = 0;
const int64_t now = pkt->data.frame.pts * 1000 *
(uint64_t)cfg->g_timebase.num /
(uint64_t)cfg->g_timebase.den;
(uint64_t)cfg->g_timebase.num /
(uint64_t)cfg->g_timebase.den;
int idx = hist->frames++ % hist->samples;
hist->pts[idx] = now;
hist->sz[idx] = (int)pkt->data.frame.sz;
if (now < cfg->rc_buf_initial_sz)
return;
if (now < cfg->rc_buf_initial_sz) return;
if (!cfg->rc_target_bitrate)
return;
if (!cfg->rc_target_bitrate) return;
then = now;
@ -98,20 +95,16 @@ void update_rate_histogram(struct rate_hist *hist,
const int i_idx = (i - 1) % hist->samples;
then = hist->pts[i_idx];
if (now - then > cfg->rc_buf_sz)
break;
if (now - then > cfg->rc_buf_sz) break;
sum_sz += hist->sz[i_idx];
}
if (now == then)
return;
if (now == then) return;
avg_bitrate = sum_sz * 8 * 1000 / (now - then);
idx = (int)(avg_bitrate * (RATE_BINS / 2) / (cfg->rc_target_bitrate * 1000));
if (idx < 0)
idx = 0;
if (idx > RATE_BINS - 1)
idx = RATE_BINS - 1;
if (idx < 0) idx = 0;
if (idx > RATE_BINS - 1) idx = RATE_BINS - 1;
if (hist->bucket[idx].low > avg_bitrate)
hist->bucket[idx].low = (int)avg_bitrate;
if (hist->bucket[idx].high < avg_bitrate)
@ -120,8 +113,8 @@ void update_rate_histogram(struct rate_hist *hist,
hist->total++;
}
static int merge_hist_buckets(struct hist_bucket *bucket,
int max_buckets, int *num_buckets) {
static int merge_hist_buckets(struct hist_bucket *bucket, int max_buckets,
int *num_buckets) {
int small_bucket = 0, merge_bucket = INT_MAX, big_bucket = 0;
int buckets = *num_buckets;
int i;
@ -129,10 +122,8 @@ static int merge_hist_buckets(struct hist_bucket *bucket,
/* Find the extrema for this list of buckets */
big_bucket = small_bucket = 0;
for (i = 0; i < buckets; i++) {
if (bucket[i].count < bucket[small_bucket].count)
small_bucket = i;
if (bucket[i].count > bucket[big_bucket].count)
big_bucket = i;
if (bucket[i].count < bucket[small_bucket].count) small_bucket = i;
if (bucket[i].count > bucket[big_bucket].count) big_bucket = i;
}
/* If we have too many buckets, merge the smallest with an adjacent
@ -174,13 +165,10 @@ static int merge_hist_buckets(struct hist_bucket *bucket,
*/
big_bucket = small_bucket = 0;
for (i = 0; i < buckets; i++) {
if (i > merge_bucket)
bucket[i] = bucket[i + 1];
if (i > merge_bucket) bucket[i] = bucket[i + 1];
if (bucket[i].count < bucket[small_bucket].count)
small_bucket = i;
if (bucket[i].count > bucket[big_bucket].count)
big_bucket = i;
if (bucket[i].count < bucket[small_bucket].count) small_bucket = i;
if (bucket[i].count > bucket[big_bucket].count) big_bucket = i;
}
}
@ -188,8 +176,8 @@ static int merge_hist_buckets(struct hist_bucket *bucket,
return bucket[big_bucket].count;
}
static void show_histogram(const struct hist_bucket *bucket,
int buckets, int total, int scale) {
static void show_histogram(const struct hist_bucket *bucket, int buckets,
int total, int scale) {
const char *pat1, *pat2;
int i;
@ -232,8 +220,7 @@ static void show_histogram(const struct hist_bucket *bucket,
pct = (float)(100.0 * bucket[i].count / total);
len = HIST_BAR_MAX * bucket[i].count / scale;
if (len < 1)
len = 1;
if (len < 1) len = 1;
assert(len <= HIST_BAR_MAX);
if (bucket[i].low == bucket[i].high)
@ -241,8 +228,7 @@ static void show_histogram(const struct hist_bucket *bucket,
else
fprintf(stderr, pat2, bucket[i].low, bucket[i].high);
for (j = 0; j < HIST_BAR_MAX; j++)
fprintf(stderr, j < len ? "=" : " ");
for (j = 0; j < HIST_BAR_MAX; j++) fprintf(stderr, j < len ? "=" : " ");
fprintf(stderr, "\t%5d (%6.2f%%)\n", bucket[i].count, pct);
}
}
@ -268,14 +254,13 @@ void show_q_histogram(const int counts[64], int max_buckets) {
show_histogram(bucket, buckets, total, scale);
}
void show_rate_histogram(struct rate_hist *hist,
const vpx_codec_enc_cfg_t *cfg, int max_buckets) {
void show_rate_histogram(struct rate_hist *hist, const vpx_codec_enc_cfg_t *cfg,
int max_buckets) {
int i, scale;
int buckets = 0;
for (i = 0; i < RATE_BINS; i++) {
if (hist->bucket[i].low == INT_MAX)
continue;
if (hist->bucket[i].low == INT_MAX) continue;
hist->bucket[buckets++] = hist->bucket[i];
}

View File

@ -23,15 +23,19 @@ class ACMRandom {
explicit ACMRandom(int seed) : random_(seed) {}
void Reset(int seed) {
random_.Reseed(seed);
}
void Reset(int seed) { random_.Reseed(seed); }
uint16_t Rand16(void) {
const uint32_t value =
random_.Generate(testing::internal::Random::kMaxRange);
return (value >> 15) & 0xffff;
}
int16_t Rand9Signed(void) {
// Use 9 bits: values between 255 (0x0FF) and -256 (0x100).
const uint32_t value = random_.Generate(512);
return static_cast<int16_t>(value) - 256;
}
uint8_t Rand8(void) {
const uint32_t value =
random_.Generate(testing::internal::Random::kMaxRange);
@ -46,17 +50,11 @@ class ACMRandom {
return r < 128 ? r << 4 : r >> 4;
}
int PseudoUniform(int range) {
return random_.Generate(range);
}
int PseudoUniform(int range) { return random_.Generate(range); }
int operator()(int n) {
return PseudoUniform(n);
}
int operator()(int n) { return PseudoUniform(n); }
static int DeterministicSeed(void) {
return 0xbaba;
}
static int DeterministicSeed(void) { return 0xbaba; }
private:
testing::internal::Random random_;

View File

@ -17,8 +17,8 @@
namespace {
// Check if any pixel in a 16x16 macroblock varies between frames.
int CheckMb(const vpx_image_t &current, const vpx_image_t &previous,
int mb_r, int mb_c) {
int CheckMb(const vpx_image_t &current, const vpx_image_t &previous, int mb_r,
int mb_c) {
for (int plane = 0; plane < 3; plane++) {
int r = 16 * mb_r;
int c0 = 16 * mb_c;
@ -37,8 +37,9 @@ int CheckMb(const vpx_image_t &current, const vpx_image_t &previous,
for (; r < r_top; ++r) {
for (int c = c0; c < c_top; ++c) {
if (current.planes[plane][current.stride[plane] * r + c] !=
previous.planes[plane][previous.stride[plane] * r + c])
previous.planes[plane][previous.stride[plane] * r + c]) {
return 1;
}
}
}
}

View File

@ -39,6 +39,7 @@ class ActiveMapTest
encoder->Control(VP8E_SET_CPUUSED, cpu_used_);
} else if (video->frame() == 3) {
vpx_active_map_t map = vpx_active_map_t();
/* clang-format off */
uint8_t active_map[9 * 13] = {
1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0,
1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0,
@ -50,6 +51,7 @@ class ActiveMapTest
0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1,
1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0,
};
/* clang-format on */
map.cols = (kWidth + 15) / 16;
map.rows = (kHeight + 15) / 16;
ASSERT_EQ(map.cols, 13u);
@ -77,13 +79,13 @@ TEST_P(ActiveMapTest, Test) {
cfg_.rc_end_usage = VPX_CBR;
cfg_.kf_max_dist = 90000;
::libvpx_test::I420VideoSource video("hantro_odd.yuv", kWidth, kHeight, 30,
1, 0, 20);
::libvpx_test::I420VideoSource video("hantro_odd.yuv", kWidth, kHeight, 30, 1,
0, 20);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
VP9_INSTANTIATE_TEST_CASE(ActiveMapTest,
::testing::Values(::libvpx_test::kRealTime),
::testing::Range(0, 6));
::testing::Range(0, 9));
} // namespace

View File

@ -0,0 +1,136 @@
/*
* Copyright (c) 2016 The WebM project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <math.h>
#include "test/clear_system_state.h"
#include "test/register_state_check.h"
#include "third_party/googletest/src/include/gtest/gtest.h"
#include "./vpx_dsp_rtcd.h"
#include "vpx/vpx_integer.h"
#include "vpx_dsp/postproc.h"
#include "vpx_mem/vpx_mem.h"
namespace {
static const int kNoiseSize = 3072;
// TODO(jimbankoski): make width and height integers not unsigned.
typedef void (*AddNoiseFunc)(uint8_t *start, const int8_t *noise,
int blackclamp, int whiteclamp, int width,
int height, int pitch);
class AddNoiseTest : public ::testing::TestWithParam<AddNoiseFunc> {
public:
virtual void TearDown() { libvpx_test::ClearSystemState(); }
virtual ~AddNoiseTest() {}
};
double stddev6(char a, char b, char c, char d, char e, char f) {
const double n = (a + b + c + d + e + f) / 6.0;
const double v = ((a - n) * (a - n) + (b - n) * (b - n) + (c - n) * (c - n) +
(d - n) * (d - n) + (e - n) * (e - n) + (f - n) * (f - n)) /
6.0;
return sqrt(v);
}
TEST_P(AddNoiseTest, CheckNoiseAdded) {
const int width = 64;
const int height = 64;
const int image_size = width * height;
int8_t noise[kNoiseSize];
const int clamp = vpx_setup_noise(4.4, noise, kNoiseSize);
uint8_t *const s =
reinterpret_cast<uint8_t *>(vpx_calloc(image_size, sizeof(*s)));
ASSERT_TRUE(s != NULL);
memset(s, 99, image_size * sizeof(*s));
ASM_REGISTER_STATE_CHECK(
GetParam()(s, noise, clamp, clamp, width, height, width));
// Check to make sure we don't end up having either the same or no added
// noise either vertically or horizontally.
for (int i = 0; i < image_size - 6 * width - 6; ++i) {
const double hd = stddev6(s[i] - 99, s[i + 1] - 99, s[i + 2] - 99,
s[i + 3] - 99, s[i + 4] - 99, s[i + 5] - 99);
const double vd = stddev6(s[i] - 99, s[i + width] - 99,
s[i + 2 * width] - 99, s[i + 3 * width] - 99,
s[i + 4 * width] - 99, s[i + 5 * width] - 99);
EXPECT_NE(hd, 0);
EXPECT_NE(vd, 0);
}
// Initialize pixels in the image to 255 and check for roll over.
memset(s, 255, image_size);
ASM_REGISTER_STATE_CHECK(
GetParam()(s, noise, clamp, clamp, width, height, width));
// Check to make sure don't roll over.
for (int i = 0; i < image_size; ++i) {
EXPECT_GT(static_cast<int>(s[i]), clamp) << "i = " << i;
}
// Initialize pixels in the image to 0 and check for roll under.
memset(s, 0, image_size);
ASM_REGISTER_STATE_CHECK(
GetParam()(s, noise, clamp, clamp, width, height, width));
// Check to make sure don't roll under.
for (int i = 0; i < image_size; ++i) {
EXPECT_LT(static_cast<int>(s[i]), 255 - clamp) << "i = " << i;
}
vpx_free(s);
}
TEST_P(AddNoiseTest, CheckCvsAssembly) {
const int width = 64;
const int height = 64;
const int image_size = width * height;
int8_t noise[kNoiseSize];
const int clamp = vpx_setup_noise(4.4, noise, kNoiseSize);
uint8_t *const s = reinterpret_cast<uint8_t *>(vpx_calloc(image_size, 1));
uint8_t *const d = reinterpret_cast<uint8_t *>(vpx_calloc(image_size, 1));
ASSERT_TRUE(s != NULL);
ASSERT_TRUE(d != NULL);
memset(s, 99, image_size);
memset(d, 99, image_size);
srand(0);
ASM_REGISTER_STATE_CHECK(
GetParam()(s, noise, clamp, clamp, width, height, width));
srand(0);
ASM_REGISTER_STATE_CHECK(
vpx_plane_add_noise_c(d, noise, clamp, clamp, width, height, width));
for (int i = 0; i < image_size; ++i) {
EXPECT_EQ(static_cast<int>(s[i]), static_cast<int>(d[i])) << "i = " << i;
}
vpx_free(d);
vpx_free(s);
}
INSTANTIATE_TEST_CASE_P(C, AddNoiseTest,
::testing::Values(vpx_plane_add_noise_c));
#if HAVE_SSE2
INSTANTIATE_TEST_CASE_P(SSE2, AddNoiseTest,
::testing::Values(vpx_plane_add_noise_sse2));
#endif
#if HAVE_MSA
INSTANTIATE_TEST_CASE_P(MSA, AddNoiseTest,
::testing::Values(vpx_plane_add_noise_msa));
#endif
} // namespace

View File

@ -0,0 +1,157 @@
/*
* Copyright (c) 2012 The WebM project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "third_party/googletest/src/include/gtest/gtest.h"
#include "test/codec_factory.h"
#include "test/encode_test_driver.h"
#include "test/i420_video_source.h"
#include "test/util.h"
namespace {
class AltRefAqSegmentTest
: public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWith2Params<libvpx_test::TestMode, int> {
protected:
AltRefAqSegmentTest() : EncoderTest(GET_PARAM(0)) {}
virtual ~AltRefAqSegmentTest() {}
virtual void SetUp() {
InitializeConfig();
SetMode(GET_PARAM(1));
set_cpu_used_ = GET_PARAM(2);
aq_mode_ = 0;
alt_ref_aq_mode_ = 0;
}
virtual void PreEncodeFrameHook(::libvpx_test::VideoSource *video,
::libvpx_test::Encoder *encoder) {
if (video->frame() == 1) {
encoder->Control(VP8E_SET_CPUUSED, set_cpu_used_);
encoder->Control(VP9E_SET_ALT_REF_AQ, alt_ref_aq_mode_);
encoder->Control(VP9E_SET_AQ_MODE, aq_mode_);
encoder->Control(VP8E_SET_MAX_INTRA_BITRATE_PCT, 100);
}
}
int set_cpu_used_;
int aq_mode_;
int alt_ref_aq_mode_;
};
// Validate that this ALT_REF_AQ/AQ segmentation mode
// (ALT_REF_AQ=0, AQ=0/no_aq)
// encodes and decodes without a mismatch.
TEST_P(AltRefAqSegmentTest, TestNoMisMatchAltRefAQ0) {
cfg_.rc_min_quantizer = 8;
cfg_.rc_max_quantizer = 56;
cfg_.rc_end_usage = VPX_VBR;
cfg_.rc_buf_initial_sz = 500;
cfg_.rc_buf_optimal_sz = 500;
cfg_.rc_buf_sz = 1000;
cfg_.rc_target_bitrate = 300;
aq_mode_ = 0;
alt_ref_aq_mode_ = 1;
::libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
30, 1, 0, 100);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
// Validate that this ALT_REF_AQ/AQ segmentation mode
// (ALT_REF_AQ=0, AQ=1/variance_aq)
// encodes and decodes without a mismatch.
TEST_P(AltRefAqSegmentTest, TestNoMisMatchAltRefAQ1) {
cfg_.rc_min_quantizer = 8;
cfg_.rc_max_quantizer = 56;
cfg_.rc_end_usage = VPX_VBR;
cfg_.rc_buf_initial_sz = 500;
cfg_.rc_buf_optimal_sz = 500;
cfg_.rc_buf_sz = 1000;
cfg_.rc_target_bitrate = 300;
aq_mode_ = 1;
alt_ref_aq_mode_ = 1;
::libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
30, 1, 0, 100);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
// Validate that this ALT_REF_AQ/AQ segmentation mode
// (ALT_REF_AQ=0, AQ=2/complexity_aq)
// encodes and decodes without a mismatch.
TEST_P(AltRefAqSegmentTest, TestNoMisMatchAltRefAQ2) {
cfg_.rc_min_quantizer = 8;
cfg_.rc_max_quantizer = 56;
cfg_.rc_end_usage = VPX_VBR;
cfg_.rc_buf_initial_sz = 500;
cfg_.rc_buf_optimal_sz = 500;
cfg_.rc_buf_sz = 1000;
cfg_.rc_target_bitrate = 300;
aq_mode_ = 2;
alt_ref_aq_mode_ = 1;
::libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
30, 1, 0, 100);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
// Validate that this ALT_REF_AQ/AQ segmentation mode
// (ALT_REF_AQ=0, AQ=3/cyclicrefresh_aq)
// encodes and decodes without a mismatch.
TEST_P(AltRefAqSegmentTest, TestNoMisMatchAltRefAQ3) {
cfg_.rc_min_quantizer = 8;
cfg_.rc_max_quantizer = 56;
cfg_.rc_end_usage = VPX_VBR;
cfg_.rc_buf_initial_sz = 500;
cfg_.rc_buf_optimal_sz = 500;
cfg_.rc_buf_sz = 1000;
cfg_.rc_target_bitrate = 300;
aq_mode_ = 3;
alt_ref_aq_mode_ = 1;
::libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
30, 1, 0, 100);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
// Validate that this ALT_REF_AQ/AQ segmentation mode
// (ALT_REF_AQ=0, AQ=4/equator360_aq)
// encodes and decodes without a mismatch.
TEST_P(AltRefAqSegmentTest, TestNoMisMatchAltRefAQ4) {
cfg_.rc_min_quantizer = 8;
cfg_.rc_max_quantizer = 56;
cfg_.rc_end_usage = VPX_VBR;
cfg_.rc_buf_initial_sz = 500;
cfg_.rc_buf_optimal_sz = 500;
cfg_.rc_buf_sz = 1000;
cfg_.rc_target_bitrate = 300;
aq_mode_ = 4;
alt_ref_aq_mode_ = 1;
::libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
30, 1, 0, 100);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
VP9_INSTANTIATE_TEST_CASE(AltRefAqSegmentTest,
::testing::Values(::libvpx_test::kOnePassGood,
::libvpx_test::kTwoPassGood),
::testing::Range(2, 5));
} // namespace

View File

@ -14,12 +14,14 @@
#include "test/util.h"
namespace {
#if CONFIG_VP8_ENCODER
// lookahead range: [kLookAheadMin, kLookAheadMax).
const int kLookAheadMin = 5;
const int kLookAheadMax = 26;
class AltRefTest : public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWithParam<int> {
public ::libvpx_test::CodecTestWithParam<int> {
protected:
AltRefTest() : EncoderTest(GET_PARAM(0)), altref_count_(0) {}
virtual ~AltRefTest() {}
@ -29,9 +31,7 @@ class AltRefTest : public ::libvpx_test::EncoderTest,
SetMode(libvpx_test::kTwoPassGood);
}
virtual void BeginPassHook(unsigned int pass) {
altref_count_ = 0;
}
virtual void BeginPassHook(unsigned int /*pass*/) { altref_count_ = 0; }
virtual void PreEncodeFrameHook(libvpx_test::VideoSource *video,
libvpx_test::Encoder *encoder) {
@ -63,7 +63,90 @@ TEST_P(AltRefTest, MonotonicTimestamps) {
EXPECT_GE(altref_count(), 1);
}
VP8_INSTANTIATE_TEST_CASE(AltRefTest,
::testing::Range(kLookAheadMin, kLookAheadMax));
#endif // CONFIG_VP8_ENCODER
class AltRefForcedKeyTestLarge
: public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWith2Params<libvpx_test::TestMode, int> {
protected:
AltRefForcedKeyTestLarge()
: EncoderTest(GET_PARAM(0)), encoding_mode_(GET_PARAM(1)),
cpu_used_(GET_PARAM(2)), forced_kf_frame_num_(1), frame_num_(0) {}
virtual ~AltRefForcedKeyTestLarge() {}
virtual void SetUp() {
InitializeConfig();
SetMode(encoding_mode_);
cfg_.rc_end_usage = VPX_VBR;
cfg_.g_threads = 0;
}
virtual void PreEncodeFrameHook(::libvpx_test::VideoSource *video,
::libvpx_test::Encoder *encoder) {
if (video->frame() == 0) {
encoder->Control(VP8E_SET_CPUUSED, cpu_used_);
encoder->Control(VP8E_SET_ENABLEAUTOALTREF, 1);
#if CONFIG_VP9_ENCODER
// override test default for tile columns if necessary.
if (GET_PARAM(0) == &libvpx_test::kVP9) {
encoder->Control(VP9E_SET_TILE_COLUMNS, 6);
}
#endif
}
frame_flags_ =
(video->frame() == forced_kf_frame_num_) ? VPX_EFLAG_FORCE_KF : 0;
}
virtual void FramePktHook(const vpx_codec_cx_pkt_t *pkt) {
if (frame_num_ == forced_kf_frame_num_) {
ASSERT_TRUE(!!(pkt->data.frame.flags & VPX_FRAME_IS_KEY))
<< "Frame #" << frame_num_ << " isn't a keyframe!";
}
++frame_num_;
}
::libvpx_test::TestMode encoding_mode_;
int cpu_used_;
unsigned int forced_kf_frame_num_;
unsigned int frame_num_;
};
TEST_P(AltRefForcedKeyTestLarge, Frame1IsKey) {
const vpx_rational timebase = { 1, 30 };
const int lag_values[] = { 3, 15, 25, -1 };
forced_kf_frame_num_ = 1;
for (int i = 0; lag_values[i] != -1; ++i) {
frame_num_ = 0;
cfg_.g_lag_in_frames = lag_values[i];
libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
timebase.den, timebase.num, 0, 30);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
}
TEST_P(AltRefForcedKeyTestLarge, ForcedFrameIsKey) {
const vpx_rational timebase = { 1, 30 };
const int lag_values[] = { 3, 15, 25, -1 };
for (int i = 0; lag_values[i] != -1; ++i) {
frame_num_ = 0;
forced_kf_frame_num_ = lag_values[i] - 1;
cfg_.g_lag_in_frames = lag_values[i];
libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
timebase.den, timebase.num, 0, 30);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
}
VP8_INSTANTIATE_TEST_CASE(AltRefForcedKeyTestLarge,
::testing::Values(::libvpx_test::kOnePassGood),
::testing::Range(0, 9));
VP9_INSTANTIATE_TEST_CASE(AltRefForcedKeyTestLarge,
::testing::Values(::libvpx_test::kOnePassGood),
::testing::Range(0, 9));
} // namespace

View File

@ -57,7 +57,7 @@ TEST_P(AqSegmentTest, TestNoMisMatchAQ1) {
aq_mode_ = 1;
::libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
30, 1, 0, 100);
30, 1, 0, 100);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
@ -77,7 +77,7 @@ TEST_P(AqSegmentTest, TestNoMisMatchAQ2) {
aq_mode_ = 2;
::libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
30, 1, 0, 100);
30, 1, 0, 100);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
@ -97,7 +97,7 @@ TEST_P(AqSegmentTest, TestNoMisMatchAQ3) {
aq_mode_ = 3;
::libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
30, 1, 0, 100);
30, 1, 0, 100);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}

View File

@ -31,7 +31,7 @@ class AverageTestBase : public ::testing::Test {
AverageTestBase(int width, int height) : width_(width), height_(height) {}
static void SetUpTestCase() {
source_data_ = reinterpret_cast<uint8_t*>(
source_data_ = reinterpret_cast<uint8_t *>(
vpx_memalign(kDataAlignment, kDataBlockSize));
}
@ -40,9 +40,7 @@ class AverageTestBase : public ::testing::Test {
source_data_ = NULL;
}
virtual void TearDown() {
libvpx_test::ClearSystemState();
}
virtual void TearDown() { libvpx_test::ClearSystemState(); }
protected:
// Handle blocks up to 4 blocks 64x64 with stride up to 128
@ -55,47 +53,46 @@ class AverageTestBase : public ::testing::Test {
}
// Sum Pixels
unsigned int ReferenceAverage8x8(const uint8_t* source, int pitch) {
unsigned int ReferenceAverage8x8(const uint8_t *source, int pitch) {
unsigned int average = 0;
for (int h = 0; h < 8; ++h)
for (int w = 0; w < 8; ++w)
average += source[h * pitch + w];
for (int h = 0; h < 8; ++h) {
for (int w = 0; w < 8; ++w) average += source[h * pitch + w];
}
return ((average + 32) >> 6);
}
unsigned int ReferenceAverage4x4(const uint8_t* source, int pitch) {
unsigned int ReferenceAverage4x4(const uint8_t *source, int pitch) {
unsigned int average = 0;
for (int h = 0; h < 4; ++h)
for (int w = 0; w < 4; ++w)
average += source[h * pitch + w];
for (int h = 0; h < 4; ++h) {
for (int w = 0; w < 4; ++w) average += source[h * pitch + w];
}
return ((average + 8) >> 4);
}
void FillConstant(uint8_t fill_constant) {
for (int i = 0; i < width_ * height_; ++i) {
source_data_[i] = fill_constant;
source_data_[i] = fill_constant;
}
}
void FillRandom() {
for (int i = 0; i < width_ * height_; ++i) {
source_data_[i] = rnd_.Rand8();
source_data_[i] = rnd_.Rand8();
}
}
int width_, height_;
static uint8_t* source_data_;
static uint8_t *source_data_;
int source_stride_;
ACMRandom rnd_;
};
typedef unsigned int (*AverageFunction)(const uint8_t* s, int pitch);
typedef unsigned int (*AverageFunction)(const uint8_t *s, int pitch);
typedef std::tr1::tuple<int, int, int, int, AverageFunction> AvgFunc;
class AverageTest
: public AverageTestBase,
public ::testing::WithParamInterface<AvgFunc>{
class AverageTest : public AverageTestBase,
public ::testing::WithParamInterface<AvgFunc> {
public:
AverageTest() : AverageTestBase(GET_PARAM(0), GET_PARAM(1)) {}
@ -103,17 +100,17 @@ class AverageTest
void CheckAverages() {
unsigned int expected = 0;
if (GET_PARAM(3) == 8) {
expected = ReferenceAverage8x8(source_data_+ GET_PARAM(2),
source_stride_);
} else if (GET_PARAM(3) == 4) {
expected = ReferenceAverage4x4(source_data_+ GET_PARAM(2),
source_stride_);
expected =
ReferenceAverage8x8(source_data_ + GET_PARAM(2), source_stride_);
} else if (GET_PARAM(3) == 4) {
expected =
ReferenceAverage4x4(source_data_ + GET_PARAM(2), source_stride_);
}
ASM_REGISTER_STATE_CHECK(GET_PARAM(4)(source_data_+ GET_PARAM(2),
source_stride_));
unsigned int actual = GET_PARAM(4)(source_data_+ GET_PARAM(2),
source_stride_);
ASM_REGISTER_STATE_CHECK(
GET_PARAM(4)(source_data_ + GET_PARAM(2), source_stride_));
unsigned int actual =
GET_PARAM(4)(source_data_ + GET_PARAM(2), source_stride_);
EXPECT_EQ(expected, actual);
}
@ -124,23 +121,20 @@ typedef void (*IntProRowFunc)(int16_t hbuf[16], uint8_t const *ref,
typedef std::tr1::tuple<int, IntProRowFunc, IntProRowFunc> IntProRowParam;
class IntProRowTest
: public AverageTestBase,
public ::testing::WithParamInterface<IntProRowParam> {
class IntProRowTest : public AverageTestBase,
public ::testing::WithParamInterface<IntProRowParam> {
public:
IntProRowTest()
: AverageTestBase(16, GET_PARAM(0)),
hbuf_asm_(NULL),
hbuf_c_(NULL) {
: AverageTestBase(16, GET_PARAM(0)), hbuf_asm_(NULL), hbuf_c_(NULL) {
asm_func_ = GET_PARAM(1);
c_func_ = GET_PARAM(2);
}
protected:
virtual void SetUp() {
hbuf_asm_ = reinterpret_cast<int16_t*>(
hbuf_asm_ = reinterpret_cast<int16_t *>(
vpx_memalign(kDataAlignment, sizeof(*hbuf_asm_) * 16));
hbuf_c_ = reinterpret_cast<int16_t*>(
hbuf_c_ = reinterpret_cast<int16_t *>(
vpx_memalign(kDataAlignment, sizeof(*hbuf_c_) * 16));
}
@ -169,9 +163,8 @@ typedef int16_t (*IntProColFunc)(uint8_t const *ref, const int width);
typedef std::tr1::tuple<int, IntProColFunc, IntProColFunc> IntProColParam;
class IntProColTest
: public AverageTestBase,
public ::testing::WithParamInterface<IntProColParam> {
class IntProColTest : public AverageTestBase,
public ::testing::WithParamInterface<IntProColParam> {
public:
IntProColTest() : AverageTestBase(GET_PARAM(0), 1), sum_asm_(0), sum_c_(0) {
asm_func_ = GET_PARAM(1);
@ -195,15 +188,14 @@ class IntProColTest
typedef int (*SatdFunc)(const int16_t *coeffs, int length);
typedef std::tr1::tuple<int, SatdFunc> SatdTestParam;
class SatdTest
: public ::testing::Test,
public ::testing::WithParamInterface<SatdTestParam> {
class SatdTest : public ::testing::Test,
public ::testing::WithParamInterface<SatdTestParam> {
protected:
virtual void SetUp() {
satd_size_ = GET_PARAM(0);
satd_func_ = GET_PARAM(1);
rnd_.Reset(ACMRandom::DeterministicSeed());
src_ = reinterpret_cast<int16_t*>(
src_ = reinterpret_cast<int16_t *>(
vpx_memalign(16, sizeof(*src_) * satd_size_));
ASSERT_TRUE(src_ != NULL);
}
@ -235,7 +227,7 @@ class SatdTest
ACMRandom rnd_;
};
uint8_t* AverageTestBase::source_data_ = NULL;
uint8_t *AverageTestBase::source_data_ = NULL;
TEST_P(AverageTest, MinValue) {
FillConstant(0);
@ -286,7 +278,6 @@ TEST_P(IntProColTest, Random) {
RunComparison();
}
TEST_P(SatdTest, MinValue) {
const int kMin = -32640;
const int expected = -kMin * satd_size_;
@ -320,92 +311,86 @@ using std::tr1::make_tuple;
INSTANTIATE_TEST_CASE_P(
C, AverageTest,
::testing::Values(
make_tuple(16, 16, 1, 8, &vpx_avg_8x8_c),
make_tuple(16, 16, 1, 4, &vpx_avg_4x4_c)));
::testing::Values(make_tuple(16, 16, 1, 8, &vpx_avg_8x8_c),
make_tuple(16, 16, 1, 4, &vpx_avg_4x4_c)));
INSTANTIATE_TEST_CASE_P(
C, SatdTest,
::testing::Values(
make_tuple(16, &vpx_satd_c),
make_tuple(64, &vpx_satd_c),
make_tuple(256, &vpx_satd_c),
make_tuple(1024, &vpx_satd_c)));
INSTANTIATE_TEST_CASE_P(C, SatdTest,
::testing::Values(make_tuple(16, &vpx_satd_c),
make_tuple(64, &vpx_satd_c),
make_tuple(256, &vpx_satd_c),
make_tuple(1024, &vpx_satd_c)));
#if HAVE_SSE2
INSTANTIATE_TEST_CASE_P(
SSE2, AverageTest,
::testing::Values(
make_tuple(16, 16, 0, 8, &vpx_avg_8x8_sse2),
make_tuple(16, 16, 5, 8, &vpx_avg_8x8_sse2),
make_tuple(32, 32, 15, 8, &vpx_avg_8x8_sse2),
make_tuple(16, 16, 0, 4, &vpx_avg_4x4_sse2),
make_tuple(16, 16, 5, 4, &vpx_avg_4x4_sse2),
make_tuple(32, 32, 15, 4, &vpx_avg_4x4_sse2)));
::testing::Values(make_tuple(16, 16, 0, 8, &vpx_avg_8x8_sse2),
make_tuple(16, 16, 5, 8, &vpx_avg_8x8_sse2),
make_tuple(32, 32, 15, 8, &vpx_avg_8x8_sse2),
make_tuple(16, 16, 0, 4, &vpx_avg_4x4_sse2),
make_tuple(16, 16, 5, 4, &vpx_avg_4x4_sse2),
make_tuple(32, 32, 15, 4, &vpx_avg_4x4_sse2)));
INSTANTIATE_TEST_CASE_P(
SSE2, IntProRowTest, ::testing::Values(
make_tuple(16, &vpx_int_pro_row_sse2, &vpx_int_pro_row_c),
make_tuple(32, &vpx_int_pro_row_sse2, &vpx_int_pro_row_c),
make_tuple(64, &vpx_int_pro_row_sse2, &vpx_int_pro_row_c)));
SSE2, IntProRowTest,
::testing::Values(make_tuple(16, &vpx_int_pro_row_sse2, &vpx_int_pro_row_c),
make_tuple(32, &vpx_int_pro_row_sse2, &vpx_int_pro_row_c),
make_tuple(64, &vpx_int_pro_row_sse2,
&vpx_int_pro_row_c)));
INSTANTIATE_TEST_CASE_P(
SSE2, IntProColTest, ::testing::Values(
make_tuple(16, &vpx_int_pro_col_sse2, &vpx_int_pro_col_c),
make_tuple(32, &vpx_int_pro_col_sse2, &vpx_int_pro_col_c),
make_tuple(64, &vpx_int_pro_col_sse2, &vpx_int_pro_col_c)));
SSE2, IntProColTest,
::testing::Values(make_tuple(16, &vpx_int_pro_col_sse2, &vpx_int_pro_col_c),
make_tuple(32, &vpx_int_pro_col_sse2, &vpx_int_pro_col_c),
make_tuple(64, &vpx_int_pro_col_sse2,
&vpx_int_pro_col_c)));
INSTANTIATE_TEST_CASE_P(
SSE2, SatdTest,
::testing::Values(
make_tuple(16, &vpx_satd_sse2),
make_tuple(64, &vpx_satd_sse2),
make_tuple(256, &vpx_satd_sse2),
make_tuple(1024, &vpx_satd_sse2)));
INSTANTIATE_TEST_CASE_P(SSE2, SatdTest,
::testing::Values(make_tuple(16, &vpx_satd_sse2),
make_tuple(64, &vpx_satd_sse2),
make_tuple(256, &vpx_satd_sse2),
make_tuple(1024, &vpx_satd_sse2)));
#endif
#if HAVE_NEON
INSTANTIATE_TEST_CASE_P(
NEON, AverageTest,
::testing::Values(
make_tuple(16, 16, 0, 8, &vpx_avg_8x8_neon),
make_tuple(16, 16, 5, 8, &vpx_avg_8x8_neon),
make_tuple(32, 32, 15, 8, &vpx_avg_8x8_neon),
make_tuple(16, 16, 0, 4, &vpx_avg_4x4_neon),
make_tuple(16, 16, 5, 4, &vpx_avg_4x4_neon),
make_tuple(32, 32, 15, 4, &vpx_avg_4x4_neon)));
::testing::Values(make_tuple(16, 16, 0, 8, &vpx_avg_8x8_neon),
make_tuple(16, 16, 5, 8, &vpx_avg_8x8_neon),
make_tuple(32, 32, 15, 8, &vpx_avg_8x8_neon),
make_tuple(16, 16, 0, 4, &vpx_avg_4x4_neon),
make_tuple(16, 16, 5, 4, &vpx_avg_4x4_neon),
make_tuple(32, 32, 15, 4, &vpx_avg_4x4_neon)));
INSTANTIATE_TEST_CASE_P(
NEON, IntProRowTest, ::testing::Values(
make_tuple(16, &vpx_int_pro_row_neon, &vpx_int_pro_row_c),
make_tuple(32, &vpx_int_pro_row_neon, &vpx_int_pro_row_c),
make_tuple(64, &vpx_int_pro_row_neon, &vpx_int_pro_row_c)));
NEON, IntProRowTest,
::testing::Values(make_tuple(16, &vpx_int_pro_row_neon, &vpx_int_pro_row_c),
make_tuple(32, &vpx_int_pro_row_neon, &vpx_int_pro_row_c),
make_tuple(64, &vpx_int_pro_row_neon,
&vpx_int_pro_row_c)));
INSTANTIATE_TEST_CASE_P(
NEON, IntProColTest, ::testing::Values(
make_tuple(16, &vpx_int_pro_col_neon, &vpx_int_pro_col_c),
make_tuple(32, &vpx_int_pro_col_neon, &vpx_int_pro_col_c),
make_tuple(64, &vpx_int_pro_col_neon, &vpx_int_pro_col_c)));
NEON, IntProColTest,
::testing::Values(make_tuple(16, &vpx_int_pro_col_neon, &vpx_int_pro_col_c),
make_tuple(32, &vpx_int_pro_col_neon, &vpx_int_pro_col_c),
make_tuple(64, &vpx_int_pro_col_neon,
&vpx_int_pro_col_c)));
INSTANTIATE_TEST_CASE_P(
NEON, SatdTest,
::testing::Values(
make_tuple(16, &vpx_satd_neon),
make_tuple(64, &vpx_satd_neon),
make_tuple(256, &vpx_satd_neon),
make_tuple(1024, &vpx_satd_neon)));
INSTANTIATE_TEST_CASE_P(NEON, SatdTest,
::testing::Values(make_tuple(16, &vpx_satd_neon),
make_tuple(64, &vpx_satd_neon),
make_tuple(256, &vpx_satd_neon),
make_tuple(1024, &vpx_satd_neon)));
#endif
#if HAVE_MSA
INSTANTIATE_TEST_CASE_P(
MSA, AverageTest,
::testing::Values(
make_tuple(16, 16, 0, 8, &vpx_avg_8x8_msa),
make_tuple(16, 16, 5, 8, &vpx_avg_8x8_msa),
make_tuple(32, 32, 15, 8, &vpx_avg_8x8_msa),
make_tuple(16, 16, 0, 4, &vpx_avg_4x4_msa),
make_tuple(16, 16, 5, 4, &vpx_avg_4x4_msa),
make_tuple(32, 32, 15, 4, &vpx_avg_4x4_msa)));
::testing::Values(make_tuple(16, 16, 0, 8, &vpx_avg_8x8_msa),
make_tuple(16, 16, 5, 8, &vpx_avg_8x8_msa),
make_tuple(32, 32, 15, 8, &vpx_avg_8x8_msa),
make_tuple(16, 16, 0, 4, &vpx_avg_4x4_msa),
make_tuple(16, 16, 5, 4, &vpx_avg_4x4_msa),
make_tuple(32, 32, 15, 4, &vpx_avg_4x4_msa)));
#endif
} // namespace

View File

@ -26,11 +26,9 @@
#include "vpx_mem/vpx_mem.h"
extern "C"
double vp9_get_blockiness(const unsigned char *img1, int img1_pitch,
const unsigned char *img2, int img2_pitch,
int width, int height);
extern "C" double vp9_get_blockiness(const unsigned char *img1, int img1_pitch,
const unsigned char *img2, int img2_pitch,
int width, int height);
using libvpx_test::ACMRandom;
@ -40,9 +38,9 @@ class BlockinessTestBase : public ::testing::Test {
BlockinessTestBase(int width, int height) : width_(width), height_(height) {}
static void SetUpTestCase() {
source_data_ = reinterpret_cast<uint8_t*>(
source_data_ = reinterpret_cast<uint8_t *>(
vpx_memalign(kDataAlignment, kDataBufferSize));
reference_data_ = reinterpret_cast<uint8_t*>(
reference_data_ = reinterpret_cast<uint8_t *>(
vpx_memalign(kDataAlignment, kDataBufferSize));
}
@ -53,14 +51,12 @@ class BlockinessTestBase : public ::testing::Test {
reference_data_ = NULL;
}
virtual void TearDown() {
libvpx_test::ClearSystemState();
}
virtual void TearDown() { libvpx_test::ClearSystemState(); }
protected:
// Handle frames up to 640x480
static const int kDataAlignment = 16;
static const int kDataBufferSize = 640*480;
static const int kDataBufferSize = 640 * 480;
virtual void SetUp() {
source_stride_ = (width_ + 31) & ~31;
@ -68,8 +64,8 @@ class BlockinessTestBase : public ::testing::Test {
rnd_.Reset(ACMRandom::DeterministicSeed());
}
void FillConstant(uint8_t *data, int stride, uint8_t fill_constant,
int width, int height) {
void FillConstant(uint8_t *data, int stride, uint8_t fill_constant, int width,
int height) {
for (int h = 0; h < height; ++h) {
for (int w = 0; w < width; ++w) {
data[h * stride + w] = fill_constant;
@ -104,10 +100,11 @@ class BlockinessTestBase : public ::testing::Test {
void FillCheckerboard(uint8_t *data, int stride) {
for (int h = 0; h < height_; h += 4) {
for (int w = 0; w < width_; w += 4) {
if (((h/4) ^ (w/4)) & 1)
if (((h / 4) ^ (w / 4)) & 1) {
FillConstant(data + h * stride + w, stride, 255, 4, 4);
else
} else {
FillConstant(data + h * stride + w, stride, 0, 4, 4);
}
}
}
}
@ -135,9 +132,9 @@ class BlockinessTestBase : public ::testing::Test {
}
}
int width_, height_;
static uint8_t* source_data_;
static uint8_t *source_data_;
int source_stride_;
static uint8_t* reference_data_;
static uint8_t *reference_data_;
int reference_stride_;
ACMRandom rnd_;
@ -152,32 +149,32 @@ class BlockinessVP9Test
BlockinessVP9Test() : BlockinessTestBase(GET_PARAM(0), GET_PARAM(1)) {}
protected:
int CheckBlockiness() {
return vp9_get_blockiness(source_data_, source_stride_,
reference_data_, reference_stride_,
width_, height_);
double GetBlockiness() const {
return vp9_get_blockiness(source_data_, source_stride_, reference_data_,
reference_stride_, width_, height_);
}
};
#endif // CONFIG_VP9_ENCODER
uint8_t* BlockinessTestBase::source_data_ = NULL;
uint8_t* BlockinessTestBase::reference_data_ = NULL;
uint8_t *BlockinessTestBase::source_data_ = NULL;
uint8_t *BlockinessTestBase::reference_data_ = NULL;
#if CONFIG_VP9_ENCODER
TEST_P(BlockinessVP9Test, SourceBlockierThanReference) {
// Source is blockier than reference.
FillRandomBlocky(source_data_, source_stride_);
FillConstant(reference_data_, reference_stride_, 128);
int super_blocky = CheckBlockiness();
const double super_blocky = GetBlockiness();
EXPECT_EQ(0, super_blocky) << "Blocky source should produce 0 blockiness.";
EXPECT_DOUBLE_EQ(0.0, super_blocky)
<< "Blocky source should produce 0 blockiness.";
}
TEST_P(BlockinessVP9Test, ReferenceBlockierThanSource) {
// Source is blockier than reference.
FillConstant(source_data_, source_stride_, 128);
FillRandomBlocky(reference_data_, reference_stride_);
int super_blocky = CheckBlockiness();
const double super_blocky = GetBlockiness();
EXPECT_GT(super_blocky, 0.0)
<< "Blocky reference should score high for blockiness.";
@ -187,10 +184,10 @@ TEST_P(BlockinessVP9Test, BlurringDecreasesBlockiness) {
// Source is blockier than reference.
FillConstant(source_data_, source_stride_, 128);
FillRandomBlocky(reference_data_, reference_stride_);
int super_blocky = CheckBlockiness();
const double super_blocky = GetBlockiness();
Blur(reference_data_, reference_stride_, 4);
int less_blocky = CheckBlockiness();
const double less_blocky = GetBlockiness();
EXPECT_GT(super_blocky, less_blocky)
<< "A straight blur should decrease blockiness.";
@ -201,17 +198,16 @@ TEST_P(BlockinessVP9Test, WorstCaseBlockiness) {
FillConstant(source_data_, source_stride_, 128);
FillCheckerboard(reference_data_, reference_stride_);
int super_blocky = CheckBlockiness();
const double super_blocky = GetBlockiness();
Blur(reference_data_, reference_stride_, 4);
int less_blocky = CheckBlockiness();
const double less_blocky = GetBlockiness();
EXPECT_GT(super_blocky, less_blocky)
<< "A straight blur should decrease blockiness.";
}
#endif // CONFIG_VP9_ENCODER
using std::tr1::make_tuple;
//------------------------------------------------------------------------------
@ -219,9 +215,7 @@ using std::tr1::make_tuple;
#if CONFIG_VP9_ENCODER
const BlockinessParam c_vp9_tests[] = {
make_tuple(320, 240),
make_tuple(318, 242),
make_tuple(318, 238),
make_tuple(320, 240), make_tuple(318, 242), make_tuple(318, 238),
};
INSTANTIATE_TEST_CASE_P(C, BlockinessVP9Test, ::testing::ValuesIn(c_vp9_tests));
#endif

View File

@ -17,8 +17,9 @@
namespace {
class BordersTest : public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWithParam<libvpx_test::TestMode> {
class BordersTest
: public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWithParam<libvpx_test::TestMode> {
protected:
BordersTest() : EncoderTest(GET_PARAM(0)) {}
virtual ~BordersTest() {}
@ -52,7 +53,7 @@ TEST_P(BordersTest, TestEncodeHighBitrate) {
// extend into the border and test the border condition.
cfg_.g_lag_in_frames = 25;
cfg_.rc_2pass_vbr_minsection_pct = 5;
cfg_.rc_2pass_vbr_minsection_pct = 2000;
cfg_.rc_2pass_vbr_maxsection_pct = 2000;
cfg_.rc_target_bitrate = 2000;
cfg_.rc_max_quantizer = 10;
@ -78,9 +79,6 @@ TEST_P(BordersTest, TestLowBitrate) {
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
VP9_INSTANTIATE_TEST_CASE(BordersTest, ::testing::Values(
::libvpx_test::kTwoPassGood));
VP10_INSTANTIATE_TEST_CASE(BordersTest, ::testing::Values(
::libvpx_test::kTwoPassGood));
VP9_INSTANTIATE_TEST_CASE(BordersTest,
::testing::Values(::libvpx_test::kTwoPassGood));
} // namespace

View File

@ -36,29 +36,26 @@ struct ByteAlignmentTestParam {
};
const ByteAlignmentTestParam kBaTestParams[] = {
{kLegacyByteAlignment, VPX_CODEC_OK, true},
{32, VPX_CODEC_OK, true},
{64, VPX_CODEC_OK, true},
{128, VPX_CODEC_OK, true},
{256, VPX_CODEC_OK, true},
{512, VPX_CODEC_OK, true},
{1024, VPX_CODEC_OK, true},
{1, VPX_CODEC_INVALID_PARAM, false},
{-2, VPX_CODEC_INVALID_PARAM, false},
{4, VPX_CODEC_INVALID_PARAM, false},
{16, VPX_CODEC_INVALID_PARAM, false},
{255, VPX_CODEC_INVALID_PARAM, false},
{2048, VPX_CODEC_INVALID_PARAM, false},
{ kLegacyByteAlignment, VPX_CODEC_OK, true },
{ 32, VPX_CODEC_OK, true },
{ 64, VPX_CODEC_OK, true },
{ 128, VPX_CODEC_OK, true },
{ 256, VPX_CODEC_OK, true },
{ 512, VPX_CODEC_OK, true },
{ 1024, VPX_CODEC_OK, true },
{ 1, VPX_CODEC_INVALID_PARAM, false },
{ -2, VPX_CODEC_INVALID_PARAM, false },
{ 4, VPX_CODEC_INVALID_PARAM, false },
{ 16, VPX_CODEC_INVALID_PARAM, false },
{ 255, VPX_CODEC_INVALID_PARAM, false },
{ 2048, VPX_CODEC_INVALID_PARAM, false },
};
// Class for testing byte alignment of reference buffers.
class ByteAlignmentTest
: public ::testing::TestWithParam<ByteAlignmentTestParam> {
protected:
ByteAlignmentTest()
: video_(NULL),
decoder_(NULL),
md5_file_(NULL) {}
ByteAlignmentTest() : video_(NULL), decoder_(NULL), md5_file_(NULL) {}
virtual void SetUp() {
video_ = new libvpx_test::WebMVideoSource(kVP9TestFile);
@ -74,8 +71,7 @@ class ByteAlignmentTest
}
virtual void TearDown() {
if (md5_file_ != NULL)
fclose(md5_file_);
if (md5_file_ != NULL) fclose(md5_file_);
delete decoder_;
delete video_;
@ -89,8 +85,7 @@ class ByteAlignmentTest
const vpx_codec_err_t res =
decoder_->DecodeFrame(video_->cxdata(), video_->frame_size());
CheckDecodedFrames(byte_alignment_to_check);
if (res == VPX_CODEC_OK)
video_->Next();
if (res == VPX_CODEC_OK) video_->Next();
return res;
}
@ -98,8 +93,7 @@ class ByteAlignmentTest
for (; video_->cxdata() != NULL; video_->Next()) {
const vpx_codec_err_t res =
decoder_->DecodeFrame(video_->cxdata(), video_->frame_size());
if (res != VPX_CODEC_OK)
return res;
if (res != VPX_CODEC_OK) return res;
CheckDecodedFrames(byte_alignment_to_check);
}
return VPX_CODEC_OK;
@ -135,7 +129,7 @@ class ByteAlignmentTest
void OpenMd5File(const std::string &md5_file_name_) {
md5_file_ = libvpx_test::OpenTestDataFile(md5_file_name_);
ASSERT_TRUE(md5_file_ != NULL) << "MD5 file open failed. Filename: "
<< md5_file_name_;
<< md5_file_name_;
}
void CheckMd5(const vpx_image_t &img) {
@ -163,8 +157,8 @@ class ByteAlignmentTest
TEST_F(ByteAlignmentTest, SwitchByteAlignment) {
const int num_elements = 14;
const int byte_alignments[] = { 0, 32, 64, 128, 256, 512, 1024,
0, 1024, 32, 512, 64, 256, 128 };
const int byte_alignments[] = { 0, 32, 64, 128, 256, 512, 1024,
0, 1024, 32, 512, 64, 256, 128 };
for (int i = 0; i < num_elements; ++i) {
SetByteAlignment(byte_alignments[i], VPX_CODEC_OK);

View File

@ -12,7 +12,7 @@
#include "./vpx_config.h"
#if ARCH_X86 || ARCH_X86_64
# include "vpx_ports/x86.h"
#include "vpx_ports/x86.h"
#endif
namespace libvpx_test {

View File

@ -13,10 +13,10 @@
#include "./vpx_config.h"
#include "vpx/vpx_decoder.h"
#include "vpx/vpx_encoder.h"
#if CONFIG_VP8_ENCODER || CONFIG_VP9_ENCODER || CONFIG_VP10_ENCODER
#if CONFIG_VP8_ENCODER || CONFIG_VP9_ENCODER
#include "vpx/vp8cx.h"
#endif
#if CONFIG_VP8_DECODER || CONFIG_VP9_DECODER || CONFIG_VP10_DECODER
#if CONFIG_VP8_DECODER || CONFIG_VP9_DECODER
#include "vpx/vp8dx.h"
#endif
@ -32,15 +32,12 @@ class CodecFactory {
virtual ~CodecFactory() {}
virtual Decoder* CreateDecoder(vpx_codec_dec_cfg_t cfg,
unsigned long deadline) const = 0;
virtual Decoder *CreateDecoder(vpx_codec_dec_cfg_t cfg) const = 0;
virtual Decoder* CreateDecoder(vpx_codec_dec_cfg_t cfg,
const vpx_codec_flags_t flags,
unsigned long deadline) // NOLINT(runtime/int)
const = 0;
virtual Decoder *CreateDecoder(vpx_codec_dec_cfg_t cfg,
const vpx_codec_flags_t flags) const = 0;
virtual Encoder* CreateEncoder(vpx_codec_enc_cfg_t cfg,
virtual Encoder *CreateEncoder(vpx_codec_enc_cfg_t cfg,
unsigned long deadline,
const unsigned long init_flags,
TwopassStatsStore *stats) const = 0;
@ -53,20 +50,20 @@ class CodecFactory {
* to avoid having to include a pointer to the CodecFactory in every test
* definition.
*/
template<class T1>
class CodecTestWithParam : public ::testing::TestWithParam<
std::tr1::tuple< const libvpx_test::CodecFactory*, T1 > > {
};
template <class T1>
class CodecTestWithParam
: public ::testing::TestWithParam<
std::tr1::tuple<const libvpx_test::CodecFactory *, T1> > {};
template<class T1, class T2>
class CodecTestWith2Params : public ::testing::TestWithParam<
std::tr1::tuple< const libvpx_test::CodecFactory*, T1, T2 > > {
};
template <class T1, class T2>
class CodecTestWith2Params
: public ::testing::TestWithParam<
std::tr1::tuple<const libvpx_test::CodecFactory *, T1, T2> > {};
template<class T1, class T2, class T3>
class CodecTestWith3Params : public ::testing::TestWithParam<
std::tr1::tuple< const libvpx_test::CodecFactory*, T1, T2, T3 > > {
};
template <class T1, class T2, class T3>
class CodecTestWith3Params
: public ::testing::TestWithParam<
std::tr1::tuple<const libvpx_test::CodecFactory *, T1, T2, T3> > {};
/*
* VP8 Codec Definitions
@ -74,15 +71,13 @@ class CodecTestWith3Params : public ::testing::TestWithParam<
#if CONFIG_VP8
class VP8Decoder : public Decoder {
public:
VP8Decoder(vpx_codec_dec_cfg_t cfg, unsigned long deadline)
: Decoder(cfg, deadline) {}
explicit VP8Decoder(vpx_codec_dec_cfg_t cfg) : Decoder(cfg) {}
VP8Decoder(vpx_codec_dec_cfg_t cfg, const vpx_codec_flags_t flag,
unsigned long deadline) // NOLINT
: Decoder(cfg, flag, deadline) {}
VP8Decoder(vpx_codec_dec_cfg_t cfg, const vpx_codec_flags_t flag)
: Decoder(cfg, flag) {}
protected:
virtual vpx_codec_iface_t* CodecInterface() const {
virtual vpx_codec_iface_t *CodecInterface() const {
#if CONFIG_VP8_DECODER
return &vpx_codec_vp8_dx_algo;
#else
@ -98,7 +93,7 @@ class VP8Encoder : public Encoder {
: Encoder(cfg, deadline, init_flags, stats) {}
protected:
virtual vpx_codec_iface_t* CodecInterface() const {
virtual vpx_codec_iface_t *CodecInterface() const {
#if CONFIG_VP8_ENCODER
return &vpx_codec_vp8_cx_algo;
#else
@ -111,22 +106,20 @@ class VP8CodecFactory : public CodecFactory {
public:
VP8CodecFactory() : CodecFactory() {}
virtual Decoder* CreateDecoder(vpx_codec_dec_cfg_t cfg,
unsigned long deadline) const {
return CreateDecoder(cfg, 0, deadline);
virtual Decoder *CreateDecoder(vpx_codec_dec_cfg_t cfg) const {
return CreateDecoder(cfg, 0);
}
virtual Decoder* CreateDecoder(vpx_codec_dec_cfg_t cfg,
const vpx_codec_flags_t flags,
unsigned long deadline) const { // NOLINT
virtual Decoder *CreateDecoder(vpx_codec_dec_cfg_t cfg,
const vpx_codec_flags_t flags) const {
#if CONFIG_VP8_DECODER
return new VP8Decoder(cfg, flags, deadline);
return new VP8Decoder(cfg, flags);
#else
return NULL;
#endif
}
virtual Encoder* CreateEncoder(vpx_codec_enc_cfg_t cfg,
virtual Encoder *CreateEncoder(vpx_codec_enc_cfg_t cfg,
unsigned long deadline,
const unsigned long init_flags,
TwopassStatsStore *stats) const {
@ -149,32 +142,30 @@ class VP8CodecFactory : public CodecFactory {
const libvpx_test::VP8CodecFactory kVP8;
#define VP8_INSTANTIATE_TEST_CASE(test, ...)\
INSTANTIATE_TEST_CASE_P(VP8, test, \
::testing::Combine( \
::testing::Values(static_cast<const libvpx_test::CodecFactory*>( \
&libvpx_test::kVP8)), \
#define VP8_INSTANTIATE_TEST_CASE(test, ...) \
INSTANTIATE_TEST_CASE_P( \
VP8, test, \
::testing::Combine( \
::testing::Values(static_cast<const libvpx_test::CodecFactory *>( \
&libvpx_test::kVP8)), \
__VA_ARGS__))
#else
#define VP8_INSTANTIATE_TEST_CASE(test, ...)
#endif // CONFIG_VP8
/*
* VP9 Codec Definitions
*/
#if CONFIG_VP9
class VP9Decoder : public Decoder {
public:
VP9Decoder(vpx_codec_dec_cfg_t cfg, unsigned long deadline)
: Decoder(cfg, deadline) {}
explicit VP9Decoder(vpx_codec_dec_cfg_t cfg) : Decoder(cfg) {}
VP9Decoder(vpx_codec_dec_cfg_t cfg, const vpx_codec_flags_t flag,
unsigned long deadline) // NOLINT
: Decoder(cfg, flag, deadline) {}
VP9Decoder(vpx_codec_dec_cfg_t cfg, const vpx_codec_flags_t flag)
: Decoder(cfg, flag) {}
protected:
virtual vpx_codec_iface_t* CodecInterface() const {
virtual vpx_codec_iface_t *CodecInterface() const {
#if CONFIG_VP9_DECODER
return &vpx_codec_vp9_dx_algo;
#else
@ -190,7 +181,7 @@ class VP9Encoder : public Encoder {
: Encoder(cfg, deadline, init_flags, stats) {}
protected:
virtual vpx_codec_iface_t* CodecInterface() const {
virtual vpx_codec_iface_t *CodecInterface() const {
#if CONFIG_VP9_ENCODER
return &vpx_codec_vp9_cx_algo;
#else
@ -203,22 +194,20 @@ class VP9CodecFactory : public CodecFactory {
public:
VP9CodecFactory() : CodecFactory() {}
virtual Decoder* CreateDecoder(vpx_codec_dec_cfg_t cfg,
unsigned long deadline) const {
return CreateDecoder(cfg, 0, deadline);
virtual Decoder *CreateDecoder(vpx_codec_dec_cfg_t cfg) const {
return CreateDecoder(cfg, 0);
}
virtual Decoder* CreateDecoder(vpx_codec_dec_cfg_t cfg,
const vpx_codec_flags_t flags,
unsigned long deadline) const { // NOLINT
virtual Decoder *CreateDecoder(vpx_codec_dec_cfg_t cfg,
const vpx_codec_flags_t flags) const {
#if CONFIG_VP9_DECODER
return new VP9Decoder(cfg, flags, deadline);
return new VP9Decoder(cfg, flags);
#else
return NULL;
#endif
}
virtual Encoder* CreateEncoder(vpx_codec_enc_cfg_t cfg,
virtual Encoder *CreateEncoder(vpx_codec_enc_cfg_t cfg,
unsigned long deadline,
const unsigned long init_flags,
TwopassStatsStore *stats) const {
@ -233,8 +222,6 @@ class VP9CodecFactory : public CodecFactory {
int usage) const {
#if CONFIG_VP9_ENCODER
return vpx_codec_enc_config_default(&vpx_codec_vp9_cx_algo, cfg, usage);
#elif CONFIG_VP10_ENCODER
return vpx_codec_enc_config_default(&vpx_codec_vp10_cx_algo, cfg, usage);
#else
return VPX_CODEC_INCAPABLE;
#endif
@ -243,106 +230,16 @@ class VP9CodecFactory : public CodecFactory {
const libvpx_test::VP9CodecFactory kVP9;
#define VP9_INSTANTIATE_TEST_CASE(test, ...)\
INSTANTIATE_TEST_CASE_P(VP9, test, \
::testing::Combine( \
::testing::Values(static_cast<const libvpx_test::CodecFactory*>( \
&libvpx_test::kVP9)), \
#define VP9_INSTANTIATE_TEST_CASE(test, ...) \
INSTANTIATE_TEST_CASE_P( \
VP9, test, \
::testing::Combine( \
::testing::Values(static_cast<const libvpx_test::CodecFactory *>( \
&libvpx_test::kVP9)), \
__VA_ARGS__))
#else
#define VP9_INSTANTIATE_TEST_CASE(test, ...)
#endif // CONFIG_VP9
/*
* VP10 Codec Definitions
*/
#if CONFIG_VP10
class VP10Decoder : public Decoder {
public:
VP10Decoder(vpx_codec_dec_cfg_t cfg, unsigned long deadline)
: Decoder(cfg, deadline) {}
VP10Decoder(vpx_codec_dec_cfg_t cfg, const vpx_codec_flags_t flag,
unsigned long deadline) // NOLINT
: Decoder(cfg, flag, deadline) {}
protected:
virtual vpx_codec_iface_t* CodecInterface() const {
#if CONFIG_VP10_DECODER
return &vpx_codec_vp10_dx_algo;
#else
return NULL;
#endif
}
};
class VP10Encoder : public Encoder {
public:
VP10Encoder(vpx_codec_enc_cfg_t cfg, unsigned long deadline,
const unsigned long init_flags, TwopassStatsStore *stats)
: Encoder(cfg, deadline, init_flags, stats) {}
protected:
virtual vpx_codec_iface_t* CodecInterface() const {
#if CONFIG_VP10_ENCODER
return &vpx_codec_vp10_cx_algo;
#else
return NULL;
#endif
}
};
class VP10CodecFactory : public CodecFactory {
public:
VP10CodecFactory() : CodecFactory() {}
virtual Decoder* CreateDecoder(vpx_codec_dec_cfg_t cfg,
unsigned long deadline) const {
return CreateDecoder(cfg, 0, deadline);
}
virtual Decoder* CreateDecoder(vpx_codec_dec_cfg_t cfg,
const vpx_codec_flags_t flags,
unsigned long deadline) const { // NOLINT
#if CONFIG_VP10_DECODER
return new VP10Decoder(cfg, flags, deadline);
#else
return NULL;
#endif
}
virtual Encoder* CreateEncoder(vpx_codec_enc_cfg_t cfg,
unsigned long deadline,
const unsigned long init_flags,
TwopassStatsStore *stats) const {
#if CONFIG_VP10_ENCODER
return new VP10Encoder(cfg, deadline, init_flags, stats);
#else
return NULL;
#endif
}
virtual vpx_codec_err_t DefaultEncoderConfig(vpx_codec_enc_cfg_t *cfg,
int usage) const {
#if CONFIG_VP10_ENCODER
return vpx_codec_enc_config_default(&vpx_codec_vp10_cx_algo, cfg, usage);
#else
return VPX_CODEC_INCAPABLE;
#endif
}
};
const libvpx_test::VP10CodecFactory kVP10;
#define VP10_INSTANTIATE_TEST_CASE(test, ...)\
INSTANTIATE_TEST_CASE_P(VP10, test, \
::testing::Combine( \
::testing::Values(static_cast<const libvpx_test::CodecFactory*>( \
&libvpx_test::kVP10)), \
__VA_ARGS__))
#else
#define VP10_INSTANTIATE_TEST_CASE(test, ...)
#endif // CONFIG_VP10
} // namespace libvpx_test
#endif // TEST_CODEC_FACTORY_H_

View File

@ -15,11 +15,13 @@
namespace {
class ConfigTest : public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWithParam<libvpx_test::TestMode> {
class ConfigTest
: public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWithParam<libvpx_test::TestMode> {
protected:
ConfigTest() : EncoderTest(GET_PARAM(0)),
frame_count_in_(0), frame_count_out_(0), frame_count_max_(0) {}
ConfigTest()
: EncoderTest(GET_PARAM(0)), frame_count_in_(0), frame_count_out_(0),
frame_count_max_(0) {}
virtual ~ConfigTest() {}
virtual void SetUp() {
@ -32,12 +34,12 @@ class ConfigTest : public ::libvpx_test::EncoderTest,
frame_count_out_ = 0;
}
virtual void PreEncodeFrameHook(libvpx_test::VideoSource* /*video*/) {
virtual void PreEncodeFrameHook(libvpx_test::VideoSource * /*video*/) {
++frame_count_in_;
abort_ |= (frame_count_in_ >= frame_count_max_);
}
virtual void FramePktHook(const vpx_codec_cx_pkt_t* /*pkt*/) {
virtual void FramePktHook(const vpx_codec_cx_pkt_t * /*pkt*/) {
++frame_count_out_;
}

View File

@ -26,12 +26,10 @@
#include "vpx_dsp/ssim.h"
#include "vpx_mem/vpx_mem.h"
extern "C"
double vpx_get_ssim_metrics(uint8_t *img1, int img1_pitch,
uint8_t *img2, int img2_pitch,
int width, int height,
Ssimv *sv2, Metrics *m,
int do_inconsistency);
extern "C" double vpx_get_ssim_metrics(uint8_t *img1, int img1_pitch,
uint8_t *img2, int img2_pitch, int width,
int height, Ssimv *sv2, Metrics *m,
int do_inconsistency);
using libvpx_test::ACMRandom;
@ -41,20 +39,18 @@ class ConsistencyTestBase : public ::testing::Test {
ConsistencyTestBase(int width, int height) : width_(width), height_(height) {}
static void SetUpTestCase() {
source_data_[0] = reinterpret_cast<uint8_t*>(
source_data_[0] = reinterpret_cast<uint8_t *>(
vpx_memalign(kDataAlignment, kDataBufferSize));
reference_data_[0] = reinterpret_cast<uint8_t*>(
reference_data_[0] = reinterpret_cast<uint8_t *>(
vpx_memalign(kDataAlignment, kDataBufferSize));
source_data_[1] = reinterpret_cast<uint8_t*>(
source_data_[1] = reinterpret_cast<uint8_t *>(
vpx_memalign(kDataAlignment, kDataBufferSize));
reference_data_[1] = reinterpret_cast<uint8_t*>(
reference_data_[1] = reinterpret_cast<uint8_t *>(
vpx_memalign(kDataAlignment, kDataBufferSize));
ssim_array_ = new Ssimv[kDataBufferSize / 16];
}
static void ClearSsim() {
memset(ssim_array_, 0, kDataBufferSize / 16);
}
static void ClearSsim() { memset(ssim_array_, 0, kDataBufferSize / 16); }
static void TearDownTestCase() {
vpx_free(source_data_[0]);
source_data_[0] = NULL;
@ -68,14 +64,12 @@ class ConsistencyTestBase : public ::testing::Test {
delete[] ssim_array_;
}
virtual void TearDown() {
libvpx_test::ClearSystemState();
}
virtual void TearDown() { libvpx_test::ClearSystemState(); }
protected:
// Handle frames up to 640x480
static const int kDataAlignment = 16;
static const int kDataBufferSize = 640*480;
static const int kDataBufferSize = 640 * 480;
virtual void SetUp() {
source_stride_ = (width_ + 31) & ~31;
@ -122,9 +116,9 @@ class ConsistencyTestBase : public ::testing::Test {
}
}
int width_, height_;
static uint8_t* source_data_[2];
static uint8_t *source_data_[2];
int source_stride_;
static uint8_t* reference_data_[2];
static uint8_t *reference_data_[2];
int reference_stride_;
static Ssimv *ssim_array_;
Metrics metrics_;
@ -142,18 +136,17 @@ class ConsistencyVP9Test
protected:
double CheckConsistency(int frame) {
EXPECT_LT(frame, 2)<< "Frame to check has to be less than 2.";
return
vpx_get_ssim_metrics(source_data_[frame], source_stride_,
reference_data_[frame], reference_stride_,
width_, height_, ssim_array_, &metrics_, 1);
EXPECT_LT(frame, 2) << "Frame to check has to be less than 2.";
return vpx_get_ssim_metrics(source_data_[frame], source_stride_,
reference_data_[frame], reference_stride_,
width_, height_, ssim_array_, &metrics_, 1);
}
};
#endif // CONFIG_VP9_ENCODER
uint8_t* ConsistencyTestBase::source_data_[2] = {NULL, NULL};
uint8_t* ConsistencyTestBase::reference_data_[2] = {NULL, NULL};
Ssimv* ConsistencyTestBase::ssim_array_ = NULL;
uint8_t *ConsistencyTestBase::source_data_[2] = { NULL, NULL };
uint8_t *ConsistencyTestBase::reference_data_[2] = { NULL, NULL };
Ssimv *ConsistencyTestBase::ssim_array_ = NULL;
#if CONFIG_VP9_ENCODER
TEST_P(ConsistencyVP9Test, ConsistencyIsZero) {
@ -205,7 +198,6 @@ TEST_P(ConsistencyVP9Test, ConsistencyIsZero) {
}
#endif // CONFIG_VP9_ENCODER
using std::tr1::make_tuple;
//------------------------------------------------------------------------------
@ -213,9 +205,7 @@ using std::tr1::make_tuple;
#if CONFIG_VP9_ENCODER
const ConsistencyParam c_vp9_tests[] = {
make_tuple(320, 240),
make_tuple(318, 242),
make_tuple(318, 238),
make_tuple(320, 240), make_tuple(318, 242), make_tuple(318, 238),
};
INSTANTIATE_TEST_CASE_P(C, ConsistencyVP9Test,
::testing::ValuesIn(c_vp9_tests));

File diff suppressed because it is too large Load Diff

View File

@ -23,10 +23,9 @@ class CpuSpeedTest
public ::libvpx_test::CodecTestWith2Params<libvpx_test::TestMode, int> {
protected:
CpuSpeedTest()
: EncoderTest(GET_PARAM(0)),
encoding_mode_(GET_PARAM(1)),
set_cpu_used_(GET_PARAM(2)),
min_psnr_(kMaxPSNR) {}
: EncoderTest(GET_PARAM(0)), encoding_mode_(GET_PARAM(1)),
set_cpu_used_(GET_PARAM(2)), min_psnr_(kMaxPSNR),
tune_content_(VP9E_CONTENT_DEFAULT) {}
virtual ~CpuSpeedTest() {}
virtual void SetUp() {
@ -41,14 +40,13 @@ class CpuSpeedTest
}
}
virtual void BeginPassHook(unsigned int /*pass*/) {
min_psnr_ = kMaxPSNR;
}
virtual void BeginPassHook(unsigned int /*pass*/) { min_psnr_ = kMaxPSNR; }
virtual void PreEncodeFrameHook(::libvpx_test::VideoSource *video,
::libvpx_test::Encoder *encoder) {
if (video->frame() == 1) {
encoder->Control(VP8E_SET_CPUUSED, set_cpu_used_);
encoder->Control(VP9E_SET_TUNE_CONTENT, tune_content_);
if (encoding_mode_ != ::libvpx_test::kRealTime) {
encoder->Control(VP8E_SET_ENABLEAUTOALTREF, 1);
encoder->Control(VP8E_SET_ARNR_MAXFRAMES, 7);
@ -59,13 +57,13 @@ class CpuSpeedTest
}
virtual void PSNRPktHook(const vpx_codec_cx_pkt_t *pkt) {
if (pkt->data.psnr.psnr[0] < min_psnr_)
min_psnr_ = pkt->data.psnr.psnr[0];
if (pkt->data.psnr.psnr[0] < min_psnr_) min_psnr_ = pkt->data.psnr.psnr[0];
}
::libvpx_test::TestMode encoding_mode_;
int set_cpu_used_;
double min_psnr_;
int tune_content_;
};
TEST_P(CpuSpeedTest, TestQ0) {
@ -74,7 +72,7 @@ TEST_P(CpuSpeedTest, TestQ0) {
// the encoder to producing lots of big partitions which will likely
// extend into the border and test the border condition.
cfg_.rc_2pass_vbr_minsection_pct = 5;
cfg_.rc_2pass_vbr_minsection_pct = 2000;
cfg_.rc_2pass_vbr_maxsection_pct = 2000;
cfg_.rc_target_bitrate = 400;
cfg_.rc_max_quantizer = 0;
cfg_.rc_min_quantizer = 0;
@ -92,7 +90,7 @@ TEST_P(CpuSpeedTest, TestScreencastQ0) {
::libvpx_test::Y4mVideoSource video("screendata.y4m", 0, 25);
cfg_.g_timebase = video.timebase();
cfg_.rc_2pass_vbr_minsection_pct = 5;
cfg_.rc_2pass_vbr_minsection_pct = 2000;
cfg_.rc_2pass_vbr_maxsection_pct = 2000;
cfg_.rc_target_bitrate = 400;
cfg_.rc_max_quantizer = 0;
cfg_.rc_min_quantizer = 0;
@ -103,13 +101,28 @@ TEST_P(CpuSpeedTest, TestScreencastQ0) {
EXPECT_GE(min_psnr_, kMaxPSNR);
}
TEST_P(CpuSpeedTest, TestTuneScreen) {
::libvpx_test::Y4mVideoSource video("screendata.y4m", 0, 25);
cfg_.g_timebase = video.timebase();
cfg_.rc_2pass_vbr_minsection_pct = 5;
cfg_.rc_2pass_vbr_minsection_pct = 2000;
cfg_.rc_target_bitrate = 2000;
cfg_.rc_max_quantizer = 63;
cfg_.rc_min_quantizer = 0;
tune_content_ = VP9E_CONTENT_SCREEN;
init_flags_ = VPX_CODEC_USE_PSNR;
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
TEST_P(CpuSpeedTest, TestEncodeHighBitrate) {
// Validate that this non multiple of 64 wide clip encodes and decodes
// without a mismatch when passing in a very low max q. This pushes
// the encoder to producing lots of big partitions which will likely
// extend into the border and test the border condition.
cfg_.rc_2pass_vbr_minsection_pct = 5;
cfg_.rc_2pass_vbr_minsection_pct = 2000;
cfg_.rc_2pass_vbr_maxsection_pct = 2000;
cfg_.rc_target_bitrate = 12000;
cfg_.rc_max_quantizer = 10;
cfg_.rc_min_quantizer = 0;
@ -125,7 +138,7 @@ TEST_P(CpuSpeedTest, TestLowBitrate) {
// when passing in a very high min q. This pushes the encoder to producing
// lots of small partitions which might will test the other condition.
cfg_.rc_2pass_vbr_minsection_pct = 5;
cfg_.rc_2pass_vbr_minsection_pct = 2000;
cfg_.rc_2pass_vbr_maxsection_pct = 2000;
cfg_.rc_target_bitrate = 200;
cfg_.rc_min_quantizer = 40;
@ -135,14 +148,9 @@ TEST_P(CpuSpeedTest, TestLowBitrate) {
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
}
VP9_INSTANTIATE_TEST_CASE(
CpuSpeedTest,
::testing::Values(::libvpx_test::kTwoPassGood, ::libvpx_test::kOnePassGood,
::libvpx_test::kRealTime),
::testing::Range(0, 9));
VP10_INSTANTIATE_TEST_CASE(
CpuSpeedTest,
::testing::Values(::libvpx_test::kTwoPassGood, ::libvpx_test::kOnePassGood),
::testing::Range(0, 3));
VP9_INSTANTIATE_TEST_CASE(CpuSpeedTest,
::testing::Values(::libvpx_test::kTwoPassGood,
::libvpx_test::kOnePassGood,
::libvpx_test::kRealTime),
::testing::Range(0, 9));
} // namespace

View File

@ -24,14 +24,12 @@ const int kCQLevelStep = 8;
const unsigned int kCQTargetBitrate = 2000;
class CQTest : public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWithParam<int> {
public ::libvpx_test::CodecTestWithParam<int> {
public:
// maps the cqlevel to the bitrate produced.
typedef std::map<int, uint32_t> BitrateMap;
static void SetUpTestCase() {
bitrates_.clear();
}
static void SetUpTestCase() { bitrates_.clear(); }
static void TearDownTestCase() {
ASSERT_TRUE(!HasFailure())
@ -128,7 +126,6 @@ TEST_P(CQTest, LinearPSNRIsHigherForCQLevel) {
EXPECT_GE(cq_psnr_lin, vbr_psnr_lin);
}
VP8_INSTANTIATE_TEST_CASE(CQTest,
::testing::Range(kCQLevelMin, kCQLevelMax,
kCQLevelStep));
VP8_INSTANTIATE_TEST_CASE(CQTest, ::testing::Range(kCQLevelMin, kCQLevelMax,
kCQLevelStep));
} // namespace

View File

@ -1,6 +1,6 @@
#!/bin/sh
##
## Copyright (c) 2014 The WebM project authors. All Rights Reserved.
## Copyright (c) 2016 The WebM project authors. All Rights Reserved.
##
## Use of this source code is governed by a BSD-style license
## that can be found in the LICENSE file in the root of the source
@ -8,30 +8,27 @@
## in the file PATENTS. All contributing project authors may
## be found in the AUTHORS file in the root of the source tree.
##
## This file tests the libvpx vp8cx_set_ref example. To add new tests to this
## This file tests the libvpx cx_set_ref example. To add new tests to this
## file, do the following:
## 1. Write a shell function (this is your test).
## 2. Add the function to vp8cx_set_ref_tests (on a new line).
## 2. Add the function to cx_set_ref_tests (on a new line).
##
. $(dirname $0)/tools_common.sh
# Environment check: $YUV_RAW_INPUT is required.
vp8cx_set_ref_verify_environment() {
cx_set_ref_verify_environment() {
if [ ! -e "${YUV_RAW_INPUT}" ]; then
echo "Libvpx test data must exist in LIBVPX_TEST_DATA_PATH."
return 1
fi
}
# Runs vp8cx_set_ref and updates the reference frame before encoding frame 90.
# $1 is the codec name, which vp8cx_set_ref does not support at present: It's
# currently used only to name the output file.
# TODO(tomfinegan): Pass the codec param once the example is updated to support
# VP9.
# Runs cx_set_ref and updates the reference frame before encoding frame 90.
# $1 is the codec name.
vpx_set_ref() {
local encoder="${LIBVPX_BIN_PATH}/vp8cx_set_ref${VPX_TEST_EXE_SUFFIX}"
local codec="$1"
local output_file="${VPX_TEST_OUTPUT_DIR}/vp8cx_set_ref_${codec}.ivf"
local encoder="${LIBVPX_BIN_PATH}/${codec}cx_set_ref${VPX_TEST_EXE_SUFFIX}"
local output_file="${VPX_TEST_OUTPUT_DIR}/${codec}cx_set_ref_${codec}.ivf"
local ref_frame_num=90
if [ ! -x "${encoder}" ]; then
@ -46,12 +43,18 @@ vpx_set_ref() {
[ -e "${output_file}" ] || return 1
}
vp8cx_set_ref_vp8() {
cx_set_ref_vp8() {
if [ "$(vp8_encode_available)" = "yes" ]; then
vpx_set_ref vp8 || return 1
fi
}
vp8cx_set_ref_tests="vp8cx_set_ref_vp8"
cx_set_ref_vp9() {
if [ "$(vp9_encode_available)" = "yes" ]; then
vpx_set_ref vp9 || return 1
fi
}
run_tests vp8cx_set_ref_verify_environment "${vp8cx_set_ref_tests}"
cx_set_ref_tests="cx_set_ref_vp8 cx_set_ref_vp9"
run_tests cx_set_ref_verify_environment "${cx_set_ref_tests}"

View File

@ -18,8 +18,9 @@
namespace {
class DatarateTestLarge : public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWithParam<libvpx_test::TestMode> {
class DatarateTestLarge
: public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWithParam<libvpx_test::TestMode> {
public:
DatarateTestLarge() : EncoderTest(GET_PARAM(0)) {}
@ -45,8 +46,9 @@ class DatarateTestLarge : public ::libvpx_test::EncoderTest,
virtual void PreEncodeFrameHook(::libvpx_test::VideoSource *video,
::libvpx_test::Encoder *encoder) {
if (video->frame() == 0)
if (video->frame() == 0) {
encoder->Control(VP8E_SET_NOISE_SENSITIVITY, denoiser_on_);
}
if (denoiser_offon_test_) {
ASSERT_GT(denoiser_offon_period_, 0)
@ -71,8 +73,7 @@ class DatarateTestLarge : public ::libvpx_test::EncoderTest,
// http://code.google.com/p/webm/issues/detail?id=496 is fixed.
// For now the codec assumes buffer starts at starting buffer rate
// plus one frame's time.
if (last_pts_ == 0)
duration = 1;
if (last_pts_ == 0) duration = 1;
// Add to the buffer the bits we'd expect from a constant bitrate server.
bits_in_buffer_model_ += static_cast<int64_t>(
@ -83,14 +84,14 @@ class DatarateTestLarge : public ::libvpx_test::EncoderTest,
* empty - and then stop showing frames until we've got enough bits to
* show one. As noted in comment below (issue 495), this does not currently
* apply to key frames. For now exclude key frames in condition below. */
const bool key_frame = (pkt->data.frame.flags & VPX_FRAME_IS_KEY)
? true: false;
const bool key_frame =
(pkt->data.frame.flags & VPX_FRAME_IS_KEY) ? true : false;
if (!key_frame) {
ASSERT_GE(bits_in_buffer_model_, 0) << "Buffer Underrun at frame "
<< pkt->data.frame.pts;
<< pkt->data.frame.pts;
}
const size_t frame_size_in_bits = pkt->data.frame.sz * 8;
const int64_t frame_size_in_bits = pkt->data.frame.sz * 8;
// Subtract from the buffer the bits associated with a played back frame.
bits_in_buffer_model_ -= frame_size_in_bits;
@ -99,8 +100,7 @@ class DatarateTestLarge : public ::libvpx_test::EncoderTest,
bits_total_ += frame_size_in_bits;
// If first drop not set and we have a drop set it to this time.
if (!first_drop_ && duration > 1)
first_drop_ = last_pts_ + 1;
if (!first_drop_ && duration > 1) first_drop_ = last_pts_ + 1;
// Update the most recent pts.
last_pts_ = pkt->data.frame.pts;
@ -119,8 +119,8 @@ class DatarateTestLarge : public ::libvpx_test::EncoderTest,
duration_ = (last_pts_ + 1) * timebase_;
// Effective file datarate includes the time spent prebuffering.
effective_datarate_ = (bits_total_ - bits_in_last_frame_) / 1000.0
/ (cfg_.rc_buf_initial_sz / 1000.0 + duration_);
effective_datarate_ = (bits_total_ - bits_in_last_frame_) / 1000.0 /
(cfg_.rc_buf_initial_sz / 1000.0 + duration_);
file_datarate_ = file_size_in_kb / duration_;
}
@ -135,7 +135,7 @@ class DatarateTestLarge : public ::libvpx_test::EncoderTest,
double duration_;
double file_datarate_;
double effective_datarate_;
size_t bits_in_last_frame_;
int64_t bits_in_last_frame_;
int denoiser_on_;
int denoiser_offon_test_;
int denoiser_offon_period_;
@ -256,8 +256,39 @@ TEST_P(DatarateTestLarge, ChangingDropFrameThresh) {
}
}
class DatarateTestVP9Large : public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWith2Params<libvpx_test::TestMode, int> {
// Disabled for tsan, see:
// https://bugs.chromium.org/p/webm/issues/detail?id=1049
#if defined(__has_feature)
#if __has_feature(thread_sanitizer)
#define BUILDING_WITH_TSAN
#endif
#endif
#ifndef BUILDING_WITH_TSAN
TEST_P(DatarateTestLarge, DropFramesMultiThreads) {
denoiser_on_ = 0;
cfg_.rc_buf_initial_sz = 500;
cfg_.rc_dropframe_thresh = 30;
cfg_.rc_max_quantizer = 56;
cfg_.rc_end_usage = VPX_CBR;
// Encode using multiple threads.
cfg_.g_threads = 2;
::libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
30, 1, 0, 140);
cfg_.rc_target_bitrate = 200;
ResetModel();
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
ASSERT_GE(cfg_.rc_target_bitrate, effective_datarate_ * 0.95)
<< " The datarate for the file exceeds the target!";
ASSERT_LE(cfg_.rc_target_bitrate, file_datarate_ * 1.3)
<< " The datarate for the file missed the target!";
}
#endif
class DatarateTestVP9Large
: public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWith2Params<libvpx_test::TestMode, int> {
public:
DatarateTestVP9Large() : EncoderTest(GET_PARAM(0)) {}
@ -307,8 +338,8 @@ class DatarateTestVP9Large : public ::libvpx_test::EncoderTest,
if (num_temp_layers == 2) {
if (frame_num % 2 == 0) {
// Layer 0: predict from L and ARF, update L.
frame_flags = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
frame_flags =
VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
} else {
// Layer 1: predict from L, G and ARF, and update G.
frame_flags = VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST |
@ -317,15 +348,15 @@ class DatarateTestVP9Large : public ::libvpx_test::EncoderTest,
} else if (num_temp_layers == 3) {
if (frame_num % 4 == 0) {
// Layer 0: predict from L and ARF; update L.
frame_flags = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
VP8_EFLAG_NO_REF_GF;
frame_flags =
VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF;
} else if ((frame_num - 2) % 4 == 0) {
// Layer 1: predict from L, G, ARF; update G.
frame_flags = VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST;
} else if ((frame_num - 1) % 2 == 0) {
} else if ((frame_num - 1) % 2 == 0) {
// Layer 2: predict from L, G, ARF; update none.
frame_flags = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF |
VP8_EFLAG_NO_UPD_LAST;
frame_flags =
VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST;
}
}
return frame_flags;
@ -353,8 +384,7 @@ class DatarateTestVP9Large : public ::libvpx_test::EncoderTest,
virtual void PreEncodeFrameHook(::libvpx_test::VideoSource *video,
::libvpx_test::Encoder *encoder) {
if (video->frame() == 0)
encoder->Control(VP8E_SET_CPUUSED, set_cpu_used_);
if (video->frame() == 0) encoder->Control(VP8E_SET_CPUUSED, set_cpu_used_);
if (denoiser_offon_test_) {
ASSERT_GT(denoiser_offon_period_, 0)
@ -374,8 +404,8 @@ class DatarateTestVP9Large : public ::libvpx_test::EncoderTest,
vpx_svc_layer_id_t layer_id;
layer_id.spatial_layer_id = 0;
frame_flags_ = SetFrameFlags(video->frame(), cfg_.ts_number_layers);
layer_id.temporal_layer_id = SetLayerId(video->frame(),
cfg_.ts_number_layers);
layer_id.temporal_layer_id =
SetLayerId(video->frame(), cfg_.ts_number_layers);
encoder->Control(VP9E_SET_SVC_LAYER_ID, &layer_id);
}
const vpx_rational_t tb = video->timebase();
@ -383,15 +413,13 @@ class DatarateTestVP9Large : public ::libvpx_test::EncoderTest,
duration_ = 0;
}
virtual void FramePktHook(const vpx_codec_cx_pkt_t *pkt) {
// Time since last timestamp = duration.
vpx_codec_pts_t duration = pkt->data.frame.pts - last_pts_;
if (duration > 1) {
// If first drop not set and we have a drop set it to this time.
if (!first_drop_)
first_drop_ = last_pts_ + 1;
if (!first_drop_) first_drop_ = last_pts_ + 1;
// Update the number of frame drops.
num_drops_ += static_cast<int>(duration - 1);
// Update counter for total number of frames (#frames input to encoder).
@ -407,7 +435,7 @@ class DatarateTestVP9Large : public ::libvpx_test::EncoderTest,
// Buffer should not go negative.
ASSERT_GE(bits_in_buffer_model_, 0) << "Buffer Underrun at frame "
<< pkt->data.frame.pts;
<< pkt->data.frame.pts;
const size_t frame_size_in_bits = pkt->data.frame.sz * 8;
@ -425,7 +453,7 @@ class DatarateTestVP9Large : public ::libvpx_test::EncoderTest,
virtual void EndPassHook(void) {
for (int layer = 0; layer < static_cast<int>(cfg_.ts_number_layers);
++layer) {
++layer) {
duration_ = (last_pts_ + 1) * timebase_;
if (bits_total_[layer]) {
// Effective file datarate:
@ -450,7 +478,55 @@ class DatarateTestVP9Large : public ::libvpx_test::EncoderTest,
int denoiser_offon_period_;
};
// Check basic rate targeting,
// Check basic rate targeting for VBR mode with 0 lag.
TEST_P(DatarateTestVP9Large, BasicRateTargetingVBRLagZero) {
cfg_.rc_min_quantizer = 0;
cfg_.rc_max_quantizer = 63;
cfg_.g_error_resilient = 0;
cfg_.rc_end_usage = VPX_VBR;
cfg_.g_lag_in_frames = 0;
::libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
30, 1, 0, 300);
for (int i = 400; i <= 800; i += 400) {
cfg_.rc_target_bitrate = i;
ResetModel();
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
ASSERT_GE(effective_datarate_[0], cfg_.rc_target_bitrate * 0.75)
<< " The datarate for the file is lower than target by too much!";
ASSERT_LE(effective_datarate_[0], cfg_.rc_target_bitrate * 1.25)
<< " The datarate for the file is greater than target by too much!";
}
}
// Check basic rate targeting for VBR mode with non-zero lag.
TEST_P(DatarateTestVP9Large, BasicRateTargetingVBRLagNonZero) {
cfg_.rc_min_quantizer = 0;
cfg_.rc_max_quantizer = 63;
cfg_.g_error_resilient = 0;
cfg_.rc_end_usage = VPX_VBR;
// For non-zero lag, rate control will work (be within bounds) for
// real-time mode.
if (deadline_ == VPX_DL_REALTIME) {
cfg_.g_lag_in_frames = 15;
} else {
cfg_.g_lag_in_frames = 0;
}
::libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
30, 1, 0, 300);
for (int i = 400; i <= 800; i += 400) {
cfg_.rc_target_bitrate = i;
ResetModel();
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
ASSERT_GE(effective_datarate_[0], cfg_.rc_target_bitrate * 0.75)
<< " The datarate for the file is lower than target by too much!";
ASSERT_LE(effective_datarate_[0], cfg_.rc_target_bitrate * 1.25)
<< " The datarate for the file is greater than target by too much!";
}
}
// Check basic rate targeting for CBR mode.
TEST_P(DatarateTestVP9Large, BasicRateTargeting) {
cfg_.rc_buf_initial_sz = 500;
cfg_.rc_buf_optimal_sz = 500;
@ -474,7 +550,31 @@ TEST_P(DatarateTestVP9Large, BasicRateTargeting) {
}
}
// Check basic rate targeting,
// Check basic rate targeting for CBR mode, with 2 threads and dropped frames.
TEST_P(DatarateTestVP9Large, BasicRateTargetingDropFramesMultiThreads) {
cfg_.rc_buf_initial_sz = 500;
cfg_.rc_buf_optimal_sz = 500;
cfg_.rc_buf_sz = 1000;
cfg_.rc_dropframe_thresh = 30;
cfg_.rc_min_quantizer = 0;
cfg_.rc_max_quantizer = 63;
cfg_.rc_end_usage = VPX_CBR;
cfg_.g_lag_in_frames = 0;
// Encode using multiple threads.
cfg_.g_threads = 2;
::libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
30, 1, 0, 140);
cfg_.rc_target_bitrate = 200;
ResetModel();
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
ASSERT_GE(effective_datarate_[0], cfg_.rc_target_bitrate * 0.85)
<< " The datarate for the file is lower than target by too much!";
ASSERT_LE(effective_datarate_[0], cfg_.rc_target_bitrate * 1.15)
<< " The datarate for the file is greater than target by too much!";
}
// Check basic rate targeting for CBR.
TEST_P(DatarateTestVP9Large, BasicRateTargeting444) {
::libvpx_test::Y4mVideoSource video("rush_hour_444.y4m", 0, 140);
@ -499,7 +599,7 @@ TEST_P(DatarateTestVP9Large, BasicRateTargeting444) {
ASSERT_LE(static_cast<double>(cfg_.rc_target_bitrate),
effective_datarate_[0] * 1.15)
<< " The datarate for the file missed the target!"
<< cfg_.rc_target_bitrate << " "<< effective_datarate_;
<< cfg_.rc_target_bitrate << " " << effective_datarate_;
}
}
@ -519,6 +619,9 @@ TEST_P(DatarateTestVP9Large, ChangingDropFrameThresh) {
cfg_.rc_end_usage = VPX_CBR;
cfg_.rc_target_bitrate = 200;
cfg_.g_lag_in_frames = 0;
// TODO(marpan): Investigate datarate target failures with a smaller keyframe
// interval (128).
cfg_.kf_max_dist = 9999;
::libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
30, 1, 0, 140);
@ -566,8 +669,7 @@ TEST_P(DatarateTestVP9Large, BasicRateTargeting2TemporalLayers) {
cfg_.temporal_layering_mode = VP9E_TEMPORAL_LAYERING_MODE_BYPASS;
if (deadline_ == VPX_DL_REALTIME)
cfg_.g_error_resilient = 1;
if (deadline_ == VPX_DL_REALTIME) cfg_.g_error_resilient = 1;
::libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
30, 1, 0, 200);
@ -581,10 +683,12 @@ TEST_P(DatarateTestVP9Large, BasicRateTargeting2TemporalLayers) {
for (int j = 0; j < static_cast<int>(cfg_.ts_number_layers); ++j) {
ASSERT_GE(effective_datarate_[j], cfg_.layer_target_bitrate[j] * 0.85)
<< " The datarate for the file is lower than target by too much, "
"for layer: " << j;
"for layer: "
<< j;
ASSERT_LE(effective_datarate_[j], cfg_.layer_target_bitrate[j] * 1.15)
<< " The datarate for the file is greater than target by too much, "
"for layer: " << j;
"for layer: "
<< j;
}
}
}
@ -624,12 +728,14 @@ TEST_P(DatarateTestVP9Large, BasicRateTargeting3TemporalLayers) {
// Adjust the thresholds to be tighter than .75.
ASSERT_GE(effective_datarate_[j], cfg_.layer_target_bitrate[j] * 0.75)
<< " The datarate for the file is lower than target by too much, "
"for layer: " << j;
"for layer: "
<< j;
// TODO(yaowu): Work out more stable rc control strategy and
// Adjust the thresholds to be tighter than 1.25.
ASSERT_LE(effective_datarate_[j], cfg_.layer_target_bitrate[j] * 1.25)
<< " The datarate for the file is greater than target by too much, "
"for layer: " << j;
"for layer: "
<< j;
}
}
}
@ -669,10 +775,12 @@ TEST_P(DatarateTestVP9Large, BasicRateTargeting3TemporalLayersFrameDropping) {
for (int j = 0; j < static_cast<int>(cfg_.ts_number_layers); ++j) {
ASSERT_GE(effective_datarate_[j], cfg_.layer_target_bitrate[j] * 0.85)
<< " The datarate for the file is lower than target by too much, "
"for layer: " << j;
"for layer: "
<< j;
ASSERT_LE(effective_datarate_[j], cfg_.layer_target_bitrate[j] * 1.15)
<< " The datarate for the file is greater than target by too much, "
"for layer: " << j;
"for layer: "
<< j;
// Expect some frame drops in this test: for this 200 frames test,
// expect at least 10% and not more than 60% drops.
ASSERT_GE(num_drops_, 20);
@ -742,11 +850,15 @@ TEST_P(DatarateTestVP9Large, DenoiserOffOn) {
}
#endif // CONFIG_VP9_TEMPORAL_DENOISING
class DatarateOnePassCbrSvc : public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWith2Params<libvpx_test::TestMode, int> {
class DatarateOnePassCbrSvc
: public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWith2Params<libvpx_test::TestMode, int> {
public:
DatarateOnePassCbrSvc() : EncoderTest(GET_PARAM(0)) {}
DatarateOnePassCbrSvc() : EncoderTest(GET_PARAM(0)) {
memset(&svc_params_, 0, sizeof(svc_params_));
}
virtual ~DatarateOnePassCbrSvc() {}
protected:
virtual void SetUp() {
InitializeConfig();
@ -764,8 +876,7 @@ class DatarateOnePassCbrSvc : public ::libvpx_test::EncoderTest,
mismatch_psnr_ = 0.0;
mismatch_nframes_ = 0;
}
virtual void BeginPassHook(unsigned int /*pass*/) {
}
virtual void BeginPassHook(unsigned int /*pass*/) {}
virtual void PreEncodeFrameHook(::libvpx_test::VideoSource *video,
::libvpx_test::Encoder *encoder) {
if (video->frame() == 0) {
@ -774,10 +885,10 @@ class DatarateOnePassCbrSvc : public ::libvpx_test::EncoderTest,
svc_params_.max_quantizers[i] = 63;
svc_params_.min_quantizers[i] = 0;
}
svc_params_.scaling_factor_num[0] = 144;
svc_params_.scaling_factor_den[0] = 288;
svc_params_.scaling_factor_num[1] = 288;
svc_params_.scaling_factor_den[1] = 288;
svc_params_.speed_per_layer[0] = 5;
for (i = 1; i < VPX_SS_MAX_LAYERS; ++i) {
svc_params_.speed_per_layer[i] = 7;
}
encoder->Control(VP9E_SET_SVC, 1);
encoder->Control(VP9E_SET_SVC_PARAMETERS, &svc_params_);
encoder->Control(VP8E_SET_CPUUSED, speed_setting_);
@ -791,21 +902,19 @@ class DatarateOnePassCbrSvc : public ::libvpx_test::EncoderTest,
}
virtual void FramePktHook(const vpx_codec_cx_pkt_t *pkt) {
vpx_codec_pts_t duration = pkt->data.frame.pts - last_pts_;
if (last_pts_ == 0)
duration = 1;
if (last_pts_ == 0) duration = 1;
bits_in_buffer_model_ += static_cast<int64_t>(
duration * timebase_ * cfg_.rc_target_bitrate * 1000);
const bool key_frame = (pkt->data.frame.flags & VPX_FRAME_IS_KEY)
? true: false;
const bool key_frame =
(pkt->data.frame.flags & VPX_FRAME_IS_KEY) ? true : false;
if (!key_frame) {
ASSERT_GE(bits_in_buffer_model_, 0) << "Buffer Underrun at frame "
<< pkt->data.frame.pts;
<< pkt->data.frame.pts;
}
const size_t frame_size_in_bits = pkt->data.frame.sz * 8;
bits_in_buffer_model_ -= frame_size_in_bits;
bits_total_ += frame_size_in_bits;
if (!first_drop_ && duration > 1)
first_drop_ = last_pts_ + 1;
if (!first_drop_ && duration > 1) first_drop_ = last_pts_ + 1;
last_pts_ = pkt->data.frame.pts;
bits_in_last_frame_ = frame_size_in_bits;
++frame_number_;
@ -814,22 +923,17 @@ class DatarateOnePassCbrSvc : public ::libvpx_test::EncoderTest,
if (bits_total_) {
const double file_size_in_kb = bits_total_ / 1000.; // bits per kilobit
duration_ = (last_pts_ + 1) * timebase_;
effective_datarate_ = (bits_total_ - bits_in_last_frame_) / 1000.0
/ (cfg_.rc_buf_initial_sz / 1000.0 + duration_);
file_datarate_ = file_size_in_kb / duration_;
}
}
virtual void MismatchHook(const vpx_image_t *img1,
const vpx_image_t *img2) {
virtual void MismatchHook(const vpx_image_t *img1, const vpx_image_t *img2) {
double mismatch_psnr = compute_psnr(img1, img2);
mismatch_psnr_ += mismatch_psnr;
++mismatch_nframes_;
}
unsigned int GetMismatchFrames() {
return mismatch_nframes_;
}
unsigned int GetMismatchFrames() { return mismatch_nframes_; }
vpx_codec_pts_t last_pts_;
int64_t bits_in_buffer_model_;
@ -839,7 +943,6 @@ class DatarateOnePassCbrSvc : public ::libvpx_test::EncoderTest,
int64_t bits_total_;
double duration_;
double file_datarate_;
double effective_datarate_;
size_t bits_in_last_frame_;
vpx_svc_extra_cfg_t svc_params_;
int speed_setting_;
@ -847,44 +950,80 @@ class DatarateOnePassCbrSvc : public ::libvpx_test::EncoderTest,
int mismatch_nframes_;
};
static void assign_layer_bitrates(vpx_codec_enc_cfg_t *const enc_cfg,
const vpx_svc_extra_cfg_t *svc_params,
int spatial_layers,
int temporal_layers,
int temporal_layering_mode) {
const vpx_svc_extra_cfg_t *svc_params,
int spatial_layers, int temporal_layers,
int temporal_layering_mode) {
int sl, spatial_layer_target;
float total = 0;
float alloc_ratio[VPX_MAX_LAYERS] = {0};
float alloc_ratio[VPX_MAX_LAYERS] = { 0 };
for (sl = 0; sl < spatial_layers; ++sl) {
if (svc_params->scaling_factor_den[sl] > 0) {
alloc_ratio[sl] = (float)(svc_params->scaling_factor_num[sl] *
1.0 / svc_params->scaling_factor_den[sl]);
alloc_ratio[sl] = (float)(svc_params->scaling_factor_num[sl] * 1.0 /
svc_params->scaling_factor_den[sl]);
total += alloc_ratio[sl];
}
}
for (sl = 0; sl < spatial_layers; ++sl) {
enc_cfg->ss_target_bitrate[sl] = spatial_layer_target =
(unsigned int)(enc_cfg->rc_target_bitrate *
alloc_ratio[sl] / total);
(unsigned int)(enc_cfg->rc_target_bitrate * alloc_ratio[sl] / total);
const int index = sl * temporal_layers;
if (temporal_layering_mode == 3) {
enc_cfg->layer_target_bitrate[index] =
spatial_layer_target >> 1;
enc_cfg->layer_target_bitrate[index] = spatial_layer_target >> 1;
enc_cfg->layer_target_bitrate[index + 1] =
(spatial_layer_target >> 1) + (spatial_layer_target >> 2);
enc_cfg->layer_target_bitrate[index + 2] =
spatial_layer_target;
enc_cfg->layer_target_bitrate[index + 2] = spatial_layer_target;
} else if (temporal_layering_mode == 2) {
enc_cfg->layer_target_bitrate[index] =
spatial_layer_target * 2 / 3;
enc_cfg->layer_target_bitrate[index + 1] =
spatial_layer_target;
enc_cfg->layer_target_bitrate[index] = spatial_layer_target * 2 / 3;
enc_cfg->layer_target_bitrate[index + 1] = spatial_layer_target;
}
}
}
// Check basic rate targeting for 1 pass CBR SVC: 2 spatial layers and
// 3 temporal layers. Run CIF clip with 1 thread.
TEST_P(DatarateOnePassCbrSvc, OnePassCbrSvc) {
TEST_P(DatarateOnePassCbrSvc, OnePassCbrSvc2SpatialLayers) {
cfg_.rc_buf_initial_sz = 500;
cfg_.rc_buf_optimal_sz = 500;
cfg_.rc_buf_sz = 1000;
cfg_.rc_min_quantizer = 0;
cfg_.rc_max_quantizer = 63;
cfg_.rc_end_usage = VPX_CBR;
cfg_.g_lag_in_frames = 0;
cfg_.ss_number_layers = 2;
cfg_.ts_number_layers = 3;
cfg_.ts_rate_decimator[0] = 4;
cfg_.ts_rate_decimator[1] = 2;
cfg_.ts_rate_decimator[2] = 1;
cfg_.g_error_resilient = 1;
cfg_.g_threads = 1;
cfg_.temporal_layering_mode = 3;
svc_params_.scaling_factor_num[0] = 144;
svc_params_.scaling_factor_den[0] = 288;
svc_params_.scaling_factor_num[1] = 288;
svc_params_.scaling_factor_den[1] = 288;
cfg_.rc_dropframe_thresh = 10;
cfg_.kf_max_dist = 9999;
::libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
30, 1, 0, 200);
// TODO(wonkap/marpan): Check that effective_datarate for each layer hits the
// layer target_bitrate.
for (int i = 200; i <= 800; i += 200) {
cfg_.rc_target_bitrate = i;
ResetModel();
assign_layer_bitrates(&cfg_, &svc_params_, cfg_.ss_number_layers,
cfg_.ts_number_layers, cfg_.temporal_layering_mode);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
ASSERT_GE(cfg_.rc_target_bitrate, file_datarate_ * 0.85)
<< " The datarate for the file exceeds the target by too much!";
ASSERT_LE(cfg_.rc_target_bitrate, file_datarate_ * 1.15)
<< " The datarate for the file is lower than the target by too much!";
EXPECT_EQ(static_cast<unsigned int>(0), GetMismatchFrames());
}
}
// Check basic rate targeting for 1 pass CBR SVC: 2 spatial layers and 3
// temporal layers. Run CIF clip with 1 thread, and few short key frame periods.
TEST_P(DatarateOnePassCbrSvc, OnePassCbrSvc2SpatialLayersSmallKf) {
cfg_.rc_buf_initial_sz = 500;
cfg_.rc_buf_optimal_sz = 500;
cfg_.rc_buf_sz = 1000;
@ -907,25 +1046,26 @@ TEST_P(DatarateOnePassCbrSvc, OnePassCbrSvc) {
cfg_.rc_dropframe_thresh = 10;
::libvpx_test::I420VideoSource video("hantro_collage_w352h288.yuv", 352, 288,
30, 1, 0, 200);
// TODO(wonkap/marpan): Check that effective_datarate for each layer hits the
// layer target_bitrate. Also check if test can pass at lower bitrate (~200k).
for (int i = 400; i <= 800; i += 200) {
cfg_.rc_target_bitrate = i;
cfg_.rc_target_bitrate = 400;
// For this 3 temporal layer case, pattern repeats every 4 frames, so choose
// 4 key neighboring key frame periods (so key frame will land on 0-2-1-2).
for (int j = 64; j <= 67; j++) {
cfg_.kf_max_dist = j;
ResetModel();
assign_layer_bitrates(&cfg_, &svc_params_, cfg_.ss_number_layers,
cfg_.ts_number_layers, cfg_.temporal_layering_mode);
cfg_.ts_number_layers, cfg_.temporal_layering_mode);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
ASSERT_GE(cfg_.rc_target_bitrate, effective_datarate_ * 0.85)
<< " The datarate for the file exceeds the target by too much!";
ASSERT_GE(cfg_.rc_target_bitrate, file_datarate_ * 0.85)
<< " The datarate for the file exceeds the target by too much!";
ASSERT_LE(cfg_.rc_target_bitrate, file_datarate_ * 1.15)
<< " The datarate for the file is lower than the target by too much!";
EXPECT_EQ(GetMismatchFrames(), (unsigned int) 0);
EXPECT_EQ(static_cast<unsigned int>(0), GetMismatchFrames());
}
}
// Check basic rate targeting for 1 pass CBR SVC: 2 spatial layers and
// 3 temporal layers. Run HD clip with 4 threads.
TEST_P(DatarateOnePassCbrSvc, OnePassCbrSvc4threads) {
TEST_P(DatarateOnePassCbrSvc, OnePassCbrSvc2SpatialLayers4threads) {
cfg_.rc_buf_initial_sz = 500;
cfg_.rc_buf_optimal_sz = 500;
cfg_.rc_buf_sz = 1000;
@ -946,26 +1086,151 @@ TEST_P(DatarateOnePassCbrSvc, OnePassCbrSvc4threads) {
svc_params_.scaling_factor_num[1] = 288;
svc_params_.scaling_factor_den[1] = 288;
cfg_.rc_dropframe_thresh = 10;
::libvpx_test::I420VideoSource video("niklas_1280_720_30.y4m", 1280, 720,
30, 1, 0, 300);
cfg_.kf_max_dist = 9999;
::libvpx_test::I420VideoSource video("niklas_1280_720_30.y4m", 1280, 720, 30,
1, 0, 300);
cfg_.rc_target_bitrate = 800;
ResetModel();
assign_layer_bitrates(&cfg_, &svc_params_, cfg_.ss_number_layers,
cfg_.ts_number_layers, cfg_.temporal_layering_mode);
cfg_.ts_number_layers, cfg_.temporal_layering_mode);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
ASSERT_GE(cfg_.rc_target_bitrate, effective_datarate_ * 0.85)
<< " The datarate for the file exceeds the target by too much!";
ASSERT_GE(cfg_.rc_target_bitrate, file_datarate_ * 0.85)
<< " The datarate for the file exceeds the target by too much!";
ASSERT_LE(cfg_.rc_target_bitrate, file_datarate_ * 1.15)
<< " The datarate for the file is lower than the target by too much!";
EXPECT_EQ(GetMismatchFrames(), (unsigned int) 0);
EXPECT_EQ(static_cast<unsigned int>(0), GetMismatchFrames());
}
// Check basic rate targeting for 1 pass CBR SVC: 3 spatial layers and
// 3 temporal layers. Run CIF clip with 1 thread.
TEST_P(DatarateOnePassCbrSvc, OnePassCbrSvc3SpatialLayers) {
cfg_.rc_buf_initial_sz = 500;
cfg_.rc_buf_optimal_sz = 500;
cfg_.rc_buf_sz = 1000;
cfg_.rc_min_quantizer = 0;
cfg_.rc_max_quantizer = 63;
cfg_.rc_end_usage = VPX_CBR;
cfg_.g_lag_in_frames = 0;
cfg_.ss_number_layers = 3;
cfg_.ts_number_layers = 3;
cfg_.ts_rate_decimator[0] = 4;
cfg_.ts_rate_decimator[1] = 2;
cfg_.ts_rate_decimator[2] = 1;
cfg_.g_error_resilient = 1;
cfg_.g_threads = 1;
cfg_.temporal_layering_mode = 3;
svc_params_.scaling_factor_num[0] = 72;
svc_params_.scaling_factor_den[0] = 288;
svc_params_.scaling_factor_num[1] = 144;
svc_params_.scaling_factor_den[1] = 288;
svc_params_.scaling_factor_num[2] = 288;
svc_params_.scaling_factor_den[2] = 288;
cfg_.rc_dropframe_thresh = 10;
cfg_.kf_max_dist = 9999;
::libvpx_test::I420VideoSource video("niklas_1280_720_30.y4m", 1280, 720, 30,
1, 0, 300);
cfg_.rc_target_bitrate = 800;
ResetModel();
assign_layer_bitrates(&cfg_, &svc_params_, cfg_.ss_number_layers,
cfg_.ts_number_layers, cfg_.temporal_layering_mode);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
ASSERT_GE(cfg_.rc_target_bitrate, file_datarate_ * 0.85)
<< " The datarate for the file exceeds the target by too much!";
ASSERT_LE(cfg_.rc_target_bitrate, file_datarate_ * 1.22)
<< " The datarate for the file is lower than the target by too much!";
EXPECT_EQ(static_cast<unsigned int>(0), GetMismatchFrames());
}
// Check basic rate targeting for 1 pass CBR SVC: 3 spatial layers and 3
// temporal layers. Run CIF clip with 1 thread, and few short key frame periods.
TEST_P(DatarateOnePassCbrSvc, OnePassCbrSvc3SpatialLayersSmallKf) {
cfg_.rc_buf_initial_sz = 500;
cfg_.rc_buf_optimal_sz = 500;
cfg_.rc_buf_sz = 1000;
cfg_.rc_min_quantizer = 0;
cfg_.rc_max_quantizer = 63;
cfg_.rc_end_usage = VPX_CBR;
cfg_.g_lag_in_frames = 0;
cfg_.ss_number_layers = 3;
cfg_.ts_number_layers = 3;
cfg_.ts_rate_decimator[0] = 4;
cfg_.ts_rate_decimator[1] = 2;
cfg_.ts_rate_decimator[2] = 1;
cfg_.g_error_resilient = 1;
cfg_.g_threads = 1;
cfg_.temporal_layering_mode = 3;
svc_params_.scaling_factor_num[0] = 72;
svc_params_.scaling_factor_den[0] = 288;
svc_params_.scaling_factor_num[1] = 144;
svc_params_.scaling_factor_den[1] = 288;
svc_params_.scaling_factor_num[2] = 288;
svc_params_.scaling_factor_den[2] = 288;
cfg_.rc_dropframe_thresh = 10;
::libvpx_test::I420VideoSource video("niklas_1280_720_30.y4m", 1280, 720, 30,
1, 0, 300);
cfg_.rc_target_bitrate = 800;
// For this 3 temporal layer case, pattern repeats every 4 frames, so choose
// 4 key neighboring key frame periods (so key frame will land on 0-2-1-2).
for (int j = 32; j <= 35; j++) {
cfg_.kf_max_dist = j;
ResetModel();
assign_layer_bitrates(&cfg_, &svc_params_, cfg_.ss_number_layers,
cfg_.ts_number_layers, cfg_.temporal_layering_mode);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
ASSERT_GE(cfg_.rc_target_bitrate, file_datarate_ * 0.85)
<< " The datarate for the file exceeds the target by too much!";
ASSERT_LE(cfg_.rc_target_bitrate, file_datarate_ * 1.30)
<< " The datarate for the file is lower than the target by too much!";
EXPECT_EQ(static_cast<unsigned int>(0), GetMismatchFrames());
}
}
// Check basic rate targeting for 1 pass CBR SVC: 3 spatial layers and
// 3 temporal layers. Run HD clip with 4 threads.
TEST_P(DatarateOnePassCbrSvc, OnePassCbrSvc3SpatialLayers4threads) {
cfg_.rc_buf_initial_sz = 500;
cfg_.rc_buf_optimal_sz = 500;
cfg_.rc_buf_sz = 1000;
cfg_.rc_min_quantizer = 0;
cfg_.rc_max_quantizer = 63;
cfg_.rc_end_usage = VPX_CBR;
cfg_.g_lag_in_frames = 0;
cfg_.ss_number_layers = 3;
cfg_.ts_number_layers = 3;
cfg_.ts_rate_decimator[0] = 4;
cfg_.ts_rate_decimator[1] = 2;
cfg_.ts_rate_decimator[2] = 1;
cfg_.g_error_resilient = 1;
cfg_.g_threads = 4;
cfg_.temporal_layering_mode = 3;
svc_params_.scaling_factor_num[0] = 72;
svc_params_.scaling_factor_den[0] = 288;
svc_params_.scaling_factor_num[1] = 144;
svc_params_.scaling_factor_den[1] = 288;
svc_params_.scaling_factor_num[2] = 288;
svc_params_.scaling_factor_den[2] = 288;
cfg_.rc_dropframe_thresh = 10;
cfg_.kf_max_dist = 9999;
::libvpx_test::I420VideoSource video("niklas_1280_720_30.y4m", 1280, 720, 30,
1, 0, 300);
cfg_.rc_target_bitrate = 800;
ResetModel();
assign_layer_bitrates(&cfg_, &svc_params_, cfg_.ss_number_layers,
cfg_.ts_number_layers, cfg_.temporal_layering_mode);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
ASSERT_GE(cfg_.rc_target_bitrate, file_datarate_ * 0.85)
<< " The datarate for the file exceeds the target by too much!";
ASSERT_LE(cfg_.rc_target_bitrate, file_datarate_ * 1.22)
<< " The datarate for the file is lower than the target by too much!";
EXPECT_EQ(static_cast<unsigned int>(0), GetMismatchFrames());
}
VP8_INSTANTIATE_TEST_CASE(DatarateTestLarge, ALL_TEST_MODES);
VP9_INSTANTIATE_TEST_CASE(DatarateTestVP9Large,
::testing::Values(::libvpx_test::kOnePassGood,
::libvpx_test::kRealTime),
::testing::Range(2, 7));
::testing::Range(2, 9));
VP9_INSTANTIATE_TEST_CASE(DatarateOnePassCbrSvc,
::testing::Values(::libvpx_test::kRealTime),
::testing::Range(5, 8));
::testing::Range(5, 9));
} // namespace

View File

@ -25,20 +25,12 @@
#include "vpx/vpx_codec.h"
#include "vpx/vpx_integer.h"
#include "vpx_ports/mem.h"
#include "vpx_ports/msvc.h" // for round()
using libvpx_test::ACMRandom;
namespace {
#ifdef _MSC_VER
static int round(double x) {
if (x < 0)
return static_cast<int>(ceil(x - 0.5));
else
return static_cast<int>(floor(x + 0.5));
}
#endif
const int kNumCoeffs = 256;
const double C1 = 0.995184726672197;
const double C2 = 0.98078528040323;
@ -62,16 +54,16 @@ void butterfly_16x16_dct_1d(double input[16], double output[16]) {
double temp1, temp2;
// step 1
step[ 0] = input[0] + input[15];
step[ 1] = input[1] + input[14];
step[ 2] = input[2] + input[13];
step[ 3] = input[3] + input[12];
step[ 4] = input[4] + input[11];
step[ 5] = input[5] + input[10];
step[ 6] = input[6] + input[ 9];
step[ 7] = input[7] + input[ 8];
step[ 8] = input[7] - input[ 8];
step[ 9] = input[6] - input[ 9];
step[0] = input[0] + input[15];
step[1] = input[1] + input[14];
step[2] = input[2] + input[13];
step[3] = input[3] + input[12];
step[4] = input[4] + input[11];
step[5] = input[5] + input[10];
step[6] = input[6] + input[9];
step[7] = input[7] + input[8];
step[8] = input[7] - input[8];
step[9] = input[6] - input[9];
step[10] = input[5] - input[10];
step[11] = input[4] - input[11];
step[12] = input[3] - input[12];
@ -89,13 +81,13 @@ void butterfly_16x16_dct_1d(double input[16], double output[16]) {
output[6] = step[1] - step[6];
output[7] = step[0] - step[7];
temp1 = step[ 8] * C7;
temp1 = step[8] * C7;
temp2 = step[15] * C9;
output[ 8] = temp1 + temp2;
output[8] = temp1 + temp2;
temp1 = step[ 9] * C11;
temp1 = step[9] * C11;
temp2 = step[14] * C5;
output[ 9] = temp1 - temp2;
output[9] = temp1 - temp2;
temp1 = step[10] * C3;
temp2 = step[13] * C13;
@ -113,40 +105,40 @@ void butterfly_16x16_dct_1d(double input[16], double output[16]) {
temp2 = step[13] * C3;
output[13] = temp2 - temp1;
temp1 = step[ 9] * C5;
temp1 = step[9] * C5;
temp2 = step[14] * C11;
output[14] = temp2 + temp1;
temp1 = step[ 8] * C9;
temp1 = step[8] * C9;
temp2 = step[15] * C7;
output[15] = temp2 - temp1;
// step 3
step[ 0] = output[0] + output[3];
step[ 1] = output[1] + output[2];
step[ 2] = output[1] - output[2];
step[ 3] = output[0] - output[3];
step[0] = output[0] + output[3];
step[1] = output[1] + output[2];
step[2] = output[1] - output[2];
step[3] = output[0] - output[3];
temp1 = output[4] * C14;
temp2 = output[7] * C2;
step[ 4] = temp1 + temp2;
step[4] = temp1 + temp2;
temp1 = output[5] * C10;
temp2 = output[6] * C6;
step[ 5] = temp1 + temp2;
step[5] = temp1 + temp2;
temp1 = output[5] * C6;
temp2 = output[6] * C10;
step[ 6] = temp2 - temp1;
step[6] = temp2 - temp1;
temp1 = output[4] * C2;
temp2 = output[7] * C14;
step[ 7] = temp2 - temp1;
step[7] = temp2 - temp1;
step[ 8] = output[ 8] + output[11];
step[ 9] = output[ 9] + output[10];
step[10] = output[ 9] - output[10];
step[11] = output[ 8] - output[11];
step[8] = output[8] + output[11];
step[9] = output[9] + output[10];
step[10] = output[9] - output[10];
step[11] = output[8] - output[11];
step[12] = output[12] + output[15];
step[13] = output[13] + output[14];
@ -154,25 +146,25 @@ void butterfly_16x16_dct_1d(double input[16], double output[16]) {
step[15] = output[12] - output[15];
// step 4
output[ 0] = (step[ 0] + step[ 1]);
output[ 8] = (step[ 0] - step[ 1]);
output[0] = (step[0] + step[1]);
output[8] = (step[0] - step[1]);
temp1 = step[2] * C12;
temp2 = step[3] * C4;
temp1 = temp1 + temp2;
output[ 4] = 2*(temp1 * C8);
output[4] = 2 * (temp1 * C8);
temp1 = step[2] * C4;
temp2 = step[3] * C12;
temp1 = temp2 - temp1;
output[12] = 2 * (temp1 * C8);
output[ 2] = 2 * ((step[4] + step[ 5]) * C8);
output[14] = 2 * ((step[7] - step[ 6]) * C8);
output[2] = 2 * ((step[4] + step[5]) * C8);
output[14] = 2 * ((step[7] - step[6]) * C8);
temp1 = step[4] - step[5];
temp2 = step[6] + step[7];
output[ 6] = (temp1 + temp2);
output[6] = (temp1 + temp2);
output[10] = (temp1 - temp2);
intermediate[8] = step[8] + step[14];
@ -188,18 +180,18 @@ void butterfly_16x16_dct_1d(double input[16], double output[16]) {
temp1 = temp2 + temp1;
output[13] = 2 * (temp1 * C8);
output[ 9] = 2 * ((step[10] + step[11]) * C8);
output[9] = 2 * ((step[10] + step[11]) * C8);
intermediate[11] = step[10] - step[11];
intermediate[12] = step[12] + step[13];
intermediate[13] = step[12] - step[13];
intermediate[14] = step[ 8] - step[14];
intermediate[15] = step[ 9] - step[15];
intermediate[14] = step[8] - step[14];
intermediate[15] = step[9] - step[15];
output[15] = (intermediate[11] + intermediate[12]);
output[ 1] = -(intermediate[11] - intermediate[12]);
output[1] = -(intermediate[11] - intermediate[12]);
output[ 7] = 2 * (intermediate[13] * C8);
output[7] = 2 * (intermediate[13] * C8);
temp1 = intermediate[14] * C12;
temp2 = intermediate[15] * C4;
@ -209,28 +201,24 @@ void butterfly_16x16_dct_1d(double input[16], double output[16]) {
temp1 = intermediate[14] * C4;
temp2 = intermediate[15] * C12;
temp1 = temp2 + temp1;
output[ 5] = 2 * (temp1 * C8);
output[5] = 2 * (temp1 * C8);
}
void reference_16x16_dct_2d(int16_t input[256], double output[256]) {
// First transform columns
for (int i = 0; i < 16; ++i) {
double temp_in[16], temp_out[16];
for (int j = 0; j < 16; ++j)
temp_in[j] = input[j * 16 + i];
for (int j = 0; j < 16; ++j) temp_in[j] = input[j * 16 + i];
butterfly_16x16_dct_1d(temp_in, temp_out);
for (int j = 0; j < 16; ++j)
output[j * 16 + i] = temp_out[j];
for (int j = 0; j < 16; ++j) output[j * 16 + i] = temp_out[j];
}
// Then transform rows
for (int i = 0; i < 16; ++i) {
double temp_in[16], temp_out[16];
for (int j = 0; j < 16; ++j)
temp_in[j] = output[j + i * 16];
for (int j = 0; j < 16; ++j) temp_in[j] = output[j + i * 16];
butterfly_16x16_dct_1d(temp_in, temp_out);
// Scale by some magic number
for (int j = 0; j < 16; ++j)
output[j + i * 16] = temp_out[j]/2;
for (int j = 0; j < 16; ++j) output[j + i * 16] = temp_out[j] / 2;
}
}
@ -256,8 +244,7 @@ void idct16x16_ref(const tran_low_t *in, uint8_t *dest, int stride,
vpx_idct16x16_256_add_c(in, dest, stride);
}
void fht16x16_ref(const int16_t *in, tran_low_t *out, int stride,
int tx_type) {
void fht16x16_ref(const int16_t *in, tran_low_t *out, int stride, int tx_type) {
vp9_fht16x16_c(in, out, stride, tx_type);
}
@ -359,11 +346,10 @@ class Trans16x16TestBase {
}
}
ASM_REGISTER_STATE_CHECK(RunFwdTxfm(test_input_block,
test_temp_block, pitch_));
ASM_REGISTER_STATE_CHECK(
RunFwdTxfm(test_input_block, test_temp_block, pitch_));
if (bit_depth_ == VPX_BITS_8) {
ASM_REGISTER_STATE_CHECK(
RunInvTxfm(test_temp_block, dst, pitch_));
ASM_REGISTER_STATE_CHECK(RunInvTxfm(test_temp_block, dst, pitch_));
#if CONFIG_VP9_HIGHBITDEPTH
} else {
ASM_REGISTER_STATE_CHECK(
@ -373,19 +359,18 @@ class Trans16x16TestBase {
for (int j = 0; j < kNumCoeffs; ++j) {
#if CONFIG_VP9_HIGHBITDEPTH
const uint32_t diff =
bit_depth_ == VPX_BITS_8 ? dst[j] - src[j] : dst16[j] - src16[j];
const int32_t diff =
bit_depth_ == VPX_BITS_8 ? dst[j] - src[j] : dst16[j] - src16[j];
#else
const uint32_t diff = dst[j] - src[j];
const int32_t diff = dst[j] - src[j];
#endif
const uint32_t error = diff * diff;
if (max_error < error)
max_error = error;
if (max_error < error) max_error = error;
total_error += error;
}
}
EXPECT_GE(1u << 2 * (bit_depth_ - 8), max_error)
EXPECT_GE(1u << 2 * (bit_depth_ - 8), max_error)
<< "Error: 16x16 FHT/IHT has an individual round trip error > 1";
EXPECT_GE(count_test_block << 2 * (bit_depth_ - 8), total_error)
@ -401,8 +386,9 @@ class Trans16x16TestBase {
for (int i = 0; i < count_test_block; ++i) {
// Initialize a test block with input range [-mask_, mask_].
for (int j = 0; j < kNumCoeffs; ++j)
for (int j = 0; j < kNumCoeffs; ++j) {
input_block[j] = (rnd.Rand16() & mask_) - (rnd.Rand16() & mask_);
}
fwd_txfm_ref(input_block, output_ref_block, pitch_, tx_type_);
ASM_REGISTER_STATE_CHECK(RunFwdTxfm(input_block, output_block, pitch_));
@ -426,16 +412,14 @@ class Trans16x16TestBase {
input_extreme_block[j] = rnd.Rand8() % 2 ? mask_ : -mask_;
}
if (i == 0) {
for (int j = 0; j < kNumCoeffs; ++j)
input_extreme_block[j] = mask_;
for (int j = 0; j < kNumCoeffs; ++j) input_extreme_block[j] = mask_;
} else if (i == 1) {
for (int j = 0; j < kNumCoeffs; ++j)
input_extreme_block[j] = -mask_;
for (int j = 0; j < kNumCoeffs; ++j) input_extreme_block[j] = -mask_;
}
fwd_txfm_ref(input_extreme_block, output_ref_block, pitch_, tx_type_);
ASM_REGISTER_STATE_CHECK(RunFwdTxfm(input_extreme_block,
output_block, pitch_));
ASM_REGISTER_STATE_CHECK(
RunFwdTxfm(input_extreme_block, output_block, pitch_));
// The minimum quant value is 4.
for (int j = 0; j < kNumCoeffs; ++j) {
@ -464,12 +448,12 @@ class Trans16x16TestBase {
for (int j = 0; j < kNumCoeffs; ++j) {
input_extreme_block[j] = rnd.Rand8() % 2 ? mask_ : -mask_;
}
if (i == 0)
for (int j = 0; j < kNumCoeffs; ++j)
input_extreme_block[j] = mask_;
if (i == 1)
for (int j = 0; j < kNumCoeffs; ++j)
input_extreme_block[j] = -mask_;
if (i == 0) {
for (int j = 0; j < kNumCoeffs; ++j) input_extreme_block[j] = mask_;
}
if (i == 1) {
for (int j = 0; j < kNumCoeffs; ++j) input_extreme_block[j] = -mask_;
}
fwd_txfm_ref(input_extreme_block, output_ref_block, pitch_, tx_type_);
@ -483,8 +467,9 @@ class Trans16x16TestBase {
// quantization with maximum allowed step sizes
output_ref_block[0] = (output_ref_block[0] / dc_thred) * dc_thred;
for (int j = 1; j < kNumCoeffs; ++j)
for (int j = 1; j < kNumCoeffs; ++j) {
output_ref_block[j] = (output_ref_block[j] / ac_thred) * ac_thred;
}
if (bit_depth_ == VPX_BITS_8) {
inv_txfm_ref(output_ref_block, ref, pitch_, tx_type_);
ASM_REGISTER_STATE_CHECK(RunInvTxfm(output_ref_block, dst, pitch_));
@ -492,17 +477,15 @@ class Trans16x16TestBase {
} else {
inv_txfm_ref(output_ref_block, CONVERT_TO_BYTEPTR(ref16), pitch_,
tx_type_);
ASM_REGISTER_STATE_CHECK(RunInvTxfm(output_ref_block,
CONVERT_TO_BYTEPTR(dst16), pitch_));
ASM_REGISTER_STATE_CHECK(
RunInvTxfm(output_ref_block, CONVERT_TO_BYTEPTR(dst16), pitch_));
#endif
}
if (bit_depth_ == VPX_BITS_8) {
for (int j = 0; j < kNumCoeffs; ++j)
EXPECT_EQ(ref[j], dst[j]);
for (int j = 0; j < kNumCoeffs; ++j) EXPECT_EQ(ref[j], dst[j]);
#if CONFIG_VP9_HIGHBITDEPTH
} else {
for (int j = 0; j < kNumCoeffs; ++j)
EXPECT_EQ(ref16[j], dst16[j]);
for (int j = 0; j < kNumCoeffs; ++j) EXPECT_EQ(ref16[j], dst16[j]);
#endif
}
}
@ -539,15 +522,16 @@ class Trans16x16TestBase {
}
reference_16x16_dct_2d(in, out_r);
for (int j = 0; j < kNumCoeffs; ++j)
for (int j = 0; j < kNumCoeffs; ++j) {
coeff[j] = static_cast<tran_low_t>(round(out_r[j]));
}
if (bit_depth_ == VPX_BITS_8) {
ASM_REGISTER_STATE_CHECK(RunInvTxfm(coeff, dst, 16));
#if CONFIG_VP9_HIGHBITDEPTH
} else {
ASM_REGISTER_STATE_CHECK(RunInvTxfm(coeff, CONVERT_TO_BYTEPTR(dst16),
16));
ASM_REGISTER_STATE_CHECK(
RunInvTxfm(coeff, CONVERT_TO_BYTEPTR(dst16), 16));
#endif // CONFIG_VP9_HIGHBITDEPTH
}
@ -559,9 +543,8 @@ class Trans16x16TestBase {
const uint32_t diff = dst[j] - src[j];
#endif // CONFIG_VP9_HIGHBITDEPTH
const uint32_t error = diff * diff;
EXPECT_GE(1u, error)
<< "Error: 16x16 IDCT has error " << error
<< " at index " << j;
EXPECT_GE(1u, error) << "Error: 16x16 IDCT has error " << error
<< " at index " << j;
}
}
}
@ -603,8 +586,8 @@ class Trans16x16TestBase {
} else {
#if CONFIG_VP9_HIGHBITDEPTH
ref_txfm(coeff, CONVERT_TO_BYTEPTR(ref16), pitch_);
ASM_REGISTER_STATE_CHECK(RunInvTxfm(coeff, CONVERT_TO_BYTEPTR(dst16),
pitch_));
ASM_REGISTER_STATE_CHECK(
RunInvTxfm(coeff, CONVERT_TO_BYTEPTR(dst16), pitch_));
#endif // CONFIG_VP9_HIGHBITDEPTH
}
@ -616,9 +599,8 @@ class Trans16x16TestBase {
const uint32_t diff = dst[j] - ref[j];
#endif // CONFIG_VP9_HIGHBITDEPTH
const uint32_t error = diff * diff;
EXPECT_EQ(0u, error)
<< "Error: 16x16 IDCT Comparison has error " << error
<< " at index " << j;
EXPECT_EQ(0u, error) << "Error: 16x16 IDCT Comparison has error "
<< error << " at index " << j;
}
}
}
@ -631,32 +613,25 @@ class Trans16x16TestBase {
IhtFunc inv_txfm_ref;
};
class Trans16x16DCT
: public Trans16x16TestBase,
public ::testing::TestWithParam<Dct16x16Param> {
class Trans16x16DCT : public Trans16x16TestBase,
public ::testing::TestWithParam<Dct16x16Param> {
public:
virtual ~Trans16x16DCT() {}
virtual void SetUp() {
fwd_txfm_ = GET_PARAM(0);
inv_txfm_ = GET_PARAM(1);
tx_type_ = GET_PARAM(2);
tx_type_ = GET_PARAM(2);
bit_depth_ = GET_PARAM(3);
pitch_ = 16;
pitch_ = 16;
fwd_txfm_ref = fdct16x16_ref;
inv_txfm_ref = idct16x16_ref;
mask_ = (1 << bit_depth_) - 1;
#if CONFIG_VP9_HIGHBITDEPTH
switch (bit_depth_) {
case VPX_BITS_10:
inv_txfm_ref = idct16x16_10_ref;
break;
case VPX_BITS_12:
inv_txfm_ref = idct16x16_12_ref;
break;
default:
inv_txfm_ref = idct16x16_ref;
break;
case VPX_BITS_10: inv_txfm_ref = idct16x16_10_ref; break;
case VPX_BITS_12: inv_txfm_ref = idct16x16_12_ref; break;
default: inv_txfm_ref = idct16x16_ref; break;
}
#else
inv_txfm_ref = idct16x16_ref;
@ -676,17 +651,11 @@ class Trans16x16DCT
IdctFunc inv_txfm_;
};
TEST_P(Trans16x16DCT, AccuracyCheck) {
RunAccuracyCheck();
}
TEST_P(Trans16x16DCT, AccuracyCheck) { RunAccuracyCheck(); }
TEST_P(Trans16x16DCT, CoeffCheck) {
RunCoeffCheck();
}
TEST_P(Trans16x16DCT, CoeffCheck) { RunCoeffCheck(); }
TEST_P(Trans16x16DCT, MemCheck) {
RunMemCheck();
}
TEST_P(Trans16x16DCT, MemCheck) { RunMemCheck(); }
TEST_P(Trans16x16DCT, QuantCheck) {
// Use maximally allowed quantization step sizes for DC and AC
@ -694,36 +663,27 @@ TEST_P(Trans16x16DCT, QuantCheck) {
RunQuantCheck(1336, 1828);
}
TEST_P(Trans16x16DCT, InvAccuracyCheck) {
RunInvAccuracyCheck();
}
TEST_P(Trans16x16DCT, InvAccuracyCheck) { RunInvAccuracyCheck(); }
class Trans16x16HT
: public Trans16x16TestBase,
public ::testing::TestWithParam<Ht16x16Param> {
class Trans16x16HT : public Trans16x16TestBase,
public ::testing::TestWithParam<Ht16x16Param> {
public:
virtual ~Trans16x16HT() {}
virtual void SetUp() {
fwd_txfm_ = GET_PARAM(0);
inv_txfm_ = GET_PARAM(1);
tx_type_ = GET_PARAM(2);
tx_type_ = GET_PARAM(2);
bit_depth_ = GET_PARAM(3);
pitch_ = 16;
pitch_ = 16;
fwd_txfm_ref = fht16x16_ref;
inv_txfm_ref = iht16x16_ref;
mask_ = (1 << bit_depth_) - 1;
#if CONFIG_VP9_HIGHBITDEPTH
switch (bit_depth_) {
case VPX_BITS_10:
inv_txfm_ref = iht16x16_10;
break;
case VPX_BITS_12:
inv_txfm_ref = iht16x16_12;
break;
default:
inv_txfm_ref = iht16x16_ref;
break;
case VPX_BITS_10: inv_txfm_ref = iht16x16_10; break;
case VPX_BITS_12: inv_txfm_ref = iht16x16_12; break;
default: inv_txfm_ref = iht16x16_ref; break;
}
#else
inv_txfm_ref = iht16x16_ref;
@ -743,17 +703,11 @@ class Trans16x16HT
IhtFunc inv_txfm_;
};
TEST_P(Trans16x16HT, AccuracyCheck) {
RunAccuracyCheck();
}
TEST_P(Trans16x16HT, AccuracyCheck) { RunAccuracyCheck(); }
TEST_P(Trans16x16HT, CoeffCheck) {
RunCoeffCheck();
}
TEST_P(Trans16x16HT, CoeffCheck) { RunCoeffCheck(); }
TEST_P(Trans16x16HT, MemCheck) {
RunMemCheck();
}
TEST_P(Trans16x16HT, MemCheck) { RunMemCheck(); }
TEST_P(Trans16x16HT, QuantCheck) {
// The encoder skips any non-DC intra prediction modes,
@ -761,9 +715,8 @@ TEST_P(Trans16x16HT, QuantCheck) {
RunQuantCheck(429, 729);
}
class InvTrans16x16DCT
: public Trans16x16TestBase,
public ::testing::TestWithParam<Idct16x16Param> {
class InvTrans16x16DCT : public Trans16x16TestBase,
public ::testing::TestWithParam<Idct16x16Param> {
public:
virtual ~InvTrans16x16DCT() {}
@ -774,7 +727,7 @@ class InvTrans16x16DCT
bit_depth_ = GET_PARAM(3);
pitch_ = 16;
mask_ = (1 << bit_depth_) - 1;
}
}
virtual void TearDown() { libvpx_test::ClearSystemState(); }
protected:
@ -792,6 +745,66 @@ TEST_P(InvTrans16x16DCT, CompareReference) {
CompareInvReference(ref_txfm_, thresh_);
}
class PartialTrans16x16Test : public ::testing::TestWithParam<
std::tr1::tuple<FdctFunc, vpx_bit_depth_t> > {
public:
virtual ~PartialTrans16x16Test() {}
virtual void SetUp() {
fwd_txfm_ = GET_PARAM(0);
bit_depth_ = GET_PARAM(1);
}
virtual void TearDown() { libvpx_test::ClearSystemState(); }
protected:
vpx_bit_depth_t bit_depth_;
FdctFunc fwd_txfm_;
};
TEST_P(PartialTrans16x16Test, Extremes) {
#if CONFIG_VP9_HIGHBITDEPTH
const int16_t maxval =
static_cast<int16_t>(clip_pixel_highbd(1 << 30, bit_depth_));
#else
const int16_t maxval = 255;
#endif
const int minval = -maxval;
DECLARE_ALIGNED(16, int16_t, input[kNumCoeffs]);
DECLARE_ALIGNED(16, tran_low_t, output[kNumCoeffs]);
for (int i = 0; i < kNumCoeffs; ++i) input[i] = maxval;
output[0] = 0;
ASM_REGISTER_STATE_CHECK(fwd_txfm_(input, output, 16));
EXPECT_EQ((maxval * kNumCoeffs) >> 1, output[0]);
for (int i = 0; i < kNumCoeffs; ++i) input[i] = minval;
output[0] = 0;
ASM_REGISTER_STATE_CHECK(fwd_txfm_(input, output, 16));
EXPECT_EQ((minval * kNumCoeffs) >> 1, output[0]);
}
TEST_P(PartialTrans16x16Test, Random) {
#if CONFIG_VP9_HIGHBITDEPTH
const int16_t maxval =
static_cast<int16_t>(clip_pixel_highbd(1 << 30, bit_depth_));
#else
const int16_t maxval = 255;
#endif
DECLARE_ALIGNED(16, int16_t, input[kNumCoeffs]);
DECLARE_ALIGNED(16, tran_low_t, output[kNumCoeffs]);
ACMRandom rnd(ACMRandom::DeterministicSeed());
int sum = 0;
for (int i = 0; i < kNumCoeffs; ++i) {
const int val = (i & 1) ? -rnd(maxval + 1) : rnd(maxval + 1);
input[i] = val;
sum += val;
}
output[0] = 0;
ASM_REGISTER_STATE_CHECK(fwd_txfm_(input, output, 16));
EXPECT_EQ(sum >> 1, output[0]);
}
using std::tr1::make_tuple;
#if CONFIG_VP9_HIGHBITDEPTH
@ -802,10 +815,10 @@ INSTANTIATE_TEST_CASE_P(
make_tuple(&vpx_highbd_fdct16x16_c, &idct16x16_12, 0, VPX_BITS_12),
make_tuple(&vpx_fdct16x16_c, &vpx_idct16x16_256_add_c, 0, VPX_BITS_8)));
#else
INSTANTIATE_TEST_CASE_P(
C, Trans16x16DCT,
::testing::Values(
make_tuple(&vpx_fdct16x16_c, &vpx_idct16x16_256_add_c, 0, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(C, Trans16x16DCT,
::testing::Values(make_tuple(&vpx_fdct16x16_c,
&vpx_idct16x16_256_add_c,
0, VPX_BITS_8)));
#endif // CONFIG_VP9_HIGHBITDEPTH
#if CONFIG_VP9_HIGHBITDEPTH
@ -824,6 +837,11 @@ INSTANTIATE_TEST_CASE_P(
make_tuple(&vp9_fht16x16_c, &vp9_iht16x16_256_add_c, 1, VPX_BITS_8),
make_tuple(&vp9_fht16x16_c, &vp9_iht16x16_256_add_c, 2, VPX_BITS_8),
make_tuple(&vp9_fht16x16_c, &vp9_iht16x16_256_add_c, 3, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(
C, PartialTrans16x16Test,
::testing::Values(make_tuple(&vpx_highbd_fdct16x16_1_c, VPX_BITS_8),
make_tuple(&vpx_highbd_fdct16x16_1_c, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct16x16_1_c, VPX_BITS_12)));
#else
INSTANTIATE_TEST_CASE_P(
C, Trans16x16HT,
@ -832,49 +850,50 @@ INSTANTIATE_TEST_CASE_P(
make_tuple(&vp9_fht16x16_c, &vp9_iht16x16_256_add_c, 1, VPX_BITS_8),
make_tuple(&vp9_fht16x16_c, &vp9_iht16x16_256_add_c, 2, VPX_BITS_8),
make_tuple(&vp9_fht16x16_c, &vp9_iht16x16_256_add_c, 3, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(C, PartialTrans16x16Test,
::testing::Values(make_tuple(&vpx_fdct16x16_1_c,
VPX_BITS_8)));
#endif // CONFIG_VP9_HIGHBITDEPTH
#if HAVE_NEON_ASM && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
#if HAVE_NEON && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
NEON, Trans16x16DCT,
::testing::Values(
make_tuple(&vpx_fdct16x16_c,
&vpx_idct16x16_256_add_neon, 0, VPX_BITS_8)));
::testing::Values(make_tuple(&vpx_fdct16x16_c, &vpx_idct16x16_256_add_neon,
0, VPX_BITS_8)));
#endif
#if HAVE_SSE2 && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
SSE2, Trans16x16DCT,
::testing::Values(
make_tuple(&vpx_fdct16x16_sse2,
&vpx_idct16x16_256_add_sse2, 0, VPX_BITS_8)));
::testing::Values(make_tuple(&vpx_fdct16x16_sse2,
&vpx_idct16x16_256_add_sse2, 0, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(
SSE2, Trans16x16HT,
::testing::Values(
make_tuple(&vp9_fht16x16_sse2, &vp9_iht16x16_256_add_sse2, 0,
VPX_BITS_8),
make_tuple(&vp9_fht16x16_sse2, &vp9_iht16x16_256_add_sse2, 1,
VPX_BITS_8),
make_tuple(&vp9_fht16x16_sse2, &vp9_iht16x16_256_add_sse2, 2,
VPX_BITS_8),
make_tuple(&vp9_fht16x16_sse2, &vp9_iht16x16_256_add_sse2, 3,
VPX_BITS_8)));
::testing::Values(make_tuple(&vp9_fht16x16_sse2, &vp9_iht16x16_256_add_sse2,
0, VPX_BITS_8),
make_tuple(&vp9_fht16x16_sse2, &vp9_iht16x16_256_add_sse2,
1, VPX_BITS_8),
make_tuple(&vp9_fht16x16_sse2, &vp9_iht16x16_256_add_sse2,
2, VPX_BITS_8),
make_tuple(&vp9_fht16x16_sse2, &vp9_iht16x16_256_add_sse2,
3, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(SSE2, PartialTrans16x16Test,
::testing::Values(make_tuple(&vpx_fdct16x16_1_sse2,
VPX_BITS_8)));
#endif // HAVE_SSE2 && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
#if HAVE_SSE2 && CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
SSE2, Trans16x16DCT,
::testing::Values(
make_tuple(&vpx_highbd_fdct16x16_sse2,
&idct16x16_10, 0, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct16x16_c,
&idct16x16_256_add_10_sse2, 0, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct16x16_sse2,
&idct16x16_12, 0, VPX_BITS_12),
make_tuple(&vpx_highbd_fdct16x16_c,
&idct16x16_256_add_12_sse2, 0, VPX_BITS_12),
make_tuple(&vpx_fdct16x16_sse2,
&vpx_idct16x16_256_add_c, 0, VPX_BITS_8)));
make_tuple(&vpx_highbd_fdct16x16_sse2, &idct16x16_10, 0, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct16x16_c, &idct16x16_256_add_10_sse2, 0,
VPX_BITS_10),
make_tuple(&vpx_highbd_fdct16x16_sse2, &idct16x16_12, 0, VPX_BITS_12),
make_tuple(&vpx_highbd_fdct16x16_c, &idct16x16_256_add_12_sse2, 0,
VPX_BITS_12),
make_tuple(&vpx_fdct16x16_sse2, &vpx_idct16x16_256_add_c, 0,
VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(
SSE2, Trans16x16HT,
::testing::Values(
@ -887,23 +906,24 @@ INSTANTIATE_TEST_CASE_P(
// that to test both branches.
INSTANTIATE_TEST_CASE_P(
SSE2, InvTrans16x16DCT,
::testing::Values(
make_tuple(&idct16x16_10_add_10_c,
&idct16x16_10_add_10_sse2, 3167, VPX_BITS_10),
make_tuple(&idct16x16_10,
&idct16x16_256_add_10_sse2, 3167, VPX_BITS_10),
make_tuple(&idct16x16_10_add_12_c,
&idct16x16_10_add_12_sse2, 3167, VPX_BITS_12),
make_tuple(&idct16x16_12,
&idct16x16_256_add_12_sse2, 3167, VPX_BITS_12)));
::testing::Values(make_tuple(&idct16x16_10_add_10_c,
&idct16x16_10_add_10_sse2, 3167, VPX_BITS_10),
make_tuple(&idct16x16_10, &idct16x16_256_add_10_sse2,
3167, VPX_BITS_10),
make_tuple(&idct16x16_10_add_12_c,
&idct16x16_10_add_12_sse2, 3167, VPX_BITS_12),
make_tuple(&idct16x16_12, &idct16x16_256_add_12_sse2,
3167, VPX_BITS_12)));
INSTANTIATE_TEST_CASE_P(SSE2, PartialTrans16x16Test,
::testing::Values(make_tuple(&vpx_fdct16x16_1_sse2,
VPX_BITS_8)));
#endif // HAVE_SSE2 && CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
#if HAVE_MSA && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
MSA, Trans16x16DCT,
::testing::Values(
make_tuple(&vpx_fdct16x16_msa,
&vpx_idct16x16_256_add_msa, 0, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(MSA, Trans16x16DCT,
::testing::Values(make_tuple(&vpx_fdct16x16_msa,
&vpx_idct16x16_256_add_msa,
0, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(
MSA, Trans16x16HT,
::testing::Values(
@ -912,5 +932,8 @@ INSTANTIATE_TEST_CASE_P(
make_tuple(&vp9_fht16x16_msa, &vp9_iht16x16_256_add_msa, 2, VPX_BITS_8),
make_tuple(&vp9_fht16x16_msa, &vp9_iht16x16_256_add_msa, 3,
VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(MSA, PartialTrans16x16Test,
::testing::Values(make_tuple(&vpx_fdct16x16_1_msa,
VPX_BITS_8)));
#endif // HAVE_MSA && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
} // namespace

View File

@ -25,18 +25,11 @@
#include "vpx/vpx_codec.h"
#include "vpx/vpx_integer.h"
#include "vpx_ports/mem.h"
#include "vpx_ports/msvc.h" // for round()
using libvpx_test::ACMRandom;
namespace {
#ifdef _MSC_VER
static int round(double x) {
if (x < 0)
return static_cast<int>(ceil(x - 0.5));
else
return static_cast<int>(floor(x + 0.5));
}
#endif
const int kNumCoeffs = 1024;
const double kPi = 3.141592653589793238462643383279502884;
@ -44,10 +37,10 @@ void reference_32x32_dct_1d(const double in[32], double out[32]) {
const double kInvSqrt2 = 0.707106781186547524400844362104;
for (int k = 0; k < 32; k++) {
out[k] = 0.0;
for (int n = 0; n < 32; n++)
for (int n = 0; n < 32; n++) {
out[k] += in[n] * cos(kPi * (2 * n + 1) * k / 64.0);
if (k == 0)
out[k] = out[k] * kInvSqrt2;
}
if (k == 0) out[k] = out[k] * kInvSqrt2;
}
}
@ -56,21 +49,17 @@ void reference_32x32_dct_2d(const int16_t input[kNumCoeffs],
// First transform columns
for (int i = 0; i < 32; ++i) {
double temp_in[32], temp_out[32];
for (int j = 0; j < 32; ++j)
temp_in[j] = input[j*32 + i];
for (int j = 0; j < 32; ++j) temp_in[j] = input[j * 32 + i];
reference_32x32_dct_1d(temp_in, temp_out);
for (int j = 0; j < 32; ++j)
output[j * 32 + i] = temp_out[j];
for (int j = 0; j < 32; ++j) output[j * 32 + i] = temp_out[j];
}
// Then transform rows
for (int i = 0; i < 32; ++i) {
double temp_in[32], temp_out[32];
for (int j = 0; j < 32; ++j)
temp_in[j] = output[j + i*32];
for (int j = 0; j < 32; ++j) temp_in[j] = output[j + i * 32];
reference_32x32_dct_1d(temp_in, temp_out);
// Scale by some magic number
for (int j = 0; j < 32; ++j)
output[j + i * 32] = temp_out[j] / 4;
for (int j = 0; j < 32; ++j) output[j + i * 32] = temp_out[j] / 4;
}
}
@ -96,8 +85,8 @@ class Trans32x32Test : public ::testing::TestWithParam<Trans32x32Param> {
virtual void SetUp() {
fwd_txfm_ = GET_PARAM(0);
inv_txfm_ = GET_PARAM(1);
version_ = GET_PARAM(2); // 0: high precision forward transform
// 1: low precision version for rd loop
version_ = GET_PARAM(2); // 0: high precision forward transform
// 1: low precision version for rd loop
bit_depth_ = GET_PARAM(3);
mask_ = (1 << bit_depth_) - 1;
}
@ -147,21 +136,20 @@ TEST_P(Trans32x32Test, AccuracyCheck) {
ASM_REGISTER_STATE_CHECK(inv_txfm_(test_temp_block, dst, 32));
#if CONFIG_VP9_HIGHBITDEPTH
} else {
ASM_REGISTER_STATE_CHECK(inv_txfm_(test_temp_block,
CONVERT_TO_BYTEPTR(dst16), 32));
ASM_REGISTER_STATE_CHECK(
inv_txfm_(test_temp_block, CONVERT_TO_BYTEPTR(dst16), 32));
#endif
}
for (int j = 0; j < kNumCoeffs; ++j) {
#if CONFIG_VP9_HIGHBITDEPTH
const uint32_t diff =
const int32_t diff =
bit_depth_ == VPX_BITS_8 ? dst[j] - src[j] : dst16[j] - src16[j];
#else
const uint32_t diff = dst[j] - src[j];
const int32_t diff = dst[j] - src[j];
#endif
const uint32_t error = diff * diff;
if (max_error < error)
max_error = error;
if (max_error < error) max_error = error;
total_error += error;
}
}
@ -187,8 +175,9 @@ TEST_P(Trans32x32Test, CoeffCheck) {
DECLARE_ALIGNED(16, tran_low_t, output_block[kNumCoeffs]);
for (int i = 0; i < count_test_block; ++i) {
for (int j = 0; j < kNumCoeffs; ++j)
for (int j = 0; j < kNumCoeffs; ++j) {
input_block[j] = (rnd.Rand16() & mask_) - (rnd.Rand16() & mask_);
}
const int stride = 32;
vpx_fdct32x32_c(input_block, output_ref_block, stride);
@ -220,11 +209,9 @@ TEST_P(Trans32x32Test, MemCheck) {
input_extreme_block[j] = rnd.Rand8() & 1 ? mask_ : -mask_;
}
if (i == 0) {
for (int j = 0; j < kNumCoeffs; ++j)
input_extreme_block[j] = mask_;
for (int j = 0; j < kNumCoeffs; ++j) input_extreme_block[j] = mask_;
} else if (i == 1) {
for (int j = 0; j < kNumCoeffs; ++j)
input_extreme_block[j] = -mask_;
for (int j = 0; j < kNumCoeffs; ++j) input_extreme_block[j] = -mask_;
}
const int stride = 32;
@ -281,8 +268,9 @@ TEST_P(Trans32x32Test, InverseAccuracy) {
}
reference_32x32_dct_2d(in, out_r);
for (int j = 0; j < kNumCoeffs; ++j)
for (int j = 0; j < kNumCoeffs; ++j) {
coeff[j] = static_cast<tran_low_t>(round(out_r[j]));
}
if (bit_depth_ == VPX_BITS_8) {
ASM_REGISTER_STATE_CHECK(inv_txfm_(coeff, dst, 32));
#if CONFIG_VP9_HIGHBITDEPTH
@ -298,59 +286,122 @@ TEST_P(Trans32x32Test, InverseAccuracy) {
const int diff = dst[j] - src[j];
#endif
const int error = diff * diff;
EXPECT_GE(1, error)
<< "Error: 32x32 IDCT has error " << error
<< " at index " << j;
EXPECT_GE(1, error) << "Error: 32x32 IDCT has error " << error
<< " at index " << j;
}
}
}
class PartialTrans32x32Test
: public ::testing::TestWithParam<
std::tr1::tuple<FwdTxfmFunc, vpx_bit_depth_t> > {
public:
virtual ~PartialTrans32x32Test() {}
virtual void SetUp() {
fwd_txfm_ = GET_PARAM(0);
bit_depth_ = GET_PARAM(1);
}
virtual void TearDown() { libvpx_test::ClearSystemState(); }
protected:
vpx_bit_depth_t bit_depth_;
FwdTxfmFunc fwd_txfm_;
};
TEST_P(PartialTrans32x32Test, Extremes) {
#if CONFIG_VP9_HIGHBITDEPTH
const int16_t maxval =
static_cast<int16_t>(clip_pixel_highbd(1 << 30, bit_depth_));
#else
const int16_t maxval = 255;
#endif
const int minval = -maxval;
DECLARE_ALIGNED(16, int16_t, input[kNumCoeffs]);
DECLARE_ALIGNED(16, tran_low_t, output[kNumCoeffs]);
for (int i = 0; i < kNumCoeffs; ++i) input[i] = maxval;
output[0] = 0;
ASM_REGISTER_STATE_CHECK(fwd_txfm_(input, output, 32));
EXPECT_EQ((maxval * kNumCoeffs) >> 3, output[0]);
for (int i = 0; i < kNumCoeffs; ++i) input[i] = minval;
output[0] = 0;
ASM_REGISTER_STATE_CHECK(fwd_txfm_(input, output, 32));
EXPECT_EQ((minval * kNumCoeffs) >> 3, output[0]);
}
TEST_P(PartialTrans32x32Test, Random) {
#if CONFIG_VP9_HIGHBITDEPTH
const int16_t maxval =
static_cast<int16_t>(clip_pixel_highbd(1 << 30, bit_depth_));
#else
const int16_t maxval = 255;
#endif
DECLARE_ALIGNED(16, int16_t, input[kNumCoeffs]);
DECLARE_ALIGNED(16, tran_low_t, output[kNumCoeffs]);
ACMRandom rnd(ACMRandom::DeterministicSeed());
int sum = 0;
for (int i = 0; i < kNumCoeffs; ++i) {
const int val = (i & 1) ? -rnd(maxval + 1) : rnd(maxval + 1);
input[i] = val;
sum += val;
}
output[0] = 0;
ASM_REGISTER_STATE_CHECK(fwd_txfm_(input, output, 32));
EXPECT_EQ(sum >> 3, output[0]);
}
using std::tr1::make_tuple;
#if CONFIG_VP9_HIGHBITDEPTH
INSTANTIATE_TEST_CASE_P(
C, Trans32x32Test,
::testing::Values(
make_tuple(&vpx_highbd_fdct32x32_c,
&idct32x32_10, 0, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct32x32_rd_c,
&idct32x32_10, 1, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct32x32_c,
&idct32x32_12, 0, VPX_BITS_12),
make_tuple(&vpx_highbd_fdct32x32_rd_c,
&idct32x32_12, 1, VPX_BITS_12),
make_tuple(&vpx_fdct32x32_c,
&vpx_idct32x32_1024_add_c, 0, VPX_BITS_8),
make_tuple(&vpx_fdct32x32_rd_c,
&vpx_idct32x32_1024_add_c, 1, VPX_BITS_8)));
make_tuple(&vpx_highbd_fdct32x32_c, &idct32x32_10, 0, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct32x32_rd_c, &idct32x32_10, 1, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct32x32_c, &idct32x32_12, 0, VPX_BITS_12),
make_tuple(&vpx_highbd_fdct32x32_rd_c, &idct32x32_12, 1, VPX_BITS_12),
make_tuple(&vpx_fdct32x32_c, &vpx_idct32x32_1024_add_c, 0, VPX_BITS_8),
make_tuple(&vpx_fdct32x32_rd_c, &vpx_idct32x32_1024_add_c, 1,
VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(
C, PartialTrans32x32Test,
::testing::Values(make_tuple(&vpx_highbd_fdct32x32_1_c, VPX_BITS_8),
make_tuple(&vpx_highbd_fdct32x32_1_c, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct32x32_1_c, VPX_BITS_12)));
#else
INSTANTIATE_TEST_CASE_P(
C, Trans32x32Test,
::testing::Values(
make_tuple(&vpx_fdct32x32_c,
&vpx_idct32x32_1024_add_c, 0, VPX_BITS_8),
make_tuple(&vpx_fdct32x32_rd_c,
&vpx_idct32x32_1024_add_c, 1, VPX_BITS_8)));
::testing::Values(make_tuple(&vpx_fdct32x32_c, &vpx_idct32x32_1024_add_c, 0,
VPX_BITS_8),
make_tuple(&vpx_fdct32x32_rd_c, &vpx_idct32x32_1024_add_c,
1, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(C, PartialTrans32x32Test,
::testing::Values(make_tuple(&vpx_fdct32x32_1_c,
VPX_BITS_8)));
#endif // CONFIG_VP9_HIGHBITDEPTH
#if HAVE_NEON_ASM && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
#if HAVE_NEON && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
NEON, Trans32x32Test,
::testing::Values(
make_tuple(&vpx_fdct32x32_c,
&vpx_idct32x32_1024_add_neon, 0, VPX_BITS_8),
make_tuple(&vpx_fdct32x32_rd_c,
&vpx_idct32x32_1024_add_neon, 1, VPX_BITS_8)));
#endif // HAVE_NEON_ASM && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
::testing::Values(make_tuple(&vpx_fdct32x32_c, &vpx_idct32x32_1024_add_neon,
0, VPX_BITS_8),
make_tuple(&vpx_fdct32x32_rd_c,
&vpx_idct32x32_1024_add_neon, 1, VPX_BITS_8)));
#endif // HAVE_NEON && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
#if HAVE_SSE2 && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
SSE2, Trans32x32Test,
::testing::Values(
make_tuple(&vpx_fdct32x32_sse2,
&vpx_idct32x32_1024_add_sse2, 0, VPX_BITS_8),
make_tuple(&vpx_fdct32x32_rd_sse2,
&vpx_idct32x32_1024_add_sse2, 1, VPX_BITS_8)));
::testing::Values(make_tuple(&vpx_fdct32x32_sse2,
&vpx_idct32x32_1024_add_sse2, 0, VPX_BITS_8),
make_tuple(&vpx_fdct32x32_rd_sse2,
&vpx_idct32x32_1024_add_sse2, 1, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(SSE2, PartialTrans32x32Test,
::testing::Values(make_tuple(&vpx_fdct32x32_1_sse2,
VPX_BITS_8)));
#endif // HAVE_SSE2 && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
#if HAVE_SSE2 && CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
@ -367,25 +418,29 @@ INSTANTIATE_TEST_CASE_P(
VPX_BITS_8),
make_tuple(&vpx_fdct32x32_rd_sse2, &vpx_idct32x32_1024_add_c, 1,
VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(SSE2, PartialTrans32x32Test,
::testing::Values(make_tuple(&vpx_fdct32x32_1_sse2,
VPX_BITS_8)));
#endif // HAVE_SSE2 && CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
#if HAVE_AVX2 && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
AVX2, Trans32x32Test,
::testing::Values(
make_tuple(&vpx_fdct32x32_avx2,
&vpx_idct32x32_1024_add_sse2, 0, VPX_BITS_8),
make_tuple(&vpx_fdct32x32_rd_avx2,
&vpx_idct32x32_1024_add_sse2, 1, VPX_BITS_8)));
::testing::Values(make_tuple(&vpx_fdct32x32_avx2,
&vpx_idct32x32_1024_add_sse2, 0, VPX_BITS_8),
make_tuple(&vpx_fdct32x32_rd_avx2,
&vpx_idct32x32_1024_add_sse2, 1, VPX_BITS_8)));
#endif // HAVE_AVX2 && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
#if HAVE_MSA && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
MSA, Trans32x32Test,
::testing::Values(
make_tuple(&vpx_fdct32x32_msa,
&vpx_idct32x32_1024_add_msa, 0, VPX_BITS_8),
make_tuple(&vpx_fdct32x32_rd_msa,
&vpx_idct32x32_1024_add_msa, 1, VPX_BITS_8)));
::testing::Values(make_tuple(&vpx_fdct32x32_msa,
&vpx_idct32x32_1024_add_msa, 0, VPX_BITS_8),
make_tuple(&vpx_fdct32x32_rd_msa,
&vpx_idct32x32_1024_add_msa, 1, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(MSA, PartialTrans32x32Test,
::testing::Values(make_tuple(&vpx_fdct32x32_1_msa,
VPX_BITS_8)));
#endif // HAVE_MSA && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
} // namespace

View File

@ -26,12 +26,9 @@ TEST(DecodeAPI, InvalidParams) {
#endif
#if CONFIG_VP9_DECODER
&vpx_codec_vp9_dx_algo,
#endif
#if CONFIG_VP10_DECODER
&vpx_codec_vp10_dx_algo,
#endif
};
uint8_t buf[1] = {0};
uint8_t buf[1] = { 0 };
vpx_codec_ctx_t dec;
EXPECT_EQ(VPX_CODEC_INVALID_PARAM, vpx_codec_dec_init(NULL, NULL, NULL, 0));
@ -54,8 +51,7 @@ TEST(DecodeAPI, InvalidParams) {
vpx_codec_decode(&dec, buf, NELEMENTS(buf), NULL, 0));
EXPECT_EQ(VPX_CODEC_INVALID_PARAM,
vpx_codec_decode(&dec, NULL, NELEMENTS(buf), NULL, 0));
EXPECT_EQ(VPX_CODEC_INVALID_PARAM,
vpx_codec_decode(&dec, buf, 0, NULL, 0));
EXPECT_EQ(VPX_CODEC_INVALID_PARAM, vpx_codec_decode(&dec, buf, 0, NULL, 0));
EXPECT_EQ(VPX_CODEC_OK, vpx_codec_destroy(&dec));
}
@ -80,12 +76,9 @@ TEST(DecodeAPI, OptionalParams) {
// Test VP9 codec controls after a decode error to ensure the code doesn't
// misbehave.
void TestVp9Controls(vpx_codec_ctx_t *dec) {
static const int kControls[] = {
VP8D_GET_LAST_REF_UPDATES,
VP8D_GET_FRAME_CORRUPTED,
VP9D_GET_DISPLAY_SIZE,
VP9D_GET_FRAME_SIZE
};
static const int kControls[] = { VP8D_GET_LAST_REF_UPDATES,
VP8D_GET_FRAME_CORRUPTED,
VP9D_GET_DISPLAY_SIZE, VP9D_GET_FRAME_SIZE };
int val[2];
for (int i = 0; i < NELEMENTS(kControls); ++i) {
@ -94,9 +87,7 @@ void TestVp9Controls(vpx_codec_ctx_t *dec) {
case VP8D_GET_FRAME_CORRUPTED:
EXPECT_EQ(VPX_CODEC_ERROR, res) << kControls[i];
break;
default:
EXPECT_EQ(VPX_CODEC_OK, res) << kControls[i];
break;
default: EXPECT_EQ(VPX_CODEC_OK, res) << kControls[i]; break;
}
EXPECT_EQ(VPX_CODEC_INVALID_PARAM,
vpx_codec_control_(dec, kControls[i], NULL));
@ -146,6 +137,39 @@ TEST(DecodeAPI, Vp9InvalidDecode) {
TestVp9Controls(&dec);
EXPECT_EQ(VPX_CODEC_OK, vpx_codec_destroy(&dec));
}
TEST(DecodeAPI, Vp9PeekSI) {
const vpx_codec_iface_t *const codec = &vpx_codec_vp9_dx_algo;
// The first 9 bytes are valid and the rest of the bytes are made up. Until
// size 10, this should return VPX_CODEC_UNSUP_BITSTREAM and after that it
// should return VPX_CODEC_CORRUPT_FRAME.
const uint8_t data[32] = {
0x85, 0xa4, 0xc1, 0xa1, 0x38, 0x81, 0xa3, 0x49, 0x83, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
};
for (uint32_t data_sz = 1; data_sz <= 32; ++data_sz) {
// Verify behavior of vpx_codec_decode. vpx_codec_decode doesn't even get
// to decoder_peek_si_internal on frames of size < 8.
if (data_sz >= 8) {
vpx_codec_ctx_t dec;
EXPECT_EQ(VPX_CODEC_OK, vpx_codec_dec_init(&dec, codec, NULL, 0));
EXPECT_EQ(
(data_sz < 10) ? VPX_CODEC_UNSUP_BITSTREAM : VPX_CODEC_CORRUPT_FRAME,
vpx_codec_decode(&dec, data, data_sz, NULL, 0));
vpx_codec_iter_t iter = NULL;
EXPECT_EQ(NULL, vpx_codec_get_frame(&dec, &iter));
EXPECT_EQ(VPX_CODEC_OK, vpx_codec_destroy(&dec));
}
// Verify behavior of vpx_codec_peek_stream_info.
vpx_codec_stream_info_t si;
si.sz = sizeof(si);
EXPECT_EQ((data_sz < 10) ? VPX_CODEC_UNSUP_BITSTREAM : VPX_CODEC_OK,
vpx_codec_peek_stream_info(codec, data, data_sz, &si));
}
}
#endif // CONFIG_VP9_DECODER
} // namespace

View File

@ -28,7 +28,6 @@ namespace {
#define VIDEO_NAME 0
#define THREADS 1
const int kMaxPsnr = 100;
const double kUsecsInSec = 1000000.0;
const char kNewEncodeOutputFile[] = "new_encode.ivf";
@ -70,8 +69,7 @@ const DecodePerfParam kVP9DecodePerfVectors[] = {
power/temp/min max frame decode times/etc
*/
class DecodePerfTest : public ::testing::TestWithParam<DecodePerfParam> {
};
class DecodePerfTest : public ::testing::TestWithParam<DecodePerfParam> {};
TEST_P(DecodePerfTest, PerfTest) {
const char *const video_name = GET_PARAM(VIDEO_NAME);
@ -92,8 +90,7 @@ TEST_P(DecodePerfTest, PerfTest) {
}
vpx_usec_timer_mark(&t);
const double elapsed_secs = double(vpx_usec_timer_elapsed(&t))
/ kUsecsInSec;
const double elapsed_secs = double(vpx_usec_timer_elapsed(&t)) / kUsecsInSec;
const unsigned frames = video.frame_number();
const double fps = double(frames) / elapsed_secs;
@ -111,17 +108,13 @@ TEST_P(DecodePerfTest, PerfTest) {
INSTANTIATE_TEST_CASE_P(VP9, DecodePerfTest,
::testing::ValuesIn(kVP9DecodePerfVectors));
class VP9NewEncodeDecodePerfTest :
public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWithParam<libvpx_test::TestMode> {
class VP9NewEncodeDecodePerfTest
: public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWithParam<libvpx_test::TestMode> {
protected:
VP9NewEncodeDecodePerfTest()
: EncoderTest(GET_PARAM(0)),
encoding_mode_(GET_PARAM(1)),
speed_(0),
outfile_(0),
out_frames_(0) {
}
: EncoderTest(GET_PARAM(0)), encoding_mode_(GET_PARAM(1)), speed_(0),
outfile_(0), out_frames_(0) {}
virtual ~VP9NewEncodeDecodePerfTest() {}
@ -160,8 +153,9 @@ class VP9NewEncodeDecodePerfTest :
virtual void EndPassHook() {
if (outfile_ != NULL) {
if (!fseek(outfile_, 0, SEEK_SET))
if (!fseek(outfile_, 0, SEEK_SET)) {
ivf_write_file_header(outfile_, &cfg_, VP9_FOURCC, out_frames_);
}
fclose(outfile_);
outfile_ = NULL;
}
@ -171,8 +165,9 @@ class VP9NewEncodeDecodePerfTest :
++out_frames_;
// Write initial file header if first frame.
if (pkt->data.frame.pts == 0)
if (pkt->data.frame.pts == 0) {
ivf_write_file_header(outfile_, &cfg_, VP9_FOURCC, out_frames_);
}
// Write frame header and data.
ivf_write_frame_header(outfile_, out_frames_, pkt->data.frame.sz);
@ -180,11 +175,9 @@ class VP9NewEncodeDecodePerfTest :
pkt->data.frame.sz);
}
virtual bool DoDecode() { return false; }
virtual bool DoDecode() const { return false; }
void set_speed(unsigned int speed) {
speed_ = speed;
}
void set_speed(unsigned int speed) { speed_ = speed; }
private:
libvpx_test::TestMode encoding_mode_;
@ -196,10 +189,7 @@ class VP9NewEncodeDecodePerfTest :
struct EncodePerfTestVideo {
EncodePerfTestVideo(const char *name_, uint32_t width_, uint32_t height_,
uint32_t bitrate_, int frames_)
: name(name_),
width(width_),
height(height_),
bitrate(bitrate_),
: name(name_), width(width_), height(height_), bitrate(bitrate_),
frames(frames_) {}
const char *name;
uint32_t width;
@ -225,10 +215,8 @@ TEST_P(VP9NewEncodeDecodePerfTest, PerfTest) {
const char *video_name = kVP9EncodePerfTestVectors[i].name;
libvpx_test::I420VideoSource video(
video_name,
kVP9EncodePerfTestVectors[i].width,
kVP9EncodePerfTestVectors[i].height,
timebase.den, timebase.num, 0,
video_name, kVP9EncodePerfTestVectors[i].width,
kVP9EncodePerfTestVectors[i].height, timebase.den, timebase.num, 0,
kVP9EncodePerfTestVectors[i].frames);
set_speed(2);
@ -268,6 +256,6 @@ TEST_P(VP9NewEncodeDecodePerfTest, PerfTest) {
printf("}\n");
}
VP9_INSTANTIATE_TEST_CASE(
VP9NewEncodeDecodePerfTest, ::testing::Values(::libvpx_test::kTwoPassGood));
VP9_INSTANTIATE_TEST_CASE(VP9NewEncodeDecodePerfTest,
::testing::Values(::libvpx_test::kTwoPassGood));
} // namespace

View File

@ -21,9 +21,8 @@ const char kVP8Name[] = "WebM Project VP8";
vpx_codec_err_t Decoder::PeekStream(const uint8_t *cxdata, size_t size,
vpx_codec_stream_info_t *stream_info) {
return vpx_codec_peek_stream_info(CodecInterface(),
cxdata, static_cast<unsigned int>(size),
stream_info);
return vpx_codec_peek_stream_info(
CodecInterface(), cxdata, static_cast<unsigned int>(size), stream_info);
}
vpx_codec_err_t Decoder::DecodeFrame(const uint8_t *cxdata, size_t size) {
@ -35,9 +34,8 @@ vpx_codec_err_t Decoder::DecodeFrame(const uint8_t *cxdata, size_t size,
vpx_codec_err_t res_dec;
InitOnce();
API_REGISTER_STATE_CHECK(
res_dec = vpx_codec_decode(&decoder_,
cxdata, static_cast<unsigned int>(size),
user_priv, 0));
res_dec = vpx_codec_decode(
&decoder_, cxdata, static_cast<unsigned int>(size), user_priv, 0));
return res_dec;
}
@ -67,7 +65,7 @@ void DecoderTest::HandlePeekResult(Decoder *const decoder,
void DecoderTest::RunLoop(CompressedVideoSource *video,
const vpx_codec_dec_cfg_t &dec_cfg) {
Decoder* const decoder = codec_->CreateDecoder(dec_cfg, flags_, 0);
Decoder *const decoder = codec_->CreateDecoder(dec_cfg, flags_);
ASSERT_TRUE(decoder != NULL);
bool end_of_file = false;
@ -80,16 +78,14 @@ void DecoderTest::RunLoop(CompressedVideoSource *video,
stream_info.sz = sizeof(stream_info);
if (video->cxdata() != NULL) {
const vpx_codec_err_t res_peek = decoder->PeekStream(video->cxdata(),
video->frame_size(),
&stream_info);
const vpx_codec_err_t res_peek = decoder->PeekStream(
video->cxdata(), video->frame_size(), &stream_info);
HandlePeekResult(decoder, video, res_peek);
ASSERT_FALSE(::testing::Test::HasFailure());
vpx_codec_err_t res_dec = decoder->DecodeFrame(video->cxdata(),
video->frame_size());
if (!HandleDecodeResult(res_dec, *video, decoder))
break;
vpx_codec_err_t res_dec =
decoder->DecodeFrame(video->cxdata(), video->frame_size());
if (!HandleDecodeResult(res_dec, *video, decoder)) break;
} else {
// Signal end of the file to the decoder.
const vpx_codec_err_t res_dec = decoder->DecodeFrame(NULL, 0);
@ -101,8 +97,9 @@ void DecoderTest::RunLoop(CompressedVideoSource *video,
const vpx_image_t *img = NULL;
// Get decompressed data
while ((img = dec_iter.Next()))
while ((img = dec_iter.Next())) {
DecompressedFrameHook(*img, video->frame_number());
}
}
delete decoder;
}
@ -116,8 +113,6 @@ void DecoderTest::set_cfg(const vpx_codec_dec_cfg_t &dec_cfg) {
memcpy(&cfg_, &dec_cfg, sizeof(cfg_));
}
void DecoderTest::set_flags(const vpx_codec_flags_t flags) {
flags_ = flags;
}
void DecoderTest::set_flags(const vpx_codec_flags_t flags) { flags_ = flags; }
} // namespace libvpx_test

View File

@ -26,13 +26,11 @@ class DxDataIterator {
explicit DxDataIterator(vpx_codec_ctx_t *decoder)
: decoder_(decoder), iter_(NULL) {}
const vpx_image_t *Next() {
return vpx_codec_get_frame(decoder_, &iter_);
}
const vpx_image_t *Next() { return vpx_codec_get_frame(decoder_, &iter_); }
private:
vpx_codec_ctx_t *decoder_;
vpx_codec_iter_t iter_;
vpx_codec_ctx_t *decoder_;
vpx_codec_iter_t iter_;
};
// Provides a simplified interface to manage one video decoding.
@ -40,20 +38,17 @@ class DxDataIterator {
// as more tests are added.
class Decoder {
public:
Decoder(vpx_codec_dec_cfg_t cfg, unsigned long deadline)
: cfg_(cfg), flags_(0), deadline_(deadline), init_done_(false) {
explicit Decoder(vpx_codec_dec_cfg_t cfg)
: cfg_(cfg), flags_(0), init_done_(false) {
memset(&decoder_, 0, sizeof(decoder_));
}
Decoder(vpx_codec_dec_cfg_t cfg, const vpx_codec_flags_t flag,
unsigned long deadline) // NOLINT
: cfg_(cfg), flags_(flag), deadline_(deadline), init_done_(false) {
Decoder(vpx_codec_dec_cfg_t cfg, const vpx_codec_flags_t flag)
: cfg_(cfg), flags_(flag), init_done_(false) {
memset(&decoder_, 0, sizeof(decoder_));
}
virtual ~Decoder() {
vpx_codec_destroy(&decoder_);
}
virtual ~Decoder() { vpx_codec_destroy(&decoder_); }
vpx_codec_err_t PeekStream(const uint8_t *cxdata, size_t size,
vpx_codec_stream_info_t *stream_info);
@ -63,17 +58,9 @@ class Decoder {
vpx_codec_err_t DecodeFrame(const uint8_t *cxdata, size_t size,
void *user_priv);
DxDataIterator GetDxData() {
return DxDataIterator(&decoder_);
}
DxDataIterator GetDxData() { return DxDataIterator(&decoder_); }
void set_deadline(unsigned long deadline) {
deadline_ = deadline;
}
void Control(int ctrl_id, int arg) {
Control(ctrl_id, arg, VPX_CODEC_OK);
}
void Control(int ctrl_id, int arg) { Control(ctrl_id, arg, VPX_CODEC_OK); }
void Control(int ctrl_id, const void *arg) {
InitOnce();
@ -87,7 +74,7 @@ class Decoder {
ASSERT_EQ(expected_value, res) << DecodeError();
}
const char* DecodeError() {
const char *DecodeError() {
const char *detail = vpx_codec_error_detail(&decoder_);
return detail ? detail : vpx_codec_error(&decoder_);
}
@ -97,38 +84,34 @@ class Decoder {
vpx_get_frame_buffer_cb_fn_t cb_get,
vpx_release_frame_buffer_cb_fn_t cb_release, void *user_priv) {
InitOnce();
return vpx_codec_set_frame_buffer_functions(
&decoder_, cb_get, cb_release, user_priv);
return vpx_codec_set_frame_buffer_functions(&decoder_, cb_get, cb_release,
user_priv);
}
const char* GetDecoderName() const {
const char *GetDecoderName() const {
return vpx_codec_iface_name(CodecInterface());
}
bool IsVP8() const;
vpx_codec_ctx_t * GetDecoder() {
return &decoder_;
}
vpx_codec_ctx_t *GetDecoder() { return &decoder_; }
protected:
virtual vpx_codec_iface_t* CodecInterface() const = 0;
virtual vpx_codec_iface_t *CodecInterface() const = 0;
void InitOnce() {
if (!init_done_) {
const vpx_codec_err_t res = vpx_codec_dec_init(&decoder_,
CodecInterface(),
&cfg_, flags_);
const vpx_codec_err_t res =
vpx_codec_dec_init(&decoder_, CodecInterface(), &cfg_, flags_);
ASSERT_EQ(VPX_CODEC_OK, res) << DecodeError();
init_done_ = true;
}
}
vpx_codec_ctx_t decoder_;
vpx_codec_ctx_t decoder_;
vpx_codec_dec_cfg_t cfg_;
vpx_codec_flags_t flags_;
unsigned int deadline_;
bool init_done_;
vpx_codec_flags_t flags_;
bool init_done_;
};
// Common test functionality for all Decoder tests.
@ -143,37 +126,35 @@ class DecoderTest {
virtual void set_flags(const vpx_codec_flags_t flags);
// Hook to be called before decompressing every frame.
virtual void PreDecodeFrameHook(const CompressedVideoSource& /*video*/,
Decoder* /*decoder*/) {}
virtual void PreDecodeFrameHook(const CompressedVideoSource & /*video*/,
Decoder * /*decoder*/) {}
// Hook to be called to handle decode result. Return true to continue.
virtual bool HandleDecodeResult(const vpx_codec_err_t res_dec,
const CompressedVideoSource& /*video*/,
const CompressedVideoSource & /*video*/,
Decoder *decoder) {
EXPECT_EQ(VPX_CODEC_OK, res_dec) << decoder->DecodeError();
return VPX_CODEC_OK == res_dec;
}
// Hook to be called on every decompressed frame.
virtual void DecompressedFrameHook(const vpx_image_t& /*img*/,
virtual void DecompressedFrameHook(const vpx_image_t & /*img*/,
const unsigned int /*frame_number*/) {}
// Hook to be called on peek result
virtual void HandlePeekResult(Decoder* const decoder,
virtual void HandlePeekResult(Decoder *const decoder,
CompressedVideoSource *video,
const vpx_codec_err_t res_peek);
protected:
explicit DecoderTest(const CodecFactory *codec)
: codec_(codec),
cfg_(),
flags_(0) {}
: codec_(codec), cfg_(), flags_(0) {}
virtual ~DecoderTest() {}
const CodecFactory *codec_;
vpx_codec_dec_cfg_t cfg_;
vpx_codec_flags_t flags_;
vpx_codec_flags_t flags_;
};
} // namespace libvpx_test

View File

@ -25,12 +25,9 @@ TEST(EncodeAPI, InvalidParams) {
#endif
#if CONFIG_VP9_ENCODER
&vpx_codec_vp9_cx_algo,
#endif
#if CONFIG_VP10_ENCODER
&vpx_codec_vp10_cx_algo,
#endif
};
uint8_t buf[1] = {0};
uint8_t buf[1] = { 0 };
vpx_image_t img;
vpx_codec_ctx_t enc;
vpx_codec_enc_cfg_t cfg;

View File

@ -26,10 +26,7 @@ const double kUsecsInSec = 1000000.0;
struct EncodePerfTestVideo {
EncodePerfTestVideo(const char *name_, uint32_t width_, uint32_t height_,
uint32_t bitrate_, int frames_)
: name(name_),
width(width_),
height(height_),
bitrate(bitrate_),
: name(name_), width(width_), height(height_), bitrate(bitrate_),
frames(frames_) {}
const char *name;
uint32_t width;
@ -45,8 +42,8 @@ const EncodePerfTestVideo kVP9EncodePerfTestVectors[] = {
EncodePerfTestVideo("macmarcostationary_640_480_30.yuv", 640, 480, 200, 718),
EncodePerfTestVideo("niklas_640_480_30.yuv", 640, 480, 200, 471),
EncodePerfTestVideo("tacomanarrows_640_480_30.yuv", 640, 480, 200, 300),
EncodePerfTestVideo("tacomasmallcameramovement_640_480_30.yuv",
640, 480, 200, 300),
EncodePerfTestVideo("tacomasmallcameramovement_640_480_30.yuv", 640, 480, 200,
300),
EncodePerfTestVideo("thaloundeskmtg_640_480_30.yuv", 640, 480, 200, 300),
EncodePerfTestVideo("niklas_1280_720_30.yuv", 1280, 720, 600, 470),
};
@ -61,12 +58,8 @@ class VP9EncodePerfTest
public ::libvpx_test::CodecTestWithParam<libvpx_test::TestMode> {
protected:
VP9EncodePerfTest()
: EncoderTest(GET_PARAM(0)),
min_psnr_(kMaxPsnr),
nframes_(0),
encoding_mode_(GET_PARAM(1)),
speed_(0),
threads_(1) {}
: EncoderTest(GET_PARAM(0)), min_psnr_(kMaxPsnr), nframes_(0),
encoding_mode_(GET_PARAM(1)), speed_(0), threads_(1) {}
virtual ~VP9EncodePerfTest() {}
@ -107,24 +100,18 @@ class VP9EncodePerfTest
virtual void PSNRPktHook(const vpx_codec_cx_pkt_t *pkt) {
if (pkt->data.psnr.psnr[0] < min_psnr_) {
min_psnr_= pkt->data.psnr.psnr[0];
min_psnr_ = pkt->data.psnr.psnr[0];
}
}
// for performance reasons don't decode
virtual bool DoDecode() { return 0; }
virtual bool DoDecode() const { return false; }
double min_psnr() const {
return min_psnr_;
}
double min_psnr() const { return min_psnr_; }
void set_speed(unsigned int speed) {
speed_ = speed;
}
void set_speed(unsigned int speed) { speed_ = speed; }
void set_threads(unsigned int threads) {
threads_ = threads;
}
void set_threads(unsigned int threads) { threads_ = threads; }
private:
double min_psnr_;
@ -139,11 +126,12 @@ TEST_P(VP9EncodePerfTest, PerfTest) {
for (size_t j = 0; j < NELEMENTS(kEncodePerfTestSpeeds); ++j) {
for (size_t k = 0; k < NELEMENTS(kEncodePerfTestThreads); ++k) {
if (kVP9EncodePerfTestVectors[i].width < 512 &&
kEncodePerfTestThreads[k] > 1)
kEncodePerfTestThreads[k] > 1) {
continue;
else if (kVP9EncodePerfTestVectors[i].width < 1024 &&
kEncodePerfTestThreads[k] > 2)
} else if (kVP9EncodePerfTestVectors[i].width < 1024 &&
kEncodePerfTestThreads[k] > 2) {
continue;
}
set_threads(kEncodePerfTestThreads[k]);
SetUp();
@ -157,10 +145,8 @@ TEST_P(VP9EncodePerfTest, PerfTest) {
const unsigned frames = kVP9EncodePerfTestVectors[i].frames;
const char *video_name = kVP9EncodePerfTestVectors[i].name;
libvpx_test::I420VideoSource video(
video_name,
kVP9EncodePerfTestVectors[i].width,
kVP9EncodePerfTestVectors[i].height,
timebase.den, timebase.num, 0,
video_name, kVP9EncodePerfTestVectors[i].width,
kVP9EncodePerfTestVectors[i].height, timebase.den, timebase.num, 0,
kVP9EncodePerfTestVectors[i].frames);
set_speed(kEncodePerfTestSpeeds[j]);
@ -197,6 +183,6 @@ TEST_P(VP9EncodePerfTest, PerfTest) {
}
}
VP9_INSTANTIATE_TEST_CASE(
VP9EncodePerfTest, ::testing::Values(::libvpx_test::kRealTime));
VP9_INSTANTIATE_TEST_CASE(VP9EncodePerfTest,
::testing::Values(::libvpx_test::kRealTime));
} // namespace

View File

@ -30,8 +30,7 @@ void Encoder::InitEncoder(VideoSource *video) {
cfg_.g_timebase = video->timebase();
cfg_.rc_twopass_stats_in = stats_->buf();
res = vpx_codec_enc_init(&encoder_, CodecInterface(), &cfg_,
init_flags_);
res = vpx_codec_enc_init(&encoder_, CodecInterface(), &cfg_, init_flags_);
ASSERT_EQ(VPX_CODEC_OK, res) << EncoderError();
#if CONFIG_VP9_ENCODER
@ -42,15 +41,6 @@ void Encoder::InitEncoder(VideoSource *video) {
log2_tile_columns);
ASSERT_EQ(VPX_CODEC_OK, res) << EncoderError();
} else
#endif
#if CONFIG_VP10_ENCODER
if (CodecInterface() == &vpx_codec_vp10_cx_algo) {
// Default to 1 tile column for VP10.
const int log2_tile_columns = 0;
res = vpx_codec_control_(&encoder_, VP9E_SET_TILE_COLUMNS,
log2_tile_columns);
ASSERT_EQ(VPX_CODEC_OK, res) << EncoderError();
} else
#endif
{
#if CONFIG_VP8_ENCODER
@ -62,17 +52,17 @@ void Encoder::InitEncoder(VideoSource *video) {
}
void Encoder::EncodeFrame(VideoSource *video, const unsigned long frame_flags) {
if (video->img())
if (video->img()) {
EncodeFrameInternal(*video, frame_flags);
else
} else {
Flush();
}
// Handle twopass stats
CxDataIterator iter = GetCxData();
while (const vpx_codec_cx_pkt_t *pkt = iter.Next()) {
if (pkt->kind != VPX_CODEC_STATS_PKT)
continue;
if (pkt->kind != VPX_CODEC_STATS_PKT) continue;
stats_->Append(*pkt);
}
@ -92,15 +82,15 @@ void Encoder::EncodeFrameInternal(const VideoSource &video,
}
// Encode the frame
API_REGISTER_STATE_CHECK(
res = vpx_codec_encode(&encoder_, img, video.pts(), video.duration(),
frame_flags, deadline_));
API_REGISTER_STATE_CHECK(res = vpx_codec_encode(&encoder_, img, video.pts(),
video.duration(), frame_flags,
deadline_));
ASSERT_EQ(VPX_CODEC_OK, res) << EncoderError();
}
void Encoder::Flush() {
const vpx_codec_err_t res = vpx_codec_encode(&encoder_, NULL, 0, 0, 0,
deadline_);
const vpx_codec_err_t res =
vpx_codec_encode(&encoder_, NULL, 0, 0, 0, deadline_);
if (!encoder_.priv)
ASSERT_EQ(VPX_CODEC_ERROR, res) << EncoderError();
else
@ -115,60 +105,57 @@ void EncoderTest::InitializeConfig() {
void EncoderTest::SetMode(TestMode mode) {
switch (mode) {
case kRealTime:
deadline_ = VPX_DL_REALTIME;
break;
case kRealTime: deadline_ = VPX_DL_REALTIME; break;
case kOnePassGood:
case kTwoPassGood:
deadline_ = VPX_DL_GOOD_QUALITY;
break;
case kTwoPassGood: deadline_ = VPX_DL_GOOD_QUALITY; break;
case kOnePassBest:
case kTwoPassBest:
deadline_ = VPX_DL_BEST_QUALITY;
break;
case kTwoPassBest: deadline_ = VPX_DL_BEST_QUALITY; break;
default:
ASSERT_TRUE(false) << "Unexpected mode " << mode;
default: ASSERT_TRUE(false) << "Unexpected mode " << mode;
}
if (mode == kTwoPassGood || mode == kTwoPassBest)
if (mode == kTwoPassGood || mode == kTwoPassBest) {
passes_ = 2;
else
} else {
passes_ = 1;
}
}
// The function should return "true" most of the time, therefore no early
// break-out is implemented within the match checking process.
static bool compare_img(const vpx_image_t *img1,
const vpx_image_t *img2) {
bool match = (img1->fmt == img2->fmt) &&
(img1->cs == img2->cs) &&
(img1->d_w == img2->d_w) &&
(img1->d_h == img2->d_h);
static bool compare_img(const vpx_image_t *img1, const vpx_image_t *img2) {
bool match = (img1->fmt == img2->fmt) && (img1->cs == img2->cs) &&
(img1->d_w == img2->d_w) && (img1->d_h == img2->d_h);
const unsigned int width_y = img1->d_w;
const unsigned int width_y = img1->d_w;
const unsigned int height_y = img1->d_h;
unsigned int i;
for (i = 0; i < height_y; ++i)
for (i = 0; i < height_y; ++i) {
match = (memcmp(img1->planes[VPX_PLANE_Y] + i * img1->stride[VPX_PLANE_Y],
img2->planes[VPX_PLANE_Y] + i * img2->stride[VPX_PLANE_Y],
width_y) == 0) && match;
const unsigned int width_uv = (img1->d_w + 1) >> 1;
width_y) == 0) &&
match;
}
const unsigned int width_uv = (img1->d_w + 1) >> 1;
const unsigned int height_uv = (img1->d_h + 1) >> 1;
for (i = 0; i < height_uv; ++i)
for (i = 0; i < height_uv; ++i) {
match = (memcmp(img1->planes[VPX_PLANE_U] + i * img1->stride[VPX_PLANE_U],
img2->planes[VPX_PLANE_U] + i * img2->stride[VPX_PLANE_U],
width_uv) == 0) && match;
for (i = 0; i < height_uv; ++i)
width_uv) == 0) &&
match;
}
for (i = 0; i < height_uv; ++i) {
match = (memcmp(img1->planes[VPX_PLANE_V] + i * img1->stride[VPX_PLANE_V],
img2->planes[VPX_PLANE_V] + i * img2->stride[VPX_PLANE_V],
width_uv) == 0) && match;
width_uv) == 0) &&
match;
}
return match;
}
void EncoderTest::MismatchHook(const vpx_image_t* /*img1*/,
const vpx_image_t* /*img2*/) {
void EncoderTest::MismatchHook(const vpx_image_t * /*img1*/,
const vpx_image_t * /*img2*/) {
ASSERT_TRUE(0) << "Encode/Decode mismatch found";
}
@ -181,34 +168,37 @@ void EncoderTest::RunLoop(VideoSource *video) {
for (unsigned int pass = 0; pass < passes_; pass++) {
last_pts_ = 0;
if (passes_ == 1)
if (passes_ == 1) {
cfg_.g_pass = VPX_RC_ONE_PASS;
else if (pass == 0)
} else if (pass == 0) {
cfg_.g_pass = VPX_RC_FIRST_PASS;
else
} else {
cfg_.g_pass = VPX_RC_LAST_PASS;
}
BeginPassHook(pass);
Encoder* const encoder = codec_->CreateEncoder(cfg_, deadline_, init_flags_,
&stats_);
ASSERT_TRUE(encoder != NULL);
testing::internal::scoped_ptr<Encoder> encoder(
codec_->CreateEncoder(cfg_, deadline_, init_flags_, &stats_));
ASSERT_TRUE(encoder.get() != NULL);
video->Begin();
ASSERT_NO_FATAL_FAILURE(video->Begin());
encoder->InitEncoder(video);
ASSERT_FALSE(::testing::Test::HasFatalFailure());
unsigned long dec_init_flags = 0; // NOLINT
// Use fragment decoder if encoder outputs partitions.
// NOTE: fragment decoder and partition encoder are only supported by VP8.
if (init_flags_ & VPX_CODEC_USE_OUTPUT_PARTITION)
if (init_flags_ & VPX_CODEC_USE_OUTPUT_PARTITION) {
dec_init_flags |= VPX_CODEC_USE_INPUT_FRAGMENTS;
Decoder* const decoder = codec_->CreateDecoder(dec_cfg, dec_init_flags, 0);
}
testing::internal::scoped_ptr<Decoder> decoder(
codec_->CreateDecoder(dec_cfg, dec_init_flags));
bool again;
for (again = true; again; video->Next()) {
again = (video->img() != NULL);
PreEncodeFrameHook(video);
PreEncodeFrameHook(video, encoder);
PreEncodeFrameHook(video, encoder.get());
encoder->EncodeFrame(video, frame_flags_);
CxDataIterator iter = encoder->GetCxData();
@ -221,12 +211,11 @@ void EncoderTest::RunLoop(VideoSource *video) {
switch (pkt->kind) {
case VPX_CODEC_CX_FRAME_PKT:
has_cxdata = true;
if (decoder && DoDecode()) {
if (decoder.get() != NULL && DoDecode()) {
vpx_codec_err_t res_dec = decoder->DecodeFrame(
(const uint8_t*)pkt->data.frame.buf, pkt->data.frame.sz);
(const uint8_t *)pkt->data.frame.buf, pkt->data.frame.sz);
if (!HandleDecodeResult(res_dec, *video, decoder))
break;
if (!HandleDecodeResult(res_dec, *video, decoder.get())) break;
has_dxdata = true;
}
@ -235,20 +224,16 @@ void EncoderTest::RunLoop(VideoSource *video) {
FramePktHook(pkt);
break;
case VPX_CODEC_PSNR_PKT:
PSNRPktHook(pkt);
break;
case VPX_CODEC_PSNR_PKT: PSNRPktHook(pkt); break;
default:
break;
default: break;
}
}
// Flush the decoder when there are no more fragments.
if ((init_flags_ & VPX_CODEC_USE_OUTPUT_PARTITION) && has_dxdata) {
const vpx_codec_err_t res_dec = decoder->DecodeFrame(NULL, 0);
if (!HandleDecodeResult(res_dec, *video, decoder))
break;
if (!HandleDecodeResult(res_dec, *video, decoder.get())) break;
}
if (has_dxdata && has_cxdata) {
@ -261,21 +246,14 @@ void EncoderTest::RunLoop(VideoSource *video) {
MismatchHook(img_enc, img_dec);
}
}
if (img_dec)
DecompressedFrameHook(*img_dec, video->pts());
if (img_dec) DecompressedFrameHook(*img_dec, video->pts());
}
if (!Continue())
break;
if (!Continue()) break;
}
EndPassHook();
if (decoder)
delete decoder;
delete encoder;
if (!Continue())
break;
if (!Continue()) break;
}
}

View File

@ -16,7 +16,7 @@
#include "third_party/googletest/src/include/gtest/gtest.h"
#include "./vpx_config.h"
#if CONFIG_VP8_ENCODER || CONFIG_VP9_ENCODER || CONFIG_VP10_ENCODER
#if CONFIG_VP8_ENCODER || CONFIG_VP9_ENCODER
#include "vpx/vp8cx.h"
#endif
#include "vpx/vpx_encoder.h"
@ -33,19 +33,17 @@ enum TestMode {
kTwoPassGood,
kTwoPassBest
};
#define ALL_TEST_MODES ::testing::Values(::libvpx_test::kRealTime, \
::libvpx_test::kOnePassGood, \
::libvpx_test::kOnePassBest, \
::libvpx_test::kTwoPassGood, \
::libvpx_test::kTwoPassBest)
#define ALL_TEST_MODES \
::testing::Values(::libvpx_test::kRealTime, ::libvpx_test::kOnePassGood, \
::libvpx_test::kOnePassBest, ::libvpx_test::kTwoPassGood, \
::libvpx_test::kTwoPassBest)
#define ONE_PASS_TEST_MODES ::testing::Values(::libvpx_test::kRealTime, \
::libvpx_test::kOnePassGood, \
::libvpx_test::kOnePassBest)
#define TWO_PASS_TEST_MODES ::testing::Values(::libvpx_test::kTwoPassGood, \
::libvpx_test::kTwoPassBest)
#define ONE_PASS_TEST_MODES \
::testing::Values(::libvpx_test::kRealTime, ::libvpx_test::kOnePassGood, \
::libvpx_test::kOnePassBest)
#define TWO_PASS_TEST_MODES \
::testing::Values(::libvpx_test::kTwoPassGood, ::libvpx_test::kTwoPassBest)
// Provides an object to handle the libvpx get_cx_data() iteration pattern
class CxDataIterator {
@ -58,8 +56,8 @@ class CxDataIterator {
}
private:
vpx_codec_ctx_t *encoder_;
vpx_codec_iter_t iter_;
vpx_codec_ctx_t *encoder_;
vpx_codec_iter_t iter_;
};
// Implements an in-memory store for libvpx twopass statistics
@ -75,15 +73,12 @@ class TwopassStatsStore {
return buf;
}
void Reset() {
buffer_.clear();
}
void Reset() { buffer_.clear(); }
protected:
std::string buffer_;
std::string buffer_;
};
// Provides a simplified interface to manage one video encoding pass, given
// a configuration and video source.
//
@ -97,13 +92,9 @@ class Encoder {
memset(&encoder_, 0, sizeof(encoder_));
}
virtual ~Encoder() {
vpx_codec_destroy(&encoder_);
}
virtual ~Encoder() { vpx_codec_destroy(&encoder_); }
CxDataIterator GetCxData() {
return CxDataIterator(&encoder_);
}
CxDataIterator GetCxData() { return CxDataIterator(&encoder_); }
void InitEncoder(VideoSource *video);
@ -115,9 +106,7 @@ class Encoder {
void EncodeFrame(VideoSource *video, const unsigned long frame_flags);
// Convenience wrapper for EncodeFrame()
void EncodeFrame(VideoSource *video) {
EncodeFrame(video, 0);
}
void EncodeFrame(VideoSource *video) { EncodeFrame(video, 0); }
void Control(int ctrl_id, int arg) {
const vpx_codec_err_t res = vpx_codec_control_(&encoder_, ctrl_id, arg);
@ -143,7 +132,7 @@ class Encoder {
const vpx_codec_err_t res = vpx_codec_control_(&encoder_, ctrl_id, arg);
ASSERT_EQ(VPX_CODEC_OK, res) << EncoderError();
}
#if CONFIG_VP8_ENCODER || CONFIG_VP9_ENCODER || CONFIG_VP10_ENCODER
#if CONFIG_VP8_ENCODER || CONFIG_VP9_ENCODER
void Control(int ctrl_id, vpx_active_map_t *arg) {
const vpx_codec_err_t res = vpx_codec_control_(&encoder_, ctrl_id, arg);
ASSERT_EQ(VPX_CODEC_OK, res) << EncoderError();
@ -156,12 +145,10 @@ class Encoder {
cfg_ = *cfg;
}
void set_deadline(unsigned long deadline) {
deadline_ = deadline;
}
void set_deadline(unsigned long deadline) { deadline_ = deadline; }
protected:
virtual vpx_codec_iface_t* CodecInterface() const = 0;
virtual vpx_codec_iface_t *CodecInterface() const = 0;
const char *EncoderError() {
const char *detail = vpx_codec_error_detail(&encoder_);
@ -175,11 +162,11 @@ class Encoder {
// Flush the encoder on EOS
void Flush();
vpx_codec_ctx_t encoder_;
vpx_codec_enc_cfg_t cfg_;
unsigned long deadline_;
unsigned long init_flags_;
TwopassStatsStore *stats_;
vpx_codec_ctx_t encoder_;
vpx_codec_enc_cfg_t cfg_;
unsigned long deadline_;
unsigned long init_flags_;
TwopassStatsStore *stats_;
};
// Common test functionality for all Encoder tests.
@ -221,36 +208,35 @@ class EncoderTest {
virtual void EndPassHook() {}
// Hook to be called before encoding a frame.
virtual void PreEncodeFrameHook(VideoSource* /*video*/) {}
virtual void PreEncodeFrameHook(VideoSource* /*video*/,
Encoder* /*encoder*/) {}
virtual void PreEncodeFrameHook(VideoSource * /*video*/) {}
virtual void PreEncodeFrameHook(VideoSource * /*video*/,
Encoder * /*encoder*/) {}
// Hook to be called on every compressed data packet.
virtual void FramePktHook(const vpx_codec_cx_pkt_t* /*pkt*/) {}
virtual void FramePktHook(const vpx_codec_cx_pkt_t * /*pkt*/) {}
// Hook to be called on every PSNR packet.
virtual void PSNRPktHook(const vpx_codec_cx_pkt_t* /*pkt*/) {}
virtual void PSNRPktHook(const vpx_codec_cx_pkt_t * /*pkt*/) {}
// Hook to determine whether the encode loop should continue.
virtual bool Continue() const {
return !(::testing::Test::HasFatalFailure() || abort_);
}
const CodecFactory *codec_;
const CodecFactory *codec_;
// Hook to determine whether to decode frame after encoding
virtual bool DoDecode() const { return 1; }
// Hook to handle encode/decode mismatch
virtual void MismatchHook(const vpx_image_t *img1,
const vpx_image_t *img2);
virtual void MismatchHook(const vpx_image_t *img1, const vpx_image_t *img2);
// Hook to be called on every decompressed frame.
virtual void DecompressedFrameHook(const vpx_image_t& /*img*/,
virtual void DecompressedFrameHook(const vpx_image_t & /*img*/,
vpx_codec_pts_t /*pts*/) {}
// Hook to be called to handle decode result. Return true to continue.
virtual bool HandleDecodeResult(const vpx_codec_err_t res_dec,
const VideoSource& /*video*/,
const VideoSource & /*video*/,
Decoder *decoder) {
EXPECT_EQ(VPX_CODEC_OK, res_dec) << decoder->DecodeError();
return VPX_CODEC_OK == res_dec;
@ -262,15 +248,15 @@ class EncoderTest {
return pkt;
}
bool abort_;
vpx_codec_enc_cfg_t cfg_;
vpx_codec_dec_cfg_t dec_cfg_;
unsigned int passes_;
unsigned long deadline_;
TwopassStatsStore stats_;
unsigned long init_flags_;
unsigned long frame_flags_;
vpx_codec_pts_t last_pts_;
bool abort_;
vpx_codec_enc_cfg_t cfg_;
vpx_codec_dec_cfg_t dec_cfg_;
unsigned int passes_;
unsigned long deadline_;
TwopassStatsStore stats_;
unsigned long init_flags_;
unsigned long frame_flags_;
vpx_codec_pts_t last_pts_;
};
} // namespace libvpx_test

View File

@ -19,16 +19,13 @@ namespace {
const int kMaxErrorFrames = 12;
const int kMaxDroppableFrames = 12;
class ErrorResilienceTestLarge : public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWith2Params<libvpx_test::TestMode, bool> {
class ErrorResilienceTestLarge
: public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWith2Params<libvpx_test::TestMode, bool> {
protected:
ErrorResilienceTestLarge()
: EncoderTest(GET_PARAM(0)),
svc_support_(GET_PARAM(2)),
psnr_(0.0),
nframes_(0),
mismatch_psnr_(0.0),
mismatch_nframes_(0),
: EncoderTest(GET_PARAM(0)), svc_support_(GET_PARAM(2)), psnr_(0.0),
nframes_(0), mismatch_psnr_(0.0), mismatch_nframes_(0),
encoding_mode_(GET_PARAM(1)) {
Reset();
}
@ -66,81 +63,70 @@ class ErrorResilienceTestLarge : public ::libvpx_test::EncoderTest,
// LAST is updated on base/layer 0, GOLDEN updated on layer 1.
// Non-zero pattern_switch parameter means pattern will switch to
// not using LAST for frame_num >= pattern_switch.
int SetFrameFlags(int frame_num,
int num_temp_layers,
int pattern_switch) {
int SetFrameFlags(int frame_num, int num_temp_layers, int pattern_switch) {
int frame_flags = 0;
if (num_temp_layers == 2) {
if (frame_num % 2 == 0) {
if (frame_num < pattern_switch || pattern_switch == 0) {
// Layer 0: predict from LAST and ARF, update LAST.
frame_flags = VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
} else {
// Layer 0: predict from GF and ARF, update GF.
frame_flags = VP8_EFLAG_NO_REF_LAST |
VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_ARF;
}
if (frame_num % 2 == 0) {
if (frame_num < pattern_switch || pattern_switch == 0) {
// Layer 0: predict from LAST and ARF, update LAST.
frame_flags =
VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
} else {
if (frame_num < pattern_switch || pattern_switch == 0) {
// Layer 1: predict from L, GF, and ARF, update GF.
frame_flags = VP8_EFLAG_NO_UPD_ARF |
VP8_EFLAG_NO_UPD_LAST;
} else {
// Layer 1: predict from GF and ARF, update GF.
frame_flags = VP8_EFLAG_NO_REF_LAST |
VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_ARF;
}
// Layer 0: predict from GF and ARF, update GF.
frame_flags = VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_ARF;
}
} else {
if (frame_num < pattern_switch || pattern_switch == 0) {
// Layer 1: predict from L, GF, and ARF, update GF.
frame_flags = VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST;
} else {
// Layer 1: predict from GF and ARF, update GF.
frame_flags = VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_ARF;
}
}
}
return frame_flags;
}
virtual void PreEncodeFrameHook(libvpx_test::VideoSource *video,
::libvpx_test::Encoder * /*encoder*/) {
frame_flags_ &= ~(VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF);
frame_flags_ &=
~(VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF);
// For temporal layer case.
if (cfg_.ts_number_layers > 1) {
frame_flags_ = SetFrameFlags(video->frame(),
cfg_.ts_number_layers,
pattern_switch_);
frame_flags_ =
SetFrameFlags(video->frame(), cfg_.ts_number_layers, pattern_switch_);
for (unsigned int i = 0; i < droppable_nframes_; ++i) {
if (droppable_frames_[i] == video->frame()) {
std::cout << "Encoding droppable frame: "
<< droppable_frames_[i] << "\n";
std::cout << "Encoding droppable frame: " << droppable_frames_[i]
<< "\n";
}
}
} else {
if (droppable_nframes_ > 0 &&
(cfg_.g_pass == VPX_RC_LAST_PASS || cfg_.g_pass == VPX_RC_ONE_PASS)) {
for (unsigned int i = 0; i < droppable_nframes_; ++i) {
if (droppable_frames_[i] == video->frame()) {
std::cout << "Encoding droppable frame: "
<< droppable_frames_[i] << "\n";
frame_flags_ |= (VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF);
return;
}
}
}
if (droppable_nframes_ > 0 &&
(cfg_.g_pass == VPX_RC_LAST_PASS || cfg_.g_pass == VPX_RC_ONE_PASS)) {
for (unsigned int i = 0; i < droppable_nframes_; ++i) {
if (droppable_frames_[i] == video->frame()) {
std::cout << "Encoding droppable frame: " << droppable_frames_[i]
<< "\n";
frame_flags_ |= (VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF);
return;
}
}
}
}
}
double GetAveragePsnr() const {
if (nframes_)
return psnr_ / nframes_;
if (nframes_) return psnr_ / nframes_;
return 0.0;
}
double GetAverageMismatchPsnr() const {
if (mismatch_nframes_)
return mismatch_psnr_ / mismatch_nframes_;
if (mismatch_nframes_) return mismatch_psnr_ / mismatch_nframes_;
return 0.0;
}
@ -158,8 +144,7 @@ class ErrorResilienceTestLarge : public ::libvpx_test::EncoderTest,
return 1;
}
virtual void MismatchHook(const vpx_image_t *img1,
const vpx_image_t *img2) {
virtual void MismatchHook(const vpx_image_t *img1, const vpx_image_t *img2) {
double mismatch_psnr = compute_psnr(img1, img2);
mismatch_psnr_ += mismatch_psnr;
++mismatch_nframes_;
@ -167,32 +152,32 @@ class ErrorResilienceTestLarge : public ::libvpx_test::EncoderTest,
}
void SetErrorFrames(int num, unsigned int *list) {
if (num > kMaxErrorFrames)
if (num > kMaxErrorFrames) {
num = kMaxErrorFrames;
else if (num < 0)
} else if (num < 0) {
num = 0;
}
error_nframes_ = num;
for (unsigned int i = 0; i < error_nframes_; ++i)
for (unsigned int i = 0; i < error_nframes_; ++i) {
error_frames_[i] = list[i];
}
}
void SetDroppableFrames(int num, unsigned int *list) {
if (num > kMaxDroppableFrames)
if (num > kMaxDroppableFrames) {
num = kMaxDroppableFrames;
else if (num < 0)
} else if (num < 0) {
num = 0;
}
droppable_nframes_ = num;
for (unsigned int i = 0; i < droppable_nframes_; ++i)
for (unsigned int i = 0; i < droppable_nframes_; ++i) {
droppable_frames_[i] = list[i];
}
}
unsigned int GetMismatchFrames() {
return mismatch_nframes_;
}
unsigned int GetMismatchFrames() { return mismatch_nframes_; }
void SetPatternSwitch(int frame_switch) {
pattern_switch_ = frame_switch;
}
void SetPatternSwitch(int frame_switch) { pattern_switch_ = frame_switch; }
bool svc_support_;
@ -265,15 +250,14 @@ TEST_P(ErrorResilienceTestLarge, DropFramesWithoutRecovery) {
// In addition to isolated loss/drop, add a long consecutive series
// (of size 9) of dropped frames.
unsigned int num_droppable_frames = 11;
unsigned int droppable_frame_list[] = {5, 16, 22, 23, 24, 25, 26, 27, 28,
29, 30};
unsigned int droppable_frame_list[] = { 5, 16, 22, 23, 24, 25,
26, 27, 28, 29, 30 };
SetDroppableFrames(num_droppable_frames, droppable_frame_list);
SetErrorFrames(num_droppable_frames, droppable_frame_list);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
// Test that no mismatches have been found
std::cout << " Mismatch frames: "
<< GetMismatchFrames() << "\n";
EXPECT_EQ(GetMismatchFrames(), (unsigned int) 0);
std::cout << " Mismatch frames: " << GetMismatchFrames() << "\n";
EXPECT_EQ(GetMismatchFrames(), (unsigned int)0);
// Reset previously set of error/droppable frames.
Reset();
@ -306,8 +290,7 @@ TEST_P(ErrorResilienceTestLarge, DropFramesWithoutRecovery) {
// layer, so successful decoding is expected.
TEST_P(ErrorResilienceTestLarge, 2LayersDropEnhancement) {
// This test doesn't run if SVC is not supported.
if (!svc_support_)
return;
if (!svc_support_) return;
const vpx_rational timebase = { 33333333, 1000000000 };
cfg_.g_timebase = timebase;
@ -337,14 +320,13 @@ TEST_P(ErrorResilienceTestLarge, 2LayersDropEnhancement) {
// The odd frames are the enhancement layer for 2 layer pattern, so set
// those frames as droppable. Drop the last 7 frames.
unsigned int num_droppable_frames = 7;
unsigned int droppable_frame_list[] = {27, 29, 31, 33, 35, 37, 39};
unsigned int droppable_frame_list[] = { 27, 29, 31, 33, 35, 37, 39 };
SetDroppableFrames(num_droppable_frames, droppable_frame_list);
SetErrorFrames(num_droppable_frames, droppable_frame_list);
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
// Test that no mismatches have been found
std::cout << " Mismatch frames: "
<< GetMismatchFrames() << "\n";
EXPECT_EQ(GetMismatchFrames(), (unsigned int) 0);
std::cout << " Mismatch frames: " << GetMismatchFrames() << "\n";
EXPECT_EQ(GetMismatchFrames(), (unsigned int)0);
// Reset previously set of error/droppable frames.
Reset();
@ -355,8 +337,7 @@ TEST_P(ErrorResilienceTestLarge, 2LayersDropEnhancement) {
// sequence, the LAST ref is not used anymore.
TEST_P(ErrorResilienceTestLarge, 2LayersNoRefLast) {
// This test doesn't run if SVC is not supported.
if (!svc_support_)
return;
if (!svc_support_) return;
const vpx_rational timebase = { 33333333, 1000000000 };
cfg_.g_timebase = timebase;
@ -385,20 +366,19 @@ TEST_P(ErrorResilienceTestLarge, 2LayersNoRefLast) {
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
// Test that no mismatches have been found
std::cout << " Mismatch frames: "
<< GetMismatchFrames() << "\n";
EXPECT_EQ(GetMismatchFrames(), (unsigned int) 0);
std::cout << " Mismatch frames: " << GetMismatchFrames() << "\n";
EXPECT_EQ(GetMismatchFrames(), (unsigned int)0);
// Reset previously set of error/droppable frames.
Reset();
}
class ErrorResilienceTestLargeCodecControls : public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWithParam<libvpx_test::TestMode> {
class ErrorResilienceTestLargeCodecControls
: public ::libvpx_test::EncoderTest,
public ::libvpx_test::CodecTestWithParam<libvpx_test::TestMode> {
protected:
ErrorResilienceTestLargeCodecControls()
: EncoderTest(GET_PARAM(0)),
encoding_mode_(GET_PARAM(1)) {
: EncoderTest(GET_PARAM(0)), encoding_mode_(GET_PARAM(1)) {
Reset();
}
@ -437,8 +417,8 @@ class ErrorResilienceTestLargeCodecControls : public ::libvpx_test::EncoderTest,
if (num_temp_layers == 2) {
if (frame_num % 2 == 0) {
// Layer 0: predict from L and ARF, update L.
frame_flags = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF;
frame_flags =
VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF;
} else {
// Layer 1: predict from L, G and ARF, and update G.
frame_flags = VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST |
@ -451,9 +431,9 @@ class ErrorResilienceTestLargeCodecControls : public ::libvpx_test::EncoderTest,
VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF;
} else if ((frame_num - 2) % 4 == 0) {
// Layer 1: predict from L, G, update G.
frame_flags = VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST |
VP8_EFLAG_NO_REF_ARF;
} else if ((frame_num - 1) % 2 == 0) {
frame_flags =
VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_REF_ARF;
} else if ((frame_num - 1) % 2 == 0) {
// Layer 2: predict from L, G, ARF; update ARG.
frame_flags = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_LAST;
}
@ -467,7 +447,7 @@ class ErrorResilienceTestLargeCodecControls : public ::libvpx_test::EncoderTest,
if (frame_num % 2 == 0) {
layer_id = 0;
} else {
layer_id = 1;
layer_id = 1;
}
} else if (num_temp_layers == 3) {
if (frame_num % 4 == 0) {
@ -484,16 +464,16 @@ class ErrorResilienceTestLargeCodecControls : public ::libvpx_test::EncoderTest,
virtual void PreEncodeFrameHook(libvpx_test::VideoSource *video,
libvpx_test::Encoder *encoder) {
if (cfg_.ts_number_layers > 1) {
int layer_id = SetLayerId(video->frame(), cfg_.ts_number_layers);
int frame_flags = SetFrameFlags(video->frame(), cfg_.ts_number_layers);
if (video->frame() > 0) {
encoder->Control(VP8E_SET_TEMPORAL_LAYER_ID, layer_id);
encoder->Control(VP8E_SET_FRAME_FLAGS, frame_flags);
}
const vpx_rational_t tb = video->timebase();
timebase_ = static_cast<double>(tb.num) / tb.den;
duration_ = 0;
return;
int layer_id = SetLayerId(video->frame(), cfg_.ts_number_layers);
int frame_flags = SetFrameFlags(video->frame(), cfg_.ts_number_layers);
if (video->frame() > 0) {
encoder->Control(VP8E_SET_TEMPORAL_LAYER_ID, layer_id);
encoder->Control(VP8E_SET_FRAME_FLAGS, frame_flags);
}
const vpx_rational_t tb = video->timebase();
timebase_ = static_cast<double>(tb.num) / tb.den;
duration_ = 0;
return;
}
}
@ -519,26 +499,28 @@ class ErrorResilienceTestLargeCodecControls : public ::libvpx_test::EncoderTest,
virtual void EndPassHook(void) {
duration_ = (last_pts_ + 1) * timebase_;
if (cfg_.ts_number_layers > 1) {
if (cfg_.ts_number_layers > 1) {
for (int layer = 0; layer < static_cast<int>(cfg_.ts_number_layers);
++layer) {
++layer) {
if (bits_total_[layer]) {
// Effective file datarate:
effective_datarate_[layer] = (bits_total_[layer] / 1000.0) / duration_;
effective_datarate_[layer] =
(bits_total_[layer] / 1000.0) / duration_;
}
}
}
}
double effective_datarate_[3];
private:
libvpx_test::TestMode encoding_mode_;
vpx_codec_pts_t last_pts_;
double timebase_;
int64_t bits_total_[3];
double duration_;
int tot_frame_number_;
};
private:
libvpx_test::TestMode encoding_mode_;
vpx_codec_pts_t last_pts_;
double timebase_;
int64_t bits_total_[3];
double duration_;
int tot_frame_number_;
};
// Check two codec controls used for:
// (1) for setting temporal layer id, and (2) for settings encoder flags.
@ -582,10 +564,12 @@ TEST_P(ErrorResilienceTestLargeCodecControls, CodecControl3TemporalLayers) {
for (int j = 0; j < static_cast<int>(cfg_.ts_number_layers); ++j) {
ASSERT_GE(effective_datarate_[j], cfg_.ts_target_bitrate[j] * 0.75)
<< " The datarate for the file is lower than target by too much, "
"for layer: " << j;
"for layer: "
<< j;
ASSERT_LE(effective_datarate_[j], cfg_.ts_target_bitrate[j] * 1.25)
<< " The datarate for the file is greater than target by too much, "
"for layer: " << j;
"for layer: "
<< j;
}
}
}
@ -596,7 +580,4 @@ VP8_INSTANTIATE_TEST_CASE(ErrorResilienceTestLargeCodecControls,
ONE_PASS_TEST_MODES);
VP9_INSTANTIATE_TEST_CASE(ErrorResilienceTestLarge, ONE_PASS_TEST_MODES,
::testing::Values(true));
// SVC-related tests don't run for VP10 since SVC is not supported.
VP10_INSTANTIATE_TEST_CASE(ErrorResilienceTestLarge, ONE_PASS_TEST_MODES,
::testing::Values(false));
} // namespace

View File

@ -34,21 +34,18 @@ struct ExternalFrameBuffer {
// Class to manipulate a list of external frame buffers.
class ExternalFrameBufferList {
public:
ExternalFrameBufferList()
: num_buffers_(0),
ext_fb_list_(NULL) {}
ExternalFrameBufferList() : num_buffers_(0), ext_fb_list_(NULL) {}
virtual ~ExternalFrameBufferList() {
for (int i = 0; i < num_buffers_; ++i) {
delete [] ext_fb_list_[i].data;
delete[] ext_fb_list_[i].data;
}
delete [] ext_fb_list_;
delete[] ext_fb_list_;
}
// Creates the list to hold the external buffers. Returns true on success.
bool CreateBufferList(int num_buffers) {
if (num_buffers < 0)
return false;
if (num_buffers < 0) return false;
num_buffers_ = num_buffers;
ext_fb_list_ = new ExternalFrameBuffer[num_buffers_];
@ -64,11 +61,10 @@ class ExternalFrameBufferList {
int GetFreeFrameBuffer(size_t min_size, vpx_codec_frame_buffer_t *fb) {
EXPECT_TRUE(fb != NULL);
const int idx = FindFreeBufferIndex();
if (idx == num_buffers_)
return -1;
if (idx == num_buffers_) return -1;
if (ext_fb_list_[idx].size < min_size) {
delete [] ext_fb_list_[idx].data;
delete[] ext_fb_list_[idx].data;
ext_fb_list_[idx].data = new uint8_t[min_size];
memset(ext_fb_list_[idx].data, 0, min_size);
ext_fb_list_[idx].size = min_size;
@ -83,11 +79,10 @@ class ExternalFrameBufferList {
int GetZeroFrameBuffer(size_t min_size, vpx_codec_frame_buffer_t *fb) {
EXPECT_TRUE(fb != NULL);
const int idx = FindFreeBufferIndex();
if (idx == num_buffers_)
return -1;
if (idx == num_buffers_) return -1;
if (ext_fb_list_[idx].size < min_size) {
delete [] ext_fb_list_[idx].data;
delete[] ext_fb_list_[idx].data;
ext_fb_list_[idx].data = NULL;
ext_fb_list_[idx].size = min_size;
}
@ -104,7 +99,7 @@ class ExternalFrameBufferList {
return -1;
}
ExternalFrameBuffer *const ext_fb =
reinterpret_cast<ExternalFrameBuffer*>(fb->priv);
reinterpret_cast<ExternalFrameBuffer *>(fb->priv);
if (ext_fb == NULL) {
EXPECT_TRUE(ext_fb != NULL);
return -1;
@ -119,7 +114,7 @@ class ExternalFrameBufferList {
void CheckXImageFrameBuffer(const vpx_image_t *img) {
if (img->fb_priv != NULL) {
const struct ExternalFrameBuffer *const ext_fb =
reinterpret_cast<ExternalFrameBuffer*>(img->fb_priv);
reinterpret_cast<ExternalFrameBuffer *>(img->fb_priv);
ASSERT_TRUE(img->planes[0] >= ext_fb->data &&
img->planes[0] < (ext_fb->data + ext_fb->size));
@ -133,8 +128,7 @@ class ExternalFrameBufferList {
int i;
// Find a free frame buffer.
for (i = 0; i < num_buffers_; ++i) {
if (!ext_fb_list_[i].in_use)
break;
if (!ext_fb_list_[i].in_use) break;
}
return i;
}
@ -161,16 +155,15 @@ class ExternalFrameBufferList {
int get_vp9_frame_buffer(void *user_priv, size_t min_size,
vpx_codec_frame_buffer_t *fb) {
ExternalFrameBufferList *const fb_list =
reinterpret_cast<ExternalFrameBufferList*>(user_priv);
reinterpret_cast<ExternalFrameBufferList *>(user_priv);
return fb_list->GetFreeFrameBuffer(min_size, fb);
}
// Callback used by libvpx to tell the application that |fb| is not needed
// anymore.
int release_vp9_frame_buffer(void *user_priv,
vpx_codec_frame_buffer_t *fb) {
int release_vp9_frame_buffer(void *user_priv, vpx_codec_frame_buffer_t *fb) {
ExternalFrameBufferList *const fb_list =
reinterpret_cast<ExternalFrameBufferList*>(user_priv);
reinterpret_cast<ExternalFrameBufferList *>(user_priv);
return fb_list->ReturnFrameBuffer(fb);
}
@ -178,7 +171,7 @@ int release_vp9_frame_buffer(void *user_priv,
int get_vp9_zero_frame_buffer(void *user_priv, size_t min_size,
vpx_codec_frame_buffer_t *fb) {
ExternalFrameBufferList *const fb_list =
reinterpret_cast<ExternalFrameBufferList*>(user_priv);
reinterpret_cast<ExternalFrameBufferList *>(user_priv);
return fb_list->GetZeroFrameBuffer(min_size, fb);
}
@ -186,7 +179,7 @@ int get_vp9_zero_frame_buffer(void *user_priv, size_t min_size,
int get_vp9_one_less_byte_frame_buffer(void *user_priv, size_t min_size,
vpx_codec_frame_buffer_t *fb) {
ExternalFrameBufferList *const fb_list =
reinterpret_cast<ExternalFrameBufferList*>(user_priv);
reinterpret_cast<ExternalFrameBufferList *>(user_priv);
return fb_list->GetFreeFrameBuffer(min_size - 1, fb);
}
@ -203,16 +196,14 @@ int do_not_release_vp9_frame_buffer(void *user_priv,
// Class for testing passing in external frame buffers to libvpx.
class ExternalFrameBufferMD5Test
: public ::libvpx_test::DecoderTest,
public ::libvpx_test::CodecTestWithParam<const char*> {
public ::libvpx_test::CodecTestWithParam<const char *> {
protected:
ExternalFrameBufferMD5Test()
: DecoderTest(GET_PARAM(::libvpx_test::kCodecFactoryParam)),
md5_file_(NULL),
num_buffers_(0) {}
md5_file_(NULL), num_buffers_(0) {}
virtual ~ExternalFrameBufferMD5Test() {
if (md5_file_ != NULL)
fclose(md5_file_);
if (md5_file_ != NULL) fclose(md5_file_);
}
virtual void PreDecodeFrameHook(
@ -222,15 +213,15 @@ class ExternalFrameBufferMD5Test
// Have libvpx use frame buffers we create.
ASSERT_TRUE(fb_list_.CreateBufferList(num_buffers_));
ASSERT_EQ(VPX_CODEC_OK,
decoder->SetFrameBufferFunctions(
GetVP9FrameBuffer, ReleaseVP9FrameBuffer, this));
decoder->SetFrameBufferFunctions(GetVP9FrameBuffer,
ReleaseVP9FrameBuffer, this));
}
}
void OpenMD5File(const std::string &md5_file_name_) {
md5_file_ = libvpx_test::OpenTestDataFile(md5_file_name_);
ASSERT_TRUE(md5_file_ != NULL) << "Md5 file open failed. Filename: "
<< md5_file_name_;
<< md5_file_name_;
}
virtual void DecompressedFrameHook(const vpx_image_t &img,
@ -258,7 +249,7 @@ class ExternalFrameBufferMD5Test
static int GetVP9FrameBuffer(void *user_priv, size_t min_size,
vpx_codec_frame_buffer_t *fb) {
ExternalFrameBufferMD5Test *const md5Test =
reinterpret_cast<ExternalFrameBufferMD5Test*>(user_priv);
reinterpret_cast<ExternalFrameBufferMD5Test *>(user_priv);
return md5Test->fb_list_.GetFreeFrameBuffer(min_size, fb);
}
@ -267,7 +258,7 @@ class ExternalFrameBufferMD5Test
static int ReleaseVP9FrameBuffer(void *user_priv,
vpx_codec_frame_buffer_t *fb) {
ExternalFrameBufferMD5Test *const md5Test =
reinterpret_cast<ExternalFrameBufferMD5Test*>(user_priv);
reinterpret_cast<ExternalFrameBufferMD5Test *>(user_priv);
return md5Test->fb_list_.ReturnFrameBuffer(fb);
}
@ -286,10 +277,7 @@ const char kVP9TestFile[] = "vp90-2-02-size-lf-1920x1080.webm";
// Class for testing passing in external frame buffers to libvpx.
class ExternalFrameBufferTest : public ::testing::Test {
protected:
ExternalFrameBufferTest()
: video_(NULL),
decoder_(NULL),
num_buffers_(0) {}
ExternalFrameBufferTest() : video_(NULL), decoder_(NULL), num_buffers_(0) {}
virtual void SetUp() {
video_ = new libvpx_test::WebMVideoSource(kVP9TestFile);
@ -309,8 +297,7 @@ class ExternalFrameBufferTest : public ::testing::Test {
// Passes the external frame buffer information to libvpx.
vpx_codec_err_t SetFrameBufferFunctions(
int num_buffers,
vpx_get_frame_buffer_cb_fn_t cb_get,
int num_buffers, vpx_get_frame_buffer_cb_fn_t cb_get,
vpx_release_frame_buffer_cb_fn_t cb_release) {
if (num_buffers > 0) {
num_buffers_ = num_buffers;
@ -324,8 +311,7 @@ class ExternalFrameBufferTest : public ::testing::Test {
const vpx_codec_err_t res =
decoder_->DecodeFrame(video_->cxdata(), video_->frame_size());
CheckDecodedFrames();
if (res == VPX_CODEC_OK)
video_->Next();
if (res == VPX_CODEC_OK) video_->Next();
return res;
}
@ -333,8 +319,7 @@ class ExternalFrameBufferTest : public ::testing::Test {
for (; video_->cxdata() != NULL; video_->Next()) {
const vpx_codec_err_t res =
decoder_->DecodeFrame(video_->cxdata(), video_->frame_size());
if (res != VPX_CODEC_OK)
return res;
if (res != VPX_CODEC_OK) return res;
CheckDecodedFrames();
}
return VPX_CODEC_OK;
@ -365,7 +350,6 @@ class ExternalFrameBufferTest : public ::testing::Test {
// Otherwise, the test failed.
TEST_P(ExternalFrameBufferMD5Test, ExtFBMD5Match) {
const std::string filename = GET_PARAM(kVideoNameParam);
libvpx_test::CompressedVideoSource *video = NULL;
// Number of buffers equals #VP9_MAXIMUM_REF_BUFFERS +
// #VPX_MAXIMUM_WORK_BUFFERS + four jitter buffers.
@ -380,18 +364,19 @@ TEST_P(ExternalFrameBufferMD5Test, ExtFBMD5Match) {
#endif
// Open compressed video file.
testing::internal::scoped_ptr<libvpx_test::CompressedVideoSource> video;
if (filename.substr(filename.length() - 3, 3) == "ivf") {
video = new libvpx_test::IVFVideoSource(filename);
video.reset(new libvpx_test::IVFVideoSource(filename));
} else {
#if CONFIG_WEBM_IO
video = new libvpx_test::WebMVideoSource(filename);
video.reset(new libvpx_test::WebMVideoSource(filename));
#else
fprintf(stderr, "WebM IO is disabled, skipping test vector %s\n",
filename.c_str());
return;
#endif
}
ASSERT_TRUE(video != NULL);
ASSERT_TRUE(video.get() != NULL);
video->Init();
// Construct md5 file name.
@ -399,8 +384,7 @@ TEST_P(ExternalFrameBufferMD5Test, ExtFBMD5Match) {
OpenMD5File(md5_filename);
// Decode frame, and check the md5 matching.
ASSERT_NO_FATAL_FAILURE(RunLoop(video));
delete video;
ASSERT_NO_FATAL_FAILURE(RunLoop(video.get()));
}
#if CONFIG_WEBM_IO
@ -409,8 +393,8 @@ TEST_F(ExternalFrameBufferTest, MinFrameBuffers) {
// #VP9_MAXIMUM_REF_BUFFERS + #VPX_MAXIMUM_WORK_BUFFERS.
const int num_buffers = VP9_MAXIMUM_REF_BUFFERS + VPX_MAXIMUM_WORK_BUFFERS;
ASSERT_EQ(VPX_CODEC_OK,
SetFrameBufferFunctions(
num_buffers, get_vp9_frame_buffer, release_vp9_frame_buffer));
SetFrameBufferFunctions(num_buffers, get_vp9_frame_buffer,
release_vp9_frame_buffer));
ASSERT_EQ(VPX_CODEC_OK, DecodeRemainingFrames());
}
@ -421,8 +405,8 @@ TEST_F(ExternalFrameBufferTest, EightJitterBuffers) {
const int num_buffers =
VP9_MAXIMUM_REF_BUFFERS + VPX_MAXIMUM_WORK_BUFFERS + jitter_buffers;
ASSERT_EQ(VPX_CODEC_OK,
SetFrameBufferFunctions(
num_buffers, get_vp9_frame_buffer, release_vp9_frame_buffer));
SetFrameBufferFunctions(num_buffers, get_vp9_frame_buffer,
release_vp9_frame_buffer));
ASSERT_EQ(VPX_CODEC_OK, DecodeRemainingFrames());
}
@ -432,8 +416,8 @@ TEST_F(ExternalFrameBufferTest, NotEnoughBuffers) {
// only use 5 frame buffers at one time.
const int num_buffers = 2;
ASSERT_EQ(VPX_CODEC_OK,
SetFrameBufferFunctions(
num_buffers, get_vp9_frame_buffer, release_vp9_frame_buffer));
SetFrameBufferFunctions(num_buffers, get_vp9_frame_buffer,
release_vp9_frame_buffer));
ASSERT_EQ(VPX_CODEC_OK, DecodeOneFrame());
ASSERT_EQ(VPX_CODEC_MEM_ERROR, DecodeRemainingFrames());
}
@ -457,18 +441,17 @@ TEST_F(ExternalFrameBufferTest, NullRealloc) {
TEST_F(ExternalFrameBufferTest, ReallocOneLessByte) {
const int num_buffers = VP9_MAXIMUM_REF_BUFFERS + VPX_MAXIMUM_WORK_BUFFERS;
ASSERT_EQ(VPX_CODEC_OK,
SetFrameBufferFunctions(
num_buffers, get_vp9_one_less_byte_frame_buffer,
release_vp9_frame_buffer));
ASSERT_EQ(VPX_CODEC_OK, SetFrameBufferFunctions(
num_buffers, get_vp9_one_less_byte_frame_buffer,
release_vp9_frame_buffer));
ASSERT_EQ(VPX_CODEC_MEM_ERROR, DecodeOneFrame());
}
TEST_F(ExternalFrameBufferTest, NullGetFunction) {
const int num_buffers = VP9_MAXIMUM_REF_BUFFERS + VPX_MAXIMUM_WORK_BUFFERS;
ASSERT_EQ(VPX_CODEC_INVALID_PARAM,
SetFrameBufferFunctions(num_buffers, NULL,
release_vp9_frame_buffer));
ASSERT_EQ(
VPX_CODEC_INVALID_PARAM,
SetFrameBufferFunctions(num_buffers, NULL, release_vp9_frame_buffer));
}
TEST_F(ExternalFrameBufferTest, NullReleaseFunction) {
@ -481,13 +464,14 @@ TEST_F(ExternalFrameBufferTest, SetAfterDecode) {
const int num_buffers = VP9_MAXIMUM_REF_BUFFERS + VPX_MAXIMUM_WORK_BUFFERS;
ASSERT_EQ(VPX_CODEC_OK, DecodeOneFrame());
ASSERT_EQ(VPX_CODEC_ERROR,
SetFrameBufferFunctions(
num_buffers, get_vp9_frame_buffer, release_vp9_frame_buffer));
SetFrameBufferFunctions(num_buffers, get_vp9_frame_buffer,
release_vp9_frame_buffer));
}
#endif // CONFIG_WEBM_IO
VP9_INSTANTIATE_TEST_CASE(ExternalFrameBufferMD5Test,
::testing::ValuesIn(libvpx_test::kVP9TestVectors,
libvpx_test::kVP9TestVectors +
libvpx_test::kNumVP9TestVectors));
VP9_INSTANTIATE_TEST_CASE(
ExternalFrameBufferMD5Test,
::testing::ValuesIn(libvpx_test::kVP9TestVectors,
libvpx_test::kVP9TestVectors +
libvpx_test::kNumVP9TestVectors));
} // namespace

View File

@ -128,35 +128,33 @@ class Trans4x4TestBase {
}
}
ASM_REGISTER_STATE_CHECK(RunFwdTxfm(test_input_block,
test_temp_block, pitch_));
ASM_REGISTER_STATE_CHECK(
RunFwdTxfm(test_input_block, test_temp_block, pitch_));
if (bit_depth_ == VPX_BITS_8) {
ASM_REGISTER_STATE_CHECK(RunInvTxfm(test_temp_block, dst, pitch_));
#if CONFIG_VP9_HIGHBITDEPTH
} else {
ASM_REGISTER_STATE_CHECK(RunInvTxfm(test_temp_block,
CONVERT_TO_BYTEPTR(dst16), pitch_));
ASM_REGISTER_STATE_CHECK(
RunInvTxfm(test_temp_block, CONVERT_TO_BYTEPTR(dst16), pitch_));
#endif
}
for (int j = 0; j < kNumCoeffs; ++j) {
#if CONFIG_VP9_HIGHBITDEPTH
const uint32_t diff =
const int diff =
bit_depth_ == VPX_BITS_8 ? dst[j] - src[j] : dst16[j] - src16[j];
#else
ASSERT_EQ(VPX_BITS_8, bit_depth_);
const uint32_t diff = dst[j] - src[j];
const int diff = dst[j] - src[j];
#endif
const uint32_t error = diff * diff;
if (max_error < error)
max_error = error;
if (max_error < error) max_error = error;
total_error += error;
}
}
EXPECT_GE(static_cast<uint32_t>(limit), max_error)
<< "Error: 4x4 FHT/IHT has an individual round trip error > "
<< limit;
<< "Error: 4x4 FHT/IHT has an individual round trip error > " << limit;
EXPECT_GE(count_test_block * limit, total_error)
<< "Error: 4x4 FHT/IHT has average round trip error > " << limit
@ -172,8 +170,9 @@ class Trans4x4TestBase {
for (int i = 0; i < count_test_block; ++i) {
// Initialize a test block with input range [-mask_, mask_].
for (int j = 0; j < kNumCoeffs; ++j)
for (int j = 0; j < kNumCoeffs; ++j) {
input_block[j] = (rnd.Rand16() & mask_) - (rnd.Rand16() & mask_);
}
fwd_txfm_ref(input_block, output_ref_block, pitch_, tx_type_);
ASM_REGISTER_STATE_CHECK(RunFwdTxfm(input_block, output_block, pitch_));
@ -197,16 +196,14 @@ class Trans4x4TestBase {
input_extreme_block[j] = rnd.Rand8() % 2 ? mask_ : -mask_;
}
if (i == 0) {
for (int j = 0; j < kNumCoeffs; ++j)
input_extreme_block[j] = mask_;
for (int j = 0; j < kNumCoeffs; ++j) input_extreme_block[j] = mask_;
} else if (i == 1) {
for (int j = 0; j < kNumCoeffs; ++j)
input_extreme_block[j] = -mask_;
for (int j = 0; j < kNumCoeffs; ++j) input_extreme_block[j] = -mask_;
}
fwd_txfm_ref(input_extreme_block, output_ref_block, pitch_, tx_type_);
ASM_REGISTER_STATE_CHECK(RunFwdTxfm(input_extreme_block,
output_block, pitch_));
ASM_REGISTER_STATE_CHECK(
RunFwdTxfm(input_extreme_block, output_block, pitch_));
// The minimum quant value is 4.
for (int j = 0; j < kNumCoeffs; ++j) {
@ -251,22 +248,21 @@ class Trans4x4TestBase {
ASM_REGISTER_STATE_CHECK(RunInvTxfm(coeff, dst, pitch_));
#if CONFIG_VP9_HIGHBITDEPTH
} else {
ASM_REGISTER_STATE_CHECK(RunInvTxfm(coeff, CONVERT_TO_BYTEPTR(dst16),
pitch_));
ASM_REGISTER_STATE_CHECK(
RunInvTxfm(coeff, CONVERT_TO_BYTEPTR(dst16), pitch_));
#endif
}
for (int j = 0; j < kNumCoeffs; ++j) {
#if CONFIG_VP9_HIGHBITDEPTH
const uint32_t diff =
const int diff =
bit_depth_ == VPX_BITS_8 ? dst[j] - src[j] : dst16[j] - src16[j];
#else
const uint32_t diff = dst[j] - src[j];
const int diff = dst[j] - src[j];
#endif
const uint32_t error = diff * diff;
EXPECT_GE(static_cast<uint32_t>(limit), error)
<< "Error: 4x4 IDCT has error " << error
<< " at index " << j;
<< "Error: 4x4 IDCT has error " << error << " at index " << j;
}
}
}
@ -278,17 +274,16 @@ class Trans4x4TestBase {
int mask_;
};
class Trans4x4DCT
: public Trans4x4TestBase,
public ::testing::TestWithParam<Dct4x4Param> {
class Trans4x4DCT : public Trans4x4TestBase,
public ::testing::TestWithParam<Dct4x4Param> {
public:
virtual ~Trans4x4DCT() {}
virtual void SetUp() {
fwd_txfm_ = GET_PARAM(0);
inv_txfm_ = GET_PARAM(1);
tx_type_ = GET_PARAM(2);
pitch_ = 4;
tx_type_ = GET_PARAM(2);
pitch_ = 4;
fwd_txfm_ref = fdct4x4_ref;
bit_depth_ = GET_PARAM(3);
mask_ = (1 << bit_depth_) - 1;
@ -307,33 +302,24 @@ class Trans4x4DCT
IdctFunc inv_txfm_;
};
TEST_P(Trans4x4DCT, AccuracyCheck) {
RunAccuracyCheck(1);
}
TEST_P(Trans4x4DCT, AccuracyCheck) { RunAccuracyCheck(1); }
TEST_P(Trans4x4DCT, CoeffCheck) {
RunCoeffCheck();
}
TEST_P(Trans4x4DCT, CoeffCheck) { RunCoeffCheck(); }
TEST_P(Trans4x4DCT, MemCheck) {
RunMemCheck();
}
TEST_P(Trans4x4DCT, MemCheck) { RunMemCheck(); }
TEST_P(Trans4x4DCT, InvAccuracyCheck) {
RunInvAccuracyCheck(1);
}
TEST_P(Trans4x4DCT, InvAccuracyCheck) { RunInvAccuracyCheck(1); }
class Trans4x4HT
: public Trans4x4TestBase,
public ::testing::TestWithParam<Ht4x4Param> {
class Trans4x4HT : public Trans4x4TestBase,
public ::testing::TestWithParam<Ht4x4Param> {
public:
virtual ~Trans4x4HT() {}
virtual void SetUp() {
fwd_txfm_ = GET_PARAM(0);
inv_txfm_ = GET_PARAM(1);
tx_type_ = GET_PARAM(2);
pitch_ = 4;
tx_type_ = GET_PARAM(2);
pitch_ = 4;
fwd_txfm_ref = fht4x4_ref;
bit_depth_ = GET_PARAM(3);
mask_ = (1 << bit_depth_) - 1;
@ -353,33 +339,24 @@ class Trans4x4HT
IhtFunc inv_txfm_;
};
TEST_P(Trans4x4HT, AccuracyCheck) {
RunAccuracyCheck(1);
}
TEST_P(Trans4x4HT, AccuracyCheck) { RunAccuracyCheck(1); }
TEST_P(Trans4x4HT, CoeffCheck) {
RunCoeffCheck();
}
TEST_P(Trans4x4HT, CoeffCheck) { RunCoeffCheck(); }
TEST_P(Trans4x4HT, MemCheck) {
RunMemCheck();
}
TEST_P(Trans4x4HT, MemCheck) { RunMemCheck(); }
TEST_P(Trans4x4HT, InvAccuracyCheck) {
RunInvAccuracyCheck(1);
}
TEST_P(Trans4x4HT, InvAccuracyCheck) { RunInvAccuracyCheck(1); }
class Trans4x4WHT
: public Trans4x4TestBase,
public ::testing::TestWithParam<Dct4x4Param> {
class Trans4x4WHT : public Trans4x4TestBase,
public ::testing::TestWithParam<Dct4x4Param> {
public:
virtual ~Trans4x4WHT() {}
virtual void SetUp() {
fwd_txfm_ = GET_PARAM(0);
inv_txfm_ = GET_PARAM(1);
tx_type_ = GET_PARAM(2);
pitch_ = 4;
tx_type_ = GET_PARAM(2);
pitch_ = 4;
fwd_txfm_ref = fwht4x4_ref;
bit_depth_ = GET_PARAM(3);
mask_ = (1 << bit_depth_) - 1;
@ -398,21 +375,13 @@ class Trans4x4WHT
IdctFunc inv_txfm_;
};
TEST_P(Trans4x4WHT, AccuracyCheck) {
RunAccuracyCheck(0);
}
TEST_P(Trans4x4WHT, AccuracyCheck) { RunAccuracyCheck(0); }
TEST_P(Trans4x4WHT, CoeffCheck) {
RunCoeffCheck();
}
TEST_P(Trans4x4WHT, CoeffCheck) { RunCoeffCheck(); }
TEST_P(Trans4x4WHT, MemCheck) {
RunMemCheck();
}
TEST_P(Trans4x4WHT, MemCheck) { RunMemCheck(); }
TEST_P(Trans4x4WHT, InvAccuracyCheck) {
RunInvAccuracyCheck(0);
}
TEST_P(Trans4x4WHT, InvAccuracyCheck) { RunInvAccuracyCheck(0); }
using std::tr1::make_tuple;
#if CONFIG_VP9_HIGHBITDEPTH
@ -423,10 +392,10 @@ INSTANTIATE_TEST_CASE_P(
make_tuple(&vpx_highbd_fdct4x4_c, &idct4x4_12, 0, VPX_BITS_12),
make_tuple(&vpx_fdct4x4_c, &vpx_idct4x4_16_add_c, 0, VPX_BITS_8)));
#else
INSTANTIATE_TEST_CASE_P(
C, Trans4x4DCT,
::testing::Values(
make_tuple(&vpx_fdct4x4_c, &vpx_idct4x4_16_add_c, 0, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(C, Trans4x4DCT,
::testing::Values(make_tuple(&vpx_fdct4x4_c,
&vpx_idct4x4_16_add_c, 0,
VPX_BITS_8)));
#endif // CONFIG_VP9_HIGHBITDEPTH
#if CONFIG_VP9_HIGHBITDEPTH
@ -463,18 +432,17 @@ INSTANTIATE_TEST_CASE_P(
make_tuple(&vp9_highbd_fwht4x4_c, &iwht4x4_12, 0, VPX_BITS_12),
make_tuple(&vp9_fwht4x4_c, &vpx_iwht4x4_16_add_c, 0, VPX_BITS_8)));
#else
INSTANTIATE_TEST_CASE_P(
C, Trans4x4WHT,
::testing::Values(
make_tuple(&vp9_fwht4x4_c, &vpx_iwht4x4_16_add_c, 0, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(C, Trans4x4WHT,
::testing::Values(make_tuple(&vp9_fwht4x4_c,
&vpx_iwht4x4_16_add_c, 0,
VPX_BITS_8)));
#endif // CONFIG_VP9_HIGHBITDEPTH
#if HAVE_NEON_ASM && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
NEON, Trans4x4DCT,
::testing::Values(
make_tuple(&vpx_fdct4x4_c,
&vpx_idct4x4_16_add_neon, 0, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(NEON, Trans4x4DCT,
::testing::Values(make_tuple(&vpx_fdct4x4_c,
&vpx_idct4x4_16_add_neon,
0, VPX_BITS_8)));
#endif // HAVE_NEON_ASM && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
#if HAVE_NEON && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
@ -487,28 +455,19 @@ INSTANTIATE_TEST_CASE_P(
make_tuple(&vp9_fht4x4_c, &vp9_iht4x4_16_add_neon, 3, VPX_BITS_8)));
#endif // HAVE_NEON && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
#if CONFIG_USE_X86INC && HAVE_MMX && !CONFIG_VP9_HIGHBITDEPTH && \
!CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
MMX, Trans4x4WHT,
::testing::Values(
make_tuple(&vp9_fwht4x4_mmx, &vpx_iwht4x4_16_add_c, 0, VPX_BITS_8)));
#endif
#if CONFIG_USE_X86INC && HAVE_SSE2 && !CONFIG_VP9_HIGHBITDEPTH && \
!CONFIG_EMULATE_HARDWARE
#if HAVE_SSE2 && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
SSE2, Trans4x4WHT,
::testing::Values(
make_tuple(&vp9_fwht4x4_sse2, &vpx_iwht4x4_16_add_c, 0, VPX_BITS_8),
make_tuple(&vp9_fwht4x4_c, &vpx_iwht4x4_16_add_sse2, 0, VPX_BITS_8)));
#endif
#if HAVE_SSE2 && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
SSE2, Trans4x4DCT,
::testing::Values(
make_tuple(&vpx_fdct4x4_sse2,
&vpx_idct4x4_16_add_sse2, 0, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(SSE2, Trans4x4DCT,
::testing::Values(make_tuple(&vpx_fdct4x4_sse2,
&vpx_idct4x4_16_add_sse2,
0, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(
SSE2, Trans4x4HT,
::testing::Values(
@ -522,12 +481,11 @@ INSTANTIATE_TEST_CASE_P(
INSTANTIATE_TEST_CASE_P(
SSE2, Trans4x4DCT,
::testing::Values(
make_tuple(&vpx_highbd_fdct4x4_c, &idct4x4_10_sse2, 0, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct4x4_c, &idct4x4_10_sse2, 0, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct4x4_sse2, &idct4x4_10_sse2, 0, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct4x4_c, &idct4x4_12_sse2, 0, VPX_BITS_12),
make_tuple(&vpx_highbd_fdct4x4_c, &idct4x4_12_sse2, 0, VPX_BITS_12),
make_tuple(&vpx_highbd_fdct4x4_sse2, &idct4x4_12_sse2, 0, VPX_BITS_12),
make_tuple(&vpx_fdct4x4_sse2, &vpx_idct4x4_16_add_c, 0,
VPX_BITS_8)));
make_tuple(&vpx_fdct4x4_sse2, &vpx_idct4x4_16_add_c, 0, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(
SSE2, Trans4x4HT,
@ -539,10 +497,10 @@ INSTANTIATE_TEST_CASE_P(
#endif // HAVE_SSE2 && CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
#if HAVE_MSA && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
MSA, Trans4x4DCT,
::testing::Values(
make_tuple(&vpx_fdct4x4_msa, &vpx_idct4x4_16_add_msa, 0, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(MSA, Trans4x4DCT,
::testing::Values(make_tuple(&vpx_fdct4x4_msa,
&vpx_idct4x4_16_add_msa, 0,
VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(
MSA, Trans4x4HT,
::testing::Values(

View File

@ -51,10 +51,10 @@ void reference_8x8_dct_1d(const double in[8], double out[8]) {
const double kInvSqrt2 = 0.707106781186547524400844362104;
for (int k = 0; k < 8; k++) {
out[k] = 0.0;
for (int n = 0; n < 8; n++)
for (int n = 0; n < 8; n++) {
out[k] += in[n] * cos(kPi * (2 * n + 1) * k / 16.0);
if (k == 0)
out[k] = out[k] * kInvSqrt2;
}
if (k == 0) out[k] = out[k] * kInvSqrt2;
}
}
@ -63,25 +63,20 @@ void reference_8x8_dct_2d(const int16_t input[kNumCoeffs],
// First transform columns
for (int i = 0; i < 8; ++i) {
double temp_in[8], temp_out[8];
for (int j = 0; j < 8; ++j)
temp_in[j] = input[j*8 + i];
for (int j = 0; j < 8; ++j) temp_in[j] = input[j * 8 + i];
reference_8x8_dct_1d(temp_in, temp_out);
for (int j = 0; j < 8; ++j)
output[j * 8 + i] = temp_out[j];
for (int j = 0; j < 8; ++j) output[j * 8 + i] = temp_out[j];
}
// Then transform rows
for (int i = 0; i < 8; ++i) {
double temp_in[8], temp_out[8];
for (int j = 0; j < 8; ++j)
temp_in[j] = output[j + i*8];
for (int j = 0; j < 8; ++j) temp_in[j] = output[j + i * 8];
reference_8x8_dct_1d(temp_in, temp_out);
// Scale by some magic number
for (int j = 0; j < 8; ++j)
output[j + i * 8] = temp_out[j] * 2;
for (int j = 0; j < 8; ++j) output[j + i * 8] = temp_out[j] * 2;
}
}
void fdct8x8_ref(const int16_t *in, tran_low_t *out, int stride,
int /*tx_type*/) {
vpx_fdct8x8_c(in, out, stride);
@ -155,17 +150,19 @@ class FwdTrans8x8TestBase {
for (int i = 0; i < count_test_block; ++i) {
// Initialize a test block with input range [-255, 255].
for (int j = 0; j < 64; ++j)
for (int j = 0; j < 64; ++j) {
test_input_block[j] = ((rnd.Rand16() >> (16 - bit_depth_)) & mask_) -
((rnd.Rand16() >> (16 - bit_depth_)) & mask_);
}
ASM_REGISTER_STATE_CHECK(
RunFwdTxfm(test_input_block, test_output_block, pitch_));
for (int j = 0; j < 64; ++j) {
if (test_output_block[j] < 0)
if (test_output_block[j] < 0) {
++count_sign_block[j][0];
else if (test_output_block[j] > 0)
} else if (test_output_block[j] > 0) {
++count_sign_block[j][1];
}
}
}
@ -177,25 +174,26 @@ class FwdTrans8x8TestBase {
<< 1. * max_diff / count_test_block * 100 << "%"
<< " for input range [-255, 255] at index " << j
<< " count0: " << count_sign_block[j][0]
<< " count1: " << count_sign_block[j][1]
<< " diff: " << diff;
<< " count1: " << count_sign_block[j][1] << " diff: " << diff;
}
memset(count_sign_block, 0, sizeof(count_sign_block));
for (int i = 0; i < count_test_block; ++i) {
// Initialize a test block with input range [-mask_ / 16, mask_ / 16].
for (int j = 0; j < 64; ++j)
test_input_block[j] = ((rnd.Rand16() & mask_) >> 4) -
((rnd.Rand16() & mask_) >> 4);
for (int j = 0; j < 64; ++j) {
test_input_block[j] =
((rnd.Rand16() & mask_) >> 4) - ((rnd.Rand16() & mask_) >> 4);
}
ASM_REGISTER_STATE_CHECK(
RunFwdTxfm(test_input_block, test_output_block, pitch_));
for (int j = 0; j < 64; ++j) {
if (test_output_block[j] < 0)
if (test_output_block[j] < 0) {
++count_sign_block[j][0];
else if (test_output_block[j] > 0)
} else if (test_output_block[j] > 0) {
++count_sign_block[j][1];
}
}
}
@ -207,8 +205,7 @@ class FwdTrans8x8TestBase {
<< 1. * max_diff / count_test_block * 100 << "%"
<< " for input range [-15, 15] at index " << j
<< " count0: " << count_sign_block[j][0]
<< " count1: " << count_sign_block[j][1]
<< " diff: " << diff;
<< " count1: " << count_sign_block[j][1] << " diff: " << diff;
}
}
@ -245,19 +242,18 @@ class FwdTrans8x8TestBase {
ASM_REGISTER_STATE_CHECK(
RunFwdTxfm(test_input_block, test_temp_block, pitch_));
for (int j = 0; j < 64; ++j) {
if (test_temp_block[j] > 0) {
test_temp_block[j] += 2;
test_temp_block[j] /= 4;
test_temp_block[j] *= 4;
} else {
test_temp_block[j] -= 2;
test_temp_block[j] /= 4;
test_temp_block[j] *= 4;
}
if (test_temp_block[j] > 0) {
test_temp_block[j] += 2;
test_temp_block[j] /= 4;
test_temp_block[j] *= 4;
} else {
test_temp_block[j] -= 2;
test_temp_block[j] /= 4;
test_temp_block[j] *= 4;
}
}
if (bit_depth_ == VPX_BITS_8) {
ASM_REGISTER_STATE_CHECK(
RunInvTxfm(test_temp_block, dst, pitch_));
ASM_REGISTER_STATE_CHECK(RunInvTxfm(test_temp_block, dst, pitch_));
#if CONFIG_VP9_HIGHBITDEPTH
} else {
ASM_REGISTER_STATE_CHECK(
@ -273,19 +269,18 @@ class FwdTrans8x8TestBase {
const int diff = dst[j] - src[j];
#endif
const int error = diff * diff;
if (max_error < error)
max_error = error;
if (max_error < error) max_error = error;
total_error += error;
}
}
EXPECT_GE(1 << 2 * (bit_depth_ - 8), max_error)
<< "Error: 8x8 FDCT/IDCT or FHT/IHT has an individual"
<< " roundtrip error > 1";
<< "Error: 8x8 FDCT/IDCT or FHT/IHT has an individual"
<< " roundtrip error > 1";
EXPECT_GE((count_test_block << 2 * (bit_depth_ - 8))/5, total_error)
<< "Error: 8x8 FDCT/IDCT or FHT/IHT has average roundtrip "
<< "error > 1/5 per block";
EXPECT_GE((count_test_block << 2 * (bit_depth_ - 8)) / 5, total_error)
<< "Error: 8x8 FDCT/IDCT or FHT/IHT has average roundtrip "
<< "error > 1/5 per block";
}
void RunExtremalCheck() {
@ -341,8 +336,7 @@ class FwdTrans8x8TestBase {
ASM_REGISTER_STATE_CHECK(
fwd_txfm_ref(test_input_block, ref_temp_block, pitch_, tx_type_));
if (bit_depth_ == VPX_BITS_8) {
ASM_REGISTER_STATE_CHECK(
RunInvTxfm(test_temp_block, dst, pitch_));
ASM_REGISTER_STATE_CHECK(RunInvTxfm(test_temp_block, dst, pitch_));
#if CONFIG_VP9_HIGHBITDEPTH
} else {
ASM_REGISTER_STATE_CHECK(
@ -358,8 +352,7 @@ class FwdTrans8x8TestBase {
const int diff = dst[j] - src[j];
#endif
const int error = diff * diff;
if (max_error < error)
max_error = error;
if (max_error < error) max_error = error;
total_error += error;
const int coeff_diff = test_temp_block[j] - ref_temp_block[j];
@ -370,7 +363,7 @@ class FwdTrans8x8TestBase {
<< "Error: Extremal 8x8 FDCT/IDCT or FHT/IHT has"
<< "an individual roundtrip error > 1";
EXPECT_GE((count_test_block << 2 * (bit_depth_ - 8))/5, total_error)
EXPECT_GE((count_test_block << 2 * (bit_depth_ - 8)) / 5, total_error)
<< "Error: Extremal 8x8 FDCT/IDCT or FHT/IHT has average"
<< " roundtrip error > 1/5 per block";
@ -411,29 +404,29 @@ class FwdTrans8x8TestBase {
}
reference_8x8_dct_2d(in, out_r);
for (int j = 0; j < kNumCoeffs; ++j)
for (int j = 0; j < kNumCoeffs; ++j) {
coeff[j] = static_cast<tran_low_t>(round(out_r[j]));
}
if (bit_depth_ == VPX_BITS_8) {
ASM_REGISTER_STATE_CHECK(RunInvTxfm(coeff, dst, pitch_));
#if CONFIG_VP9_HIGHBITDEPTH
} else {
ASM_REGISTER_STATE_CHECK(RunInvTxfm(coeff, CONVERT_TO_BYTEPTR(dst16),
pitch_));
ASM_REGISTER_STATE_CHECK(
RunInvTxfm(coeff, CONVERT_TO_BYTEPTR(dst16), pitch_));
#endif
}
for (int j = 0; j < kNumCoeffs; ++j) {
#if CONFIG_VP9_HIGHBITDEPTH
const uint32_t diff =
const int diff =
bit_depth_ == VPX_BITS_8 ? dst[j] - src[j] : dst16[j] - src16[j];
#else
const uint32_t diff = dst[j] - src[j];
const int diff = dst[j] - src[j];
#endif
const uint32_t error = diff * diff;
EXPECT_GE(1u << 2 * (bit_depth_ - 8), error)
<< "Error: 8x8 IDCT has error " << error
<< " at index " << j;
<< "Error: 8x8 IDCT has error " << error << " at index " << j;
}
}
}
@ -449,25 +442,26 @@ class FwdTrans8x8TestBase {
double out_r[kNumCoeffs];
// Initialize a test block with input range [-mask_, mask_].
for (int j = 0; j < kNumCoeffs; ++j)
for (int j = 0; j < kNumCoeffs; ++j) {
in[j] = rnd.Rand8() % 2 == 0 ? mask_ : -mask_;
}
RunFwdTxfm(in, coeff, pitch_);
reference_8x8_dct_2d(in, out_r);
for (int j = 0; j < kNumCoeffs; ++j)
for (int j = 0; j < kNumCoeffs; ++j) {
coeff_r[j] = static_cast<tran_low_t>(round(out_r[j]));
}
for (int j = 0; j < kNumCoeffs; ++j) {
const uint32_t diff = coeff[j] - coeff_r[j];
const int32_t diff = coeff[j] - coeff_r[j];
const uint32_t error = diff * diff;
EXPECT_GE(9u << 2 * (bit_depth_ - 8), error)
<< "Error: 8x8 DCT has error " << error
<< " at index " << j;
<< "Error: 8x8 DCT has error " << error << " at index " << j;
}
}
}
void CompareInvReference(IdctFunc ref_txfm, int thresh) {
void CompareInvReference(IdctFunc ref_txfm, int thresh) {
ACMRandom rnd(ACMRandom::DeterministicSeed());
const int count_test_block = 10000;
const int eob = 12;
@ -484,7 +478,7 @@ void CompareInvReference(IdctFunc ref_txfm, int thresh) {
for (int j = 0; j < kNumCoeffs; ++j) {
if (j < eob) {
// Random values less than the threshold, either positive or negative
coeff[scan[j]] = rnd(thresh) * (1-2*(i%2));
coeff[scan[j]] = rnd(thresh) * (1 - 2 * (i % 2));
} else {
coeff[scan[j]] = 0;
}
@ -504,22 +498,21 @@ void CompareInvReference(IdctFunc ref_txfm, int thresh) {
#if CONFIG_VP9_HIGHBITDEPTH
} else {
ref_txfm(coeff, CONVERT_TO_BYTEPTR(ref16), pitch_);
ASM_REGISTER_STATE_CHECK(RunInvTxfm(coeff, CONVERT_TO_BYTEPTR(dst16),
pitch_));
ASM_REGISTER_STATE_CHECK(
RunInvTxfm(coeff, CONVERT_TO_BYTEPTR(dst16), pitch_));
#endif
}
for (int j = 0; j < kNumCoeffs; ++j) {
#if CONFIG_VP9_HIGHBITDEPTH
const uint32_t diff =
const int diff =
bit_depth_ == VPX_BITS_8 ? dst[j] - ref[j] : dst16[j] - ref16[j];
#else
const uint32_t diff = dst[j] - ref[j];
const int diff = dst[j] - ref[j];
#endif
const uint32_t error = diff * diff;
EXPECT_EQ(0u, error)
<< "Error: 8x8 IDCT has error " << error
<< " at index " << j;
EXPECT_EQ(0u, error) << "Error: 8x8 IDCT has error " << error
<< " at index " << j;
}
}
}
@ -530,17 +523,16 @@ void CompareInvReference(IdctFunc ref_txfm, int thresh) {
int mask_;
};
class FwdTrans8x8DCT
: public FwdTrans8x8TestBase,
public ::testing::TestWithParam<Dct8x8Param> {
class FwdTrans8x8DCT : public FwdTrans8x8TestBase,
public ::testing::TestWithParam<Dct8x8Param> {
public:
virtual ~FwdTrans8x8DCT() {}
virtual void SetUp() {
fwd_txfm_ = GET_PARAM(0);
inv_txfm_ = GET_PARAM(1);
tx_type_ = GET_PARAM(2);
pitch_ = 8;
tx_type_ = GET_PARAM(2);
pitch_ = 8;
fwd_txfm_ref = fdct8x8_ref;
bit_depth_ = GET_PARAM(3);
mask_ = (1 << bit_depth_) - 1;
@ -560,37 +552,26 @@ class FwdTrans8x8DCT
IdctFunc inv_txfm_;
};
TEST_P(FwdTrans8x8DCT, SignBiasCheck) {
RunSignBiasCheck();
}
TEST_P(FwdTrans8x8DCT, SignBiasCheck) { RunSignBiasCheck(); }
TEST_P(FwdTrans8x8DCT, RoundTripErrorCheck) {
RunRoundTripErrorCheck();
}
TEST_P(FwdTrans8x8DCT, RoundTripErrorCheck) { RunRoundTripErrorCheck(); }
TEST_P(FwdTrans8x8DCT, ExtremalCheck) {
RunExtremalCheck();
}
TEST_P(FwdTrans8x8DCT, ExtremalCheck) { RunExtremalCheck(); }
TEST_P(FwdTrans8x8DCT, FwdAccuracyCheck) {
RunFwdAccuracyCheck();
}
TEST_P(FwdTrans8x8DCT, FwdAccuracyCheck) { RunFwdAccuracyCheck(); }
TEST_P(FwdTrans8x8DCT, InvAccuracyCheck) {
RunInvAccuracyCheck();
}
TEST_P(FwdTrans8x8DCT, InvAccuracyCheck) { RunInvAccuracyCheck(); }
class FwdTrans8x8HT
: public FwdTrans8x8TestBase,
public ::testing::TestWithParam<Ht8x8Param> {
class FwdTrans8x8HT : public FwdTrans8x8TestBase,
public ::testing::TestWithParam<Ht8x8Param> {
public:
virtual ~FwdTrans8x8HT() {}
virtual void SetUp() {
fwd_txfm_ = GET_PARAM(0);
inv_txfm_ = GET_PARAM(1);
tx_type_ = GET_PARAM(2);
pitch_ = 8;
tx_type_ = GET_PARAM(2);
pitch_ = 8;
fwd_txfm_ref = fht8x8_ref;
bit_depth_ = GET_PARAM(3);
mask_ = (1 << bit_depth_) - 1;
@ -610,21 +591,14 @@ class FwdTrans8x8HT
IhtFunc inv_txfm_;
};
TEST_P(FwdTrans8x8HT, SignBiasCheck) {
RunSignBiasCheck();
}
TEST_P(FwdTrans8x8HT, SignBiasCheck) { RunSignBiasCheck(); }
TEST_P(FwdTrans8x8HT, RoundTripErrorCheck) {
RunRoundTripErrorCheck();
}
TEST_P(FwdTrans8x8HT, RoundTripErrorCheck) { RunRoundTripErrorCheck(); }
TEST_P(FwdTrans8x8HT, ExtremalCheck) {
RunExtremalCheck();
}
TEST_P(FwdTrans8x8HT, ExtremalCheck) { RunExtremalCheck(); }
class InvTrans8x8DCT
: public FwdTrans8x8TestBase,
public ::testing::TestWithParam<Idct8x8Param> {
class InvTrans8x8DCT : public FwdTrans8x8TestBase,
public ::testing::TestWithParam<Idct8x8Param> {
public:
virtual ~InvTrans8x8DCT() {}
@ -664,10 +638,10 @@ INSTANTIATE_TEST_CASE_P(
make_tuple(&vpx_highbd_fdct8x8_c, &idct8x8_10, 0, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct8x8_c, &idct8x8_12, 0, VPX_BITS_12)));
#else
INSTANTIATE_TEST_CASE_P(
C, FwdTrans8x8DCT,
::testing::Values(
make_tuple(&vpx_fdct8x8_c, &vpx_idct8x8_64_add_c, 0, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(C, FwdTrans8x8DCT,
::testing::Values(make_tuple(&vpx_fdct8x8_c,
&vpx_idct8x8_64_add_c, 0,
VPX_BITS_8)));
#endif // CONFIG_VP9_HIGHBITDEPTH
#if CONFIG_VP9_HIGHBITDEPTH
@ -697,11 +671,10 @@ INSTANTIATE_TEST_CASE_P(
#endif // CONFIG_VP9_HIGHBITDEPTH
#if HAVE_NEON_ASM && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
NEON, FwdTrans8x8DCT,
::testing::Values(
make_tuple(&vpx_fdct8x8_neon, &vpx_idct8x8_64_add_neon, 0,
VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(NEON, FwdTrans8x8DCT,
::testing::Values(make_tuple(&vpx_fdct8x8_neon,
&vpx_idct8x8_64_add_neon,
0, VPX_BITS_8)));
#endif // HAVE_NEON_ASM && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
#if HAVE_NEON && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
@ -715,11 +688,10 @@ INSTANTIATE_TEST_CASE_P(
#endif // HAVE_NEON && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
#if HAVE_SSE2 && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
SSE2, FwdTrans8x8DCT,
::testing::Values(
make_tuple(&vpx_fdct8x8_sse2, &vpx_idct8x8_64_add_sse2, 0,
VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(SSE2, FwdTrans8x8DCT,
::testing::Values(make_tuple(&vpx_fdct8x8_sse2,
&vpx_idct8x8_64_add_sse2,
0, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(
SSE2, FwdTrans8x8HT,
::testing::Values(
@ -732,16 +704,16 @@ INSTANTIATE_TEST_CASE_P(
#if HAVE_SSE2 && CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
SSE2, FwdTrans8x8DCT,
::testing::Values(
make_tuple(&vpx_fdct8x8_sse2, &vpx_idct8x8_64_add_c, 0, VPX_BITS_8),
make_tuple(&vpx_highbd_fdct8x8_c,
&idct8x8_64_add_10_sse2, 12, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct8x8_sse2,
&idct8x8_64_add_10_sse2, 12, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct8x8_c,
&idct8x8_64_add_12_sse2, 12, VPX_BITS_12),
make_tuple(&vpx_highbd_fdct8x8_sse2,
&idct8x8_64_add_12_sse2, 12, VPX_BITS_12)));
::testing::Values(make_tuple(&vpx_fdct8x8_sse2, &vpx_idct8x8_64_add_c, 0,
VPX_BITS_8),
make_tuple(&vpx_highbd_fdct8x8_c, &idct8x8_64_add_10_sse2,
12, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct8x8_sse2,
&idct8x8_64_add_10_sse2, 12, VPX_BITS_10),
make_tuple(&vpx_highbd_fdct8x8_c, &idct8x8_64_add_12_sse2,
12, VPX_BITS_12),
make_tuple(&vpx_highbd_fdct8x8_sse2,
&idct8x8_64_add_12_sse2, 12, VPX_BITS_12)));
INSTANTIATE_TEST_CASE_P(
SSE2, FwdTrans8x8HT,
@ -756,30 +728,27 @@ INSTANTIATE_TEST_CASE_P(
INSTANTIATE_TEST_CASE_P(
SSE2, InvTrans8x8DCT,
::testing::Values(
make_tuple(&idct8x8_10_add_10_c,
&idct8x8_10_add_10_sse2, 6225, VPX_BITS_10),
make_tuple(&idct8x8_10,
&idct8x8_64_add_10_sse2, 6225, VPX_BITS_10),
make_tuple(&idct8x8_10_add_12_c,
&idct8x8_10_add_12_sse2, 6225, VPX_BITS_12),
make_tuple(&idct8x8_12,
&idct8x8_64_add_12_sse2, 6225, VPX_BITS_12)));
make_tuple(&idct8x8_10_add_10_c, &idct8x8_10_add_10_sse2, 6225,
VPX_BITS_10),
make_tuple(&idct8x8_10, &idct8x8_64_add_10_sse2, 6225, VPX_BITS_10),
make_tuple(&idct8x8_10_add_12_c, &idct8x8_10_add_12_sse2, 6225,
VPX_BITS_12),
make_tuple(&idct8x8_12, &idct8x8_64_add_12_sse2, 6225, VPX_BITS_12)));
#endif // HAVE_SSE2 && CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
#if HAVE_SSSE3 && CONFIG_USE_X86INC && ARCH_X86_64 && \
!CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
SSSE3, FwdTrans8x8DCT,
::testing::Values(
make_tuple(&vpx_fdct8x8_ssse3, &vpx_idct8x8_64_add_ssse3, 0,
VPX_BITS_8)));
#if HAVE_SSSE3 && ARCH_X86_64 && !CONFIG_VP9_HIGHBITDEPTH && \
!CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(SSSE3, FwdTrans8x8DCT,
::testing::Values(make_tuple(&vpx_fdct8x8_ssse3,
&vpx_idct8x8_64_add_ssse3,
0, VPX_BITS_8)));
#endif
#if HAVE_MSA && !CONFIG_VP9_HIGHBITDEPTH && !CONFIG_EMULATE_HARDWARE
INSTANTIATE_TEST_CASE_P(
MSA, FwdTrans8x8DCT,
::testing::Values(
make_tuple(&vpx_fdct8x8_msa, &vpx_idct8x8_64_add_msa, 0, VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(MSA, FwdTrans8x8DCT,
::testing::Values(make_tuple(&vpx_fdct8x8_msa,
&vpx_idct8x8_64_add_msa, 0,
VPX_BITS_8)));
INSTANTIATE_TEST_CASE_P(
MSA, FwdTrans8x8HT,
::testing::Values(

View File

@ -13,12 +13,11 @@
namespace {
class VP9FrameSizeTestsLarge
: public ::libvpx_test::EncoderTest,
public ::testing::Test {
class VP9FrameSizeTestsLarge : public ::libvpx_test::EncoderTest,
public ::testing::Test {
protected:
VP9FrameSizeTestsLarge() : EncoderTest(&::libvpx_test::kVP9),
expected_res_(VPX_CODEC_OK) {}
VP9FrameSizeTestsLarge()
: EncoderTest(&::libvpx_test::kVP9), expected_res_(VPX_CODEC_OK) {}
virtual ~VP9FrameSizeTestsLarge() {}
virtual void SetUp() {
@ -27,7 +26,7 @@ class VP9FrameSizeTestsLarge
}
virtual bool HandleDecodeResult(const vpx_codec_err_t res_dec,
const libvpx_test::VideoSource& /*video*/,
const libvpx_test::VideoSource & /*video*/,
libvpx_test::Decoder *decoder) {
EXPECT_EQ(expected_res_, res_dec) << decoder->DecodeError();
return !::testing::Test::HasFailure();
@ -67,13 +66,13 @@ TEST_F(VP9FrameSizeTestsLarge, ValidSizes) {
expected_res_ = VPX_CODEC_OK;
ASSERT_NO_FATAL_FAILURE(RunLoop(&video));
#else
// This test produces a pretty large single frame allocation, (roughly
// 25 megabits). The encoder allocates a good number of these frames
// one for each lag in frames (for 2 pass), and then one for each possible
// reference buffer (8) - we can end up with up to 30 buffers of roughly this
// size or almost 1 gig of memory.
// In total the allocations will exceed 2GiB which may cause a failure with
// mingw + wine, use a smaller size in that case.
// This test produces a pretty large single frame allocation, (roughly
// 25 megabits). The encoder allocates a good number of these frames
// one for each lag in frames (for 2 pass), and then one for each possible
// reference buffer (8) - we can end up with up to 30 buffers of roughly this
// size or almost 1 gig of memory.
// In total the allocations will exceed 2GiB which may cause a failure with
// mingw + wine, use a smaller size in that case.
#if defined(_WIN32) && !defined(_WIN64) || defined(__OS2__)
video.SetSize(4096, 3072);
#else

View File

@ -0,0 +1,220 @@
/*
* Copyright (c) 2016 The WebM project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <algorithm>
#include "third_party/googletest/src/include/gtest/gtest.h"
#include "./vpx_dsp_rtcd.h"
#include "test/acm_random.h"
#include "test/register_state_check.h"
namespace {
using ::libvpx_test::ACMRandom;
typedef void (*HadamardFunc)(const int16_t *a, int a_stride, int16_t *b);
void hadamard_loop(const int16_t *a, int a_stride, int16_t *out) {
int16_t b[8];
for (int i = 0; i < 8; i += 2) {
b[i + 0] = a[i * a_stride] + a[(i + 1) * a_stride];
b[i + 1] = a[i * a_stride] - a[(i + 1) * a_stride];
}
int16_t c[8];
for (int i = 0; i < 8; i += 4) {
c[i + 0] = b[i + 0] + b[i + 2];
c[i + 1] = b[i + 1] + b[i + 3];
c[i + 2] = b[i + 0] - b[i + 2];
c[i + 3] = b[i + 1] - b[i + 3];
}
out[0] = c[0] + c[4];
out[7] = c[1] + c[5];
out[3] = c[2] + c[6];
out[4] = c[3] + c[7];
out[2] = c[0] - c[4];
out[6] = c[1] - c[5];
out[1] = c[2] - c[6];
out[5] = c[3] - c[7];
}
void reference_hadamard8x8(const int16_t *a, int a_stride, int16_t *b) {
int16_t buf[64];
for (int i = 0; i < 8; ++i) {
hadamard_loop(a + i, a_stride, buf + i * 8);
}
for (int i = 0; i < 8; ++i) {
hadamard_loop(buf + i, 8, b + i * 8);
}
}
void reference_hadamard16x16(const int16_t *a, int a_stride, int16_t *b) {
/* The source is a 16x16 block. The destination is rearranged to 8x32.
* Input is 9 bit. */
reference_hadamard8x8(a + 0 + 0 * a_stride, a_stride, b + 0);
reference_hadamard8x8(a + 8 + 0 * a_stride, a_stride, b + 64);
reference_hadamard8x8(a + 0 + 8 * a_stride, a_stride, b + 128);
reference_hadamard8x8(a + 8 + 8 * a_stride, a_stride, b + 192);
/* Overlay the 8x8 blocks and combine. */
for (int i = 0; i < 64; ++i) {
/* 8x8 steps the range up to 15 bits. */
const int16_t a0 = b[0];
const int16_t a1 = b[64];
const int16_t a2 = b[128];
const int16_t a3 = b[192];
/* Prevent the result from escaping int16_t. */
const int16_t b0 = (a0 + a1) >> 1;
const int16_t b1 = (a0 - a1) >> 1;
const int16_t b2 = (a2 + a3) >> 1;
const int16_t b3 = (a2 - a3) >> 1;
/* Store a 16 bit value. */
b[0] = b0 + b2;
b[64] = b1 + b3;
b[128] = b0 - b2;
b[192] = b1 - b3;
++b;
}
}
class HadamardTestBase : public ::testing::TestWithParam<HadamardFunc> {
public:
virtual void SetUp() {
h_func_ = GetParam();
rnd_.Reset(ACMRandom::DeterministicSeed());
}
protected:
HadamardFunc h_func_;
ACMRandom rnd_;
};
class Hadamard8x8Test : public HadamardTestBase {};
TEST_P(Hadamard8x8Test, CompareReferenceRandom) {
DECLARE_ALIGNED(16, int16_t, a[64]);
DECLARE_ALIGNED(16, int16_t, b[64]);
int16_t b_ref[64];
for (int i = 0; i < 64; ++i) {
a[i] = rnd_.Rand9Signed();
}
memset(b, 0, sizeof(b));
memset(b_ref, 0, sizeof(b_ref));
reference_hadamard8x8(a, 8, b_ref);
ASM_REGISTER_STATE_CHECK(h_func_(a, 8, b));
// The order of the output is not important. Sort before checking.
std::sort(b, b + 64);
std::sort(b_ref, b_ref + 64);
EXPECT_EQ(0, memcmp(b, b_ref, sizeof(b)));
}
TEST_P(Hadamard8x8Test, VaryStride) {
DECLARE_ALIGNED(16, int16_t, a[64 * 8]);
DECLARE_ALIGNED(16, int16_t, b[64]);
int16_t b_ref[64];
for (int i = 0; i < 64 * 8; ++i) {
a[i] = rnd_.Rand9Signed();
}
for (int i = 8; i < 64; i += 8) {
memset(b, 0, sizeof(b));
memset(b_ref, 0, sizeof(b_ref));
reference_hadamard8x8(a, i, b_ref);
ASM_REGISTER_STATE_CHECK(h_func_(a, i, b));
// The order of the output is not important. Sort before checking.
std::sort(b, b + 64);
std::sort(b_ref, b_ref + 64);
EXPECT_EQ(0, memcmp(b, b_ref, sizeof(b)));
}
}
INSTANTIATE_TEST_CASE_P(C, Hadamard8x8Test,
::testing::Values(&vpx_hadamard_8x8_c));
#if HAVE_SSE2
INSTANTIATE_TEST_CASE_P(SSE2, Hadamard8x8Test,
::testing::Values(&vpx_hadamard_8x8_sse2));
#endif // HAVE_SSE2
#if HAVE_SSSE3 && ARCH_X86_64
INSTANTIATE_TEST_CASE_P(SSSE3, Hadamard8x8Test,
::testing::Values(&vpx_hadamard_8x8_ssse3));
#endif // HAVE_SSSE3 && ARCH_X86_64
#if HAVE_NEON
INSTANTIATE_TEST_CASE_P(NEON, Hadamard8x8Test,
::testing::Values(&vpx_hadamard_8x8_neon));
#endif // HAVE_NEON
class Hadamard16x16Test : public HadamardTestBase {};
TEST_P(Hadamard16x16Test, CompareReferenceRandom) {
DECLARE_ALIGNED(16, int16_t, a[16 * 16]);
DECLARE_ALIGNED(16, int16_t, b[16 * 16]);
int16_t b_ref[16 * 16];
for (int i = 0; i < 16 * 16; ++i) {
a[i] = rnd_.Rand9Signed();
}
memset(b, 0, sizeof(b));
memset(b_ref, 0, sizeof(b_ref));
reference_hadamard16x16(a, 16, b_ref);
ASM_REGISTER_STATE_CHECK(h_func_(a, 16, b));
// The order of the output is not important. Sort before checking.
std::sort(b, b + 16 * 16);
std::sort(b_ref, b_ref + 16 * 16);
EXPECT_EQ(0, memcmp(b, b_ref, sizeof(b)));
}
TEST_P(Hadamard16x16Test, VaryStride) {
DECLARE_ALIGNED(16, int16_t, a[16 * 16 * 8]);
DECLARE_ALIGNED(16, int16_t, b[16 * 16]);
int16_t b_ref[16 * 16];
for (int i = 0; i < 16 * 16 * 8; ++i) {
a[i] = rnd_.Rand9Signed();
}
for (int i = 8; i < 64; i += 8) {
memset(b, 0, sizeof(b));
memset(b_ref, 0, sizeof(b_ref));
reference_hadamard16x16(a, i, b_ref);
ASM_REGISTER_STATE_CHECK(h_func_(a, i, b));
// The order of the output is not important. Sort before checking.
std::sort(b, b + 16 * 16);
std::sort(b_ref, b_ref + 16 * 16);
EXPECT_EQ(0, memcmp(b, b_ref, sizeof(b)));
}
}
INSTANTIATE_TEST_CASE_P(C, Hadamard16x16Test,
::testing::Values(&vpx_hadamard_16x16_c));
#if HAVE_SSE2
INSTANTIATE_TEST_CASE_P(SSE2, Hadamard16x16Test,
::testing::Values(&vpx_hadamard_16x16_sse2));
#endif // HAVE_SSE2
#if HAVE_NEON
INSTANTIATE_TEST_CASE_P(NEON, Hadamard16x16Test,
::testing::Values(&vpx_hadamard_16x16_neon));
#endif // HAVE_NEON
} // namespace

Some files were not shown because too many files have changed in this diff Show More