switch_buffer_zero(context->vpx_packet_buffer);
}
- if (frame->img) {
- switch_set_flag(frame, SFF_USE_VIDEO_TIMESTAMP);
- } else {
+ if (!frame->img) {
+ //switch_set_flag(frame, SFF_USE_VIDEO_TIMESTAMP);
+ //} else {
status = SWITCH_STATUS_MORE_DATA;
}
//DFF
- if (0 && rtp_session->flags[SWITCH_RTP_FLAG_VIDEO] && *bytes > 94) {
+ if (rtp_session->flags[SWITCH_RTP_FLAG_VIDEO] && *bytes > 94) {
//(rtp_session->rtcp_recv_msg_p->header.type == 205 || //RTPFB
//rtp_session->rtcp_recv_msg_p->header.type == 206)) {//PSFB
}
/* TMP DISABLE DFF */
- if (0 && switch_rtp_test_flag(rtp_session, SWITCH_RTP_FLAG_VIDEO)) {
+ if (switch_rtp_test_flag(rtp_session, SWITCH_RTP_FLAG_VIDEO)) {
/* Normalize the timestamps to our own base by generating a made up starting point then adding the measured deltas to that base
so if the timestamps and ssrc of the source change, it will not break the other end's jitter bufffer / decoder etc *cough* CHROME *cough*
*/
rtp_session->ts_norm.last_ssrc = send_msg->header.ssrc;
rtp_session->ts_norm.last_frame = ntohl(send_msg->header.ts);
}
-
+
if (ntohl(send_msg->header.ts) != rtp_session->ts_norm.last_frame) {
rtp_session->ts_norm.delta = ntohl(send_msg->header.ts) - rtp_session->ts_norm.last_frame;
rtp_session->ts_norm.ts += rtp_session->ts_norm.delta;
+ //switch_core_timer_sync(&rtp_session->timer);
+ //printf("W00t %d\n", rtp_session->timer.samplecount);
+ //rtp_session->ts_norm.ts = rtp_session->timer.samplecount;
}
rtp_session->ts_norm.last_frame = ntohl(send_msg->header.ts);