[xiph-commits] r12913 - in trunk/theora-exp: examples lib unix
tterribe at svn.xiph.org
tterribe at svn.xiph.org
Wed May 2 21:29:18 PDT 2007
Author: tterribe
Date: 2007-05-02 21:29:18 -0700 (Wed, 02 May 2007)
New Revision: 12913
Added:
trunk/theora-exp/examples/rehuff.c
trunk/theora-exp/lib/recode.c
trunk/theora-exp/lib/recode.h
Modified:
trunk/theora-exp/unix/Makefile
Log:
A first pass at a rehuff tool for Theora.
Compression improvement is on the order of 1-2% only for most files.
Build support is only included in the unix Makefile for now.
This should support Theora+Vorbis streams (with an arbitrary number of them
concurrently multiplexed and possibly chained as well; these features are,
however, totally untested).
No support for additional codecs is planned because it would require additional
dependencies.
The long term solution to that problem is a real tool (possibly using liboggz)
in a separate package.
Added: trunk/theora-exp/examples/rehuff.c
===================================================================
--- trunk/theora-exp/examples/rehuff.c (rev 0)
+++ trunk/theora-exp/examples/rehuff.c 2007-05-03 04:29:18 UTC (rev 12913)
@@ -0,0 +1,1192 @@
+/********************************************************************
+ * *
+ * THIS FILE IS PART OF THE OggTheora SOFTWARE CODEC SOURCE CODE. *
+ * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
+ * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
+ * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
+ * *
+ * THE Theora SOURCE CODE IS COPYRIGHT (C) 2002-2006 *
+ * by the Xiph.Org Foundation http://www.xiph.org/ *
+ * *
+ ********************************************************************
+
+ function: example rehuff application; optimizes the Huffman codes
+ for Theora streams.
+ last mod: $Id: rehuff.c,v 1.2 2004/03/24 19:12:42 derf Exp $
+
+ ********************************************************************/
+
+#if !defined(_REENTRANT)
+#define _REENTRANT
+#endif
+#if !defined(_GNU_SOURCE)
+#define _GNU_SOURCE
+#endif
+#if !defined(_LARGEFILE_SOURCE)
+#define _LARGEFILE_SOURCE
+#endif
+#if !defined(_LARGEFILE64_SOURCE)
+#define _LARGEFILE64_SOURCE
+#endif
+#if !defined(_FILE_OFFSET_BITS)
+#define _FILE_OFFSET_BITS 64
+#endif
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <float.h>
+#include <limits.h>
+#include <math.h>
+#include <string.h>
+/*Yes, yes, we're going to hell.*/
+#if defined(_WIN32)
+#include <io.h>
+#include <fcntl.h>
+#endif
+#include "getopt.h"
+#include "../lib/recode.h"
+#include <vorbis/codec.h>
+
+
+
+#define OC_MIN(_a,_b) ((_a)<(_b)?(_a):(_b))
+#define OC_MAX(_a,_b) ((_a)>(_b)?(_a):(_b))
+/*The ANSI offsetof macro is broken on some platforms (e.g., older DECs).*/
+#define _ogg_offsetof(_type,_field)\
+ ((size_t)((char *)&((_type *)0)->_field-(char *)0))
+
+
+
+typedef struct oc_huff_entry oc_huff_entry;
+
+
+
+struct oc_huff_entry{
+ oc_huff_entry *next;
+ oc_huff_entry *children[2];
+ ogg_int64_t freq;
+ int token;
+ int min_token;
+};
+
+
+
+/*Fills in an array of Huffman codes given a Huffman tree.
+ _entry: The root of the current branch of the tree.
+ _codes: The array in which to store the codes.
+ _pattern: The prefix required to reach the current branch of the tree.
+ _nbits: The number of bits in the prefix.*/
+static void huff_codes_create(oc_huff_entry *_entry,
+ th_huff_code _codes[TH_NDCT_TOKENS],int _pattern,int _nbits){
+ if(_entry->children[0]==NULL&&_entry->children[1]==NULL){
+ th_huff_code *code;
+ code=_codes+_entry->token;
+ code->pattern=_pattern;
+ code->nbits=_nbits;
+ }
+ else{
+ _nbits++;
+ huff_codes_create(_entry->children[0],_codes,_pattern<<1,_nbits);
+ huff_codes_create(_entry->children[1],_codes,_pattern<<1|1,_nbits);
+ }
+}
+
+/*Inserts an element into a singly linked list of Huffman tree nodes.
+ The element is inserted in ascending order by frequency.
+ _root: The start of the list.
+ _entry: The entry to insert.
+ Return: The new head of the list. */
+static oc_huff_entry *huff_list_ins(oc_huff_entry *_root,
+ oc_huff_entry *_entry){
+ oc_huff_entry **pnext;
+ oc_huff_entry *search;
+ for(pnext=&_root,search=_root;search!=NULL&&search->freq<_entry->freq;
+ pnext=&search->next,search=search->next);
+ _entry->next=search;
+ *pnext=_entry;
+ return _root;
+}
+
+/*Creates a list of Huffman tree nodes for the given frequency table, sorted in
+ ascending order.
+ _freqs: A list of frequency counts.
+ All counts less than 1 will be upgraded to 1.
+ Return: A pointer to the first tree node in the list.
+ At this point, the nodes just form a list, and are not arranged in a
+ single tree.*/
+static oc_huff_entry *huff_list_create(oc_huff_entry _entries[TH_NDCT_TOKENS],
+ const ogg_int64_t _freqs[TH_NDCT_TOKENS]){
+ oc_huff_entry *root;
+ int ti;
+ root=NULL;
+ for(ti=0;ti<TH_NDCT_TOKENS;ti++){
+ oc_huff_entry *entry;
+ /*Create a new entry for this value.*/
+ entry=_entries+ti;
+ entry->children[0]=entry->children[1]=NULL;
+ entry->token=ti;
+ entry->min_token=ti;
+ entry->freq=_freqs[ti]<<5;
+ if(entry->freq<=0)entry->freq=1;
+ root=huff_list_ins(root,entry);
+ }
+ return root;
+}
+
+/*Builds a complete Huffman code for a given frequency table.
+ _codes: The array to store the Huffman codes in.
+ _freqs: The frequency table to build the tree from.*/
+static void huff_code_build(th_huff_code _codes[TH_NDCT_TOKENS],
+ const ogg_int64_t _freqs[TH_NDCT_TOKENS]){
+ oc_huff_entry entries[(TH_NDCT_TOKENS<<1)-1];
+ oc_huff_entry *root;
+ oc_huff_entry *next;
+ /*Create an initial sorted list of what will become the leaf nodes of the
+ tree.*/
+ root=huff_list_create(entries,_freqs);
+ next=entries+TH_NDCT_TOKENS;
+ /*Merge pairs of nodes until the tree is complete.*/
+ while(root->next!=NULL){
+ oc_huff_entry *entry;
+ entry=next++;
+ entry->children[0]=root;
+ entry->children[1]=root->next;
+ /*Sort the codes so that the branch containing the smaller minimum token
+ value is on the left.
+ This ensures that a bitstream of all 0's decodes to an EOB token, which
+ is useful if a packet gets truncated.*/
+ if(root->next->min_token<root->min_token){
+ entry->children[0]=root->next;
+ entry->children[1]=root;
+ }
+ entry->min_token=entry->children[0]->min_token;
+ entry->token=-1;
+ entry->freq=root->freq+root->next->freq;
+ root=huff_list_ins(root->next->next,entry);
+ }
+ /*Create the Huffman codes corresponding to the generated tree.*/
+ huff_codes_create(root,_codes,0,0);
+}
+
+/*Builds a complete Huffman code for a given frequency table.
+ _codes: The array to store the Huffman codes in.
+ _freqs: The frequency table to build the tree from.*/
+static void huff_code_buildi(th_huff_code _codes[TH_NDCT_TOKENS],
+ const int _freqs[TH_NDCT_TOKENS]){
+ ogg_int64_t freqs[TH_NDCT_TOKENS];
+ int ti;
+ for(ti=0;ti<TH_NDCT_TOKENS;ti++)freqs[ti]=_freqs[ti];
+ huff_code_build(_codes,freqs);
+}
+
+/*Prints C code for an array of Huffman codes.*/
+static void huff_codes_print(const char *_symbol,
+ th_huff_code _codes[TH_NHUFFMAN_TABLES][TH_NDCT_TOKENS]){
+ int maxlen;
+ int ti;
+ int tj;
+ maxlen=0;
+ printf("th_huff_code %s[TH_NHUFFMAN_TABLES][TH_NDCT_TOKENS]={",_symbol);
+ for(ti=0;ti<TH_NHUFFMAN_TABLES;ti++){
+ for(tj=0;tj<TH_NDCT_TOKENS;tj++){
+ if(_codes[ti][tj].nbits>maxlen)maxlen=_codes[ti][tj].nbits;
+ }
+ }
+ maxlen=maxlen+3>>2;
+ for(ti=0;ti<TH_NHUFFMAN_TABLES;ti++){
+ if(ti>0)printf(",");
+ printf("\n {");
+ for(tj=0;tj<TH_NDCT_TOKENS;tj++){
+ if(tj>0)printf(",");
+ if((tj&3)==0)printf("\n ");
+ printf("{0x%0*X,%2i}",maxlen,_codes[ti][tj].pattern,_codes[ti][tj].nbits);
+ }
+ printf("\n }");
+ }
+ printf("\n};\n");
+}
+
+
+
+/*Farthest point clustering and k-means clustering code based on work
+ originally by Nathan E. Egge.*/
+
+/*Compute the total number of tokens in a frequency count table.*/
+int count_tokens(const int *_freqs,int _nfreqs){
+ int ti;
+ int c;
+ c=0;
+ for(ti=0;ti<_nfreqs;ti++)c+=_freqs[ti];
+ return c;
+}
+
+/*Compute the quantized cross entropy of a given vector by counting the number
+ of bits it takes to encode it with the Huffman table from another vector.*/
+int count_bits(const int *_freqs,int _nfreqs,const th_huff_code *_codes){
+ int ti;
+ int c;
+ c=0;
+ for(ti=0;ti<_nfreqs;ti++)c+=_freqs[ti]*_codes[ti].nbits;
+ return c;
+}
+
+typedef struct oc_tok_vec oc_tok_vec;
+
+struct oc_tok_vec{
+ const int *freqs;
+ int entropy;
+ int dist;
+ int idx;
+};
+
+#define NDC_TOKENS (TH_NDCT_TOKENS)
+#define NAC_TOKENS (TH_NDCT_TOKENS<<2)
+#define NTOKENS_MAX (OC_MAX(NDC_TOKENS,NAC_TOKENS))
+
+void oc_tok_vec_init(oc_tok_vec *_vec,const int *_freqs,int _nfreqs){
+ th_huff_code codes[NTOKENS_MAX];
+ int ti;
+ _vec->freqs=_freqs;
+ for(ti=0;ti<_nfreqs;ti+=TH_NDCT_TOKENS)huff_code_buildi(codes+ti,_freqs+ti);
+ _vec->entropy=count_bits(_freqs,_nfreqs,codes);
+ _vec->dist=INT_MAX;
+ _vec->idx=-1;
+}
+
+/*Farthest point clustering.
+ At each step, a new cluster center is added that is the vector that is the
+ farthest from any of the existing cluster centers.
+ Return: The total number of wasted bits.*/
+ogg_int64_t tok_vecs_fpc(oc_tok_vec *_vecs,long _nvecs,int _nfreqs,
+ int _cis[16]){
+ ogg_int64_t ret;
+ long vi;
+ int best_ntoks;
+ int best_vi;
+ int ntoks;
+ int ti;
+ int cii;
+ /*Initialize the first cluster as the frame with the most tokens.*/
+ best_ntoks=best_vi=-1;
+ for(vi=0;vi<_nvecs;vi++){
+ ntoks=count_tokens(_vecs[vi].freqs,_nfreqs);
+ if(ntoks>best_ntoks){
+ best_ntoks=ntoks;
+ best_vi=vi;
+ }
+ }
+ for(cii=0;cii<16;cii++){
+ int dist;
+ int max_dist;
+ int max_ntoks;
+ th_huff_code codes[NTOKENS_MAX];
+ _cis[cii]=best_vi;
+ /*Build the optimal Huffman code for the new cluster center.*/
+ for(ti=0;ti<_nfreqs;ti+=TH_NDCT_TOKENS){
+ huff_code_buildi(codes+ti,_vecs[best_vi].freqs+ti);
+ }
+ /*Update cluster membership to see if the new cluster center is closer.*/
+ max_dist=0;
+ max_ntoks=0;
+ ret=0;
+ for(vi=0;vi<_nvecs;vi++){
+ dist=count_bits(_vecs[vi].freqs,_nfreqs,codes)-_vecs[vi].entropy;
+ if(dist<=_vecs[vi].dist){
+ _vecs[vi].dist=dist;
+ _vecs[vi].idx=cii;
+ }
+ else dist=_vecs[vi].dist;
+ ret+=dist;
+ ntoks=count_tokens(_vecs[vi].freqs,_nfreqs);
+ /*The distances are inverse weighted by the number of tokens, so that we
+ are measuring the average number of bits wasted, not the total.*/
+ if(dist*(ogg_int64_t)max_ntoks>=max_dist*(ogg_int64_t)ntoks){
+ max_dist=dist;
+ max_ntoks=ntoks;
+ best_vi=vi;
+ }
+ }
+ }
+ fprintf(stderr,"After FPC: %lli wasted bits.\n",ret);
+ return ret;
+}
+
+/*A single iteration of K-means clustering.
+ New cluster centers are computed from the current cluster members, and then
+ the numbers are redistributed to belong to the new closest clusters.
+ Return: The total number of wasted bits.*/
+ogg_int64_t tok_vecs_kmeans(oc_tok_vec *_vecs,long _nvecs,int _nfreqs,
+ th_huff_code _codes[16][NTOKENS_MAX],int *_converged){
+ ogg_int64_t freqs[16][NTOKENS_MAX];
+ ogg_int64_t ret;
+ int converged;
+ long vi;
+ int ti;
+ int ci;
+ /*Gather the frequency statistics for all the clusters.*/
+ memset(freqs,0,sizeof(freqs));
+ for(vi=0;vi<_nvecs;vi++)for(ti=0;ti<_nfreqs;ti++){
+ freqs[_vecs[vi].idx][ti]+=_vecs[vi].freqs[ti];
+ }
+ /*Build the new Huffman codes for these clusters.*/
+ for(ci=0;ci<16;ci++)for(ti=0;ti<_nfreqs;ti+=TH_NDCT_TOKENS){
+ huff_code_build(_codes[ci]+ti,freqs[ci]+ti);
+ }
+ /*Re-assign points into the new clusters.*/
+ converged=1;
+ ret=0;
+ for(vi=0;vi<_nvecs;vi++){
+ int old_idx;
+ old_idx=_vecs[vi].idx;
+ _vecs[vi].dist=INT_MAX;
+ _vecs[vi].idx=0;
+ for(ci=0;ci<16;ci++){
+ int dist;
+ dist=count_bits(_vecs[vi].freqs,_nfreqs,_codes[ci])-_vecs[vi].entropy;
+ if(dist<=_vecs[vi].dist){
+ _vecs[vi].dist=dist;
+ _vecs[vi].idx=ci;
+ }
+ }
+ if(_vecs[vi].idx!=old_idx)converged=0;
+ ret+=_vecs[vi].dist;
+ }
+ fprintf(stderr,"After K-means: %lli wasted bits.\n",ret);
+ *_converged=converged;
+ return ret;
+}
+
+/*Initialize a set of token vectors from the given frame statistics.*/
+long tok_vecs_init(oc_tok_vec **_dc_vecs,oc_tok_vec **_ac_vecs,
+ const oc_frame_tok_hist *_tok_hists,long _ntok_hists){
+ oc_tok_vec *dc_vecs;
+ oc_tok_vec *ac_vecs;
+ long vi;
+ long fi;
+ dc_vecs=(oc_tok_vec *)_ogg_malloc((_ntok_hists<<1)*sizeof(*dc_vecs));
+ ac_vecs=(oc_tok_vec *)_ogg_malloc((_ntok_hists<<1)*sizeof(*ac_vecs));
+ for(fi=vi=0;fi<_ntok_hists;fi++){
+ oc_tok_vec_init(dc_vecs+vi,_tok_hists[fi].tok_hist[0][0],NDC_TOKENS);
+ oc_tok_vec_init(ac_vecs+vi,_tok_hists[fi].tok_hist[0][1],NAC_TOKENS);
+ vi++;
+ oc_tok_vec_init(dc_vecs+vi,_tok_hists[fi].tok_hist[1][0],NDC_TOKENS);
+ oc_tok_vec_init(ac_vecs+vi,_tok_hists[fi].tok_hist[1][1],NAC_TOKENS);
+ vi++;
+ }
+ *_dc_vecs=dc_vecs;
+ *_ac_vecs=ac_vecs;
+ return vi;
+}
+
+
+
+typedef struct th_rehuff_ctx th_rehuff_ctx;
+typedef struct page_queue page_queue;
+typedef struct ov_passthrough_ctx ov_passthrough_ctx;
+
+struct th_rehuff_ctx{
+ ogg_stream_state to;
+ ogg_stream_state tp;
+ ogg_page og;
+ unsigned char *page_data;
+ int cpage_data;
+ long serialno;
+ int processing_headers;
+ int page_ready;
+ th_info ti;
+ th_comment tc;
+ th_setup_info *ts;
+ th_rec_ctx *tr;
+};
+
+static void th_rehuff_init(th_rehuff_ctx *_ctx,const ogg_stream_state *_to,
+ const th_info *_ti,int _processing_headers){
+ /*Copy the preliminary data used to parse the BOS packet.*/
+ memcpy(&_ctx->to,_to,sizeof(_ctx->to));
+ _ctx->serialno=_to->serialno;
+ _ctx->processing_headers=_processing_headers;
+ memcpy(&_ctx->ti,_ti,sizeof(_ctx->ti));
+ /*Init supporting Theora structures needed in header parsing */
+ th_comment_init(&_ctx->tc);
+ _ctx->ts=NULL;
+ _ctx->page_ready=0;
+ _ctx->page_data=NULL;
+ _ctx->cpage_data=0;
+ ogg_stream_init(&_ctx->tp,_ctx->serialno);
+}
+
+static void th_rehuff_clear(th_rehuff_ctx *_ctx){
+ ogg_stream_clear(&_ctx->to);
+ ogg_stream_clear(&_ctx->tp);
+ _ogg_free(_ctx->page_data);
+ th_info_clear(&_ctx->ti);
+ th_comment_clear(&_ctx->tc);
+ th_setup_free(_ctx->ts);
+ th_recode_free(_ctx->tr);
+}
+
+static void th_rehuff_copy_page_data(th_rehuff_ctx *_ctx){
+ int npage_data;
+ npage_data=_ctx->og.header_len+_ctx->og.body_len;
+ if(_ctx->cpage_data<npage_data){
+ _ctx->page_data=(unsigned char *)_ogg_realloc(_ctx->page_data,npage_data);
+ _ctx->cpage_data=npage_data;
+ }
+ memcpy(_ctx->page_data,_ctx->og.header,_ctx->og.header_len);
+ memcpy(_ctx->page_data+_ctx->og.header_len,_ctx->og.body,_ctx->og.body_len);
+ _ctx->og.header=_ctx->page_data;
+ _ctx->og.body=_ctx->page_data+_ctx->og.header_len;
+}
+
+static int th_rehuff_pagein(th_rehuff_ctx *_ctx,ogg_page *_og){
+ ogg_packet op;
+ int ret;
+ ret=ogg_stream_pagein(&_ctx->to,_og);
+ if(ret<0)return ret;
+ /*Recode all the packets available.*/
+ while(ogg_stream_packetout(&_ctx->to,&op)>0){
+ int err;
+ if(!th_packet_isheader(&op)){
+ ogg_packet oq;
+ err=th_recode_packet_rewrite(_ctx->tr,&op,&oq);
+ if(err<0){
+ ret=err;
+ break;
+ }
+ ogg_stream_packetin(&_ctx->tp,&oq);
+ }
+ }
+ /*TODO: Also flush packets after a certain elapsed time.*/
+ if(!_ctx->page_ready){
+ _ctx->page_ready=ogg_stream_pageout(&_ctx->tp,&_ctx->og)>0;
+ if(_ctx->page_ready)th_rehuff_copy_page_data(_ctx);
+ }
+ return ret;
+}
+
+static double th_rehuff_pagetime(th_rehuff_ctx *_ctx,int _flush){
+ if(!_ctx->page_ready){
+ if(!_flush)return -2;
+ _ctx->page_ready=ogg_stream_flush(&_ctx->tp,&_ctx->og)>0;
+ if(_ctx->page_ready)th_rehuff_copy_page_data(_ctx);
+ else return -2;
+ }
+ return th_granule_time(_ctx->tr,ogg_page_granulepos(&_ctx->og));
+}
+
+static size_t th_rehuff_writepage(th_rehuff_ctx *_ctx,FILE *_out){
+ size_t ret;
+ ret=fwrite(_ctx->og.header,1,_ctx->og.header_len,_out)+
+ fwrite(_ctx->og.body,1,_ctx->og.body_len,_out);
+ if(_ctx->processing_headers){
+ _ctx->page_ready=ogg_stream_flush(&_ctx->tp,&_ctx->og)>0;
+ if(_ctx->page_ready)th_rehuff_copy_page_data(_ctx);
+ else _ctx->processing_headers=0;
+ }
+ else{
+ _ctx->page_ready=ogg_stream_pageout(&_ctx->tp,&_ctx->og)>0;
+ if(_ctx->page_ready)th_rehuff_copy_page_data(_ctx);
+ }
+ return ret;
+}
+
+struct page_queue{
+ ogg_page page;
+ page_queue *next;
+ page_queue *prev;
+ unsigned char data[1];
+};
+
+struct ov_passthrough_ctx{
+ vorbis_info vi;
+ vorbis_dsp_state vd;
+ long serialno;
+ page_queue *head;
+ page_queue *tail;
+};
+
+static void ov_passthrough_init(ov_passthrough_ctx *_ctx,
+ const vorbis_info *_vi,long _serialno);
+static void ov_passthrough_clear(ov_passthrough_ctx *_ctx);
+static int ov_passthrough_queuepage(ov_passthrough_ctx *_ctx,ogg_page *_og);
+static page_queue *ov_passthrough_dequeuepage(ov_passthrough_ctx *_ctx);
+
+static void ov_passthrough_init(ov_passthrough_ctx *_ctx,
+ const vorbis_info *_vi,long _serialno){
+ memcpy(&_ctx->vi,_vi,sizeof(_ctx->vi));
+ vorbis_analysis_init(&_ctx->vd,&_ctx->vi);
+ _ctx->serialno=_serialno;
+ _ctx->head=_ctx->tail=NULL;
+}
+
+static void ov_passthrough_clear(ov_passthrough_ctx *_ctx){
+ while(_ctx->tail!=NULL)_ogg_free(ov_passthrough_dequeuepage(_ctx));
+ vorbis_dsp_clear(&_ctx->vd);
+ vorbis_info_clear(&_ctx->vi);
+}
+
+static int ov_passthrough_queuepage(ov_passthrough_ctx *_ctx,ogg_page *_og){
+ page_queue *oq;
+ /*Only queue pages that belong to our stream.*/
+ if(ogg_page_serialno(_og)!=_ctx->serialno)return 0;
+ /*The page data gets trampled when the next page is read from the stream, so
+ make a copy of it.*/
+ oq=(page_queue *)_ogg_malloc(
+ _ogg_offsetof(page_queue,data)+_og->header_len+_og->body_len);
+ memcpy(&oq->page,_og,sizeof(oq->page));
+ oq->page.header=oq->data;
+ memcpy(oq->page.header,_og->header,_og->header_len);
+ oq->page.body=oq->data+_og->header_len;
+ memcpy(oq->page.body,_og->body,_og->body_len);
+ oq->next=_ctx->head;
+ oq->prev=NULL;
+ if(_ctx->tail==NULL)_ctx->tail=oq;
+ else oq->next->prev=oq;
+ _ctx->head=oq;
+ return 1;
+}
+
+static page_queue *ov_passthrough_dequeuepage(ov_passthrough_ctx *_ctx){
+ page_queue *oq;
+ oq=_ctx->tail;
+ _ctx->tail=oq->prev;
+ if(_ctx->tail==NULL)_ctx->head=NULL;
+ else oq->prev->next=NULL;
+ return oq;
+}
+
+static double ov_passthrough_pagetime(ov_passthrough_ctx *_ctx,int _flush){
+ if(_ctx->tail==NULL)return -2;
+ return vorbis_granule_time(&_ctx->vd,ogg_page_granulepos(&_ctx->tail->page));
+}
+
+static size_t ov_passthrough_writepage(ov_passthrough_ctx *_ctx,FILE *_out){
+ page_queue *oq;
+ size_t ret;
+ oq=ov_passthrough_dequeuepage(_ctx);
+ ret=fwrite(oq->data,1,oq->page.header_len+oq->page.body_len,_out);
+ _ogg_free(oq);
+ return ret;
+}
+
+typedef int (*stream_pagein_func)(void *_stream,ogg_page *_og);
+typedef double (*stream_pagetime_func)(void *_stream,int _flush);
+typedef size_t (*stream_writepage_func)(void *_stream,FILE *_out);
+typedef struct stream_ctx_vtbl stream_ctx_vtbl;
+typedef struct stream_ctx stream_ctx;
+
+struct stream_ctx_vtbl{
+ stream_pagein_func pagein;
+ stream_pagetime_func pagetime;
+ stream_writepage_func writepage;
+};
+
+struct stream_ctx{
+ const stream_ctx_vtbl *vtbl;
+ void *ctx;
+};
+
+static void write_pages(stream_ctx *_sos,int _nsos,int _flush,FILE *_out){
+ double min_time;
+ int min_si;
+ int nready;
+ int si;
+ for(;;){
+ min_time=DBL_MAX;
+ min_si=-1;
+ nready=0;
+ for(si=0;si<_nsos;si++){
+ double time;
+ /*Write out any header pages.
+ Technically checking for time!=0 is not a reliable check if a page is
+ not a header page, but because we process pages in the order they
+ appear in the input, in this case it works out even if the check
+ fails.*/
+ for(;;){
+ time=(*_sos[si].vtbl->pagetime)(_sos[si].ctx,_flush);
+ /*Write any header or intermediate pages immediately.*/
+ if(time!=0&&time!=-1)break;
+ (*_sos[si].vtbl->writepage)(_sos[si].ctx,_out);
+ }
+ if(time>0){
+ if(time<min_time){
+ min_time=time;
+ min_si=si;
+ }
+ nready++;
+ }
+ }
+ /*If everyone has a data page, or we are flushing the last pages and
+ there's at least one, write out the earliest.*/
+ if(nready==_nsos||_flush&&nready>0){
+ (*_sos[min_si].vtbl->writepage)(_sos[min_si].ctx,_out);
+ }
+ else break;
+ }
+}
+
+static const stream_ctx_vtbl STREAM_STATE_VTBL={
+ (stream_pagein_func)ogg_stream_pagein,
+ NULL,
+ NULL
+};
+
+static const stream_ctx_vtbl TH_REHUFF_VTBL={
+ (stream_pagein_func)th_rehuff_pagein,
+ (stream_pagetime_func)th_rehuff_pagetime,
+ (stream_writepage_func)th_rehuff_writepage
+};
+
+static const stream_ctx_vtbl OV_PASSTHROUGH_VTBL={
+ (stream_pagein_func)ov_passthrough_queuepage,
+ (stream_pagetime_func)ov_passthrough_pagetime,
+ (stream_writepage_func)ov_passthrough_writepage
+};
+
+static void stream_ctx_add(stream_ctx **_sos,int *_nsos,int *_csos,
+ const stream_ctx_vtbl *_vtbl,void *_ctx){
+ stream_ctx *sos;
+ int nsos;
+ int csos;
+ sos=*_sos;
+ nsos=*_nsos;
+ csos=*_csos;
+ if(nsos>=csos){
+ csos=csos<<1|1;
+ sos=(stream_ctx *)_ogg_realloc(sos,csos*sizeof(*sos));
+ }
+ sos[nsos].vtbl=_vtbl;
+ sos[nsos].ctx=_ctx;
+ nsos++;
+ *_sos=sos;
+ *_nsos=nsos;
+ *_csos=csos;
+}
+
+const char *optstring="o:s:";
+
+const struct option options[]={
+ {"output",required_argument,NULL,'o'},
+ {"output-stats",required_argument,NULL,'s'},
+ /*{"input-stats",required_argument,NULL,'S'},*/
+ {NULL,0,NULL,0}
+};
+
+
+
+/*Grab some more compressed bitstream and sync it for page extraction.*/
+size_t buffer_data(FILE *_fin,ogg_sync_state *_oy){
+ char *buf;
+ size_t bytes;
+ buf=ogg_sync_buffer(_oy,4096);
+ bytes=fread(buf,1,4096,_fin);
+ ogg_sync_wrote(_oy,(long)bytes);
+ return bytes;
+}
+
+/*Push a page into the appropriate steam.*/
+static int queue_page(stream_ctx *_sos,int _nsos,ogg_page *_og){
+ int si;
+ /*This can be done blindly; a stream won't accept a page that doesn't belong
+ to it.*/
+ for(si=0;si<_nsos;si++){
+ if(!(*_sos[si].vtbl->pagein)(_sos[si].ctx,_og))break;
+ }
+ return 0;
+}
+
+static void usage(void){
+ fprintf(stderr,"Usage: rehuff [-s <statsout.txt> ] "
+ "<infile.ogg> <outfile.ogg>\n");
+ exit(-1);
+}
+
+static void oc_process_bos0(ogg_page *_og,th_rehuff_ctx **_rehuffs,
+ int *_nrehuffs,int *_crehuffs,stream_ctx **_sos,int *_nsos,int *_csos){
+ th_rehuff_ctx *rehuffs;
+ int nrehuffs;
+ int crehuffs;
+ ogg_stream_state test;
+ ogg_packet op;
+ th_info ti;
+ int theorap;
+ rehuffs=*_rehuffs;
+ nrehuffs=*_nrehuffs;
+ crehuffs=*_crehuffs;
+ ogg_stream_init(&test,ogg_page_serialno(_og));
+ ogg_stream_pagein(&test,_og);
+ /*If we don't get a packet back, something is wrong.*/
+ if(!ogg_stream_packetpeek(&test,&op)){
+ fprintf(stderr,
+ "Warning: BOS page encountered without a complete packet.\n");
+ ogg_stream_clear(&test);
+ return;
+ }
+ /*Identify the codec: try Theora.*/
+ th_info_init(&ti);
+ theorap=th_decode_headerin(&ti,NULL,NULL,&op);
+ if(theorap>=0){
+ /*It is Theora.*/
+ if(nrehuffs>=crehuffs){
+ int ri;
+ crehuffs=crehuffs<<1|1;
+ rehuffs=(th_rehuff_ctx *)_ogg_realloc(rehuffs,
+ crehuffs*sizeof(*rehuffs));
+ for(ri=0;ri<nrehuffs;ri++)(*_sos)[ri].ctx=&rehuffs[ri].to;
+ }
+ th_rehuff_init(rehuffs+nrehuffs,&test,&ti,theorap);
+ /*Advance past the successfully processed header.*/
+ ogg_stream_packetout(&rehuffs[nrehuffs].to,NULL);
+ stream_ctx_add(_sos,_nsos,_csos,&STREAM_STATE_VTBL,&rehuffs[nrehuffs].to);
+ nrehuffs++;
+ }
+ else{
+ /*Whatever it is, we don't care about it.*/
+ th_info_clear(&ti);
+ ogg_stream_clear(&test);
+ }
+ *_rehuffs=rehuffs;
+ *_nrehuffs=nrehuffs;
+ *_crehuffs=crehuffs;
+}
+
+static void oc_process_bos1(ogg_page *_og,th_rehuff_ctx *_rehuffs,
+ int _nrehuffs,ov_passthrough_ctx **_passthroughs,int *_npassthroughs,
+ int *_cpassthroughs,stream_ctx **_sos,int *_nsos,int *_csos){
+ ov_passthrough_ctx *passthroughs;
+ int npassthroughs;
+ int cpassthroughs;
+ ogg_stream_state test;
+ ogg_packet op;
+ th_info ti;
+ int theorap;
+ int vorbisp;
+ int ri;
+ passthroughs=*_passthroughs;
+ npassthroughs=*_npassthroughs;
+ cpassthroughs=*_cpassthroughs;
+ ogg_stream_init(&test,ogg_page_serialno(_og));
+ ogg_stream_pagein(&test,_og);
+ /*If we don't get a packet back, something is wrong.*/
+ if(!ogg_stream_packetpeek(&test,&op)){
+ ogg_stream_clear(&test);
+ return;
+ }
+ /*Identify the codec: try Theora.*/
+ th_info_init(&ti);
+ theorap=th_decode_headerin(&ti,NULL,NULL,&op);
+ if(theorap>=0){
+ th_rehuff_ctx *rehuff;
+ for(ri=0;ri<_nrehuffs;ri++){
+ if(_rehuffs[ri].serialno==test.serialno)break;
+ }
+ if(ri>=_nrehuffs){
+ fprintf(stderr,"Error: Stream headers changed after first pass.\n");
+ return;
+ }
+ rehuff=_rehuffs+ri;
+ if(!rehuff->page_ready){
+ ogg_packet op;
+ ogg_stream_clear(&rehuff->to);
+ ogg_stream_init(&rehuff->to,rehuff->serialno);
+ th_recode_flushheader(rehuff->tr,&rehuff->tc,&op);
+ ogg_stream_packetin(&rehuff->tp,&op);
+ ogg_stream_pageout(&rehuff->tp,&rehuff->og);
+ th_rehuff_copy_page_data(rehuff);
+ rehuff->page_ready=1;
+ /*processing_headers now means "flush pages until we run out", to ensure
+ they all appear before any data pages.
+ We don't want to start doing that until all the BOS pages are written.*/
+ rehuff->processing_headers=0;
+ }
+ }
+ else{
+ vorbis_info vi;
+ int pi;
+ vorbis_info_init(&vi);
+ vorbisp=vorbis_synthesis_headerin(&vi,NULL,&op);
+ if(vorbisp>=0){
+ ov_passthrough_ctx *passthrough;
+ if(npassthroughs>=cpassthroughs){
+ cpassthroughs=cpassthroughs<<1|1;
+ passthroughs=(ov_passthrough_ctx *)_ogg_realloc(passthroughs,
+ cpassthroughs*sizeof(*passthroughs));
+ *_nsos=0;
+ for(ri=0;ri<_nrehuffs;ri++){
+ stream_ctx_add(_sos,_nsos,_csos,&TH_REHUFF_VTBL,_rehuffs+ri);
+ }
+ for(pi=0;pi<npassthroughs;pi++){
+ stream_ctx_add(_sos,_nsos,_csos,&OV_PASSTHROUGH_VTBL,passthroughs+pi);
+ }
+ }
+ passthrough=passthroughs+npassthroughs++;
+ ov_passthrough_init(passthrough,&vi,test.serialno);
+ stream_ctx_add(_sos,_nsos,_csos,&OV_PASSTHROUGH_VTBL,passthrough);
+ ov_passthrough_queuepage(passthrough,_og);
+ }
+ else{
+ fprintf(stderr,"Warning: Ignoring unknown stream with serialno 0x%08lX\n",
+ test.serialno);
+ }
+ }
+ *_passthroughs=passthroughs;
+ *_npassthroughs=npassthroughs;
+ *_cpassthroughs=cpassthroughs;
+}
+
+
+int main(int _argc,char **_argv){
+ th_rehuff_ctx *rehuffs;
+ int nrehuffs;
+ int crehuffs;
+ ov_passthrough_ctx *passthroughs;
+ int npassthroughs;
+ int cpassthroughs;
+ stream_ctx *sos;
+ int nsos;
+ int csos;
+ ogg_sync_state oy;
+ ogg_sync_state oz;
+ ogg_page og;
+ ogg_page og0;
+ ogg_page og1;
+ ogg_packet op;
+ FILE *infile;
+ FILE *outfile;
+ /*FILE *statsin;*/
+ FILE *statsout;
+ fpos_t chain_start;
+ int long_option_index;
+ int have_bos0;
+ int have_bos1;
+ int done;
+ int c;
+ int ret;
+ int ri;
+#if defined(_WIN32)
+ /*We need to set stdin/stdout to binary mode on windows.*/
+ /*Beware the evil #ifdef.
+ We avoid these where we can, but this one we cannot.
+ Don't add any more, you'll probably go to hell if you do.*/
+ _setmode(_fileno(stdin),_O_BINARY);
+ _setmode(_fileno(stdout),_O_BINARY);
+#endif
+ infile=NULL;
+ outfile=NULL;
+ /*statsin=NULL;*/
+ statsout=NULL;
+ /*Process option arguments.*/
+ for(;;){
+ c=getopt_long(_argc,_argv,optstring,options,&long_option_index);
+ if(c==EOF)break;
+ switch(c){
+ case 'o':{
+ if(strcmp(optarg,"-")){
+ outfile=fopen(optarg,"wb");
+ if(outfile==NULL){
+ fprintf(stderr,"Unable to open output file '%s'.\n",optarg);
+ exit(1);
+ }
+ }
+ else outfile=stdout;
+ }break;
+ case 's':{
+ if(strcmp(optarg,"-")){
+ statsout=fopen(optarg,"ab");
+ if(statsout==NULL){
+ fprintf(stderr,
+ "Unable to open statistics output file '%s'.\n",optarg);
+ exit(1);
+ }
+ }
+ else statsout=stdout;
+ }break;
+ /*case 'S':{
+ if(!strcmp(optarg,"-")){
+ statsin=fopen(optarg,"rb");
+ if(statsin==NULL){
+ fprintf(stderr,
+ "Unable to open statistics input file '%s'.\n",optarg);
+ exit(1);
+ }
+ }
+ else statsin=stdin;
+ }break;*/
+ default:usage();break;
+ }
+ }
+ if(optind<_argc){
+ infile=fopen(_argv[optind],"rb");
+ if(infile==NULL){
+ fprintf(stderr,"Unable to open input file '%s'.\n",_argv[optind]);
+ exit(1);
+ }
+ optind++;
+ }
+ if(outfile==NULL&&optind<_argc){
+ outfile=fopen(_argv[optind],"wb");
+ if(outfile==NULL){
+ fprintf(stderr,"Unable to open output file '%s'.\n",_argv[optind]);
+ exit(1);
+ }
+ optind++;
+ }
+ if(infile==NULL&&outfile==NULL||optind<_argc)usage();
+ if(fseek(infile,0,SEEK_END)==-1||fseek(infile,0,SEEK_SET)==-1){
+ fprintf(stderr,"Cannot seek on input file.\n");
+ exit(1);
+ }
+ sos=NULL;
+ csos=0;
+ rehuffs=NULL;
+ crehuffs=0;
+ passthroughs=NULL;
+ cpassthroughs=0;
+ /*Start up Ogg stream synchronization layer.*/
+ ogg_sync_init(&oy);
+ ogg_sync_init(&oz);
+ have_bos0=0;
+ have_bos1=0;
+ /*Loop over the links of a chained file.*/
+ do{
+ fgetpos(infile,&chain_start);
+ /*Parse the headers.*/
+ nsos=0;
+ nrehuffs=0;
+ /*Only interested Theora streams.
+ All others are ignored.*/
+ for(done=0;!done;){
+ /*We have a BOS page encountered during a previous chain segment.*/
+ if(have_bos0){
+ oc_process_bos0(&og0,&rehuffs,&nrehuffs,&crehuffs,&sos,&nsos,&csos);
+ have_bos0=0;
+ }
+ else{
+ if(!buffer_data(infile,&oy))break;
+ while(ogg_sync_pageout(&oy,&og)>0){
+ /*Is this a mandated initial header? If not, stop parsing.*/
+ if(!ogg_page_bos(&og)){
+ /*Don't leak the page; get it into the appropriate stream.*/
+ queue_page(sos,nsos,&og);
+ done=1;
+ break;
+ }
+ oc_process_bos0(&og,&rehuffs,&nrehuffs,&crehuffs,&sos,&nsos,&csos);
+ }
+ }
+ }
+ for(;;){
+ /*Try to read in any available headers for any streams.*/
+ for(ri=0;ri<nrehuffs;ri++){
+ while(rehuffs[ri].processing_headers){
+ /*We're expecting more header packets.*/
+ ret=ogg_stream_packetpeek(&rehuffs[ri].to,&op);
+ /*We're ignoring any gaps.
+ If it's a problem, we'll find it soon enough.*/
+ if(ret<0)continue;
+ /*No packet -> stop.*/
+ if(!ret)break;
+ rehuffs[ri].processing_headers=th_decode_headerin(&rehuffs[ri].ti,
+ &rehuffs[ri].tc,&rehuffs[ri].ts,&op);
+ if(rehuffs[ri].processing_headers<0){
+ printf("Error parsing Theora stream headers; corrupt stream?\n");
+ exit(1);
+ }
+ else if(rehuffs[ri].processing_headers>0){
+ /*Advance past the successfully processed header.*/
+ ogg_stream_packetout(&rehuffs[ri].to,NULL);
+ }
+ }
+ }
+ /*If all Theora streams have all their header packets, stop now so we don't
+ fail if there aren't enough pages in a short stream.*/
+ for(ri=0;ri<nrehuffs;ri++)if(rehuffs[ri].processing_headers)break;
+ if(ri>=nrehuffs)break;
+ /*The header pages/packets will arrive before anything else we care about,
+ or the stream is not obeying spec.*/
+ /*Demux into the appropriate stream.*/
+ if(ogg_sync_pageout(&oy,&og)>0)queue_page(sos,nsos,&og);
+ else{
+ /*Someone needs more data.*/
+ if(!buffer_data(infile,&oy)){
+ fprintf(stderr,"End of file while searching for codec headers.\n");
+ exit(1);
+ }
+ }
+ }
+ /*And now we have it all.
+ Initialize recoders.*/
+ for(ri=0;ri<nrehuffs;ri++){
+ rehuffs[ri].tr=th_recode_alloc(&rehuffs[ri].ti,rehuffs[ri].ts);
+ fprintf(stderr,"Ogg logical stream %lx is Theora %dx%d %.02f fps video\n"
+ "Encoded frame content is %dx%d with %dx%d offset\n",
+ rehuffs[ri].to.serialno,
+ rehuffs[ri].ti.frame_width,rehuffs[ri].ti.frame_height,
+ (double)rehuffs[ri].ti.fps_numerator/rehuffs[ri].ti.fps_denominator,
+ rehuffs[ri].ti.pic_width,rehuffs[ri].ti.pic_height,
+ rehuffs[ri].ti.pic_x,rehuffs[ri].ti.pic_y);
+ }
+ /*Queue any remaining pages from data we buffered but that did not contain
+ headers.*/
+ while(ogg_sync_pageout(&oy,&og)>0)queue_page(sos,nsos,&og);
+ /*On to the main decode loop.*/
+ for(;;){
+ /*Process all the packets from all the Theora streams.*/
+ for(ri=0;ri<nrehuffs;ri++){
+ while(ogg_stream_packetout(&rehuffs[ri].to,&op)>0){
+ th_recode_packetin(rehuffs[ri].tr,&op,NULL);
+ }
+ }
+ if(!buffer_data(infile,&oy))break;
+ while(ogg_sync_pageout(&oy,&og)>0){
+ /*Stop if we encounter a new chain segment.*/
+ if(ogg_page_bos(&og)){
+ /*Save the page for the next chain segment to use.*/
+ memcpy(&og0,&og,sizeof(og0));
+ have_bos0=1;
+ break;
+ }
+ else queue_page(sos,nsos,&og);
+ }
+ if(have_bos0)break;
+ }
+ /*Decode of this chain segment complete.*/
+ for(ri=0;ri<nrehuffs;ri++){
+ th_huff_code codes[TH_NHUFFMAN_TABLES][TH_NDCT_TOKENS];
+ th_huff_code cluster_codes[16][NTOKENS_MAX];
+ int cluster_centers[16];
+ oc_frame_tok_hist *tok_hists;
+ long ntok_hists;
+ oc_tok_vec *dc_vecs;
+ oc_tok_vec *ac_vecs;
+ ogg_int64_t bits_wasted;
+ ogg_int64_t granpos;
+ int converged;
+ long nvecs;
+ long fi;
+ int pli;
+ int hgi;
+ int ci;
+ /*Get the token statistics for all the frames.*/
+ th_recode_ctl(rehuffs[ri].tr,TH_RECCTL_GET_TOK_NSTATS,
+ &ntok_hists,sizeof(ntok_hists));
+ th_recode_ctl(rehuffs[ri].tr,TH_RECCTL_GET_TOK_STATS,
+ &tok_hists,sizeof(tok_hists));
+ /*If the user requested output statistics, write some.*/
+ if(statsout!=NULL)for(fi=0;fi<ntok_hists;fi++){
+ for(pli=0;pli<2;pli++){
+ for(hgi=0;hgi<5;hgi++){
+ for(ci=0;ci<TH_NDCT_TOKENS;ci++){
+ fprintf(statsout,"%i%c",tok_hists[fi].tok_hist[pli][hgi][ci],
+ ci+1<TH_NDCT_TOKENS?' ':hgi+1<5?'\t':'\n');
+ }
+ }
+ }
+ }
+ nvecs=tok_vecs_init(&dc_vecs,&ac_vecs,tok_hists,ntok_hists);
+ bits_wasted=tok_vecs_fpc(dc_vecs,nvecs,NDC_TOKENS,cluster_centers);
+ do{
+ bits_wasted=tok_vecs_kmeans(dc_vecs,nvecs,NDC_TOKENS,cluster_codes,
+ &converged);
+ }
+ while(!converged);
+ for(ci=0;ci<16;ci++)memcpy(codes[ci],cluster_codes[ci],sizeof(codes[ci]));
+ bits_wasted=tok_vecs_fpc(ac_vecs,nvecs,NAC_TOKENS,cluster_centers);
+ do{
+ bits_wasted=tok_vecs_kmeans(ac_vecs,nvecs,NAC_TOKENS,cluster_codes,
+ &converged);
+ }
+ while(!converged);
+ for(ci=0;ci<16;ci++){
+ memcpy(codes[ci+16],cluster_codes[ci]+TH_NDCT_TOKENS*0,
+ sizeof(codes[ci+16]));
+ memcpy(codes[ci+32],cluster_codes[ci]+TH_NDCT_TOKENS*1,
+ sizeof(codes[ci+32]));
+ memcpy(codes[ci+48],cluster_codes[ci]+TH_NDCT_TOKENS*2,
+ sizeof(codes[ci+48]));
+ memcpy(codes[ci+64],cluster_codes[ci]+TH_NDCT_TOKENS*3,
+ sizeof(codes[ci+64]));
+ }
+ huff_codes_print("CODES",codes);
+ th_recode_ctl(rehuffs[ri].tr,TH_ENCCTL_SET_HUFFMAN_CODES,
+ codes,sizeof(codes));
+ /*TODO: Detect start offset in input video and correct for it.*/
+ granpos=0;
+ th_recode_ctl(rehuffs[ri].tr,TH_DECCTL_SET_GRANPOS,
+ &granpos,sizeof(granpos));
+ }
+ /*Now read the chain segment a second time and rewrite the packets.*/
+ fsetpos(infile,&chain_start);
+ /*Parse the headers.*/
+ nsos=0;
+ npassthroughs=0;
+ for(ri=0;ri<nrehuffs;ri++){
+ stream_ctx_add(&sos,&nsos,&csos,&TH_REHUFF_VTBL,rehuffs+ri);
+ }
+ /*Only interested in Theora streams.
+ Vorbis streams are passed through.
+ All others are ignored, since we don't know how to re-mux them.*/
+ for(done=0;!done;){
+ /*We have a BOS page encountered during a previous chain segment.*/
+ if(have_bos1){
+ oc_process_bos1(&og1,rehuffs,nrehuffs,&passthroughs,
+ &npassthroughs,&cpassthroughs,&sos,&nsos,&csos);
+ have_bos1=0;
+ }
+ else{
+ if(!buffer_data(infile,&oz))break;
+ while(ogg_sync_pageout(&oz,&og)>0){
+ /*Is this a mandated initial header? If not, stop parsing.*/
+ if(!ogg_page_bos(&og)){
+ /*Don't leak the page; get it into the appropriate stream.*/
+ queue_page(sos,nsos,&og);
+ done=1;
+ break;
+ }
+ oc_process_bos1(&og,rehuffs,nrehuffs,&passthroughs,
+ &npassthroughs,&cpassthroughs,&sos,&nsos,&csos);
+ }
+ }
+ /*Write the new BOS page.*/
+ write_pages(sos,nsos,0,outfile);
+ }
+ /*Queue up the rest of the Theora headers.
+ The rest of the streams will write them out as they come to them.*/
+ for(ri=0;ri<nrehuffs;ri++){
+ ogg_packet op;
+ while(th_recode_flushheader(rehuffs[ri].tr,&rehuffs[ri].tc,&op)>0){
+ ogg_stream_packetin(&rehuffs[ri].tp,&op);
+ }
+ rehuffs[ri].processing_headers=1;
+ }
+ write_pages(sos,nsos,0,outfile);
+ /*Main re-coding loop.*/
+ for(;;){
+ if(!buffer_data(infile,&oz))break;
+ while(ogg_sync_pageout(&oz,&og)>0){
+ /*Stop if we encounter a new chain segment.*/
+ if(ogg_page_bos(&og)){
+ /*Save the page for the next chain segment to use.*/
+ memcpy(&og1,&og,sizeof(og1));
+ have_bos1=1;
+ break;
+ }
+ else{
+ queue_page(sos,nsos,&og);
+ write_pages(sos,nsos,0,outfile);
+ }
+ }
+ if(have_bos1)break;
+ }
+ /*Flush out any remaining pages.*/
+ write_pages(sos,nsos,1,outfile);
+ /*Tear down the structures for this chain segment.*/
+ for(ri=0;ri<nrehuffs;ri++)th_rehuff_clear(rehuffs+ri);
+ for(ri=0;ri<npassthroughs;ri++)ov_passthrough_clear(passthroughs+ri);
+ }
+ while(have_bos0||!(feof(infile)||ferror(infile)));
+ ogg_sync_clear(&oy);
+ ogg_sync_clear(&oz);
+ fclose(outfile);
+ return(0);
+}
Added: trunk/theora-exp/lib/recode.c
===================================================================
--- trunk/theora-exp/lib/recode.c (rev 0)
+++ trunk/theora-exp/lib/recode.c 2007-05-03 04:29:18 UTC (rev 12913)
@@ -0,0 +1,1245 @@
+#include <stdlib.h>
+#include <string.h>
+#include <limits.h>
+#include "recode.h"
+/*For th_setup_info, packet state, idct, huffdec, dequant.*/
+#include "decint.h"
+/*For oc_huff_codes_pack, oc_state_flushheader.*/
+#include "encint.h"
+
+typedef struct th_rec_ctx oc_rec_ctx;
+typedef oc_tok_hist oc_tok_hist_table[5];
+
+
+
+/*Reading packet statistics.*/
+#define OC_PACKET_ANALYZE (1)
+/*Waiting for Huffman tables to be set.*/
+#define OC_PACKET_HUFFTABLES (2)
+/*Rewriting data packets.*/
+#define OC_PACKET_REWRITE (0)
+
+
+
+struct th_rec_ctx{
+ /*Shared encoder/decoder state.*/
+ oc_theora_state state;
+ /*The next four fields must be in the given positions in order to be
+ compatible with some encoder functions we call.*/
+ /*Whether or not packets are ready to be emitted.
+ This takes on negative values while there are remaining header packets to
+ be emitted, reaches 0 when the codec is ready for input, and goes to 1
+ when a frame has been processed and a data packet is ready.*/
+ int packet_state;
+ /*Buffer in which to assemble packets.*/
+ oggpack_buffer enc_opb;
+ /*Huffman encode tables.*/
+ th_huff_code enc_huff_codes[TH_NHUFFMAN_TABLES][TH_NDCT_TOKENS];
+ /*Quantization parameters.*/
+ th_quant_info qinfo;
+ /*The previous four fields must be in the given positions in order to be
+ compatible with some encoder functions we call.*/
+ /*Buffer from which to decode packets.*/
+ oggpack_buffer dec_opb;
+ /*Huffman decode trees.*/
+ oc_huff_node *dec_huff_tables[TH_NHUFFMAN_TABLES];
+ /*The index of one past the last token in each plane for each coefficient.
+ The final entries are the total number of tokens for each coefficient.*/
+ int ti0[3][64];
+ /*The index of one past the last extra bits entry in each plane for each
+ coefficient.
+ The final entries are the total number of extra bits entries for each
+ coefficient.*/
+ int ebi0[3][64];
+ /*The number of outstanding EOB runs at the start of each coefficient in each
+ plane.*/
+ int eob_runs[3][64];
+ /*The DCT token lists.*/
+ unsigned char **dct_tokens;
+ /*The extra bits associated with DCT tokens.*/
+ ogg_uint16_t **extra_bits;
+ /*The DCT token counts for the last decoded frame.*/
+ oc_tok_hist tok_hist[2][5];
+ /*The DCT token counts for all decoded frames.*/
+ oc_frame_tok_hist *tok_hists;
+ long ntok_hists;
+ long ctok_hists;
+ /*The index of the set of token counts used for the current frame while
+ rewriting.*/
+ long cur_tok_histi;
+};
+
+
+
+/*The mode alphabets for the various mode coding schemes.
+ Scheme 0 uses a custom alphabet, which is not stored in this table.*/
+static const int OC_MODE_ALPHABETS[7][OC_NMODES]={
+ /*Last MV dominates */
+ {
+ OC_MODE_INTER_MV_LAST,OC_MODE_INTER_MV_LAST2,OC_MODE_INTER_MV,
+ OC_MODE_INTER_NOMV,OC_MODE_INTRA,OC_MODE_GOLDEN_NOMV,OC_MODE_GOLDEN_MV,
+ OC_MODE_INTER_MV_FOUR
+ },
+ {
+ OC_MODE_INTER_MV_LAST,OC_MODE_INTER_MV_LAST2,OC_MODE_INTER_NOMV,
+ OC_MODE_INTER_MV,OC_MODE_INTRA,OC_MODE_GOLDEN_NOMV,OC_MODE_GOLDEN_MV,
+ OC_MODE_INTER_MV_FOUR
+ },
+ {
+ OC_MODE_INTER_MV_LAST,OC_MODE_INTER_MV,OC_MODE_INTER_MV_LAST2,
+ OC_MODE_INTER_NOMV,OC_MODE_INTRA,OC_MODE_GOLDEN_NOMV,OC_MODE_GOLDEN_MV,
+ OC_MODE_INTER_MV_FOUR
+ },
+ {
+ OC_MODE_INTER_MV_LAST,OC_MODE_INTER_MV,OC_MODE_INTER_NOMV,
+ OC_MODE_INTER_MV_LAST2,OC_MODE_INTRA,OC_MODE_GOLDEN_NOMV,
+ OC_MODE_GOLDEN_MV,OC_MODE_INTER_MV_FOUR
+ },
+ /*No MV dominates.*/
+ {
+ OC_MODE_INTER_NOMV,OC_MODE_INTER_MV_LAST,OC_MODE_INTER_MV_LAST2,
+ OC_MODE_INTER_MV,OC_MODE_INTRA,OC_MODE_GOLDEN_NOMV,OC_MODE_GOLDEN_MV,
+ OC_MODE_INTER_MV_FOUR
+ },
+ {
+ OC_MODE_INTER_NOMV,OC_MODE_GOLDEN_NOMV,OC_MODE_INTER_MV_LAST,
+ OC_MODE_INTER_MV_LAST2,OC_MODE_INTER_MV,OC_MODE_INTRA,OC_MODE_GOLDEN_MV,
+ OC_MODE_INTER_MV_FOUR
+ },
+ /*Default ordering.*/
+ {
+ OC_MODE_INTER_NOMV,OC_MODE_INTRA,OC_MODE_INTER_MV,OC_MODE_INTER_MV_LAST,
+ OC_MODE_INTER_MV_LAST2,OC_MODE_GOLDEN_NOMV,OC_MODE_GOLDEN_MV,
+ OC_MODE_INTER_MV_FOUR
+ }
+};
+
+
+
+static int oc_sb_run_unpack(oggpack_buffer *_opb){
+ long bits;
+ int ret;
+ /*Coding scheme:
+ Codeword Run Length
+ 0 1
+ 10x 2-3
+ 110x 4-5
+ 1110xx 6-9
+ 11110xxx 10-17
+ 111110xxxx 18-33
+ 111111xxxxxxxxxxxx 34-4129*/
+ theora_read1(_opb,&bits);
+ if(bits==0)return 1;
+ theora_read(_opb,2,&bits);
+ if((bits&2)==0)return 2+(int)bits;
+ else if((bits&1)==0){
+ theora_read1(_opb,&bits);
+ return 4+(int)bits;
+ }
+ theora_read(_opb,3,&bits);
+ if((bits&4)==0)return 6+(int)bits;
+ else if((bits&2)==0){
+ ret=10+((bits&1)<<2);
+ theora_read(_opb,2,&bits);
+ return ret+(int)bits;
+ }
+ else if((bits&1)==0){
+ theora_read(_opb,4,&bits);
+ return 18+(int)bits;
+ }
+ theora_read(_opb,12,&bits);
+ return 34+(int)bits;
+}
+
+static int oc_block_run_unpack(oggpack_buffer *_opb){
+ long bits;
+ long bits2;
+ /*Coding scheme:
+ Codeword Run Length
+ 0x 1-2
+ 10x 3-4
+ 110x 5-6
+ 1110xx 7-10
+ 11110xx 11-14
+ 11111xxxx 15-30*/
+ theora_read(_opb,2,&bits);
+ if((bits&2)==0)return 1+(int)bits;
+ else if((bits&1)==0){
+ theora_read1(_opb,&bits);
+ return 3+(int)bits;
+ }
+ theora_read(_opb,2,&bits);
+ if((bits&2)==0)return 5+(int)bits;
+ else if((bits&1)==0){
+ theora_read(_opb,2,&bits);
+ return 7+(int)bits;
+ }
+ theora_read(_opb,3,&bits);
+ if((bits&4)==0)return 11+bits;
+ theora_read(_opb,2,&bits2);
+ return 15+((bits&3)<<2)+bits2;
+}
+
+static void oc_quant_params_copy(th_quant_info *_qdst,
+ const th_quant_info *_qsrc){
+ int i;
+ memcpy(_qdst,_qsrc,sizeof(*_qdst));
+ for(i=0;i<6;i++){
+ int qti;
+ int pli;
+ int qtj;
+ int plj;
+ qti=i/3;
+ pli=i%3;
+ qtj=(i-1)/3;
+ plj=(i-1)%3;
+ if(i>0&&_qsrc->qi_ranges[qti][pli].sizes==
+ _qsrc->qi_ranges[qtj][plj].sizes){
+ _qdst->qi_ranges[qti][pli].sizes=_qdst->qi_ranges[qtj][plj].sizes;
+ }
+ else if(qti>0&&_qsrc->qi_ranges[1][pli].sizes==
+ _qsrc->qi_ranges[0][pli].sizes){
+ _qdst->qi_ranges[1][pli].sizes=_qdst->qi_ranges[0][pli].sizes;
+ }
+ else{
+ int *sizes;
+ sizes=(int *)_ogg_malloc(
+ _qsrc->qi_ranges[qti][pli].nranges*sizeof(*sizes));
+ memcpy(sizes,_qsrc->qi_ranges[qti][pli].sizes,
+ _qsrc->qi_ranges[qti][pli].nranges*sizeof(*sizes));
+ _qdst->qi_ranges[qti][pli].sizes=sizes;
+ }
+ if(i>0&&_qsrc->qi_ranges[qti][pli].base_matrices==
+ _qsrc->qi_ranges[qtj][plj].base_matrices){
+ _qdst->qi_ranges[qti][pli].base_matrices=
+ _qdst->qi_ranges[qtj][plj].base_matrices;
+ }
+ else if(qti>0&&_qsrc->qi_ranges[1][pli].base_matrices==
+ _qsrc->qi_ranges[0][pli].base_matrices){
+ _qdst->qi_ranges[1][pli].base_matrices=
+ _qdst->qi_ranges[0][pli].base_matrices;
+ }
+ else{
+ th_quant_base *base_matrices;
+ base_matrices=(th_quant_base *)_ogg_malloc(
+ (_qsrc->qi_ranges[qti][pli].nranges+1)*sizeof(*base_matrices));
+ memcpy(base_matrices,_qsrc->qi_ranges[qti][pli].base_matrices,
+ (_qsrc->qi_ranges[qti][pli].nranges+1)*sizeof(*base_matrices));
+ _qdst->qi_ranges[qti][pli].base_matrices=
+ (const th_quant_base *)base_matrices;
+ }
+ }
+}
+
+
+static int oc_rec_init(oc_rec_ctx *_rec,const th_info *_info,
+ const th_setup_info *_setup){
+ int ret;
+ ret=oc_state_init(&_rec->state,_info);
+ if(ret<0)return ret;
+ oc_huff_trees_copy(_rec->dec_huff_tables,
+ (const oc_huff_node *const *)_setup->huff_tables);
+ /*Do a deep copy of the quant params, since we will need to refer to this
+ data again (unlike in the normal decoder).*/
+ oc_quant_params_copy(&_rec->qinfo,&_setup->qinfo);
+ _rec->dct_tokens=(unsigned char **)oc_calloc_2d(64,
+ _rec->state.nfrags,sizeof(_rec->dct_tokens[0][0]));
+ _rec->extra_bits=(ogg_uint16_t **)oc_calloc_2d(64,
+ _rec->state.nfrags,sizeof(_rec->extra_bits[0][0]));
+ _rec->tok_hists=NULL;
+ _rec->ntok_hists=_rec->ctok_hists=0;
+ _rec->cur_tok_histi=0;
+ _rec->packet_state=OC_PACKET_ANALYZE;
+ oggpackB_writeinit(&_rec->enc_opb);
+ return 0;
+}
+
+static void oc_rec_clear(oc_rec_ctx *_rec){
+ _ogg_free(_rec->tok_hists);
+ oc_free_2d(_rec->extra_bits);
+ oc_free_2d(_rec->dct_tokens);
+ oc_quant_params_clear(&_rec->qinfo);
+ oc_huff_trees_clear(_rec->dec_huff_tables);
+ oggpackB_writeclear(&_rec->enc_opb);
+ oc_state_clear(&_rec->state);
+}
+
+
+static int oc_rec_frame_header_unpack(oc_rec_ctx *_rec){
+ long val;
+ /*Check to make sure this is a data packet.*/
+ theora_read1(&_rec->dec_opb,&val);
+ if(val!=0)return TH_EBADPACKET;
+ /*Read in the frame type (I or P).*/
+ theora_read1(&_rec->dec_opb,&val);
+ _rec->state.frame_type=(int)val;
+ /*Read in the current qi.*/
+ theora_read(&_rec->dec_opb,6,&val);
+ _rec->state.qis[0]=(int)val;
+ theora_read1(&_rec->dec_opb,&val);
+ if(!val)_rec->state.nqis=1;
+ else{
+ theora_read(&_rec->dec_opb,6,&val);
+ _rec->state.qis[1]=(int)val;
+ theora_read1(&_rec->dec_opb,&val);
+ if(!val)_rec->state.nqis=2;
+ else{
+ theora_read(&_rec->dec_opb,6,&val);
+ _rec->state.qis[2]=(int)val;
+ _rec->state.nqis=3;
+ }
+ }
+ if(_rec->state.frame_type==OC_INTRA_FRAME){
+ /*Keyframes have 3 unused configuration bits, holdovers from VP3 days.
+ Most of the other unused bits in the VP3 headers were eliminated.
+ I don't know why these remain.*/
+ theora_read(&_rec->dec_opb,3,&val);
+ if(val!=0)return TH_EIMPL;
+ }
+ return 0;
+}
+
+/*Mark all fragments as coded and in OC_MODE_INTRA.
+ This also builds up the coded fragment list (in coded order), and clears the
+ uncoded fragment list.
+ It does not update the coded macro block list, as that is not used when
+ decoding INTRA frames.*/
+static void oc_rec_mark_all_intra(oc_rec_ctx *_rec){
+ oc_sb *sb;
+ oc_sb *sb_end;
+ int pli;
+ int ncoded_fragis;
+ int prev_ncoded_fragis;
+ prev_ncoded_fragis=ncoded_fragis=0;
+ sb=sb_end=_rec->state.sbs;
+ for(pli=0;pli<3;pli++){
+ const oc_fragment_plane *fplane;
+ fplane=_rec->state.fplanes+pli;
+ sb_end+=fplane->nsbs;
+ for(;sb<sb_end;sb++){
+ int quadi;
+ for(quadi=0;quadi<4;quadi++)if(sb->quad_valid&1<<quadi){
+ int bi;
+ for(bi=0;bi<4;bi++){
+ int fragi;
+ fragi=sb->map[quadi][bi];
+ if(fragi>=0){
+ oc_fragment *frag;
+ frag=_rec->state.frags+fragi;
+ frag->coded=1;
+ frag->mbmode=OC_MODE_INTRA;
+ _rec->state.coded_fragis[ncoded_fragis++]=fragi;
+ }
+ }
+ }
+ }
+ _rec->state.ncoded_fragis[pli]=ncoded_fragis-prev_ncoded_fragis;
+ prev_ncoded_fragis=ncoded_fragis;
+ _rec->state.nuncoded_fragis[pli]=0;
+ }
+}
+
+/*Decodes the bit flags for whether or not each super block is partially coded
+ or not.
+ Return: The number of partially coded super blocks.*/
+static int oc_rec_partial_sb_flags_unpack(oc_rec_ctx *_rec){
+ oc_sb *sb;
+ oc_sb *sb_end;
+ long val;
+ int flag;
+ int npartial;
+ int run_count;
+ theora_read1(&_rec->dec_opb,&val);
+ flag=(int)val;
+ sb=_rec->state.sbs;
+ sb_end=sb+_rec->state.nsbs;
+ run_count=npartial=0;
+ while(sb<sb_end){
+ int full_run;
+ run_count=oc_sb_run_unpack(&_rec->dec_opb);
+ full_run=run_count>=4129;
+ do{
+ sb->coded_partially=flag;
+ sb->coded_fully=0;
+ npartial+=flag;
+ sb++;
+ }
+ while(--run_count>0&&sb<sb_end);
+ if(full_run&&sb<sb_end){
+ theora_read1(&_rec->dec_opb,&val);
+ flag=(int)val;
+ }
+ else flag=!flag;
+ }
+ /*TODO: run_count should be 0 here.
+ If it's not, we should issue a warning of some kind.*/
+ return npartial;
+}
+
+/*Decodes the bit flags for whether or not each non-partially-coded super
+ block is fully coded or not.
+ This function should only be called if there is at least one
+ non-partially-coded super block.
+ Return: The number of partially coded super blocks.*/
+static void oc_rec_coded_sb_flags_unpack(oc_rec_ctx *_rec){
+ oc_sb *sb;
+ oc_sb *sb_end;
+ long val;
+ int flag;
+ int run_count;
+ sb=_rec->state.sbs;
+ sb_end=sb+_rec->state.nsbs;
+ /*Skip partially coded super blocks.*/
+ for(;sb->coded_partially;sb++);
+ theora_read1(&_rec->dec_opb,&val);
+ flag=(int)val;
+ while(sb<sb_end){
+ int full_run;
+ run_count=oc_sb_run_unpack(&_rec->dec_opb);
+ full_run=run_count>=4129;
+ for(;sb<sb_end;sb++){
+ if(sb->coded_partially)continue;
+ if(run_count--<=0)break;
+ sb->coded_fully=flag;
+ }
+ if(full_run&&sb<sb_end){
+ theora_read1(&_rec->dec_opb,&val);
+ flag=(int)val;
+ }
+ else flag=!flag;
+ }
+ /*TODO: run_count should be 0 here.
+ If it's not, we should issue a warning of some kind.*/
+}
+
+static void oc_rec_coded_flags_unpack(oc_rec_ctx *_rec){
+ oc_sb *sb;
+ oc_sb *sb_end;
+ long val;
+ int npartial;
+ int pli;
+ int flag;
+ int run_count;
+ int ncoded_fragis;
+ int prev_ncoded_fragis;
+ int nuncoded_fragis;
+ int prev_nuncoded_fragis;
+ npartial=oc_rec_partial_sb_flags_unpack(_rec);
+ if(npartial<_rec->state.nsbs)oc_rec_coded_sb_flags_unpack(_rec);
+ if(npartial>0){
+ theora_read1(&_rec->dec_opb,&val);
+ flag=!(int)val;
+ }
+ else flag=0;
+ run_count=0;
+ prev_ncoded_fragis=ncoded_fragis=prev_nuncoded_fragis=nuncoded_fragis=0;
+ sb=sb_end=_rec->state.sbs;
+ for(pli=0;pli<3;pli++){
+ const oc_fragment_plane *fplane;
+ fplane=_rec->state.fplanes+pli;
+ sb_end+=fplane->nsbs;
+ for(;sb<sb_end;sb++){
+ int quadi;
+ for(quadi=0;quadi<4;quadi++)if(sb->quad_valid&1<<quadi){
+ int bi;
+ for(bi=0;bi<4;bi++){
+ int fragi;
+ fragi=sb->map[quadi][bi];
+ if(fragi>=0){
+ oc_fragment *frag;
+ frag=_rec->state.frags+fragi;
+ if(sb->coded_fully)frag->coded=1;
+ else if(!sb->coded_partially)frag->coded=0;
+ else{
+ if(run_count<=0){
+ run_count=oc_block_run_unpack(&_rec->dec_opb);
+ flag=!flag;
+ }
+ run_count--;
+ frag->coded=flag;
+ }
+ if(frag->coded)_rec->state.coded_fragis[ncoded_fragis++]=fragi;
+ else *(_rec->state.uncoded_fragis-++nuncoded_fragis)=fragi;
+ }
+ }
+ }
+ }
+ _rec->state.ncoded_fragis[pli]=ncoded_fragis-prev_ncoded_fragis;
+ prev_ncoded_fragis=ncoded_fragis;
+ _rec->state.nuncoded_fragis[pli]=nuncoded_fragis-prev_nuncoded_fragis;
+ prev_nuncoded_fragis=nuncoded_fragis;
+ }
+ /*TODO: run_count should be 0 here.
+ If it's not, we should issue a warning of some kind.*/
+}
+
+
+
+typedef int (*oc_mode_unpack_func)(oggpack_buffer *_opb);
+
+static int oc_vlc_mode_unpack(oggpack_buffer *_opb){
+ long val;
+ int i;
+ for(i=0;i<7;i++){
+ theora_read1(_opb,&val);
+ if(!val)break;
+ }
+ return i;
+}
+
+static int oc_clc_mode_unpack(oggpack_buffer *_opb){
+ long val;
+ theora_read(_opb,3,&val);
+ return (int)val;
+}
+
+/*Unpacks the list of macro block modes for INTER frames.*/
+void oc_rec_mb_modes_unpack(oc_rec_ctx *_rec){
+ oc_mode_unpack_func mode_unpack;
+ oc_mb *mb;
+ oc_mb *mb_end;
+ const int *alphabet;
+ long val;
+ int scheme0_alphabet[8];
+ int mode_scheme;
+ theora_read(&_rec->dec_opb,3,&val);
+ mode_scheme=(int)val;
+ if(mode_scheme==0){
+ int mi;
+ /*Just in case, initialize the modes to something.
+ If the bitstream doesn't contain each index exactly once, it's likely
+ corrupt and the rest of the packet is garbage anyway, but this way we
+ won't crash, and we'll decode SOMETHING.*/
+ for(mi=0;mi<OC_NMODES;mi++)scheme0_alphabet[mi]=OC_MODE_INTER_NOMV;
+ for(mi=0;mi<OC_NMODES;mi++){
+ theora_read(&_rec->dec_opb,3,&val);
+ scheme0_alphabet[val]=OC_MODE_ALPHABETS[6][mi];
+ }
+ alphabet=scheme0_alphabet;
+ }
+ else alphabet=OC_MODE_ALPHABETS[mode_scheme-1];
+ if(mode_scheme==7)mode_unpack=oc_clc_mode_unpack;
+ else mode_unpack=oc_vlc_mode_unpack;
+ mb=_rec->state.mbs;
+ mb_end=mb+_rec->state.nmbs;
+ for(;mb<mb_end;mb++)if(mb->mode!=OC_MODE_INVALID){
+ int bi;
+ for(bi=0;bi<4;bi++){
+ int fragi;
+ fragi=mb->map[0][bi];
+ if(fragi>=0&&_rec->state.frags[fragi].coded)break;
+ }
+ if(bi<4)mb->mode=alphabet[(*mode_unpack)(&_rec->dec_opb)];
+ else mb->mode=OC_MODE_INTER_NOMV;
+ }
+}
+
+
+
+typedef int (*oc_mv_comp_unpack_func)(oggpack_buffer *_opb);
+
+static int oc_vlc_mv_comp_unpack(oggpack_buffer *_opb){
+ long bits;
+ int mvsigned[2];
+ theora_read(_opb,3,&bits);
+ switch(bits){
+ case 0:return 0;
+ case 1:return 1;
+ case 2:return -1;
+ case 3:{
+ mvsigned[0]=2;
+ theora_read1(_opb,&bits);
+ }break;
+ case 4:{
+ mvsigned[0]=3;
+ theora_read1(_opb,&bits);
+ }break;
+ case 5:{
+ theora_read(_opb,3,&bits);
+ mvsigned[0]=4+(bits>>1);
+ bits&=1;
+ }break;
+ case 6:{
+ theora_read(_opb,4,&bits);
+ mvsigned[0]=8+(bits>>1);
+ bits&=1;
+ }break;
+ case 7:{
+ theora_read(_opb,5,&bits);
+ mvsigned[0]=16+(bits>>1);
+ bits&=1;
+ }break;
+ }
+ mvsigned[1]=-mvsigned[0];
+ return mvsigned[bits];
+}
+
+static int oc_clc_mv_comp_unpack(oggpack_buffer *_opb){
+ long bits;
+ int mvsigned[2];
+ theora_read(_opb,6,&bits);
+ mvsigned[0]=bits>>1;
+ mvsigned[1]=-mvsigned[0];
+ return mvsigned[bits&1];
+}
+
+/*Unpacks the list of motion vectors for INTER frames.
+ Does not propagte the macro block modes and motion vectors to the individual
+ fragments.
+ The purpose of this function is solely to skip these bits in the packet.*/
+static void oc_rec_mv_unpack(oc_rec_ctx *_rec){
+ oc_mv_comp_unpack_func mv_comp_unpack;
+ oc_mb *mb;
+ oc_mb *mb_end;
+ const int *map_idxs;
+ long val;
+ int map_nidxs;
+ theora_read1(&_rec->dec_opb,&val);
+ mv_comp_unpack=val?oc_clc_mv_comp_unpack:oc_vlc_mv_comp_unpack;
+ map_idxs=OC_MB_MAP_IDXS[_rec->state.info.pixel_fmt];
+ map_nidxs=OC_MB_MAP_NIDXS[_rec->state.info.pixel_fmt];
+ mb=_rec->state.mbs;
+ mb_end=mb+_rec->state.nmbs;
+ for(;mb<mb_end;mb++)if(mb->mode!=OC_MODE_INVALID){
+ int coded[13];
+ int codedi;
+ int ncoded;
+ int mapi;
+ int mapii;
+ int fragi;
+ /*Search for at least one coded fragment.*/
+ ncoded=mapii=0;
+ do{
+ mapi=map_idxs[mapii];
+ fragi=mb->map[mapi>>2][mapi&3];
+ if(fragi>=0&&_rec->state.frags[fragi].coded)coded[ncoded++]=mapi;
+ }
+ while(++mapii<map_nidxs);
+ if(ncoded<=0)continue;
+ switch(mb->mode){
+ case OC_MODE_INTER_MV_FOUR:{
+ int bi;
+ /*Mark the tail of the list, so we don't accidentally go past it.*/
+ coded[ncoded]=-1;
+ for(bi=codedi=0;bi<4;bi++)if(coded[codedi]==bi){
+ codedi++;
+ (*mv_comp_unpack)(&_rec->dec_opb);
+ (*mv_comp_unpack)(&_rec->dec_opb);
+ }
+ }break;
+ case OC_MODE_INTER_MV:{
+ (*mv_comp_unpack)(&_rec->dec_opb);
+ (*mv_comp_unpack)(&_rec->dec_opb);
+ }break;
+ case OC_MODE_GOLDEN_MV:{
+ (*mv_comp_unpack)(&_rec->dec_opb);
+ (*mv_comp_unpack)(&_rec->dec_opb);
+ }break;
+ }
+ }
+}
+
+static void oc_rec_block_qis_unpack(oc_rec_ctx *_rec){
+ int *coded_fragi;
+ int *coded_fragi_end;
+ int ncoded_fragis;
+ ncoded_fragis=_rec->state.ncoded_fragis[0]+
+ _rec->state.ncoded_fragis[1]+_rec->state.ncoded_fragis[2];
+ if(ncoded_fragis<=0)return;
+ coded_fragi=_rec->state.coded_fragis;
+ coded_fragi_end=coded_fragi+ncoded_fragis;
+ if(_rec->state.nqis>1){
+ long val;
+ int flag;
+ int nqi0;
+ int run_count;
+ /*If this frame has more than one qi value, we decode a qi index for each
+ fragment, using two passes of the same binary RLE scheme used for
+ super-block coded bits.
+ The first pass marks each fragment as having a qii of 0 or greater than
+ 0, and the second pass (if necessary), distinguishes between a qii of
+ 1 and 2.
+ We just store the qii in the fragment.*/
+ theora_read1(&_rec->dec_opb,&val);
+ flag=(int)val;
+ run_count=nqi0=0;
+ while(coded_fragi<coded_fragi_end){
+ int full_run;
+ run_count=oc_sb_run_unpack(&_rec->dec_opb);
+ full_run=run_count>=4129;
+ do{
+ _rec->state.frags[*coded_fragi++].qi=flag;
+ nqi0+=!flag;
+ }
+ while(--run_count>0&&coded_fragi<coded_fragi_end);
+ if(full_run&&coded_fragi<coded_fragi_end){
+ theora_read1(&_rec->dec_opb,&val);
+ flag=(int)val;
+ }
+ else flag=!flag;
+ }
+ /*TODO: run_count should be 0 here.
+ If it's not, we should issue a warning of some kind.*/
+ /*If we have 3 different qi's for this frame, and there was at least one
+ fragment with a non-zero qi, make the second pass.*/
+ if(_rec->state.nqis==3&&nqi0<ncoded_fragis){
+ /*Skip qii==0 fragments.*/
+ for(coded_fragi=_rec->state.coded_fragis;
+ _rec->state.frags[*coded_fragi].qi==0;coded_fragi++);
+ theora_read1(&_rec->dec_opb,&val);
+ flag=(int)val;
+ while(coded_fragi<coded_fragi_end){
+ int full_run;
+ run_count=oc_sb_run_unpack(&_rec->dec_opb);
+ full_run=run_count>=4129;
+ for(;coded_fragi<coded_fragi_end;coded_fragi++){
+ oc_fragment *frag;
+ frag=_rec->state.frags+*coded_fragi;
+ if(frag->qi==0)continue;
+ if(run_count--<=0)break;
+ frag->qi+=flag;
+ }
+ if(full_run&&coded_fragi<coded_fragi_end){
+ theora_read1(&_rec->dec_opb,&val);
+ flag=(int)val;
+ }
+ else flag=!flag;
+ }
+ /*TODO: run_count should be 0 here.
+ If it's not, we should issue a warning of some kind.*/
+ }
+ }
+}
+
+/*Unpacks the DC coefficient tokens.
+ Unlike when unpacking the AC coefficient tokens, we actually need to decode
+ the DC coefficient values now so that we can do DC prediction.
+ _huff_idx: The index of the Huffman table to use for each color plane.
+ _ntoks_left: The number of tokens left to be decoded in each color plane for
+ each coefficient.
+ This is updated as EOB tokens and zero run tokens are decoded.
+ Return: The length of any outstanding EOB run.*/
+static int oc_rec_dc_coeff_unpack(oc_rec_ctx *_rec,int _huff_idxs[3],
+ int *_tok_hists[3],int _ntoks_left[3][64]){
+ long val;
+ int *coded_fragi;
+ int *coded_fragi_end;
+ int run_counts[64];
+ int cfi;
+ int eobi;
+ int eobs;
+ int ti;
+ int ebi;
+ int pli;
+ int rli;
+ eobs=0;
+ ti=ebi=0;
+ coded_fragi_end=coded_fragi=_rec->state.coded_fragis;
+ for(pli=0;pli<3;pli++){
+ coded_fragi_end+=_rec->state.ncoded_fragis[pli];
+ memset(run_counts,0,sizeof(run_counts));
+ _rec->eob_runs[pli][0]=eobs;
+ /*Continue any previous EOB run, if there was one.*/
+ for(eobi=eobs;eobi-->0&&coded_fragi<coded_fragi_end;coded_fragi++);
+ cfi=0;
+ while(eobs<_ntoks_left[pli][0]-cfi){
+ int token;
+ int neb;
+ int eb;
+ int skip;
+ cfi+=eobs;
+ run_counts[63]+=eobs;
+ token=oc_huff_token_decode(&_rec->dec_opb,
+ _rec->dec_huff_tables[_huff_idxs[pli]]);
+ _rec->dct_tokens[0][ti++]=(char)token;
+ _tok_hists[pli][token]++;
+ neb=OC_DCT_TOKEN_EXTRA_BITS[token];
+ if(neb){
+ theora_read(&_rec->dec_opb,neb,&val);
+ eb=(int)val;
+ _rec->extra_bits[0][ebi++]=(ogg_int16_t)eb;
+ }
+ else eb=0;
+ skip=oc_dct_token_skip(token,eb);
+ if(skip<0){
+ eobs=eobi=-skip;
+ while(eobi-->0&&coded_fragi<coded_fragi_end)coded_fragi++;
+ }
+ else{
+ run_counts[skip-1]++;
+ cfi++;
+ eobs=0;
+ coded_fragi++;
+ }
+ }
+ _rec->ti0[pli][0]=ti;
+ _rec->ebi0[pli][0]=ebi;
+ /*Set the EOB count to the portion of the last EOB run which extends past
+ this coefficient.*/
+ eobs=eobs+cfi-_ntoks_left[pli][0];
+ /*Add the portion of the last EOB which was included in this coefficient to
+ to the longest run length.*/
+ run_counts[63]+=_ntoks_left[pli][0]-cfi;
+ /*And convert the run_counts array to a moment table.*/
+ for(rli=63;rli-->0;)run_counts[rli]+=run_counts[rli+1];
+ /*Finally, subtract off the number of coefficients that have been
+ accounted for by runs started in this coefficient.*/
+ for(rli=64;rli-->0;)_ntoks_left[pli][rli]-=run_counts[rli];
+ }
+ return eobs;
+}
+
+/*Unpacks the AC coefficient tokens.
+ This can completely discard coefficient values while unpacking, and so is
+ somewhat simpler than unpacking the DC coefficient tokens.
+ _huff_idx: The index of the Huffman table to use for each color plane.
+ _ntoks_left: The number of tokens left to be decoded in each color plane for
+ each coefficient.
+ This is updated as EOB tokens and zero run tokens are decoded.
+ _eobs: The length of any outstanding EOB run from previous
+ coefficients.
+ Return: The length of any outstanding EOB run.*/
+static int oc_rec_ac_coeff_unpack(oc_rec_ctx *_rec,int _zzi,int _huff_idxs[3],
+ int *_tok_hists[3],int _ntoks_left[3][64],int _eobs){
+ long val;
+ int run_counts[64];
+ int cfi;
+ int ti;
+ int ebi;
+ int pli;
+ int rli;
+ ti=ebi=0;
+ for(pli=0;pli<3;pli++){
+ memset(run_counts,0,sizeof(run_counts));
+ _rec->eob_runs[pli][_zzi]=_eobs;
+ cfi=0;
+ while(_eobs<_ntoks_left[pli][_zzi]-cfi){
+ int token;
+ int neb;
+ int eb;
+ int skip;
+ cfi+=_eobs;
+ run_counts[63]+=_eobs;
+ token=oc_huff_token_decode(&_rec->dec_opb,
+ _rec->dec_huff_tables[_huff_idxs[pli]]);
+ _rec->dct_tokens[_zzi][ti++]=(char)token;
+ _tok_hists[pli][token]++;
+ neb=OC_DCT_TOKEN_EXTRA_BITS[token];
+ if(neb){
+ theora_read(&_rec->dec_opb,neb,&val);
+ eb=(int)val;
+ _rec->extra_bits[_zzi][ebi++]=(ogg_int16_t)eb;
+ }
+ else eb=0;
+ skip=oc_dct_token_skip(token,eb);
+ if(skip<0)_eobs=-skip;
+ else{
+ run_counts[skip-1]++;
+ cfi++;
+ _eobs=0;
+ }
+ }
+ _rec->ti0[pli][_zzi]=ti;
+ _rec->ebi0[pli][_zzi]=ebi;
+ /*Set the EOB count to the portion of the last EOB run which extends past
+ this coefficient.*/
+ _eobs=_eobs+cfi-_ntoks_left[pli][_zzi];
+ /*Add the portion of the last EOB which was included in this coefficient to
+ to the longest run length.*/
+ run_counts[63]+=_ntoks_left[pli][_zzi]-cfi;
+ /*And convert the run_counts array to a moment table.*/
+ for(rli=63;rli-->0;)run_counts[rli]+=run_counts[rli+1];
+ /*Finally, subtract off the number of coefficients that have been
+ accounted for by runs started in this coefficient.*/
+ for(rli=64-_zzi;rli-->0;)_ntoks_left[pli][_zzi+rli]-=run_counts[rli];
+ }
+ return _eobs;
+}
+
+/*Tokens describing the DCT coefficients that belong to each fragment are
+ stored in the bitstream grouped by coefficient, not by fragment.
+ This means that we either decode all the tokens in order, building up a
+ separate coefficient list for each fragment as we go, and then go back and
+ do the iDCT on each fragment, or we have to create separate lists of tokens
+ for each coefficient, so that we can pull the next token required off the
+ head of the appropriate list when decoding a specific fragment.
+ The former was VP3's choice, and it meant 2*w*h extra storage for all the
+ decoded coefficient values.
+ We take the second option, which lets us store just one or three bytes per
+ token (generally far fewer than the number of coefficients, due to EOB
+ tokens and zero runs), and which requires us to only maintain a counter for
+ each of the 64 coefficients, instead of a counter for every fragment to
+ determine where the next token goes.
+ Actually, we use 3 counters per coefficient, one for each color plane, so we
+ can decode all color planes simultaneously.
+ This lets us color conversion, etc., be done as soon as a full MCU (one or
+ two super block rows) is decoded, while the image data is still in cache.*/
+static void oc_rec_residual_tokens_unpack(oc_rec_ctx *_rec){
+ static const int OC_HUFF_LIST_MAX[5]={1,6,15,28,64};
+ long val;
+ int ntoks_left[3][64];
+ int huff_idxs[3];
+ int *tok_hists[3];
+ int pli;
+ int zzi;
+ int hgi;
+ int huffi_y;
+ int huffi_c;
+ int eobs;
+ memset(_rec->tok_hist,0,sizeof(_rec->tok_hist));
+ for(pli=0;pli<3;pli++)for(zzi=0;zzi<64;zzi++){
+ ntoks_left[pli][zzi]=_rec->state.ncoded_fragis[pli];
+ }
+ theora_read(&_rec->dec_opb,4,&val);
+ huffi_y=(int)val;
+ theora_read(&_rec->dec_opb,4,&val);
+ huffi_c=(int)val;
+ huff_idxs[0]=huffi_y;
+ huff_idxs[1]=huff_idxs[2]=huffi_c;
+ tok_hists[0]=_rec->tok_hist[0][0];
+ tok_hists[1]=tok_hists[2]=_rec->tok_hist[1][0];
+ _rec->eob_runs[0][0]=0;
+ eobs=oc_rec_dc_coeff_unpack(_rec,huff_idxs,tok_hists,ntoks_left);
+ theora_read(&_rec->dec_opb,4,&val);
+ huffi_y=(int)val;
+ theora_read(&_rec->dec_opb,4,&val);
+ huffi_c=(int)val;
+ zzi=1;
+ for(hgi=1;hgi<5;hgi++){
+ huff_idxs[0]=huffi_y+(hgi<<4);
+ huff_idxs[1]=huff_idxs[2]=huffi_c+(hgi<<4);
+ tok_hists[0]=_rec->tok_hist[0][hgi];
+ tok_hists[1]=tok_hists[2]=_rec->tok_hist[1][hgi];
+ for(;zzi<OC_HUFF_LIST_MAX[hgi];zzi++){
+ eobs=oc_rec_ac_coeff_unpack(_rec,zzi,huff_idxs,tok_hists,ntoks_left,eobs);
+ }
+ }
+ /*TODO: eobs should be exactly zero, or 4096 or greater.
+ The second case occurs when an EOB run of size zero is encountered, which
+ gets treated as an infinite EOB run (where infinity is INT_MAX).
+ If neither of these conditions holds, then a warning should be issued.*/
+}
+
+static int oc_rec_set_huffman_codes(oc_rec_ctx *_rec,
+ const th_huff_code _codes[TH_NHUFFMAN_TABLES][TH_NDCT_TOKENS]){
+ int ret;
+ if(_rec==NULL)return TH_EFAULT;
+ /*If we've already emitted the setup header, then don't let the user set the
+ tables again.*/
+ if(_rec->packet_state>=OC_PACKET_SETUP_HDR&&
+ _rec->packet_state<=OC_PACKET_REWRITE){
+ return TH_EINVAL;
+ }
+ if(_codes==NULL)_codes=TH_VP31_HUFF_CODES;
+ /*Validate the codes.*/
+ oggpackB_reset(&_rec->enc_opb);
+ ret=oc_huff_codes_pack(&_rec->enc_opb,_codes);
+ if(ret<0)return ret;
+ memcpy(_rec->enc_huff_codes,_codes,sizeof(_rec->enc_huff_codes));
+ _rec->packet_state=OC_PACKET_INFO_HDR;
+ return 0;
+}
+
+/*Computes the number of bits used for each of the potential Huffman codes for
+ the given list of token counts.
+ The bits are added to whatever the current bit counts are.*/
+static void oc_rec_count_bits(oc_rec_ctx *_rec,int _hgi,
+ const int _token_counts[TH_NDCT_TOKENS],int _bit_counts[16]){
+ int huffi;
+ int huff_base;
+ int token;
+ huff_base=_hgi<<4;
+ for(huffi=huff_base;huffi<huff_base+16;huffi++){
+ for(token=0;token<TH_NDCT_TOKENS;token++){
+ _bit_counts[huffi-huff_base]+=
+ _token_counts[token]*_rec->enc_huff_codes[huffi][token].nbits;
+ }
+ }
+}
+
+/*Returns the Huffman index using the fewest number of bits.*/
+static int oc_rec_select_huffi(int _bit_counts[16]){
+ int best_huffi;
+ int huffi;
+ best_huffi=0;
+ for(huffi=1;huffi<16;huffi++)if(_bit_counts[huffi]<_bit_counts[best_huffi]){
+ best_huffi=huffi;
+ }
+ return best_huffi;
+}
+
+/*Packs the DCT tokens for the given range of coefficient indices in zig-zag
+ order using the given Huffman tables.*/
+static void oc_rec_huff_group_pack(oc_rec_ctx *_rec,int _zzi_start,
+ int _zzi_end,int _huff_idxs[3]){
+ int zzi;
+ for(zzi=_zzi_start;zzi<_zzi_end;zzi++){
+ int pli;
+ int ti;
+ int ebi;
+ ti=0;
+ ebi=0;
+ for(pli=0;pli<3;pli++){
+ const th_huff_code *huff_codes;
+ int token;
+ int ti_end;
+ /*Step 2: Write the tokens using these tables.*/
+ huff_codes=_rec->enc_huff_codes[_huff_idxs[pli]];
+ /*Note: dct_token_offs[3] is really the ndct_tokens table.
+ Yes, this seems like a horrible hack, yet it's strangely elegant.*/
+ ti_end=_rec->ti0[pli][zzi];
+ for(;ti<ti_end;ti++){
+ token=_rec->dct_tokens[zzi][ti];
+ oggpackB_write(&_rec->enc_opb,huff_codes[token].pattern,
+ huff_codes[token].nbits);
+ if(OC_DCT_TOKEN_EXTRA_BITS[token]){
+ oggpackB_write(&_rec->enc_opb,_rec->extra_bits[zzi][ebi++],
+ OC_DCT_TOKEN_EXTRA_BITS[token]);
+ }
+ }
+ }
+ }
+}
+
+static void oc_rec_residual_tokens_pack(oc_rec_ctx *_rec,
+ const oc_tok_hist _tok_hist[2][5]){
+ static const int OC_HUFF_LIST_MIN[6]={0,1,6,15,28,64};
+ static const int *OC_HUFF_LIST_MAX=OC_HUFF_LIST_MIN+1;
+ int bits_y[16];
+ int bits_c[16];
+ int huff_idxs[5][3];
+ int huffi_y;
+ int huffi_c;
+ int hgi;
+ /*Step 1a: Select Huffman tables for the DC token list.*/
+ memset(bits_y,0,sizeof(bits_y));
+ memset(bits_c,0,sizeof(bits_c));
+ oc_rec_count_bits(_rec,0,_tok_hist[0][0],bits_y);
+ oc_rec_count_bits(_rec,0,_tok_hist[1][0],bits_c);
+ huffi_y=oc_rec_select_huffi(bits_y);
+ huffi_c=oc_rec_select_huffi(bits_c);
+ huff_idxs[0][0]=huffi_y;
+ huff_idxs[0][1]=huff_idxs[0][2]=huffi_c;
+ /*Step 1b: Write the DC token list with the chosen tables.*/
+ oggpackB_write(&_rec->enc_opb,huffi_y,4);
+ oggpackB_write(&_rec->enc_opb,huffi_c,4);
+ oc_rec_huff_group_pack(_rec,0,1,huff_idxs[0]);
+ /*Step 2a: Select Huffman tables for the AC token lists.*/
+ memset(bits_y,0,sizeof(bits_y));
+ memset(bits_y,0,sizeof(bits_c));
+ for(hgi=1;hgi<5;hgi++){
+ oc_rec_count_bits(_rec,hgi,_tok_hist[0][hgi],bits_y);
+ oc_rec_count_bits(_rec,hgi,_tok_hist[1][hgi],bits_c);
+ }
+ huffi_y=oc_rec_select_huffi(bits_y);
+ huffi_c=oc_rec_select_huffi(bits_c);
+ /*Step 2b: Write the AC token lists using the chosen tables.*/
+ oggpackB_write(&_rec->enc_opb,huffi_y,4);
+ oggpackB_write(&_rec->enc_opb,huffi_c,4);
+ for(hgi=1;hgi<5;hgi++){
+ huff_idxs[hgi][0]=huffi_y+(hgi<<4);
+ huff_idxs[hgi][1]=huff_idxs[hgi][2]=huffi_c+(hgi<<4);
+ oc_rec_huff_group_pack(_rec,OC_HUFF_LIST_MIN[hgi],OC_HUFF_LIST_MAX[hgi],
+ huff_idxs[hgi]);
+ }
+}
+
+
+
+th_rec_ctx *th_recode_alloc(const th_info *_info,const th_setup_info *_setup){
+ oc_rec_ctx *dec;
+ if(_info==NULL||_setup==NULL)return NULL;
+ dec=_ogg_malloc(sizeof(*dec));
+ if(oc_rec_init(dec,_info,_setup)<0){
+ _ogg_free(dec);
+ return NULL;
+ }
+ dec->state.curframe_num=0;
+ return dec;
+}
+
+void th_recode_free(th_rec_ctx *_rec){
+ if(_rec!=NULL){
+ oc_rec_clear(_rec);
+ _ogg_free(_rec);
+ }
+}
+
+int th_recode_packetin(th_rec_ctx *_rec,const ogg_packet *_op,
+ ogg_int64_t *_granpos){
+ int ret;
+ if(_rec==NULL||_op==NULL)return TH_EFAULT;
+ /*If the user has already retrieved the statistics, we can't update them any
+ longer.*/
+ if(_rec->packet_state!=OC_PACKET_ANALYZE)return TH_EINVAL;
+ /*A completely empty packet indicates a dropped frame and is treated exactly
+ like an inter frame with no coded blocks.
+ Only proceed if we have a non-empty packet.*/
+ if(_op->bytes!=0){
+ oc_frame_tok_hist *tok_hist;
+ oggpackB_readinit(&_rec->dec_opb,_op->packet,_op->bytes);
+ ret=oc_rec_frame_header_unpack(_rec);
+ if(ret<0)return ret;
+ if(_rec->state.frame_type==OC_INTRA_FRAME){
+ oc_rec_mark_all_intra(_rec);
+ _rec->state.keyframe_num=_rec->state.curframe_num;
+ }
+ else{
+ oc_rec_coded_flags_unpack(_rec);
+ oc_rec_mb_modes_unpack(_rec);
+ oc_rec_mv_unpack(_rec);
+ }
+ oc_rec_block_qis_unpack(_rec);
+ if(_rec->ntok_hists>=_rec->ctok_hists){
+ _rec->ctok_hists=_rec->ctok_hists<<1|1;
+ _rec->tok_hists=(oc_frame_tok_hist *)_ogg_realloc(_rec->tok_hists,
+ _rec->ctok_hists*sizeof(*_rec->tok_hists));
+ }
+ tok_hist=_rec->tok_hists+_rec->ntok_hists++;
+ tok_hist->pkt_sz=_op->bytes;
+ tok_hist->dct_offs=oggpackB_bits(&_rec->dec_opb);
+ memcpy(tok_hist->ncoded_fragis,_rec->state.ncoded_fragis,
+ sizeof(tok_hist->ncoded_fragis));
+ oc_rec_residual_tokens_unpack(_rec);
+ /*Update granule position.*/
+ _rec->state.granpos=
+ (_rec->state.keyframe_num<<_rec->state.info.keyframe_granule_shift)+
+ (_rec->state.curframe_num-_rec->state.keyframe_num);
+ tok_hist->granpos=_rec->state.granpos;
+ /*Save the statistics for this frame.*/
+ memcpy(tok_hist->tok_hist,_rec->tok_hist,sizeof(tok_hist->tok_hist));
+ _rec->state.curframe_num++;
+ if(_granpos!=NULL)*_granpos=_rec->state.granpos;
+ return 0;
+ }
+ else{
+ /*Just update the granule position and return.*/
+ _rec->state.granpos=
+ (_rec->state.keyframe_num<<_rec->state.info.keyframe_granule_shift)+
+ (_rec->state.curframe_num-_rec->state.keyframe_num);
+ _rec->state.curframe_num++;
+ if(_granpos!=NULL)*_granpos=_rec->state.granpos;
+ return TH_DUPFRAME;
+ }
+}
+
+int th_recode_ctl(th_rec_ctx *_rec,int _req,void *_buf,size_t _buf_sz){
+ switch(_req){
+ case TH_ENCCTL_SET_HUFFMAN_CODES:{
+ if(_buf==NULL&&_buf_sz!=0||_buf!=NULL&&
+ _buf_sz!=sizeof(th_huff_code)*TH_NHUFFMAN_TABLES*TH_NDCT_TOKENS){
+ return TH_EINVAL;
+ }
+ return oc_rec_set_huffman_codes(_rec,(const th_huff_table *)_buf);
+ }break;
+ case TH_DECCTL_SET_GRANPOS:{
+ ogg_int64_t granpos;
+ if(_rec==NULL||_buf==NULL)return TH_EFAULT;
+ if(_buf_sz!=sizeof(ogg_int64_t))return TH_EINVAL;
+ granpos=*(ogg_int64_t *)_buf;
+ if(granpos<0)return TH_EINVAL;
+ _rec->state.granpos=granpos;
+ _rec->state.keyframe_num=
+ granpos>>_rec->state.info.keyframe_granule_shift;
+ _rec->state.curframe_num=_rec->state.keyframe_num+
+ (granpos&(1<<_rec->state.info.keyframe_granule_shift)-1);
+ return 0;
+ }break;
+ case TH_RECCTL_GET_TOK_NSTATS:{
+ if(_rec==NULL||_buf==NULL)return TH_EFAULT;
+ if(_buf_sz!=sizeof(long))return TH_EINVAL;
+ *((long *)_buf)=_rec->ntok_hists;
+ return 0;
+ }break;
+ case TH_RECCTL_GET_TOK_STATS:{
+ if(_rec==NULL||_buf==NULL)return TH_EFAULT;
+ if(_buf_sz!=sizeof(const oc_frame_tok_hist **))return TH_EINVAL;
+ if(_rec->packet_state<OC_PACKET_ANALYZE)return TH_EINVAL;
+ /*Update the state to prevent us from invalidating this pointer.*/
+ _rec->packet_state=OC_PACKET_HUFFTABLES;
+ *((const oc_frame_tok_hist **)_buf)=_rec->tok_hists;
+ return 0;
+ }break;
+ default:return TH_EIMPL;
+ }
+}
+
+int th_recode_flushheader(th_rec_ctx *_rec,th_comment *_tc,ogg_packet *_op){
+ return oc_state_flushheader(&_rec->state,&_rec->packet_state,&_rec->enc_opb,
+ &_rec->qinfo,(const th_huff_table *)_rec->enc_huff_codes,_tc->vendor,
+ _tc,_op);
+}
+
+#include <stdio.h>
+
+int th_recode_packet_rewrite(th_rec_ctx *_rec,const ogg_packet *_op_in,
+ ogg_packet *_op_out){
+ int ret;
+ if(_rec==NULL||_op_in==NULL||_op_out==NULL)return TH_EFAULT;
+ /*If we've used all our decoded token histograms, please stop calling us.*/
+ if(_rec->cur_tok_histi>=_rec->ntok_hists)return TH_EINVAL;
+ /*A completely empty packet indicates a dropped frame and is treated exactly
+ like an inter frame with no coded blocks.
+ Only proceed if we have a non-empty packet.*/
+ if(_op_in->bytes!=0){
+ oc_frame_tok_hist *tok_hist;
+ /*Read enough of the packet to figure out what kind of frame we have.
+ This also validates the packet to be sure we can decode it, which is why
+ we don't just use th_packet_iskeyframe().*/
+ oggpackB_readinit(&_rec->dec_opb,_op_in->packet,_op_in->bytes);
+ ret=oc_rec_frame_header_unpack(_rec);
+ if(ret<0)return ret;
+ /*Update granule position.*/
+ if(_rec->state.frame_type==OC_INTRA_FRAME){
+ _rec->state.keyframe_num=_rec->state.curframe_num;
+ }
+ _rec->state.granpos=
+ (_rec->state.keyframe_num<<_rec->state.info.keyframe_granule_shift)+
+ (_rec->state.curframe_num-_rec->state.keyframe_num);
+ _rec->state.curframe_num++;
+ /*Sanity checks to see if the next piece of frame data corresponds to this
+ packet.
+ This isn't a guarantee if someone rewrote the file out from under us, but
+ it at least ensures that we have enough bytes in the packet.
+ TODO: We could re-decode this packet to get the info we need, instead of
+ failing, but that would be more code.*/
+ tok_hist=_rec->tok_hists+_rec->cur_tok_histi;
+ if(tok_hist->granpos!=_rec->state.granpos||
+ tok_hist->pkt_sz!=_op_in->bytes){
+ return TH_EBADPACKET;
+ }
+ _rec->cur_tok_histi++;
+ /*Copy the contents of the input packet up to the DCT tokens.*/
+ oggpackB_reset(&_rec->enc_opb);
+ oggpackB_writecopy(&_rec->enc_opb,_op_in->packet,tok_hist->dct_offs);
+ /*Read the DCT tokens using the old codes.*/
+ oggpackB_readinit(&_rec->dec_opb,_op_in->packet,_op_in->bytes);
+ oggpackB_adv(&_rec->dec_opb,tok_hist->dct_offs);
+ memcpy(_rec->state.ncoded_fragis,tok_hist->ncoded_fragis,
+ sizeof(_rec->state.ncoded_fragis));
+ oc_rec_residual_tokens_unpack(_rec);
+ /*Write the DCT tokens using the new codes.*/
+ memcpy(_rec->state.ncoded_fragis,tok_hist->ncoded_fragis,
+ sizeof(_rec->state.ncoded_fragis));
+ oc_rec_residual_tokens_pack(_rec,
+ (const oc_tok_hist_table *)tok_hist->tok_hist);
+ ret=0;
+ }
+ else{
+ oggpackB_reset(&_rec->enc_opb);
+ /*Just update the granule position and return.*/
+ _rec->state.granpos=
+ (_rec->state.keyframe_num<<_rec->state.info.keyframe_granule_shift)+
+ (_rec->state.curframe_num-_rec->state.keyframe_num);
+ _rec->state.curframe_num++;
+ ret=TH_DUPFRAME;
+ }
+ _op_out->packet=oggpackB_get_buffer(&_rec->enc_opb);
+ _op_out->bytes=oggpackB_bytes(&_rec->enc_opb);
+ _op_out->b_o_s=0;
+ _op_out->e_o_s=_op_in->e_o_s;
+ _op_out->packetno=_rec->state.curframe_num;
+ _op_out->granulepos=_rec->state.granpos;
+ if(_op_out->e_o_s)_rec->packet_state=OC_PACKET_DONE;
+ return ret;
+}
Added: trunk/theora-exp/lib/recode.h
===================================================================
--- trunk/theora-exp/lib/recode.h (rev 0)
+++ trunk/theora-exp/lib/recode.h 2007-05-03 04:29:18 UTC (rev 12913)
@@ -0,0 +1,37 @@
+#include <ogg/ogg.h>
+#if !defined(_recode_H)
+# define _recode_H (1)
+# include "theora/theoradec.h"
+# include "theora/theoraenc.h"
+
+#define TH_RECCTL_GET_TOK_NSTATS (0x8000)
+#define TH_RECCTL_GET_TOK_STATS (0x8001)
+
+typedef int oc_tok_hist[TH_NDCT_TOKENS];
+typedef struct oc_frame_tok_hist oc_frame_tok_hist;
+typedef struct th_rec_ctx th_rec_ctx;
+
+
+
+/*The DCT token histograms for a single frame.*/
+struct oc_frame_tok_hist{
+ oc_tok_hist tok_hist[2][5];
+ ogg_int64_t granpos;
+ long pkt_sz;
+ long dct_offs;
+ int ncoded_fragis[3];
+};
+
+
+
+th_rec_ctx *th_recode_alloc(const th_info *_info,const th_setup_info *_setup);
+void th_recode_free(th_rec_ctx *_rec);
+
+int th_recode_packetin(th_rec_ctx *_rec,const ogg_packet *_op,
+ ogg_int64_t *_granpos);
+int th_recode_ctl(th_rec_ctx *_rec,int _req,void *_buf,size_t _buf_sz);
+int th_recode_flushheader(th_rec_ctx *_enc,th_comment *_tc,ogg_packet *_op);
+int th_recode_packet_rewrite(th_rec_ctx *_rec,const ogg_packet *_op_in,
+ ogg_packet *_op_out);
+
+#endif
Modified: trunk/theora-exp/unix/Makefile
===================================================================
--- trunk/theora-exp/unix/Makefile 2007-05-03 04:22:07 UTC (rev 12912)
+++ trunk/theora-exp/unix/Makefile 2007-05-03 04:29:18 UTC (rev 12913)
@@ -8,6 +8,7 @@
DUMP_VIDEO_TARGET = dump_video
PLAYER_EXAMPLE_TARGET = player_example
ENCODER_EXAMPLE_TARGET = encoder_example
+REHUFF_EXAMPLE_TARGET = rehuff
# The compiler to use
CC = gcc
# The command to use to generate dependency information
@@ -20,9 +21,11 @@
# Extra compilation flags.
# You may get speed increases by including flags such as -O2 or -O3 or
# -ffast-math, or additional flags, depending on your system and compiler.
+# The correct -march=<architecture> flag will also generate much better code
+# on newer architectures.
+CFLAGS = -O3 -Wall -Wno-parentheses -fomit-frame-pointer -fforce-addr -finline-functions -DOC_X86ASM
# The -g flag will generally include debugging information.
-CFLAGS = -O3 -fforce-addr -fomit-frame-pointer -DOC_X86ASM
-#CFLAGS = -g
+#CFLAGS = -g -Wall -Wno-parentheses
# Libraries to link with, and the location of library files.
# Add -lpng -lz if you want to use -DOC_DUMP_IMAGES.
LIBS = -logg -lvorbis -lvorbisenc `sdl-config --libs`
@@ -95,6 +98,32 @@
DUMP_VIDEO_CSOURCES = dump_video.c
ENCODER_EXAMPLE_CSOURCES = encoder_example.c
PLAYER_EXAMPLE_CSOURCES = player_example.c
+#rehuff links staticaly directly to these objects, since it uses symbols which
+# we may not want to publicly export from the libraries.
+#This is pretty ugly; feel free to try to clean it up.
+REHUFF_EXAMPLE_CSOURCES = rehuff.c
+REHUFF_CSOURCES = \
+decinfo.c \
+decode.c \
+dequant.c \
+encinfo.c \
+enquant.c \
+fragment.c \
+huffdec.c \
+huffenc.c \
+idct.c \
+info.c \
+internal.c \
+quant.c \
+recode.c \
+state.c \
+$(if $(findstring -DOC_X86ASM,${CFLAGS}), \
+x86/mmxstate.c \
+x86/x86state.c \
+x86/mmxidct.c \
+x86/mmxfrag.c \
+x86/cpu.c \
+)
# Create object file list.
LIBTHEORADEC_OBJS:= ${LIBTHEORADEC_CSOURCES:%.c=${WORKDIR}/%.o}
@@ -102,8 +131,11 @@
DUMP_VIDEO_OBJS:= ${DUMP_VIDEO_CSOURCES:%.c=${WORKDIR}/%.o}
ENCODER_EXAMPLE_OBJS:= ${ENCODER_EXAMPLE_CSOURCES:%.c=${WORKDIR}/%.o}
PLAYER_EXAMPLE_OBJS:= ${PLAYER_EXAMPLE_CSOURCES:%.c=${WORKDIR}/%.o}
+REHUFF_EXAMPLE_OBJS:= ${REHUFF_EXAMPLE_CSOURCES:%.c=${WORKDIR}/%.o} \
+ ${REHUFF_CSOURCES:%.c=${WORKDIR}/%.o}
ALL_OBJS:= ${LIBTHEORADEC_OBJS} ${LIBTHEORAENC_OBJS} \
- ${DUMP_VIDEO_OBJS} ${ENCODER_EXAMPLE_OBJS} ${PLAYER_EXAMPLE_OBJS}
+ ${DUMP_VIDEO_OBJS} ${ENCODER_EXAMPLE_OBJS} ${PLAYER_EXAMPLE_OBJS} \
+ ${REHUFF_EXAMPLE_OBJS}
# Create the dependency file list
ALL_DEPS:= ${ALL_OBJS:%.o=%.d}
# Prepend source path to file names.
@@ -114,16 +146,21 @@
DUMP_VIDEO_CSOURCES:= ${DUMP_VIDEO_CSOURCES:%=${BINSRCDIR}/%}
ENCODER_EXAMPLE_CSOURCES:= ${ENCODER_EXAMPLE_CSOURCES:%=${BINSRCDIR}/%}
PLAYER_EXAMPLE_CSOURCES:= ${PLAYER_EXAMPLE_CSOURCES:%=${BINSRCDIR}/%}
+REHUFF_EXAMPLE_CSOURCES:= ${REHUFF_EXAMPLE_CSOURCES:%=${BINSRCDIR}/%} \
+ ${REHUFF_CSOURCES:%=${LIBSRCDIR}/%}
ALL_CSOURCES:= ${LIBTHEORADEC_CSOURCES} ${LIBTHEORAENC_CSOURCES} \
- ${DUMP_VIDEO_CSOURCES} ${ENCODER_EXAMPLE_CSOURCES} ${PLAYER_EXAMPLE_CSOURCES}
+ ${DUMP_VIDEO_CSOURCES} ${ENCODER_EXAMPLE_CSOURCES} \
+ ${PLAYER_EXAMPLE_CSOURCES} ${REHUFF_EXAMPLE_CSOURCES}
# Prepand target path to file names.
LIBTHEORADEC_TARGET:= ${TARGETLIBDIR}/${LIBTHEORADEC_TARGET}
LIBTHEORAENC_TARGET:= ${TARGETLIBDIR}/${LIBTHEORAENC_TARGET}
DUMP_VIDEO_TARGET:= ${TARGETBINDIR}/${DUMP_VIDEO_TARGET}
ENCODER_EXAMPLE_TARGET:= ${TARGETBINDIR}/${ENCODER_EXAMPLE_TARGET}
PLAYER_EXAMPLE_TARGET:= ${TARGETBINDIR}/${PLAYER_EXAMPLE_TARGET}
+REHUFF_EXAMPLE_TARGET:= ${TARGETBINDIR}/${REHUFF_EXAMPLE_TARGET}
ALL_TARGETS:= ${LIBTHEORADEC_TARGET} ${LIBTHEORAENC_TARGET} \
- ${DUMP_VIDEO_TARGET} ${ENCODER_EXAMPLE_TARGET} ${PLAYER_EXAMPLE_TARGET}
+ ${DUMP_VIDEO_TARGET} ${ENCODER_EXAMPLE_TARGET} ${PLAYER_EXAMPLE_TARGET} \
+ ${REHUFF_EXAMPLE_TARGET}
# Targets:
# Everything (default)
@@ -157,6 +194,11 @@
${CC} ${CFLAGS} -o $@ ${PLAYER_EXAMPLE_OBJS} ${LIBS} \
${LIBTHEORADEC_TARGET}
+# rehuff
+${REHUFF_EXAMPLE_TARGET}: ${REHUFF_EXAMPLE_OBJS}
+ mkdir -p ${TARGETBINDIR}
+ ${CC} ${CFLAGS} -o $@ ${REHUFF_EXAMPLE_OBJS} ${LIBS}
+
# Remove all targets.
clean:
-rm ${ALL_OBJS} ${ALL_DEPS} ${ALL_TARGETS}
@@ -183,4 +225,4 @@
${CC} ${CINCLUDE} ${CFLAGS} -c -o $@ $<
# Include header file dependencies
-include ${ALL_DEPS}
+-include ${ALL_DEPS}
More information about the commits
mailing list