00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022 #include "avcodec.h"
00023 #include "bitstream.h"
00024
00025
00026 typedef struct {
00027 AVFrame picture;
00028 } AvsContext;
00029
00030 typedef enum {
00031 AVS_VIDEO = 0x01,
00032 AVS_AUDIO = 0x02,
00033 AVS_PALETTE = 0x03,
00034 AVS_GAME_DATA = 0x04,
00035 } AvsBlockType;
00036
00037 typedef enum {
00038 AVS_I_FRAME = 0x00,
00039 AVS_P_FRAME_3X3 = 0x01,
00040 AVS_P_FRAME_2X2 = 0x02,
00041 AVS_P_FRAME_2X3 = 0x03,
00042 } AvsVideoSubType;
00043
00044
00045 static int
00046 avs_decode_frame(AVCodecContext * avctx,
00047 void *data, int *data_size, const uint8_t * buf, int buf_size)
00048 {
00049 AvsContext *const avs = avctx->priv_data;
00050 AVFrame *picture = data;
00051 AVFrame *const p = (AVFrame *) & avs->picture;
00052 const uint8_t *table, *vect;
00053 uint8_t *out;
00054 int i, j, x, y, stride, vect_w = 3, vect_h = 3;
00055 AvsVideoSubType sub_type;
00056 AvsBlockType type;
00057 GetBitContext change_map;
00058
00059 if (avctx->reget_buffer(avctx, p)) {
00060 av_log(avctx, AV_LOG_ERROR, "reget_buffer() failed\n");
00061 return -1;
00062 }
00063 p->reference = 1;
00064 p->pict_type = FF_P_TYPE;
00065 p->key_frame = 0;
00066
00067 out = avs->picture.data[0];
00068 stride = avs->picture.linesize[0];
00069
00070 sub_type = buf[0];
00071 type = buf[1];
00072 buf += 4;
00073
00074 if (type == AVS_PALETTE) {
00075 int first, last;
00076 uint32_t *pal = (uint32_t *) avs->picture.data[1];
00077
00078 first = AV_RL16(buf);
00079 last = first + AV_RL16(buf + 2);
00080 buf += 4;
00081 for (i=first; i<last; i++, buf+=3)
00082 pal[i] = (buf[0] << 18) | (buf[1] << 10) | (buf[2] << 2);
00083
00084 sub_type = buf[0];
00085 type = buf[1];
00086 buf += 4;
00087 }
00088
00089 if (type != AVS_VIDEO)
00090 return -1;
00091
00092 switch (sub_type) {
00093 case AVS_I_FRAME:
00094 p->pict_type = FF_I_TYPE;
00095 p->key_frame = 1;
00096 case AVS_P_FRAME_3X3:
00097 vect_w = 3;
00098 vect_h = 3;
00099 break;
00100
00101 case AVS_P_FRAME_2X2:
00102 vect_w = 2;
00103 vect_h = 2;
00104 break;
00105
00106 case AVS_P_FRAME_2X3:
00107 vect_w = 2;
00108 vect_h = 3;
00109 break;
00110
00111 default:
00112 return -1;
00113 }
00114
00115 table = buf + (256 * vect_w * vect_h);
00116 if (sub_type != AVS_I_FRAME) {
00117 int map_size = ((318 / vect_w + 7) / 8) * (198 / vect_h);
00118 init_get_bits(&change_map, table, map_size);
00119 table += map_size;
00120 }
00121
00122 for (y=0; y<198; y+=vect_h) {
00123 for (x=0; x<318; x+=vect_w) {
00124 if (sub_type == AVS_I_FRAME || get_bits1(&change_map)) {
00125 vect = &buf[*table++ * (vect_w * vect_h)];
00126 for (j=0; j<vect_w; j++) {
00127 out[(y + 0) * stride + x + j] = vect[(0 * vect_w) + j];
00128 out[(y + 1) * stride + x + j] = vect[(1 * vect_w) + j];
00129 if (vect_h == 3)
00130 out[(y + 2) * stride + x + j] =
00131 vect[(2 * vect_w) + j];
00132 }
00133 }
00134 }
00135 if (sub_type != AVS_I_FRAME)
00136 align_get_bits(&change_map);
00137 }
00138
00139 *picture = *(AVFrame *) & avs->picture;
00140 *data_size = sizeof(AVPicture);
00141
00142 return buf_size;
00143 }
00144
00145 static av_cold int avs_decode_init(AVCodecContext * avctx)
00146 {
00147 avctx->pix_fmt = PIX_FMT_PAL8;
00148 return 0;
00149 }
00150
00151 AVCodec avs_decoder = {
00152 "avs",
00153 CODEC_TYPE_VIDEO,
00154 CODEC_ID_AVS,
00155 sizeof(AvsContext),
00156 avs_decode_init,
00157 NULL,
00158 NULL,
00159 avs_decode_frame,
00160 CODEC_CAP_DR1,
00161 .long_name = NULL_IF_CONFIG_SMALL("AVS (Audio Video Standard) video"),
00162 };