1 /*
2 * Interplay MVE Video Decoder
3 * Copyright (C) 2003 The FFmpeg project
4 *
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
11 *
12 * FFmpeg is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with FFmpeg; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22 /**
23 * @file
24 * Interplay MVE Video Decoder by Mike Melanson (melanson@pcisys.net)
25 * For more information about the Interplay MVE format, visit:
26 * http://www.pcisys.net/~melanson/codecs/interplay-mve.txt
27 * This code is written in such a way that the identifiers match up
28 * with the encoding descriptions in the document.
29 *
30 * This decoder presently only supports a PAL8 output colorspace.
31 *
32 * An Interplay video frame consists of 2 parts: The decoding map and
33 * the video data. A demuxer must load these 2 parts together in a single
34 * buffer before sending it through the stream to this decoder.
35 */
36
37 #include <stdio.h>
38 #include <stdlib.h>
39 #include <string.h>
40
41 #include "libavutil/intreadwrite.h"
42
43 #define BITSTREAM_READER_LE
44 #include "avcodec.h"
45 #include "bytestream.h"
46 #include "codec_internal.h"
47 #include "decode.h"
48 #include "get_bits.h"
49 #include "hpeldsp.h"
50 #include "internal.h"
51
52 #define PALETTE_COUNT 256
53
54 typedef struct IpvideoContext {
55
56 AVCodecContext *avctx;
57 HpelDSPContext hdsp;
58 AVFrame *second_last_frame;
59 AVFrame *last_frame;
60
61 /* For format 0x10 */
62 AVFrame *cur_decode_frame;
63 AVFrame *prev_decode_frame;
64
65 const unsigned char *decoding_map;
66 int decoding_map_size;
67 const unsigned char *skip_map;
68 int skip_map_size;
69
70 int is_16bpp;
71 GetByteContext stream_ptr, mv_ptr;
72 unsigned char *pixel_ptr;
73 int line_inc;
74 int stride;
75 int upper_motion_limit_offset;
76
77 uint32_t pal[256];
78 } IpvideoContext;
79
copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)80 static int copy_from(IpvideoContext *s, AVFrame *src, AVFrame *dst, int delta_x, int delta_y)
81 {
82 int width = dst->width;
83 int current_offset = s->pixel_ptr - dst->data[0];
84 int x = (current_offset % dst->linesize[0]) / (1 + s->is_16bpp);
85 int y = current_offset / dst->linesize[0];
86 int dx = delta_x + x - ((delta_x + x >= width) - (delta_x + x < 0)) * width;
87 int dy = delta_y + y + (delta_x + x >= width) - (delta_x + x < 0);
88 int motion_offset = dy * src->linesize[0] + dx * (1 + s->is_16bpp);
89
90 if (motion_offset < 0) {
91 av_log(s->avctx, AV_LOG_ERROR, "motion offset < 0 (%d)\n", motion_offset);
92 return AVERROR_INVALIDDATA;
93 } else if (motion_offset > s->upper_motion_limit_offset) {
94 av_log(s->avctx, AV_LOG_ERROR, "motion offset above limit (%d >= %d)\n",
95 motion_offset, s->upper_motion_limit_offset);
96 return AVERROR_INVALIDDATA;
97 }
98 if (!src->data[0]) {
99 av_log(s->avctx, AV_LOG_ERROR, "Invalid decode type, corrupted header?\n");
100 return AVERROR(EINVAL);
101 }
102 s->hdsp.put_pixels_tab[!s->is_16bpp][0](s->pixel_ptr, src->data[0] + motion_offset,
103 dst->linesize[0], 8);
104 return 0;
105 }
106
ipvideo_decode_block_opcode_0x0(IpvideoContext *s, AVFrame *frame)107 static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s, AVFrame *frame)
108 {
109 return copy_from(s, s->last_frame, frame, 0, 0);
110 }
111
ipvideo_decode_block_opcode_0x1(IpvideoContext *s, AVFrame *frame)112 static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s, AVFrame *frame)
113 {
114 return copy_from(s, s->second_last_frame, frame, 0, 0);
115 }
116
ipvideo_decode_block_opcode_0x2(IpvideoContext *s, AVFrame *frame)117 static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s, AVFrame *frame)
118 {
119 unsigned char B;
120 int x, y;
121
122 /* copy block from 2 frames ago using a motion vector; need 1 more byte */
123 if (!s->is_16bpp) {
124 B = bytestream2_get_byte(&s->stream_ptr);
125 } else {
126 B = bytestream2_get_byte(&s->mv_ptr);
127 }
128
129 if (B < 56) {
130 x = 8 + (B % 7);
131 y = B / 7;
132 } else {
133 x = -14 + ((B - 56) % 29);
134 y = 8 + ((B - 56) / 29);
135 }
136
137 ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
138 return copy_from(s, s->second_last_frame, frame, x, y);
139 }
140
ipvideo_decode_block_opcode_0x3(IpvideoContext *s, AVFrame *frame)141 static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s, AVFrame *frame)
142 {
143 unsigned char B;
144 int x, y;
145
146 /* copy 8x8 block from current frame from an up/left block */
147
148 /* need 1 more byte for motion */
149 if (!s->is_16bpp) {
150 B = bytestream2_get_byte(&s->stream_ptr);
151 } else {
152 B = bytestream2_get_byte(&s->mv_ptr);
153 }
154
155 if (B < 56) {
156 x = -(8 + (B % 7));
157 y = -(B / 7);
158 } else {
159 x = -(-14 + ((B - 56) % 29));
160 y = -( 8 + ((B - 56) / 29));
161 }
162
163 ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
164 return copy_from(s, frame, frame, x, y);
165 }
166
ipvideo_decode_block_opcode_0x4(IpvideoContext *s, AVFrame *frame)167 static int ipvideo_decode_block_opcode_0x4(IpvideoContext *s, AVFrame *frame)
168 {
169 int x, y;
170 unsigned char B, BL, BH;
171
172 /* copy a block from the previous frame; need 1 more byte */
173 if (!s->is_16bpp) {
174 B = bytestream2_get_byte(&s->stream_ptr);
175 } else {
176 B = bytestream2_get_byte(&s->mv_ptr);
177 }
178
179 BL = B & 0x0F;
180 BH = (B >> 4) & 0x0F;
181 x = -8 + BL;
182 y = -8 + BH;
183
184 ff_tlog(s->avctx, "motion byte = %d, (x, y) = (%d, %d)\n", B, x, y);
185 return copy_from(s, s->last_frame, frame, x, y);
186 }
187
ipvideo_decode_block_opcode_0x5(IpvideoContext *s, AVFrame *frame)188 static int ipvideo_decode_block_opcode_0x5(IpvideoContext *s, AVFrame *frame)
189 {
190 signed char x, y;
191
192 /* copy a block from the previous frame using an expanded range;
193 * need 2 more bytes */
194 x = bytestream2_get_byte(&s->stream_ptr);
195 y = bytestream2_get_byte(&s->stream_ptr);
196
197 ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
198 return copy_from(s, s->last_frame, frame, x, y);
199 }
200
ipvideo_decode_block_opcode_0x6(IpvideoContext *s, AVFrame *frame)201 static int ipvideo_decode_block_opcode_0x6(IpvideoContext *s, AVFrame *frame)
202 {
203 /* mystery opcode? skip multiple blocks? */
204 av_log(s->avctx, AV_LOG_ERROR, "Help! Mystery opcode 0x6 seen\n");
205
206 /* report success */
207 return 0;
208 }
209
ipvideo_decode_block_opcode_0x7(IpvideoContext *s, AVFrame *frame)210 static int ipvideo_decode_block_opcode_0x7(IpvideoContext *s, AVFrame *frame)
211 {
212 int x, y;
213 unsigned char P[2];
214 unsigned int flags;
215
216 if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
217 av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x7\n");
218 return AVERROR_INVALIDDATA;
219 }
220
221 /* 2-color encoding */
222 P[0] = bytestream2_get_byte(&s->stream_ptr);
223 P[1] = bytestream2_get_byte(&s->stream_ptr);
224
225 if (P[0] <= P[1]) {
226
227 /* need 8 more bytes from the stream */
228 for (y = 0; y < 8; y++) {
229 flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
230 for (; flags != 1; flags >>= 1)
231 *s->pixel_ptr++ = P[flags & 1];
232 s->pixel_ptr += s->line_inc;
233 }
234
235 } else {
236
237 /* need 2 more bytes from the stream */
238 flags = bytestream2_get_le16(&s->stream_ptr);
239 for (y = 0; y < 8; y += 2) {
240 for (x = 0; x < 8; x += 2, flags >>= 1) {
241 s->pixel_ptr[x ] =
242 s->pixel_ptr[x + 1 ] =
243 s->pixel_ptr[x + s->stride] =
244 s->pixel_ptr[x + 1 + s->stride] = P[flags & 1];
245 }
246 s->pixel_ptr += s->stride * 2;
247 }
248 }
249
250 /* report success */
251 return 0;
252 }
253
ipvideo_decode_block_opcode_0x8(IpvideoContext *s, AVFrame *frame)254 static int ipvideo_decode_block_opcode_0x8(IpvideoContext *s, AVFrame *frame)
255 {
256 int x, y;
257 unsigned char P[4];
258 unsigned int flags = 0;
259
260 if (bytestream2_get_bytes_left(&s->stream_ptr) < 12) {
261 av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x8\n");
262 return AVERROR_INVALIDDATA;
263 }
264
265 /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
266 * either top and bottom or left and right halves */
267 P[0] = bytestream2_get_byte(&s->stream_ptr);
268 P[1] = bytestream2_get_byte(&s->stream_ptr);
269
270 if (P[0] <= P[1]) {
271 for (y = 0; y < 16; y++) {
272 // new values for each 4x4 block
273 if (!(y & 3)) {
274 if (y) {
275 P[0] = bytestream2_get_byte(&s->stream_ptr);
276 P[1] = bytestream2_get_byte(&s->stream_ptr);
277 }
278 flags = bytestream2_get_le16(&s->stream_ptr);
279 }
280
281 for (x = 0; x < 4; x++, flags >>= 1)
282 *s->pixel_ptr++ = P[flags & 1];
283 s->pixel_ptr += s->stride - 4;
284 // switch to right half
285 if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
286 }
287
288 } else {
289 flags = bytestream2_get_le32(&s->stream_ptr);
290 P[2] = bytestream2_get_byte(&s->stream_ptr);
291 P[3] = bytestream2_get_byte(&s->stream_ptr);
292
293 if (P[2] <= P[3]) {
294
295 /* vertical split; left & right halves are 2-color encoded */
296
297 for (y = 0; y < 16; y++) {
298 for (x = 0; x < 4; x++, flags >>= 1)
299 *s->pixel_ptr++ = P[flags & 1];
300 s->pixel_ptr += s->stride - 4;
301 // switch to right half
302 if (y == 7) {
303 s->pixel_ptr -= 8 * s->stride - 4;
304 P[0] = P[2];
305 P[1] = P[3];
306 flags = bytestream2_get_le32(&s->stream_ptr);
307 }
308 }
309
310 } else {
311
312 /* horizontal split; top & bottom halves are 2-color encoded */
313
314 for (y = 0; y < 8; y++) {
315 if (y == 4) {
316 P[0] = P[2];
317 P[1] = P[3];
318 flags = bytestream2_get_le32(&s->stream_ptr);
319 }
320
321 for (x = 0; x < 8; x++, flags >>= 1)
322 *s->pixel_ptr++ = P[flags & 1];
323 s->pixel_ptr += s->line_inc;
324 }
325 }
326 }
327
328 /* report success */
329 return 0;
330 }
331
ipvideo_decode_block_opcode_0x9(IpvideoContext *s, AVFrame *frame)332 static int ipvideo_decode_block_opcode_0x9(IpvideoContext *s, AVFrame *frame)
333 {
334 int x, y;
335 unsigned char P[4];
336
337 if (bytestream2_get_bytes_left(&s->stream_ptr) < 8) {
338 av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0x9\n");
339 return AVERROR_INVALIDDATA;
340 }
341
342 /* 4-color encoding */
343 bytestream2_get_buffer(&s->stream_ptr, P, 4);
344
345 if (P[0] <= P[1]) {
346 if (P[2] <= P[3]) {
347
348 /* 1 of 4 colors for each pixel, need 16 more bytes */
349 for (y = 0; y < 8; y++) {
350 /* get the next set of 8 2-bit flags */
351 int flags = bytestream2_get_le16(&s->stream_ptr);
352 for (x = 0; x < 8; x++, flags >>= 2)
353 *s->pixel_ptr++ = P[flags & 0x03];
354 s->pixel_ptr += s->line_inc;
355 }
356
357 } else {
358 uint32_t flags;
359
360 /* 1 of 4 colors for each 2x2 block, need 4 more bytes */
361 flags = bytestream2_get_le32(&s->stream_ptr);
362
363 for (y = 0; y < 8; y += 2) {
364 for (x = 0; x < 8; x += 2, flags >>= 2) {
365 s->pixel_ptr[x ] =
366 s->pixel_ptr[x + 1 ] =
367 s->pixel_ptr[x + s->stride] =
368 s->pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
369 }
370 s->pixel_ptr += s->stride * 2;
371 }
372
373 }
374 } else {
375 uint64_t flags;
376
377 /* 1 of 4 colors for each 2x1 or 1x2 block, need 8 more bytes */
378 flags = bytestream2_get_le64(&s->stream_ptr);
379 if (P[2] <= P[3]) {
380 for (y = 0; y < 8; y++) {
381 for (x = 0; x < 8; x += 2, flags >>= 2) {
382 s->pixel_ptr[x ] =
383 s->pixel_ptr[x + 1] = P[flags & 0x03];
384 }
385 s->pixel_ptr += s->stride;
386 }
387 } else {
388 for (y = 0; y < 8; y += 2) {
389 for (x = 0; x < 8; x++, flags >>= 2) {
390 s->pixel_ptr[x ] =
391 s->pixel_ptr[x + s->stride] = P[flags & 0x03];
392 }
393 s->pixel_ptr += s->stride * 2;
394 }
395 }
396 }
397
398 /* report success */
399 return 0;
400 }
401
ipvideo_decode_block_opcode_0xA(IpvideoContext *s, AVFrame *frame)402 static int ipvideo_decode_block_opcode_0xA(IpvideoContext *s, AVFrame *frame)
403 {
404 int x, y;
405 unsigned char P[8];
406 int flags = 0;
407
408 if (bytestream2_get_bytes_left(&s->stream_ptr) < 16) {
409 av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xA\n");
410 return AVERROR_INVALIDDATA;
411 }
412
413 bytestream2_get_buffer(&s->stream_ptr, P, 4);
414
415 /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
416 * either top and bottom or left and right halves */
417 if (P[0] <= P[1]) {
418
419 /* 4-color encoding for each quadrant; need 32 bytes */
420 for (y = 0; y < 16; y++) {
421 // new values for each 4x4 block
422 if (!(y & 3)) {
423 if (y) bytestream2_get_buffer(&s->stream_ptr, P, 4);
424 flags = bytestream2_get_le32(&s->stream_ptr);
425 }
426
427 for (x = 0; x < 4; x++, flags >>= 2)
428 *s->pixel_ptr++ = P[flags & 0x03];
429
430 s->pixel_ptr += s->stride - 4;
431 // switch to right half
432 if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
433 }
434
435 } else {
436 // vertical split?
437 int vert;
438 uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
439
440 bytestream2_get_buffer(&s->stream_ptr, P + 4, 4);
441 vert = P[4] <= P[5];
442
443 /* 4-color encoding for either left and right or top and bottom
444 * halves */
445
446 for (y = 0; y < 16; y++) {
447 for (x = 0; x < 4; x++, flags >>= 2)
448 *s->pixel_ptr++ = P[flags & 0x03];
449
450 if (vert) {
451 s->pixel_ptr += s->stride - 4;
452 // switch to right half
453 if (y == 7) s->pixel_ptr -= 8 * s->stride - 4;
454 } else if (y & 1) s->pixel_ptr += s->line_inc;
455
456 // load values for second half
457 if (y == 7) {
458 memcpy(P, P + 4, 4);
459 flags = bytestream2_get_le64(&s->stream_ptr);
460 }
461 }
462 }
463
464 /* report success */
465 return 0;
466 }
467
ipvideo_decode_block_opcode_0xB(IpvideoContext *s, AVFrame *frame)468 static int ipvideo_decode_block_opcode_0xB(IpvideoContext *s, AVFrame *frame)
469 {
470 int y;
471
472 /* 64-color encoding (each pixel in block is a different color) */
473 for (y = 0; y < 8; y++) {
474 bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
475 s->pixel_ptr += s->stride;
476 }
477
478 /* report success */
479 return 0;
480 }
481
ipvideo_decode_block_opcode_0xC(IpvideoContext *s, AVFrame *frame)482 static int ipvideo_decode_block_opcode_0xC(IpvideoContext *s, AVFrame *frame)
483 {
484 int x, y;
485
486 /* 16-color block encoding: each 2x2 block is a different color */
487 for (y = 0; y < 8; y += 2) {
488 for (x = 0; x < 8; x += 2) {
489 s->pixel_ptr[x ] =
490 s->pixel_ptr[x + 1 ] =
491 s->pixel_ptr[x + s->stride] =
492 s->pixel_ptr[x + 1 + s->stride] = bytestream2_get_byte(&s->stream_ptr);
493 }
494 s->pixel_ptr += s->stride * 2;
495 }
496
497 /* report success */
498 return 0;
499 }
500
ipvideo_decode_block_opcode_0xD(IpvideoContext *s, AVFrame *frame)501 static int ipvideo_decode_block_opcode_0xD(IpvideoContext *s, AVFrame *frame)
502 {
503 int y;
504 unsigned char P[2];
505
506 if (bytestream2_get_bytes_left(&s->stream_ptr) < 4) {
507 av_log(s->avctx, AV_LOG_ERROR, "too little data for opcode 0xD\n");
508 return AVERROR_INVALIDDATA;
509 }
510
511 /* 4-color block encoding: each 4x4 block is a different color */
512 for (y = 0; y < 8; y++) {
513 if (!(y & 3)) {
514 P[0] = bytestream2_get_byte(&s->stream_ptr);
515 P[1] = bytestream2_get_byte(&s->stream_ptr);
516 }
517 memset(s->pixel_ptr, P[0], 4);
518 memset(s->pixel_ptr + 4, P[1], 4);
519 s->pixel_ptr += s->stride;
520 }
521
522 /* report success */
523 return 0;
524 }
525
ipvideo_decode_block_opcode_0xE(IpvideoContext *s, AVFrame *frame)526 static int ipvideo_decode_block_opcode_0xE(IpvideoContext *s, AVFrame *frame)
527 {
528 int y;
529 unsigned char pix;
530
531 /* 1-color encoding: the whole block is 1 solid color */
532 pix = bytestream2_get_byte(&s->stream_ptr);
533
534 for (y = 0; y < 8; y++) {
535 memset(s->pixel_ptr, pix, 8);
536 s->pixel_ptr += s->stride;
537 }
538
539 /* report success */
540 return 0;
541 }
542
ipvideo_decode_block_opcode_0xF(IpvideoContext *s, AVFrame *frame)543 static int ipvideo_decode_block_opcode_0xF(IpvideoContext *s, AVFrame *frame)
544 {
545 int x, y;
546 unsigned char sample[2];
547
548 /* dithered encoding */
549 sample[0] = bytestream2_get_byte(&s->stream_ptr);
550 sample[1] = bytestream2_get_byte(&s->stream_ptr);
551
552 for (y = 0; y < 8; y++) {
553 for (x = 0; x < 8; x += 2) {
554 *s->pixel_ptr++ = sample[ y & 1 ];
555 *s->pixel_ptr++ = sample[!(y & 1)];
556 }
557 s->pixel_ptr += s->line_inc;
558 }
559
560 /* report success */
561 return 0;
562 }
563
ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s, AVFrame *frame)564 static int ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s, AVFrame *frame)
565 {
566 signed char x, y;
567
568 /* copy a block from the second last frame using an expanded range */
569 x = bytestream2_get_byte(&s->stream_ptr);
570 y = bytestream2_get_byte(&s->stream_ptr);
571
572 ff_tlog(s->avctx, "motion bytes = %d, %d\n", x, y);
573 return copy_from(s, s->second_last_frame, frame, x, y);
574 }
575
ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s, AVFrame *frame)576 static int ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s, AVFrame *frame)
577 {
578 int x, y;
579 uint16_t P[2];
580 unsigned int flags;
581 uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
582
583 /* 2-color encoding */
584 P[0] = bytestream2_get_le16(&s->stream_ptr);
585 P[1] = bytestream2_get_le16(&s->stream_ptr);
586
587 if (!(P[0] & 0x8000)) {
588
589 for (y = 0; y < 8; y++) {
590 flags = bytestream2_get_byte(&s->stream_ptr) | 0x100;
591 for (; flags != 1; flags >>= 1)
592 *pixel_ptr++ = P[flags & 1];
593 pixel_ptr += s->line_inc;
594 }
595
596 } else {
597
598 flags = bytestream2_get_le16(&s->stream_ptr);
599 for (y = 0; y < 8; y += 2) {
600 for (x = 0; x < 8; x += 2, flags >>= 1) {
601 pixel_ptr[x ] =
602 pixel_ptr[x + 1 ] =
603 pixel_ptr[x + s->stride] =
604 pixel_ptr[x + 1 + s->stride] = P[flags & 1];
605 }
606 pixel_ptr += s->stride * 2;
607 }
608 }
609
610 return 0;
611 }
612
ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s, AVFrame *frame)613 static int ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s, AVFrame *frame)
614 {
615 int x, y;
616 uint16_t P[4];
617 unsigned int flags = 0;
618 uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
619
620 /* 2-color encoding for each 4x4 quadrant, or 2-color encoding on
621 * either top and bottom or left and right halves */
622 P[0] = bytestream2_get_le16(&s->stream_ptr);
623 P[1] = bytestream2_get_le16(&s->stream_ptr);
624
625 if (!(P[0] & 0x8000)) {
626
627 for (y = 0; y < 16; y++) {
628 // new values for each 4x4 block
629 if (!(y & 3)) {
630 if (y) {
631 P[0] = bytestream2_get_le16(&s->stream_ptr);
632 P[1] = bytestream2_get_le16(&s->stream_ptr);
633 }
634 flags = bytestream2_get_le16(&s->stream_ptr);
635 }
636
637 for (x = 0; x < 4; x++, flags >>= 1)
638 *pixel_ptr++ = P[flags & 1];
639 pixel_ptr += s->stride - 4;
640 // switch to right half
641 if (y == 7) pixel_ptr -= 8 * s->stride - 4;
642 }
643
644 } else {
645
646 flags = bytestream2_get_le32(&s->stream_ptr);
647 P[2] = bytestream2_get_le16(&s->stream_ptr);
648 P[3] = bytestream2_get_le16(&s->stream_ptr);
649
650 if (!(P[2] & 0x8000)) {
651
652 /* vertical split; left & right halves are 2-color encoded */
653
654 for (y = 0; y < 16; y++) {
655 for (x = 0; x < 4; x++, flags >>= 1)
656 *pixel_ptr++ = P[flags & 1];
657 pixel_ptr += s->stride - 4;
658 // switch to right half
659 if (y == 7) {
660 pixel_ptr -= 8 * s->stride - 4;
661 P[0] = P[2];
662 P[1] = P[3];
663 flags = bytestream2_get_le32(&s->stream_ptr);
664 }
665 }
666
667 } else {
668
669 /* horizontal split; top & bottom halves are 2-color encoded */
670
671 for (y = 0; y < 8; y++) {
672 if (y == 4) {
673 P[0] = P[2];
674 P[1] = P[3];
675 flags = bytestream2_get_le32(&s->stream_ptr);
676 }
677
678 for (x = 0; x < 8; x++, flags >>= 1)
679 *pixel_ptr++ = P[flags & 1];
680 pixel_ptr += s->line_inc;
681 }
682 }
683 }
684
685 /* report success */
686 return 0;
687 }
688
ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s, AVFrame *frame)689 static int ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s, AVFrame *frame)
690 {
691 int x, y;
692 uint16_t P[4];
693 uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
694
695 /* 4-color encoding */
696 for (x = 0; x < 4; x++)
697 P[x] = bytestream2_get_le16(&s->stream_ptr);
698
699 if (!(P[0] & 0x8000)) {
700 if (!(P[2] & 0x8000)) {
701
702 /* 1 of 4 colors for each pixel */
703 for (y = 0; y < 8; y++) {
704 /* get the next set of 8 2-bit flags */
705 int flags = bytestream2_get_le16(&s->stream_ptr);
706 for (x = 0; x < 8; x++, flags >>= 2)
707 *pixel_ptr++ = P[flags & 0x03];
708 pixel_ptr += s->line_inc;
709 }
710
711 } else {
712 uint32_t flags;
713
714 /* 1 of 4 colors for each 2x2 block */
715 flags = bytestream2_get_le32(&s->stream_ptr);
716
717 for (y = 0; y < 8; y += 2) {
718 for (x = 0; x < 8; x += 2, flags >>= 2) {
719 pixel_ptr[x ] =
720 pixel_ptr[x + 1 ] =
721 pixel_ptr[x + s->stride] =
722 pixel_ptr[x + 1 + s->stride] = P[flags & 0x03];
723 }
724 pixel_ptr += s->stride * 2;
725 }
726
727 }
728 } else {
729 uint64_t flags;
730
731 /* 1 of 4 colors for each 2x1 or 1x2 block */
732 flags = bytestream2_get_le64(&s->stream_ptr);
733 if (!(P[2] & 0x8000)) {
734 for (y = 0; y < 8; y++) {
735 for (x = 0; x < 8; x += 2, flags >>= 2) {
736 pixel_ptr[x ] =
737 pixel_ptr[x + 1] = P[flags & 0x03];
738 }
739 pixel_ptr += s->stride;
740 }
741 } else {
742 for (y = 0; y < 8; y += 2) {
743 for (x = 0; x < 8; x++, flags >>= 2) {
744 pixel_ptr[x ] =
745 pixel_ptr[x + s->stride] = P[flags & 0x03];
746 }
747 pixel_ptr += s->stride * 2;
748 }
749 }
750 }
751
752 /* report success */
753 return 0;
754 }
755
ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s, AVFrame *frame)756 static int ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s, AVFrame *frame)
757 {
758 int x, y;
759 uint16_t P[8];
760 int flags = 0;
761 uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
762
763 for (x = 0; x < 4; x++)
764 P[x] = bytestream2_get_le16(&s->stream_ptr);
765
766 /* 4-color encoding for each 4x4 quadrant, or 4-color encoding on
767 * either top and bottom or left and right halves */
768 if (!(P[0] & 0x8000)) {
769
770 /* 4-color encoding for each quadrant */
771 for (y = 0; y < 16; y++) {
772 // new values for each 4x4 block
773 if (!(y & 3)) {
774 if (y)
775 for (x = 0; x < 4; x++)
776 P[x] = bytestream2_get_le16(&s->stream_ptr);
777 flags = bytestream2_get_le32(&s->stream_ptr);
778 }
779
780 for (x = 0; x < 4; x++, flags >>= 2)
781 *pixel_ptr++ = P[flags & 0x03];
782
783 pixel_ptr += s->stride - 4;
784 // switch to right half
785 if (y == 7) pixel_ptr -= 8 * s->stride - 4;
786 }
787
788 } else {
789 // vertical split?
790 int vert;
791 uint64_t flags = bytestream2_get_le64(&s->stream_ptr);
792
793 for (x = 4; x < 8; x++)
794 P[x] = bytestream2_get_le16(&s->stream_ptr);
795 vert = !(P[4] & 0x8000);
796
797 /* 4-color encoding for either left and right or top and bottom
798 * halves */
799
800 for (y = 0; y < 16; y++) {
801 for (x = 0; x < 4; x++, flags >>= 2)
802 *pixel_ptr++ = P[flags & 0x03];
803
804 if (vert) {
805 pixel_ptr += s->stride - 4;
806 // switch to right half
807 if (y == 7) pixel_ptr -= 8 * s->stride - 4;
808 } else if (y & 1) pixel_ptr += s->line_inc;
809
810 // load values for second half
811 if (y == 7) {
812 memcpy(P, P + 4, 8);
813 flags = bytestream2_get_le64(&s->stream_ptr);
814 }
815 }
816 }
817
818 /* report success */
819 return 0;
820 }
821
ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s, AVFrame *frame)822 static int ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s, AVFrame *frame)
823 {
824 int x, y;
825 uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
826
827 /* 64-color encoding (each pixel in block is a different color) */
828 for (y = 0; y < 8; y++) {
829 for (x = 0; x < 8; x++)
830 pixel_ptr[x] = bytestream2_get_le16(&s->stream_ptr);
831 pixel_ptr += s->stride;
832 }
833
834 /* report success */
835 return 0;
836 }
837
ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s, AVFrame *frame)838 static int ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s, AVFrame *frame)
839 {
840 int x, y;
841 uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
842
843 /* 16-color block encoding: each 2x2 block is a different color */
844 for (y = 0; y < 8; y += 2) {
845 for (x = 0; x < 8; x += 2) {
846 pixel_ptr[x ] =
847 pixel_ptr[x + 1 ] =
848 pixel_ptr[x + s->stride] =
849 pixel_ptr[x + 1 + s->stride] = bytestream2_get_le16(&s->stream_ptr);
850 }
851 pixel_ptr += s->stride * 2;
852 }
853
854 /* report success */
855 return 0;
856 }
857
ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s, AVFrame *frame)858 static int ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s, AVFrame *frame)
859 {
860 int x, y;
861 uint16_t P[2];
862 uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
863
864 /* 4-color block encoding: each 4x4 block is a different color */
865 for (y = 0; y < 8; y++) {
866 if (!(y & 3)) {
867 P[0] = bytestream2_get_le16(&s->stream_ptr);
868 P[1] = bytestream2_get_le16(&s->stream_ptr);
869 }
870 for (x = 0; x < 8; x++)
871 pixel_ptr[x] = P[x >> 2];
872 pixel_ptr += s->stride;
873 }
874
875 /* report success */
876 return 0;
877 }
878
ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s, AVFrame *frame)879 static int ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s, AVFrame *frame)
880 {
881 int x, y;
882 uint16_t pix;
883 uint16_t *pixel_ptr = (uint16_t*)s->pixel_ptr;
884
885 /* 1-color encoding: the whole block is 1 solid color */
886 pix = bytestream2_get_le16(&s->stream_ptr);
887
888 for (y = 0; y < 8; y++) {
889 for (x = 0; x < 8; x++)
890 pixel_ptr[x] = pix;
891 pixel_ptr += s->stride;
892 }
893
894 /* report success */
895 return 0;
896 }
897
898 static int (* const ipvideo_decode_block[])(IpvideoContext *s, AVFrame *frame) = {
899 ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
900 ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
901 ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
902 ipvideo_decode_block_opcode_0x6, ipvideo_decode_block_opcode_0x7,
903 ipvideo_decode_block_opcode_0x8, ipvideo_decode_block_opcode_0x9,
904 ipvideo_decode_block_opcode_0xA, ipvideo_decode_block_opcode_0xB,
905 ipvideo_decode_block_opcode_0xC, ipvideo_decode_block_opcode_0xD,
906 ipvideo_decode_block_opcode_0xE, ipvideo_decode_block_opcode_0xF,
907 };
908
909 static int (* const ipvideo_decode_block16[])(IpvideoContext *s, AVFrame *frame) = {
910 ipvideo_decode_block_opcode_0x0, ipvideo_decode_block_opcode_0x1,
911 ipvideo_decode_block_opcode_0x2, ipvideo_decode_block_opcode_0x3,
912 ipvideo_decode_block_opcode_0x4, ipvideo_decode_block_opcode_0x5,
913 ipvideo_decode_block_opcode_0x6_16, ipvideo_decode_block_opcode_0x7_16,
914 ipvideo_decode_block_opcode_0x8_16, ipvideo_decode_block_opcode_0x9_16,
915 ipvideo_decode_block_opcode_0xA_16, ipvideo_decode_block_opcode_0xB_16,
916 ipvideo_decode_block_opcode_0xC_16, ipvideo_decode_block_opcode_0xD_16,
917 ipvideo_decode_block_opcode_0xE_16, ipvideo_decode_block_opcode_0x1,
918 };
919
ipvideo_format_06_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)920 static void ipvideo_format_06_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
921 {
922 int line;
923
924 if (!opcode) {
925 for (line = 0; line < 8; ++line) {
926 bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
927 s->pixel_ptr += s->stride;
928 }
929 } else {
930 /* Don't try to copy second_last_frame data on the first frames */
931 if (s->avctx->frame_number > 2)
932 copy_from(s, s->second_last_frame, frame, 0, 0);
933 }
934 }
935
ipvideo_format_06_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)936 static void ipvideo_format_06_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
937 {
938 int off_x, off_y;
939
940 if (opcode < 0) {
941 off_x = ((uint16_t)opcode - 0xC000) % frame->width;
942 off_y = ((uint16_t)opcode - 0xC000) / frame->width;
943 copy_from(s, s->last_frame, frame, off_x, off_y);
944 } else if (opcode > 0) {
945 off_x = ((uint16_t)opcode - 0x4000) % frame->width;
946 off_y = ((uint16_t)opcode - 0x4000) / frame->width;
947 copy_from(s, frame, frame, off_x, off_y);
948 }
949 }
950
951 static void (* const ipvideo_format_06_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op) = {
952 ipvideo_format_06_firstpass, ipvideo_format_06_secondpass,
953 };
954
ipvideo_decode_format_06_opcodes(IpvideoContext *s, AVFrame *frame)955 static void ipvideo_decode_format_06_opcodes(IpvideoContext *s, AVFrame *frame)
956 {
957 int pass, x, y;
958 int16_t opcode;
959 GetByteContext decoding_map_ptr;
960
961 /* this is PAL8, so make the palette available */
962 memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
963 s->stride = frame->linesize[0];
964
965 s->line_inc = s->stride - 8;
966 s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
967 + (s->avctx->width - 8) * (1 + s->is_16bpp);
968
969 bytestream2_init(&decoding_map_ptr, s->decoding_map, s->decoding_map_size);
970
971 for (pass = 0; pass < 2; ++pass) {
972 bytestream2_seek(&decoding_map_ptr, 0, SEEK_SET);
973 for (y = 0; y < s->avctx->height; y += 8) {
974 for (x = 0; x < s->avctx->width; x += 8) {
975 opcode = bytestream2_get_le16(&decoding_map_ptr);
976
977 ff_tlog(s->avctx,
978 " block @ (%3d, %3d): opcode 0x%X, data ptr offset %d\n",
979 x, y, opcode, bytestream2_tell(&s->stream_ptr));
980
981 s->pixel_ptr = frame->data[0] + x + y * frame->linesize[0];
982 ipvideo_format_06_passes[pass](s, frame, opcode);
983 }
984 }
985 }
986
987 if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
988 av_log(s->avctx, AV_LOG_DEBUG,
989 "decode finished with %d bytes left over\n",
990 bytestream2_get_bytes_left(&s->stream_ptr));
991 }
992 }
993
ipvideo_format_10_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)994 static void ipvideo_format_10_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
995 {
996 int line;
997
998 if (!opcode) {
999 for (line = 0; line < 8; ++line) {
1000 bytestream2_get_buffer(&s->stream_ptr, s->pixel_ptr, 8);
1001 s->pixel_ptr += s->stride;
1002 }
1003 }
1004 }
1005
ipvideo_format_10_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)1006 static void ipvideo_format_10_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode)
1007 {
1008 int off_x, off_y;
1009
1010 if (opcode < 0) {
1011 off_x = ((uint16_t)opcode - 0xC000) % s->cur_decode_frame->width;
1012 off_y = ((uint16_t)opcode - 0xC000) / s->cur_decode_frame->width;
1013 copy_from(s, s->prev_decode_frame, s->cur_decode_frame, off_x, off_y);
1014 } else if (opcode > 0) {
1015 off_x = ((uint16_t)opcode - 0x4000) % s->cur_decode_frame->width;
1016 off_y = ((uint16_t)opcode - 0x4000) / s->cur_decode_frame->width;
1017 copy_from(s, s->cur_decode_frame, s->cur_decode_frame, off_x, off_y);
1018 }
1019 }
1020
1021 static void (* const ipvideo_format_10_passes[])(IpvideoContext *s, AVFrame *frame, int16_t op) = {
1022 ipvideo_format_10_firstpass, ipvideo_format_10_secondpass,
1023 };
1024
ipvideo_decode_format_10_opcodes(IpvideoContext *s, AVFrame *frame)1025 static void ipvideo_decode_format_10_opcodes(IpvideoContext *s, AVFrame *frame)
1026 {
1027 int pass, x, y, changed_block;
1028 int16_t opcode, skip;
1029 GetByteContext decoding_map_ptr;
1030 GetByteContext skip_map_ptr;
1031
1032 bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
1033
1034 /* this is PAL8, so make the palette available */
1035 memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
1036 s->stride = frame->linesize[0];
1037
1038 s->line_inc = s->stride - 8;
1039 s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
1040 + (s->avctx->width - 8) * (1 + s->is_16bpp);
1041
1042 bytestream2_init(&decoding_map_ptr, s->decoding_map, s->decoding_map_size);
1043 bytestream2_init(&skip_map_ptr, s->skip_map, s->skip_map_size);
1044
1045 for (pass = 0; pass < 2; ++pass) {
1046 bytestream2_seek(&decoding_map_ptr, 0, SEEK_SET);
1047 bytestream2_seek(&skip_map_ptr, 0, SEEK_SET);
1048 skip = bytestream2_get_le16(&skip_map_ptr);
1049
1050 for (y = 0; y < s->avctx->height; y += 8) {
1051 for (x = 0; x < s->avctx->width; x += 8) {
1052 s->pixel_ptr = s->cur_decode_frame->data[0] + x + y * s->cur_decode_frame->linesize[0];
1053
1054 while (skip <= 0) {
1055 if (skip != -0x8000 && skip) {
1056 opcode = bytestream2_get_le16(&decoding_map_ptr);
1057 ipvideo_format_10_passes[pass](s, frame, opcode);
1058 break;
1059 }
1060 if (bytestream2_get_bytes_left(&skip_map_ptr) < 2)
1061 return;
1062 skip = bytestream2_get_le16(&skip_map_ptr);
1063 }
1064 skip *= 2;
1065 }
1066 }
1067 }
1068
1069 bytestream2_seek(&skip_map_ptr, 0, SEEK_SET);
1070 skip = bytestream2_get_le16(&skip_map_ptr);
1071 for (y = 0; y < s->avctx->height; y += 8) {
1072 for (x = 0; x < s->avctx->width; x += 8) {
1073 changed_block = 0;
1074 s->pixel_ptr = frame->data[0] + x + y*frame->linesize[0];
1075
1076 while (skip <= 0) {
1077 if (skip != -0x8000 && skip) {
1078 changed_block = 1;
1079 break;
1080 }
1081 if (bytestream2_get_bytes_left(&skip_map_ptr) < 2)
1082 return;
1083 skip = bytestream2_get_le16(&skip_map_ptr);
1084 }
1085
1086 if (changed_block) {
1087 copy_from(s, s->cur_decode_frame, frame, 0, 0);
1088 } else {
1089 /* Don't try to copy last_frame data on the first frame */
1090 if (s->avctx->frame_number)
1091 copy_from(s, s->last_frame, frame, 0, 0);
1092 }
1093 skip *= 2;
1094 }
1095 }
1096
1097 FFSWAP(AVFrame*, s->prev_decode_frame, s->cur_decode_frame);
1098
1099 if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
1100 av_log(s->avctx, AV_LOG_DEBUG,
1101 "decode finished with %d bytes left over\n",
1102 bytestream2_get_bytes_left(&s->stream_ptr));
1103 }
1104 }
1105
ipvideo_decode_format_11_opcodes(IpvideoContext *s, AVFrame *frame)1106 static void ipvideo_decode_format_11_opcodes(IpvideoContext *s, AVFrame *frame)
1107 {
1108 int x, y;
1109 unsigned char opcode;
1110 int ret;
1111 GetBitContext gb;
1112
1113 bytestream2_skip(&s->stream_ptr, 14); /* data starts 14 bytes in */
1114 if (!s->is_16bpp) {
1115 /* this is PAL8, so make the palette available */
1116 memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
1117
1118 s->stride = frame->linesize[0];
1119 } else {
1120 s->stride = frame->linesize[0] >> 1;
1121 s->mv_ptr = s->stream_ptr;
1122 bytestream2_skip(&s->mv_ptr, bytestream2_get_le16(&s->stream_ptr));
1123 }
1124 s->line_inc = s->stride - 8;
1125 s->upper_motion_limit_offset = (s->avctx->height - 8) * frame->linesize[0]
1126 + (s->avctx->width - 8) * (1 + s->is_16bpp);
1127
1128 init_get_bits(&gb, s->decoding_map, s->decoding_map_size * 8);
1129 for (y = 0; y < s->avctx->height; y += 8) {
1130 for (x = 0; x < s->avctx->width; x += 8) {
1131 if (get_bits_left(&gb) < 4)
1132 return;
1133 opcode = get_bits(&gb, 4);
1134
1135 ff_tlog(s->avctx,
1136 " block @ (%3d, %3d): encoding 0x%X, data ptr offset %d\n",
1137 x, y, opcode, bytestream2_tell(&s->stream_ptr));
1138
1139 if (!s->is_16bpp) {
1140 s->pixel_ptr = frame->data[0] + x
1141 + y*frame->linesize[0];
1142 ret = ipvideo_decode_block[opcode](s, frame);
1143 } else {
1144 s->pixel_ptr = frame->data[0] + x*2
1145 + y*frame->linesize[0];
1146 ret = ipvideo_decode_block16[opcode](s, frame);
1147 }
1148 if (ret != 0) {
1149 av_log(s->avctx, AV_LOG_ERROR, "decode problem on frame %d, @ block (%d, %d)\n",
1150 s->avctx->frame_number, x, y);
1151 return;
1152 }
1153 }
1154 }
1155 if (bytestream2_get_bytes_left(&s->stream_ptr) > 1) {
1156 av_log(s->avctx, AV_LOG_DEBUG,
1157 "decode finished with %d bytes left over\n",
1158 bytestream2_get_bytes_left(&s->stream_ptr));
1159 }
1160 }
1161
ipvideo_decode_init(AVCodecContext *avctx)1162 static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
1163 {
1164 IpvideoContext *s = avctx->priv_data;
1165
1166 s->avctx = avctx;
1167
1168 s->is_16bpp = avctx->bits_per_coded_sample == 16;
1169 avctx->pix_fmt = s->is_16bpp ? AV_PIX_FMT_RGB555 : AV_PIX_FMT_PAL8;
1170
1171 ff_hpeldsp_init(&s->hdsp, avctx->flags);
1172
1173 s->last_frame = av_frame_alloc();
1174 s->second_last_frame = av_frame_alloc();
1175 s->cur_decode_frame = av_frame_alloc();
1176 s->prev_decode_frame = av_frame_alloc();
1177 if (!s->last_frame || !s->second_last_frame ||
1178 !s->cur_decode_frame || !s->prev_decode_frame) {
1179 return AVERROR(ENOMEM);
1180 }
1181
1182 s->cur_decode_frame->width = avctx->width;
1183 s->prev_decode_frame->width = avctx->width;
1184 s->cur_decode_frame->height = avctx->height;
1185 s->prev_decode_frame->height = avctx->height;
1186 s->cur_decode_frame->format = avctx->pix_fmt;
1187 s->prev_decode_frame->format = avctx->pix_fmt;
1188
1189 return 0;
1190 }
1191
ipvideo_decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *avpkt)1192 static int ipvideo_decode_frame(AVCodecContext *avctx, AVFrame *frame,
1193 int *got_frame, AVPacket *avpkt)
1194 {
1195 const uint8_t *buf = avpkt->data;
1196 int buf_size = avpkt->size;
1197 IpvideoContext *s = avctx->priv_data;
1198 int ret;
1199 int send_buffer;
1200 int frame_format;
1201 int video_data_size;
1202
1203 if (av_packet_get_side_data(avpkt, AV_PKT_DATA_PARAM_CHANGE, NULL)) {
1204 av_frame_unref(s->last_frame);
1205 av_frame_unref(s->second_last_frame);
1206 av_frame_unref(s->cur_decode_frame);
1207 av_frame_unref(s->prev_decode_frame);
1208 }
1209
1210 if (!s->cur_decode_frame->data[0]) {
1211 ret = ff_get_buffer(avctx, s->cur_decode_frame, 0);
1212 if (ret < 0)
1213 return ret;
1214
1215 ret = ff_get_buffer(avctx, s->prev_decode_frame, 0);
1216 if (ret < 0) {
1217 av_frame_unref(s->cur_decode_frame);
1218 return ret;
1219 }
1220 }
1221
1222 if (buf_size < 8)
1223 return AVERROR_INVALIDDATA;
1224
1225 frame_format = AV_RL8(buf);
1226 send_buffer = AV_RL8(buf + 1);
1227 video_data_size = AV_RL16(buf + 2);
1228 s->decoding_map_size = AV_RL16(buf + 4);
1229 s->skip_map_size = AV_RL16(buf + 6);
1230
1231 switch (frame_format) {
1232 case 0x06:
1233 if (s->decoding_map_size) {
1234 av_log(avctx, AV_LOG_ERROR, "Decoding map for format 0x06\n");
1235 return AVERROR_INVALIDDATA;
1236 }
1237
1238 if (s->skip_map_size) {
1239 av_log(avctx, AV_LOG_ERROR, "Skip map for format 0x06\n");
1240 return AVERROR_INVALIDDATA;
1241 }
1242
1243 if (s->is_16bpp) {
1244 av_log(avctx, AV_LOG_ERROR, "Video format 0x06 does not support 16bpp movies\n");
1245 return AVERROR_INVALIDDATA;
1246 }
1247
1248 /* Decoding map for 0x06 frame format is at the top of pixeldata */
1249 s->decoding_map_size = ((s->avctx->width / 8) * (s->avctx->height / 8)) * 2;
1250 s->decoding_map = buf + 8 + 14; /* 14 bits of op data */
1251 video_data_size -= s->decoding_map_size + 14;
1252 if (video_data_size <= 0 || s->decoding_map_size == 0)
1253 return AVERROR_INVALIDDATA;
1254
1255 if (buf_size < 8 + s->decoding_map_size + 14 + video_data_size)
1256 return AVERROR_INVALIDDATA;
1257
1258 bytestream2_init(&s->stream_ptr, buf + 8 + s->decoding_map_size + 14, video_data_size);
1259
1260 break;
1261
1262 case 0x10:
1263 if (! s->decoding_map_size) {
1264 av_log(avctx, AV_LOG_ERROR, "Empty decoding map for format 0x10\n");
1265 return AVERROR_INVALIDDATA;
1266 }
1267
1268 if (! s->skip_map_size) {
1269 av_log(avctx, AV_LOG_ERROR, "Empty skip map for format 0x10\n");
1270 return AVERROR_INVALIDDATA;
1271 }
1272
1273 if (s->is_16bpp) {
1274 av_log(avctx, AV_LOG_ERROR, "Video format 0x10 does not support 16bpp movies\n");
1275 return AVERROR_INVALIDDATA;
1276 }
1277
1278 if (buf_size < 8 + video_data_size + s->decoding_map_size + s->skip_map_size)
1279 return AVERROR_INVALIDDATA;
1280
1281 bytestream2_init(&s->stream_ptr, buf + 8, video_data_size);
1282 s->decoding_map = buf + 8 + video_data_size;
1283 s->skip_map = buf + 8 + video_data_size + s->decoding_map_size;
1284
1285 break;
1286
1287 case 0x11:
1288 if (! s->decoding_map_size) {
1289 av_log(avctx, AV_LOG_ERROR, "Empty decoding map for format 0x11\n");
1290 return AVERROR_INVALIDDATA;
1291 }
1292
1293 if (s->skip_map_size) {
1294 av_log(avctx, AV_LOG_ERROR, "Skip map for format 0x11\n");
1295 return AVERROR_INVALIDDATA;
1296 }
1297
1298 if (buf_size < 8 + video_data_size + s->decoding_map_size)
1299 return AVERROR_INVALIDDATA;
1300
1301 bytestream2_init(&s->stream_ptr, buf + 8, video_data_size);
1302 s->decoding_map = buf + 8 + video_data_size;
1303
1304 break;
1305
1306 default:
1307 av_log(avctx, AV_LOG_ERROR, "Frame type 0x%02X unsupported\n", frame_format);
1308 }
1309
1310 /* ensure we can't overread the packet */
1311 if (buf_size < 8 + s->decoding_map_size + video_data_size + s->skip_map_size) {
1312 av_log(avctx, AV_LOG_ERROR, "Invalid IP packet size\n");
1313 return AVERROR_INVALIDDATA;
1314 }
1315
1316 if ((ret = ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF)) < 0)
1317 return ret;
1318
1319 if (!s->is_16bpp) {
1320 frame->palette_has_changed = ff_copy_palette(s->pal, avpkt, avctx);
1321 }
1322
1323 switch (frame_format) {
1324 case 0x06:
1325 ipvideo_decode_format_06_opcodes(s, frame);
1326 break;
1327 case 0x10:
1328 ipvideo_decode_format_10_opcodes(s, frame);
1329 break;
1330 case 0x11:
1331 ipvideo_decode_format_11_opcodes(s, frame);
1332 break;
1333 }
1334
1335 *got_frame = send_buffer;
1336
1337 /* shuffle frames */
1338 av_frame_unref(s->second_last_frame);
1339 FFSWAP(AVFrame*, s->second_last_frame, s->last_frame);
1340 if ((ret = av_frame_ref(s->last_frame, frame)) < 0)
1341 return ret;
1342
1343 /* report that the buffer was completely consumed */
1344 return buf_size;
1345 }
1346
ipvideo_decode_end(AVCodecContext *avctx)1347 static av_cold int ipvideo_decode_end(AVCodecContext *avctx)
1348 {
1349 IpvideoContext *s = avctx->priv_data;
1350
1351 av_frame_free(&s->last_frame);
1352 av_frame_free(&s->second_last_frame);
1353 av_frame_free(&s->cur_decode_frame);
1354 av_frame_free(&s->prev_decode_frame);
1355
1356 return 0;
1357 }
1358
1359 const FFCodec ff_interplay_video_decoder = {
1360 .p.name = "interplayvideo",
1361 .p.long_name = NULL_IF_CONFIG_SMALL("Interplay MVE video"),
1362 .p.type = AVMEDIA_TYPE_VIDEO,
1363 .p.id = AV_CODEC_ID_INTERPLAY_VIDEO,
1364 .priv_data_size = sizeof(IpvideoContext),
1365 .init = ipvideo_decode_init,
1366 .close = ipvideo_decode_end,
1367 FF_CODEC_DECODE_CB(ipvideo_decode_frame),
1368 .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_PARAM_CHANGE,
1369 .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
1370 };
1371