⚠️ VeridianOS Kernel Documentation - This is low-level kernel code. All functions are unsafe unless explicitly marked otherwise. no_std

veridian_kernel/media/
video_processing.rs

1//! Video processing module for VeridianOS
2//!
3//! Provides four major subsystems:
4//! 1. **AVI container parser** -- RIFF/AVI header parsing, stream demuxing,
5//!    index (idx1) parsing, and frame extraction.
6//! 2. **Frame rate conversion** -- Frame duplication, frame dropping, 3:2
7//!    pulldown (telecine), timestamp-based selection, and motion-compensated
8//!    linear blend interpolation. All math is integer-only.
9//! 3. **Subtitle overlay** -- SRT parser, timestamp matching, 8x16 bitmap font
10//!    text rendering with semi-transparent background, multi-line word
11//!    wrapping, and configurable margins.
12//! 4. **Real-time audio scheduling** -- Deadline scheduler integration for
13//!    audio threads with period-based wake scheduling, latency/jitter tracking,
14//!    underrun/overrun statistics, and CPU reservation.
15//!
16//! All arithmetic is integer or fixed-point. No floating-point is used
17//! anywhere.
18
19#![allow(dead_code)]
20
21#[cfg(feature = "alloc")]
22extern crate alloc;
23#[cfg(feature = "alloc")]
24use alloc::{string::String, vec::Vec};
25use core::sync::atomic::{AtomicU64, Ordering};
26
27// ============================================================================
28// AVI Container Parser
29// ============================================================================
30
31/// AVI file flags from the main header (avih).
32#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
33pub struct AviFlags(pub u32);
34
35impl AviFlags {
36    /// File has an index chunk (idx1).
37    pub const AVIF_HASINDEX: u32 = 0x0000_0010;
38    /// Interleaved audio/video data.
39    pub const AVIF_ISINTERLEAVED: u32 = 0x0000_0100;
40    /// Use idx1 offsets from the movi list start (not file start).
41    pub const AVIF_MUSTUSEINDEX: u32 = 0x0000_0020;
42    /// AVI is copyrighted.
43    pub const AVIF_COPYRIGHTED: u32 = 0x0002_0000;
44
45    /// Check whether a specific flag is set.
46    pub(crate) fn has_flag(&self, flag: u32) -> bool {
47        self.0 & flag != 0
48    }
49}
50
51/// Four-character code (FourCC) used throughout RIFF/AVI.
52#[derive(Clone, Copy, PartialEq, Eq)]
53pub struct FourCC(pub [u8; 4]);
54
55impl FourCC {
56    pub const RIFF: Self = Self(*b"RIFF");
57    pub const AVI: Self = Self(*b"AVI ");
58    pub const LIST: Self = Self(*b"LIST");
59    pub const AVIH: Self = Self(*b"avih");
60    pub const STRH: Self = Self(*b"strh");
61    pub const STRF: Self = Self(*b"strf");
62    pub const IDX1: Self = Self(*b"idx1");
63    pub const MOVI: Self = Self(*b"movi");
64    pub const HDRL: Self = Self(*b"hdrl");
65    pub const STRL: Self = Self(*b"strl");
66    pub const VIDS: Self = Self(*b"vids");
67    pub const AUDS: Self = Self(*b"auds");
68
69    /// Create from a byte slice; returns None if slice is too short.
70    pub(crate) fn from_bytes(data: &[u8]) -> Option<Self> {
71        if data.len() < 4 {
72            return None;
73        }
74        Some(Self([data[0], data[1], data[2], data[3]]))
75    }
76}
77
78impl core::fmt::Debug for FourCC {
79    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
80        let s: [u8; 4] = self.0;
81        write!(
82            f,
83            "FourCC('{}{}{}{}')",
84            s[0] as char, s[1] as char, s[2] as char, s[3] as char
85        )
86    }
87}
88
89/// AVI main header (avih chunk) -- 56 bytes.
90#[derive(Debug, Clone, Copy, Default)]
91pub struct AviMainHeader {
92    /// Microseconds per frame (frame period).
93    pub microseconds_per_frame: u32,
94    /// Maximum bytes per second (approximate data rate).
95    pub max_bytes_per_sec: u32,
96    /// Padding granularity in bytes.
97    pub padding_granularity: u32,
98    /// AVI flags (see [`AviFlags`]).
99    pub flags: AviFlags,
100    /// Total number of frames in the video stream.
101    pub total_frames: u32,
102    /// Number of streams that require initial frames before playback.
103    pub initial_frames: u32,
104    /// Number of streams in the file.
105    pub streams: u32,
106    /// Suggested buffer size for reading the file.
107    pub suggested_buffer_size: u32,
108    /// Video width in pixels.
109    pub width: u32,
110    /// Video height in pixels.
111    pub height: u32,
112}
113
114impl AviMainHeader {
115    /// Parse from a byte buffer (little-endian, expects >= 40 bytes).
116    pub(crate) fn parse(data: &[u8]) -> Option<Self> {
117        if data.len() < 40 {
118            return None;
119        }
120        Some(Self {
121            microseconds_per_frame: read_u32_le(data, 0),
122            max_bytes_per_sec: read_u32_le(data, 4),
123            padding_granularity: read_u32_le(data, 8),
124            flags: AviFlags(read_u32_le(data, 12)),
125            total_frames: read_u32_le(data, 16),
126            initial_frames: read_u32_le(data, 20),
127            streams: read_u32_le(data, 24),
128            suggested_buffer_size: read_u32_le(data, 28),
129            width: read_u32_le(data, 32),
130            height: read_u32_le(data, 36),
131        })
132    }
133
134    /// Compute frame rate as a rational number (numerator, denominator).
135    /// Returns (fps_num, fps_den) such that fps = fps_num / fps_den.
136    pub(crate) fn frame_rate(&self) -> (u32, u32) {
137        if self.microseconds_per_frame == 0 {
138            return (0, 1);
139        }
140        // fps = 1_000_000 / microseconds_per_frame
141        (1_000_000, self.microseconds_per_frame)
142    }
143}
144
145/// Stream type tag.
146#[derive(Debug, Clone, Copy, PartialEq, Eq)]
147pub enum StreamType {
148    /// Video stream (vids).
149    Video,
150    /// Audio stream (auds).
151    Audio,
152    /// Unknown / unsupported stream type.
153    Unknown,
154}
155
156/// AVI stream header (strh chunk) -- 56 bytes.
157#[derive(Debug, Clone, Copy, Default)]
158pub struct AviStreamHeader {
159    /// Stream type FourCC (vids, auds, ...).
160    pub stream_type: [u8; 4],
161    /// Codec handler FourCC (e.g. DIB for uncompressed, MJPG, etc.).
162    pub handler: [u8; 4],
163    /// Stream flags.
164    pub flags: u32,
165    /// Priority (used for language selection, etc.).
166    pub priority: u16,
167    /// Language tag.
168    pub language: u16,
169    /// Initial frames (delay before interleave).
170    pub initial_frames: u32,
171    /// Time scale (denominator of sample rate).
172    pub scale: u32,
173    /// Rate (numerator of sample rate). sample_rate = rate / scale.
174    pub rate: u32,
175    /// Start time of the stream.
176    pub start: u32,
177    /// Length of the stream (in `scale` units).
178    pub length: u32,
179    /// Suggested buffer size.
180    pub suggested_buffer_size: u32,
181    /// Quality indicator (-1 = default).
182    pub quality: u32,
183    /// Sample size (0 for variable-size, else fixed).
184    pub sample_size: u32,
185    /// Frame rectangle: left.
186    pub frame_left: u16,
187    /// Frame rectangle: top.
188    pub frame_top: u16,
189    /// Frame rectangle: right.
190    pub frame_right: u16,
191    /// Frame rectangle: bottom.
192    pub frame_bottom: u16,
193}
194
195impl AviStreamHeader {
196    /// Parse from a byte buffer (little-endian, expects >= 56 bytes).
197    pub(crate) fn parse(data: &[u8]) -> Option<Self> {
198        if data.len() < 56 {
199            return None;
200        }
201        let mut stream_type = [0u8; 4];
202        stream_type.copy_from_slice(&data[0..4]);
203        let mut handler = [0u8; 4];
204        handler.copy_from_slice(&data[4..8]);
205        Some(Self {
206            stream_type,
207            handler,
208            flags: read_u32_le(data, 8),
209            priority: read_u16_le(data, 12),
210            language: read_u16_le(data, 14),
211            initial_frames: read_u32_le(data, 16),
212            scale: read_u32_le(data, 20),
213            rate: read_u32_le(data, 24),
214            start: read_u32_le(data, 28),
215            length: read_u32_le(data, 32),
216            suggested_buffer_size: read_u32_le(data, 36),
217            quality: read_u32_le(data, 40),
218            sample_size: read_u32_le(data, 44),
219            frame_left: read_u16_le(data, 48),
220            frame_top: read_u16_le(data, 50),
221            frame_right: read_u16_le(data, 52),
222            frame_bottom: read_u16_le(data, 54),
223        })
224    }
225
226    /// Determine the stream type from the FourCC tag.
227    pub(crate) fn get_stream_type(&self) -> StreamType {
228        if self.stream_type == *b"vids" {
229            StreamType::Video
230        } else if self.stream_type == *b"auds" {
231            StreamType::Audio
232        } else {
233            StreamType::Unknown
234        }
235    }
236
237    /// Compute sample rate as a rational (rate / scale).
238    pub(crate) fn sample_rate(&self) -> (u32, u32) {
239        if self.scale == 0 {
240            return (0, 1);
241        }
242        (self.rate, self.scale)
243    }
244}
245
246/// BitmapInfoHeader (BITMAPINFOHEADER) -- 40 bytes.
247/// Used in video strf chunks to describe the video format.
248#[derive(Debug, Clone, Copy, Default)]
249pub struct BitmapInfoHeader {
250    /// Size of this structure (should be >= 40).
251    pub size: u32,
252    /// Image width in pixels.
253    pub width: i32,
254    /// Image height in pixels (positive = bottom-up, negative = top-down).
255    pub height: i32,
256    /// Number of color planes (must be 1).
257    pub planes: u16,
258    /// Bits per pixel (1, 4, 8, 16, 24, 32).
259    pub bit_count: u16,
260    /// Compression FourCC (0 = BI_RGB = uncompressed).
261    pub compression: u32,
262    /// Size of the image data (may be 0 for BI_RGB).
263    pub image_size: u32,
264    /// Horizontal resolution (pixels per meter).
265    pub x_pels_per_meter: i32,
266    /// Vertical resolution (pixels per meter).
267    pub y_pels_per_meter: i32,
268    /// Number of colors used (0 = all).
269    pub colors_used: u32,
270    /// Number of important colors (0 = all).
271    pub colors_important: u32,
272}
273
274impl BitmapInfoHeader {
275    /// Parse from a byte buffer (little-endian, expects >= 40 bytes).
276    pub(crate) fn parse(data: &[u8]) -> Option<Self> {
277        if data.len() < 40 {
278            return None;
279        }
280        Some(Self {
281            size: read_u32_le(data, 0),
282            width: read_i32_le(data, 4),
283            height: read_i32_le(data, 8),
284            planes: read_u16_le(data, 12),
285            bit_count: read_u16_le(data, 14),
286            compression: read_u32_le(data, 16),
287            image_size: read_u32_le(data, 20),
288            x_pels_per_meter: read_i32_le(data, 24),
289            y_pels_per_meter: read_i32_le(data, 28),
290            colors_used: read_u32_le(data, 32),
291            colors_important: read_u32_le(data, 36),
292        })
293    }
294
295    /// Whether the image is stored bottom-up (positive height).
296    pub(crate) fn is_bottom_up(&self) -> bool {
297        self.height > 0
298    }
299
300    /// Absolute height (always positive).
301    pub(crate) fn abs_height(&self) -> u32 {
302        if self.height < 0 {
303            (-(self.height as i64)) as u32
304        } else {
305            self.height as u32
306        }
307    }
308}
309
310/// WaveFormatEx (WAVEFORMATEX) -- 18 bytes minimum.
311/// Used in audio strf chunks to describe the audio format.
312#[derive(Debug, Clone, Copy, Default)]
313pub struct WaveFormatEx {
314    /// Format tag (1 = PCM, 3 = IEEE Float, etc.).
315    pub format_tag: u16,
316    /// Number of channels (1 = mono, 2 = stereo).
317    pub channels: u16,
318    /// Samples per second (Hz).
319    pub samples_per_sec: u32,
320    /// Average bytes per second.
321    pub avg_bytes_per_sec: u32,
322    /// Block alignment (channels * bits_per_sample / 8).
323    pub block_align: u16,
324    /// Bits per sample (8, 16, 24, 32).
325    pub bits_per_sample: u16,
326    /// Size of extra format data following this structure.
327    pub cb_size: u16,
328}
329
330impl WaveFormatEx {
331    /// PCM format tag.
332    pub const WAVE_FORMAT_PCM: u16 = 1;
333
334    /// Parse from a byte buffer (little-endian, expects >= 16 bytes).
335    /// The cb_size field is optional (only present if data >= 18 bytes).
336    pub(crate) fn parse(data: &[u8]) -> Option<Self> {
337        if data.len() < 16 {
338            return None;
339        }
340        let cb_size = if data.len() >= 18 {
341            read_u16_le(data, 16)
342        } else {
343            0
344        };
345        Some(Self {
346            format_tag: read_u16_le(data, 0),
347            channels: read_u16_le(data, 2),
348            samples_per_sec: read_u32_le(data, 4),
349            avg_bytes_per_sec: read_u32_le(data, 8),
350            block_align: read_u16_le(data, 12),
351            bits_per_sample: read_u16_le(data, 14),
352            cb_size,
353        })
354    }
355
356    /// Whether this is PCM (uncompressed) audio.
357    pub(crate) fn is_pcm(&self) -> bool {
358        self.format_tag == Self::WAVE_FORMAT_PCM
359    }
360}
361
362/// An entry in the AVI index (idx1 chunk).
363#[derive(Debug, Clone, Copy, PartialEq, Eq)]
364pub struct AviIndexEntry {
365    /// Stream chunk identifier (e.g., "00dc" for video, "01wb" for audio).
366    pub chunk_id: [u8; 4],
367    /// Flags -- bit 4 (0x10) = AVIIF_KEYFRAME.
368    pub flags: u32,
369    /// Byte offset of the chunk (from start of movi list or file).
370    pub offset: u32,
371    /// Size of the chunk data in bytes.
372    pub size: u32,
373}
374
375impl AviIndexEntry {
376    /// AVIIF_KEYFRAME flag.
377    pub const AVIIF_KEYFRAME: u32 = 0x0000_0010;
378
379    /// Parse a single index entry (16 bytes).
380    pub(crate) fn parse(data: &[u8]) -> Option<Self> {
381        if data.len() < 16 {
382            return None;
383        }
384        let mut chunk_id = [0u8; 4];
385        chunk_id.copy_from_slice(&data[0..4]);
386        Some(Self {
387            chunk_id,
388            flags: read_u32_le(data, 4),
389            offset: read_u32_le(data, 8),
390            size: read_u32_le(data, 12),
391        })
392    }
393
394    /// Whether this entry is a keyframe.
395    pub(crate) fn is_keyframe(&self) -> bool {
396        self.flags & Self::AVIIF_KEYFRAME != 0
397    }
398
399    /// Get the stream number from the chunk_id (first two ASCII digits).
400    /// E.g., "00dc" -> 0, "01wb" -> 1.
401    pub(crate) fn stream_number(&self) -> u8 {
402        let d0 = self.chunk_id[0].wrapping_sub(b'0');
403        let d1 = self.chunk_id[1].wrapping_sub(b'0');
404        if d0 <= 9 && d1 <= 9 {
405            d0 * 10 + d1
406        } else {
407            0
408        }
409    }
410
411    /// Whether this is a video chunk (ends with "dc" or "db").
412    pub(crate) fn is_video(&self) -> bool {
413        self.chunk_id[2] == b'd' && (self.chunk_id[3] == b'c' || self.chunk_id[3] == b'b')
414    }
415
416    /// Whether this is an audio chunk (ends with "wb").
417    pub(crate) fn is_audio(&self) -> bool {
418        self.chunk_id[2] == b'w' && self.chunk_id[3] == b'b'
419    }
420}
421
422/// Information about a parsed AVI stream.
423#[cfg(feature = "alloc")]
424#[derive(Debug, Clone)]
425pub struct AviStreamInfo {
426    /// Zero-based stream index.
427    pub index: u32,
428    /// Stream type.
429    pub stream_type: StreamType,
430    /// Stream header.
431    pub header: AviStreamHeader,
432    /// Video format (present if stream_type == Video).
433    pub video_format: Option<BitmapInfoHeader>,
434    /// Audio format (present if stream_type == Audio).
435    pub audio_format: Option<WaveFormatEx>,
436}
437
438/// Parsed AVI container.
439#[cfg(feature = "alloc")]
440#[derive(Debug, Clone)]
441pub struct AviContainer {
442    /// Main AVI header.
443    pub main_header: AviMainHeader,
444    /// Stream information.
445    pub streams: Vec<AviStreamInfo>,
446    /// Index entries from idx1 chunk.
447    pub index: Vec<AviIndexEntry>,
448    /// Byte offset of the movi list data start within the file.
449    pub movi_offset: u32,
450    /// Total file size in bytes.
451    pub file_size: u32,
452}
453
454#[cfg(feature = "alloc")]
455impl AviContainer {
456    /// Parse an AVI container from a byte buffer.
457    ///
458    /// Reads RIFF header, avih, all strh/strf pairs, and the idx1 index.
459    /// Does NOT load frame data -- use [`extract_frame`] for that.
460    pub(crate) fn parse(data: &[u8]) -> Option<Self> {
461        // RIFF header: "RIFF" + size(4) + "AVI "
462        if data.len() < 12 {
463            return None;
464        }
465        let riff = FourCC::from_bytes(data)?;
466        if riff != FourCC::RIFF {
467            return None;
468        }
469        let _file_size = read_u32_le(data, 4);
470        let form = FourCC::from_bytes(&data[8..])?;
471        if form != FourCC::AVI {
472            return None;
473        }
474
475        let mut main_header = AviMainHeader::default();
476        let mut streams = Vec::new();
477        let mut index = Vec::new();
478        let mut movi_offset: u32 = 0;
479        let mut pos: usize = 12;
480
481        // Walk top-level chunks
482        while pos + 8 <= data.len() {
483            let chunk_id = FourCC::from_bytes(&data[pos..])?;
484            let chunk_size = read_u32_le(data, pos + 4) as usize;
485            let chunk_data_start = pos + 8;
486            let chunk_data_end = chunk_data_start.saturating_add(chunk_size).min(data.len());
487
488            if chunk_id == FourCC::LIST {
489                if chunk_data_end < chunk_data_start + 4 {
490                    pos = aligned_next(chunk_data_end);
491                    continue;
492                }
493                let list_type = FourCC::from_bytes(&data[chunk_data_start..])?;
494
495                if list_type == FourCC::HDRL {
496                    // Parse header list
497                    Self::parse_hdrl(
498                        &data[chunk_data_start + 4..chunk_data_end],
499                        &mut main_header,
500                        &mut streams,
501                    );
502                } else if list_type == FourCC::MOVI {
503                    movi_offset = (chunk_data_start + 4) as u32;
504                }
505            } else if chunk_id == FourCC::IDX1 {
506                // Parse index
507                Self::parse_idx1(&data[chunk_data_start..chunk_data_end], &mut index);
508            }
509
510            pos = aligned_next(chunk_data_end);
511        }
512
513        Some(Self {
514            main_header,
515            streams,
516            index,
517            movi_offset,
518            file_size: data.len() as u32,
519        })
520    }
521
522    /// Parse the hdrl LIST contents (avih + strl lists).
523    fn parse_hdrl(data: &[u8], main_header: &mut AviMainHeader, streams: &mut Vec<AviStreamInfo>) {
524        let mut pos: usize = 0;
525        let mut stream_index: u32 = 0;
526
527        while pos + 8 <= data.len() {
528            let chunk_id_opt = FourCC::from_bytes(&data[pos..]);
529            let chunk_id = match chunk_id_opt {
530                Some(id) => id,
531                None => break,
532            };
533            let chunk_size = read_u32_le(data, pos + 4) as usize;
534            let chunk_data_start = pos + 8;
535            let chunk_data_end = chunk_data_start.saturating_add(chunk_size).min(data.len());
536
537            if chunk_id == FourCC::AVIH {
538                if let Some(hdr) = AviMainHeader::parse(&data[chunk_data_start..chunk_data_end]) {
539                    *main_header = hdr;
540                }
541            } else if chunk_id == FourCC::LIST {
542                // Check for strl sub-list
543                if chunk_data_end >= chunk_data_start + 4 {
544                    let list_type = FourCC::from_bytes(&data[chunk_data_start..]);
545                    if list_type == Some(FourCC::STRL) {
546                        if let Some(info) = Self::parse_strl(
547                            &data[chunk_data_start + 4..chunk_data_end],
548                            stream_index,
549                        ) {
550                            streams.push(info);
551                            stream_index += 1;
552                        }
553                    }
554                }
555            }
556
557            // Advance past the chunk (word-aligned)
558            pos = aligned_next_rel(chunk_data_end);
559        }
560    }
561
562    /// Parse a stream list (strl) containing strh + strf.
563    fn parse_strl(data: &[u8], stream_index: u32) -> Option<AviStreamInfo> {
564        let mut header: Option<AviStreamHeader> = None;
565        let mut video_format: Option<BitmapInfoHeader> = None;
566        let mut audio_format: Option<WaveFormatEx> = None;
567        let mut pos: usize = 0;
568
569        while pos + 8 <= data.len() {
570            let chunk_id = FourCC::from_bytes(&data[pos..])?;
571            let chunk_size = read_u32_le(data, pos + 4) as usize;
572            let chunk_data_start = pos + 8;
573            let chunk_data_end = chunk_data_start.saturating_add(chunk_size).min(data.len());
574
575            if chunk_id == FourCC::STRH {
576                header = AviStreamHeader::parse(&data[chunk_data_start..chunk_data_end]);
577            } else if chunk_id == FourCC::STRF {
578                if let Some(ref hdr) = header {
579                    match hdr.get_stream_type() {
580                        StreamType::Video => {
581                            video_format =
582                                BitmapInfoHeader::parse(&data[chunk_data_start..chunk_data_end]);
583                        }
584                        StreamType::Audio => {
585                            audio_format =
586                                WaveFormatEx::parse(&data[chunk_data_start..chunk_data_end]);
587                        }
588                        StreamType::Unknown => {}
589                    }
590                }
591            }
592
593            pos = aligned_next_rel(chunk_data_end);
594        }
595
596        let hdr = header?;
597        let stream_type = hdr.get_stream_type();
598        Some(AviStreamInfo {
599            index: stream_index,
600            stream_type,
601            header: hdr,
602            video_format,
603            audio_format,
604        })
605    }
606
607    /// Parse the idx1 chunk.
608    fn parse_idx1(data: &[u8], index: &mut Vec<AviIndexEntry>) {
609        let mut pos: usize = 0;
610        while pos + 16 <= data.len() {
611            if let Some(entry) = AviIndexEntry::parse(&data[pos..]) {
612                index.push(entry);
613            }
614            pos += 16;
615        }
616    }
617
618    /// Get the first video stream info, if any.
619    pub(crate) fn video_stream(&self) -> Option<&AviStreamInfo> {
620        self.streams
621            .iter()
622            .find(|s| s.stream_type == StreamType::Video)
623    }
624
625    /// Get the first audio stream info, if any.
626    pub(crate) fn audio_stream(&self) -> Option<&AviStreamInfo> {
627        self.streams
628            .iter()
629            .find(|s| s.stream_type == StreamType::Audio)
630    }
631
632    /// Extract frame data by index from the original AVI data buffer.
633    ///
634    /// Returns a slice into the provided data pointing to the frame payload.
635    /// `frame_index` is the zero-based video frame number in the idx1.
636    pub(crate) fn extract_frame<'a>(&self, data: &'a [u8], frame_index: usize) -> Option<&'a [u8]> {
637        let video_entries: Vec<&AviIndexEntry> =
638            self.index.iter().filter(|e| e.is_video()).collect();
639        let entry = video_entries.get(frame_index)?;
640
641        // Offset is relative to movi list start (after "movi" tag)
642        // Each chunk has an 8-byte header (fourcc + size)
643        let abs_offset = (self.movi_offset as usize)
644            .checked_add(entry.offset as usize)?
645            .checked_add(8)?; // skip chunk header
646        let end = abs_offset.checked_add(entry.size as usize)?;
647
648        if end > data.len() {
649            return None;
650        }
651        Some(&data[abs_offset..end])
652    }
653
654    /// Count video frames in the index.
655    pub(crate) fn video_frame_count(&self) -> usize {
656        self.index.iter().filter(|e| e.is_video()).count()
657    }
658
659    /// Count audio chunks in the index.
660    pub(crate) fn audio_chunk_count(&self) -> usize {
661        self.index.iter().filter(|e| e.is_audio()).count()
662    }
663
664    /// Get all video index entries.
665    pub(crate) fn video_index_entries(&self) -> Vec<&AviIndexEntry> {
666        self.index.iter().filter(|e| e.is_video()).collect()
667    }
668
669    /// Get all audio index entries.
670    pub(crate) fn audio_index_entries(&self) -> Vec<&AviIndexEntry> {
671        self.index.iter().filter(|e| e.is_audio()).collect()
672    }
673
674    /// Demux: separate video and audio index entries for interleaved playback.
675    /// Returns (video_entries, audio_entries).
676    pub(crate) fn demux_streams(&self) -> (Vec<AviIndexEntry>, Vec<AviIndexEntry>) {
677        let mut video = Vec::new();
678        let mut audio = Vec::new();
679        for entry in &self.index {
680            if entry.is_video() {
681                video.push(*entry);
682            } else if entry.is_audio() {
683                audio.push(*entry);
684            }
685        }
686        (video, audio)
687    }
688}
689
690// ============================================================================
691// Frame Rate Conversion
692// ============================================================================
693
694/// Frame rate conversion mode.
695#[derive(Debug, Clone, Copy, PartialEq, Eq)]
696pub enum FrameRateMode {
697    /// Duplicate frames to increase frame rate.
698    Duplicate,
699    /// Drop frames to decrease frame rate.
700    Drop,
701    /// 3:2 pulldown (telecine) for 24fps -> ~30fps (29.97 interlaced).
702    /// Pattern repeats every 5 output frames from 4 source frames.
703    Pulldown32,
704    /// Timestamp-based selection (nearest source frame).
705    TimestampSelect,
706    /// Linear blend between adjacent frames (integer weighted average).
707    LinearBlend,
708}
709
710/// Frame rate converter state.
711#[derive(Debug, Clone)]
712pub struct FrameRateConverter {
713    /// Source frame rate numerator.
714    pub src_fps_num: u32,
715    /// Source frame rate denominator.
716    pub src_fps_den: u32,
717    /// Target frame rate numerator.
718    pub dst_fps_num: u32,
719    /// Target frame rate denominator.
720    pub dst_fps_den: u32,
721    /// Conversion mode.
722    pub mode: FrameRateMode,
723}
724
725impl FrameRateConverter {
726    /// Create a new frame rate converter.
727    pub fn new(
728        src_fps_num: u32,
729        src_fps_den: u32,
730        dst_fps_num: u32,
731        dst_fps_den: u32,
732        mode: FrameRateMode,
733    ) -> Self {
734        Self {
735            src_fps_num,
736            src_fps_den,
737            dst_fps_num,
738            dst_fps_den,
739            mode,
740        }
741    }
742
743    /// Compute the source frame index for a given output frame index.
744    ///
745    /// Uses timestamp-based selection:
746    ///   source_index = output_index * src_fps_den * dst_fps_num
747    ///                  / (dst_fps_den * src_fps_num)
748    ///
749    /// All integer arithmetic; rounds down to nearest source frame.
750    pub(crate) fn source_frame_for_output(&self, output_index: u32) -> u32 {
751        if self.src_fps_num == 0 || self.dst_fps_den == 0 {
752            return 0;
753        }
754        // output_pts = output_index * dst_fps_den / dst_fps_num (in seconds *
755        // dst_fps_den) source_index = output_pts * src_fps_num / src_fps_den
756        // Combined: output_index * dst_fps_den * src_fps_num / (dst_fps_num *
757        // src_fps_den) Wait, we want: output_index * src_fps_den * dst_fps_num
758        // / (dst_fps_den * src_fps_num) Actually: source_pts * src_fps =
759        // output_pts * dst_fps => source_index = output_index * dst_fps /
760        // src_fps (when fps = num/den) => source_index = output_index *
761        // (dst_fps_num / dst_fps_den) / (src_fps_num / src_fps_den)
762        // => source_index = output_index * dst_fps_num * src_fps_den / (dst_fps_den *
763        // src_fps_num) Wrong direction for pulldown/duplication -- re-derive:
764        // If we output more frames, each output frame maps to an earlier source frame.
765        // source_index = output_index * src_fps / dst_fps
766        //              = output_index * (src_fps_num / src_fps_den) / (dst_fps_num /
767        // dst_fps_den)              = output_index * src_fps_num * dst_fps_den
768        // / (src_fps_den * dst_fps_num)
769        let numerator = (output_index as u64)
770            .checked_mul(self.src_fps_num as u64)
771            .and_then(|v| v.checked_mul(self.dst_fps_den as u64))
772            .unwrap_or(u64::MAX);
773        let denominator = (self.src_fps_den as u64)
774            .checked_mul(self.dst_fps_num as u64)
775            .max(Some(1))
776            .unwrap_or(1);
777        (numerator / denominator) as u32
778    }
779
780    /// Compute the 3:2 pulldown pattern for a given output frame index.
781    ///
782    /// 3:2 pulldown maps 4 source frames to 5 output frames:
783    ///   Output 0 -> Source 0  (A)
784    ///   Output 1 -> Source 0  (A) -- repeated
785    ///   Output 2 -> Source 1  (B)
786    ///   Output 3 -> Source 2  (C)
787    ///   Output 4 -> Source 2  (C) -- repeated
788    ///   ... then pattern repeats with next 4 source frames.
789    ///
790    /// Returns (source_frame_index, is_repeated_frame).
791    pub(crate) fn pulldown_32_source(&self, output_index: u32) -> (u32, bool) {
792        let cycle = output_index / 5;
793        let phase = output_index % 5;
794        let base = cycle * 4;
795        match phase {
796            0 => (base, false),
797            1 => (base, true),      // A repeated
798            2 => (base + 1, false), // B
799            3 => (base + 2, false), // C
800            4 => (base + 2, true),  // C repeated
801            _ => unreachable!(),
802        }
803    }
804
805    /// Generate the output frame sequence for `total_output_frames`.
806    ///
807    /// Returns a list of (source_frame_index, blend_weight) pairs.
808    /// For non-blend modes, blend_weight is always 256 (fully opaque = source
809    /// frame). For LinearBlend, blend_weight is 0..256 indicating how much
810    /// of the *next* source frame to blend (0 = 100% current, 256 = 100%
811    /// next).
812    #[cfg(feature = "alloc")]
813    pub(crate) fn build_frame_map(
814        &self,
815        total_source_frames: u32,
816        total_output_frames: u32,
817    ) -> Vec<FrameMapEntry> {
818        let mut map = Vec::with_capacity(total_output_frames as usize);
819
820        for out_idx in 0..total_output_frames {
821            let entry = match self.mode {
822                FrameRateMode::Duplicate | FrameRateMode::Drop | FrameRateMode::TimestampSelect => {
823                    let src = self
824                        .source_frame_for_output(out_idx)
825                        .min(total_source_frames.saturating_sub(1));
826                    FrameMapEntry {
827                        source_index: src,
828                        blend_weight: 256,
829                    }
830                }
831                FrameRateMode::Pulldown32 => {
832                    let (src, _repeated) = self.pulldown_32_source(out_idx);
833                    FrameMapEntry {
834                        source_index: src.min(total_source_frames.saturating_sub(1)),
835                        blend_weight: 256,
836                    }
837                }
838                FrameRateMode::LinearBlend => {
839                    self.compute_blend_entry(out_idx, total_source_frames)
840                }
841            };
842            map.push(entry);
843        }
844
845        map
846    }
847
848    /// Compute a linear blend frame map entry.
849    ///
850    /// Determines which two source frames to blend and the blend weight.
851    /// Uses 8.8 fixed-point for sub-frame position.
852    fn compute_blend_entry(&self, output_index: u32, total_source_frames: u32) -> FrameMapEntry {
853        if self.dst_fps_num == 0 || self.src_fps_den == 0 || total_source_frames == 0 {
854            return FrameMapEntry {
855                source_index: 0,
856                blend_weight: 256,
857            };
858        }
859
860        // Source position in 8.8 fixed-point
861        let numerator = (output_index as u64)
862            .checked_mul(self.src_fps_num as u64)
863            .and_then(|v| v.checked_mul(self.dst_fps_den as u64))
864            .and_then(|v| v.checked_mul(256)) // 8.8 scale
865            .unwrap_or(u64::MAX);
866        let denominator = (self.src_fps_den as u64)
867            .checked_mul(self.dst_fps_num as u64)
868            .max(Some(1))
869            .unwrap_or(1);
870        let src_pos_fp = (numerator / denominator) as u32;
871
872        let src_index = (src_pos_fp >> 8).min(total_source_frames.saturating_sub(1));
873        let frac = src_pos_fp & 0xFF; // 0..255
874
875        FrameMapEntry {
876            source_index: src_index,
877            blend_weight: frac as u16, // 0 = 100% current, 255 = ~100% next
878        }
879    }
880}
881
882/// A single entry in the frame map produced by
883/// [`FrameRateConverter::build_frame_map`].
884#[derive(Debug, Clone, Copy, PartialEq, Eq)]
885pub struct FrameMapEntry {
886    /// Index of the (primary) source frame.
887    pub source_index: u32,
888    /// Blend weight toward the *next* source frame (0..256).
889    /// 0 means 100% this frame, 256 means 100% next frame.
890    /// For non-blend modes this is always 256 (use source_index as-is).
891    pub blend_weight: u16,
892}
893
894/// Blend two pixel buffers using integer weighted average.
895///
896/// `weight` is 0..256 (0 = 100% frame_a, 256 = 100% frame_b).
897/// Both buffers must have the same length. Output is written to `out`.
898#[cfg(feature = "alloc")]
899pub(crate) fn blend_frames(frame_a: &[u8], frame_b: &[u8], out: &mut [u8], weight: u16) {
900    let len = frame_a.len().min(frame_b.len()).min(out.len());
901    let w = weight as u32;
902    let inv_w = 256u32.saturating_sub(w);
903
904    for i in 0..len {
905        let a = frame_a[i] as u32;
906        let b = frame_b[i] as u32;
907        out[i] = ((a * inv_w + b * w) >> 8) as u8;
908    }
909}
910
911// ============================================================================
912// Subtitle (SRT) Parser and Overlay
913// ============================================================================
914
915/// A single subtitle entry parsed from SRT format.
916#[cfg(feature = "alloc")]
917#[derive(Debug, Clone, PartialEq, Eq)]
918pub struct SubtitleEntry {
919    /// Sequence number (1-based).
920    pub sequence: u32,
921    /// Start time in milliseconds.
922    pub start_ms: u64,
923    /// End time in milliseconds.
924    pub end_ms: u64,
925    /// Text content (may contain multiple lines separated by '\n').
926    pub text: String,
927}
928
929/// Subtitle overlay configuration.
930#[derive(Debug, Clone, Copy)]
931pub struct SubtitleConfig {
932    /// Bottom margin in pixels from the bottom of the frame.
933    pub bottom_margin: u32,
934    /// Left/right margin in pixels.
935    pub horizontal_margin: u32,
936    /// Font width in pixels (8 for the 8x16 bitmap font).
937    pub font_width: u32,
938    /// Font height in pixels (16 for the 8x16 bitmap font).
939    pub font_height: u32,
940    /// Background box opacity: 0 = transparent, 255 = fully opaque.
941    pub bg_opacity: u8,
942    /// Background color (R, G, B).
943    pub bg_color: (u8, u8, u8),
944    /// Text color (R, G, B).
945    pub text_color: (u8, u8, u8),
946    /// Padding inside background box in pixels.
947    pub padding: u32,
948}
949
950impl Default for SubtitleConfig {
951    fn default() -> Self {
952        Self {
953            bottom_margin: 40,
954            horizontal_margin: 20,
955            font_width: 8,
956            font_height: 16,
957            bg_opacity: 180,
958            bg_color: (0, 0, 0),
959            text_color: (255, 255, 255),
960            padding: 4,
961        }
962    }
963}
964
965/// Subtitle track holding all parsed entries.
966#[cfg(feature = "alloc")]
967#[derive(Debug, Clone)]
968pub struct SubtitleTrack {
969    /// All subtitle entries, sorted by start_ms.
970    pub entries: Vec<SubtitleEntry>,
971}
972
973#[cfg(feature = "alloc")]
974impl SubtitleTrack {
975    /// Parse an SRT file from a string.
976    ///
977    /// SRT format:
978    /// ```text
979    /// 1
980    /// 00:00:01,000 --> 00:00:04,000
981    /// Hello, world!
982    ///
983    /// 2
984    /// 00:00:05,500 --> 00:00:08,000
985    /// Second subtitle
986    /// with multiple lines.
987    /// ```
988    pub(crate) fn parse_srt(input: &str) -> Self {
989        let mut entries = Vec::new();
990        let mut lines = input.lines().peekable();
991
992        while lines.peek().is_some() {
993            // Skip blank lines
994            while let Some(&line) = lines.peek() {
995                if line.trim().is_empty() {
996                    lines.next();
997                } else {
998                    break;
999                }
1000            }
1001
1002            // Sequence number
1003            let seq_line = match lines.next() {
1004                Some(l) => l.trim(),
1005                None => break,
1006            };
1007            let sequence = match parse_u32_from_str(seq_line) {
1008                Some(n) => n,
1009                None => continue,
1010            };
1011
1012            // Timestamp line: "HH:MM:SS,mmm --> HH:MM:SS,mmm"
1013            let ts_line = match lines.next() {
1014                Some(l) => l.trim(),
1015                None => break,
1016            };
1017            let (start_ms, end_ms) = match parse_srt_timestamp_line(ts_line) {
1018                Some(t) => t,
1019                None => continue,
1020            };
1021
1022            // Text lines (until blank line or EOF)
1023            let mut text = String::new();
1024            while let Some(&line) = lines.peek() {
1025                if line.trim().is_empty() {
1026                    break;
1027                }
1028                if !text.is_empty() {
1029                    text.push('\n');
1030                }
1031                text.push_str(lines.next().unwrap_or(""));
1032            }
1033
1034            entries.push(SubtitleEntry {
1035                sequence,
1036                start_ms,
1037                end_ms,
1038                text,
1039            });
1040        }
1041
1042        // Sort by start time (should already be sorted in valid SRT)
1043        entries.sort_by_key(|e| e.start_ms);
1044
1045        Self { entries }
1046    }
1047
1048    /// Find the active subtitle at the given time (in milliseconds).
1049    ///
1050    /// Returns the first entry where start_ms <= time_ms < end_ms.
1051    pub(crate) fn active_at(&self, time_ms: u64) -> Option<&SubtitleEntry> {
1052        self.entries
1053            .iter()
1054            .find(|e| time_ms >= e.start_ms && time_ms < e.end_ms)
1055    }
1056
1057    /// Find all active subtitles at the given time.
1058    pub(crate) fn all_active_at(&self, time_ms: u64) -> Vec<&SubtitleEntry> {
1059        self.entries
1060            .iter()
1061            .filter(|e| time_ms >= e.start_ms && time_ms < e.end_ms)
1062            .collect()
1063    }
1064
1065    /// Number of subtitle entries.
1066    pub(crate) fn len(&self) -> usize {
1067        self.entries.len()
1068    }
1069
1070    /// Whether the track is empty.
1071    pub(crate) fn is_empty(&self) -> bool {
1072        self.entries.is_empty()
1073    }
1074}
1075
1076/// Render a subtitle onto a pixel buffer (XRGB8888 / BGRX8888 / ARGB8888
1077/// format).
1078///
1079/// `buf` is the framebuffer in 32-bit pixel format (4 bytes per pixel).
1080/// `stride` is the row stride in bytes.
1081/// `width` and `height` are the frame dimensions.
1082/// `text` is the subtitle text (may contain newlines).
1083/// `config` controls positioning, colors, and opacity.
1084///
1085/// Uses a simple 8x16 bitmap font renderer. Characters outside printable ASCII
1086/// are skipped. Multi-line text is word-wrapped at the frame width boundary.
1087#[cfg(feature = "alloc")]
1088pub(crate) fn render_subtitle_overlay(
1089    buf: &mut [u8],
1090    stride: u32,
1091    width: u32,
1092    height: u32,
1093    text: &str,
1094    config: &SubtitleConfig,
1095) {
1096    if text.is_empty() || width == 0 || height == 0 {
1097        return;
1098    }
1099
1100    let fw = config.font_width;
1101    let fh = config.font_height;
1102    let pad = config.padding;
1103
1104    // Compute available text area width
1105    let text_area_width = width
1106        .saturating_sub(config.horizontal_margin * 2)
1107        .saturating_sub(pad * 2);
1108    if text_area_width < fw {
1109        return;
1110    }
1111    let max_chars_per_line = text_area_width / fw;
1112    if max_chars_per_line == 0 {
1113        return;
1114    }
1115
1116    // Word-wrap lines
1117    let wrapped_lines = wrap_text(text, max_chars_per_line as usize);
1118    let num_lines = wrapped_lines.len() as u32;
1119    if num_lines == 0 {
1120        return;
1121    }
1122
1123    // Compute box dimensions
1124    let box_text_height = num_lines * fh;
1125    let box_height = box_text_height + pad * 2;
1126    let max_line_len = wrapped_lines
1127        .iter()
1128        .map(|l| l.len() as u32)
1129        .max()
1130        .unwrap_or(0);
1131    let box_text_width = max_line_len * fw;
1132    let box_width = box_text_width + pad * 2;
1133
1134    // Position: bottom-center
1135    let box_x = if width > box_width {
1136        (width - box_width) / 2
1137    } else {
1138        0
1139    };
1140    let box_y = if height > box_height + config.bottom_margin {
1141        height - box_height - config.bottom_margin
1142    } else {
1143        0
1144    };
1145
1146    // Draw semi-transparent background box
1147    draw_bg_box(
1148        buf, stride, width, height, box_x, box_y, box_width, box_height, config,
1149    );
1150
1151    // Draw text
1152    let text_x = box_x + pad;
1153    let mut text_y = box_y + pad;
1154
1155    for line in &wrapped_lines {
1156        // Center each line horizontally within the box
1157        let line_width = line.len() as u32 * fw;
1158        let line_x = if box_text_width > line_width {
1159            text_x + (box_text_width - line_width) / 2
1160        } else {
1161            text_x
1162        };
1163
1164        draw_text_line(buf, stride, width, height, line_x, text_y, line, config);
1165        text_y += fh;
1166    }
1167}
1168
1169/// Draw a semi-transparent background rectangle.
1170#[allow(clippy::too_many_arguments)]
1171fn draw_bg_box(
1172    buf: &mut [u8],
1173    stride: u32,
1174    _width: u32,
1175    height: u32,
1176    bx: u32,
1177    by: u32,
1178    bw: u32,
1179    bh: u32,
1180    config: &SubtitleConfig,
1181) {
1182    let alpha = config.bg_opacity as u32;
1183    let inv_alpha = 255u32.saturating_sub(alpha);
1184    let (br, bg, bb) = config.bg_color;
1185
1186    for dy in 0..bh {
1187        let py = by + dy;
1188        if py >= height {
1189            break;
1190        }
1191        let row_offset = (py * stride) as usize;
1192
1193        for dx in 0..bw {
1194            let px = bx + dx;
1195            let pixel_offset = row_offset + (px as usize) * 4;
1196            if pixel_offset + 3 >= buf.len() {
1197                continue;
1198            }
1199
1200            // Alpha blend: out = bg * alpha + existing * (255 - alpha), all / 255
1201            let existing_b = buf[pixel_offset] as u32;
1202            let existing_g = buf[pixel_offset + 1] as u32;
1203            let existing_r = buf[pixel_offset + 2] as u32;
1204
1205            buf[pixel_offset] = ((bb as u32 * alpha + existing_b * inv_alpha) / 255) as u8;
1206            buf[pixel_offset + 1] = ((bg as u32 * alpha + existing_g * inv_alpha) / 255) as u8;
1207            buf[pixel_offset + 2] = ((br as u32 * alpha + existing_r * inv_alpha) / 255) as u8;
1208            buf[pixel_offset + 3] = 0xFF;
1209        }
1210    }
1211}
1212
1213/// Draw a single line of text using an 8x16 bitmap font.
1214fn draw_text_line(
1215    buf: &mut [u8],
1216    stride: u32,
1217    _width: u32,
1218    height: u32,
1219    start_x: u32,
1220    start_y: u32,
1221    text: &str,
1222    config: &SubtitleConfig,
1223) {
1224    let (tr, tg, tb) = config.text_color;
1225
1226    for (i, ch) in text.chars().enumerate() {
1227        let gx = start_x + (i as u32) * config.font_width;
1228        let glyph = get_glyph(ch);
1229
1230        for row in 0..config.font_height.min(16) {
1231            let py = start_y + row;
1232            if py >= height {
1233                break;
1234            }
1235            let bits = glyph[row as usize];
1236
1237            for col in 0..config.font_width.min(8) {
1238                if bits & (0x80 >> col) != 0 {
1239                    let px = gx + col;
1240                    let pixel_offset = (py * stride) as usize + (px as usize) * 4;
1241                    if pixel_offset + 3 < buf.len() {
1242                        buf[pixel_offset] = tb;
1243                        buf[pixel_offset + 1] = tg;
1244                        buf[pixel_offset + 2] = tr;
1245                        buf[pixel_offset + 3] = 0xFF;
1246                    }
1247                }
1248            }
1249        }
1250    }
1251}
1252
1253/// Minimal 8x16 glyph lookup.
1254///
1255/// Returns a 16-byte bitmap where each byte represents one row of 8 pixels.
1256/// Bit 7 is the leftmost pixel. Only printable ASCII (0x20..0x7E) is supported;
1257/// all other characters return a blank glyph.
1258fn get_glyph(ch: char) -> [u8; 16] {
1259    let code = ch as u32;
1260    if !(0x20..=0x7E).contains(&code) {
1261        return [0u8; 16];
1262    }
1263
1264    // We use a tiny built-in font for a few essential characters.
1265    // In production, this would reference the kernel's full font8x16 table.
1266    // Here we provide minimal glyphs for common characters used in subtitles.
1267    match ch {
1268        ' ' => [0; 16],
1269        'A' => [
1270            0x00, 0x00, 0x18, 0x3C, 0x66, 0x66, 0x7E, 0x66, 0x66, 0x66, 0x66, 0x00, 0x00, 0x00,
1271            0x00, 0x00,
1272        ],
1273        'H' => [
1274            0x00, 0x00, 0x66, 0x66, 0x66, 0x66, 0x7E, 0x66, 0x66, 0x66, 0x66, 0x00, 0x00, 0x00,
1275            0x00, 0x00,
1276        ],
1277        'e' => [
1278            0x00, 0x00, 0x00, 0x00, 0x00, 0x3C, 0x66, 0x7E, 0x60, 0x3C, 0x00, 0x00, 0x00, 0x00,
1279            0x00, 0x00,
1280        ],
1281        'l' => [
1282            0x00, 0x00, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x0E, 0x00, 0x00, 0x00, 0x00,
1283            0x00, 0x00,
1284        ],
1285        'o' => [
1286            0x00, 0x00, 0x00, 0x00, 0x00, 0x3C, 0x66, 0x66, 0x66, 0x3C, 0x00, 0x00, 0x00, 0x00,
1287            0x00, 0x00,
1288        ],
1289        '!' => [
1290            0x00, 0x00, 0x18, 0x18, 0x18, 0x18, 0x18, 0x00, 0x00, 0x18, 0x18, 0x00, 0x00, 0x00,
1291            0x00, 0x00,
1292        ],
1293        _ => {
1294            // Generic block glyph for characters without specific definitions
1295            [
1296                0x00, 0x00, 0x7E, 0x42, 0x42, 0x42, 0x42, 0x42, 0x42, 0x7E, 0x00, 0x00, 0x00, 0x00,
1297                0x00, 0x00,
1298            ]
1299        }
1300    }
1301}
1302
1303/// Word-wrap text to fit within `max_chars` characters per line.
1304/// Splits on existing newlines first, then wraps long lines at word boundaries.
1305#[cfg(feature = "alloc")]
1306fn wrap_text(text: &str, max_chars: usize) -> Vec<String> {
1307    let mut result = Vec::new();
1308    if max_chars == 0 {
1309        return result;
1310    }
1311
1312    for raw_line in text.split('\n') {
1313        if raw_line.len() <= max_chars {
1314            result.push(String::from(raw_line));
1315            continue;
1316        }
1317
1318        // Word-wrap this line
1319        let mut current_line = String::new();
1320        for word in raw_line.split(' ') {
1321            if current_line.is_empty() {
1322                if word.len() > max_chars {
1323                    // Word longer than line -- force break
1324                    let mut start = 0;
1325                    while start < word.len() {
1326                        let end = (start + max_chars).min(word.len());
1327                        result.push(String::from(&word[start..end]));
1328                        start = end;
1329                    }
1330                } else {
1331                    current_line.push_str(word);
1332                }
1333            } else if current_line.len() + 1 + word.len() <= max_chars {
1334                current_line.push(' ');
1335                current_line.push_str(word);
1336            } else {
1337                result.push(current_line);
1338                current_line = String::from(word);
1339            }
1340        }
1341        if !current_line.is_empty() {
1342            result.push(current_line);
1343        }
1344    }
1345
1346    result
1347}
1348
1349// ============================================================================
1350// Real-Time Audio Scheduling
1351// ============================================================================
1352
1353/// Audio thread priority class, mapped to the kernel deadline scheduler.
1354#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1355pub enum AudioPriorityClass {
1356    /// Critical audio path (lowest latency, highest priority).
1357    /// Period: 1ms, runtime budget: 500us.
1358    Critical,
1359    /// Normal audio processing (standard latency).
1360    /// Period: 5ms, runtime budget: 2ms.
1361    Normal,
1362    /// Background audio tasks (bulk processing, not latency-sensitive).
1363    /// Period: 20ms, runtime budget: 10ms.
1364    Background,
1365}
1366
1367impl AudioPriorityClass {
1368    /// Get the period in nanoseconds for this priority class.
1369    pub(crate) fn period_ns(&self) -> u64 {
1370        match self {
1371            Self::Critical => 1_000_000,    // 1ms
1372            Self::Normal => 5_000_000,      // 5ms
1373            Self::Background => 20_000_000, // 20ms
1374        }
1375    }
1376
1377    /// Get the runtime budget in nanoseconds for this priority class.
1378    pub(crate) fn runtime_ns(&self) -> u64 {
1379        match self {
1380            Self::Critical => 500_000,      // 500us
1381            Self::Normal => 2_000_000,      // 2ms
1382            Self::Background => 10_000_000, // 10ms
1383        }
1384    }
1385
1386    /// Get the deadline in nanoseconds (same as period for audio).
1387    pub(crate) fn deadline_ns(&self) -> u64 {
1388        self.period_ns()
1389    }
1390
1391    /// Compute CPU utilization in permille (parts per 1000).
1392    pub(crate) fn utilization_permille(&self) -> u64 {
1393        let period = self.period_ns();
1394        if period == 0 {
1395            return 1000;
1396        }
1397        self.runtime_ns()
1398            .checked_mul(1000)
1399            .map(|v| v / period)
1400            .unwrap_or(1000)
1401    }
1402}
1403
1404/// Audio scheduling parameters for a single audio thread.
1405#[derive(Debug, Clone, Copy)]
1406pub struct AudioSchedParams {
1407    /// Process/thread ID.
1408    pub pid: u64,
1409    /// Priority class.
1410    pub priority: AudioPriorityClass,
1411    /// Period in nanoseconds (wake interval for buffer fill).
1412    pub period_ns: u64,
1413    /// Runtime budget in nanoseconds per period.
1414    pub runtime_ns: u64,
1415    /// CPU reservation in permille (0..1000).
1416    pub cpu_reservation_permille: u32,
1417}
1418
1419impl AudioSchedParams {
1420    /// Create scheduling parameters for a given priority class.
1421    pub(crate) fn from_priority(pid: u64, priority: AudioPriorityClass) -> Self {
1422        Self {
1423            pid,
1424            priority,
1425            period_ns: priority.period_ns(),
1426            runtime_ns: priority.runtime_ns(),
1427            cpu_reservation_permille: priority.utilization_permille() as u32,
1428        }
1429    }
1430
1431    /// Create custom scheduling parameters.
1432    pub(crate) fn custom(pid: u64, period_ns: u64, runtime_ns: u64) -> Self {
1433        let cpu_reservation_permille = if period_ns > 0 {
1434            (runtime_ns.saturating_mul(1000) / period_ns) as u32
1435        } else {
1436            1000
1437        };
1438        Self {
1439            pid,
1440            priority: AudioPriorityClass::Normal,
1441            period_ns,
1442            runtime_ns,
1443            cpu_reservation_permille,
1444        }
1445    }
1446}
1447
1448/// Statistics for a single audio thread's scheduling behavior.
1449#[derive(Debug, Clone, Copy, Default)]
1450pub struct AudioSchedStats {
1451    /// Total number of scheduling periods completed.
1452    pub periods_completed: u64,
1453    /// Number of times the thread was woken on time.
1454    pub on_time_wakes: u64,
1455    /// Number of times the thread was woken late (missed deadline).
1456    pub late_wakes: u64,
1457    /// Number of buffer underruns (thread did not fill buffer in time).
1458    pub underruns: u64,
1459    /// Number of buffer overruns (buffer full, data lost).
1460    pub overruns: u64,
1461    /// Maximum observed scheduling jitter in nanoseconds.
1462    pub max_jitter_ns: u64,
1463    /// Minimum observed scheduling jitter in nanoseconds.
1464    pub min_jitter_ns: u64,
1465    /// Cumulative jitter (for computing average).
1466    pub total_jitter_ns: u64,
1467    /// Last wake timestamp (nanoseconds since boot).
1468    pub last_wake_ns: u64,
1469    /// Expected next wake timestamp.
1470    pub next_expected_wake_ns: u64,
1471}
1472
1473impl AudioSchedStats {
1474    /// Compute average jitter in nanoseconds.
1475    pub(crate) fn avg_jitter_ns(&self) -> u64 {
1476        if self.periods_completed == 0 {
1477            return 0;
1478        }
1479        self.total_jitter_ns / self.periods_completed
1480    }
1481
1482    /// Record a wake event.
1483    ///
1484    /// `actual_wake_ns` is the actual wake time. `expected_wake_ns` is when
1485    /// the wake was scheduled. The difference is the jitter.
1486    pub(crate) fn record_wake(&mut self, actual_wake_ns: u64, expected_wake_ns: u64) {
1487        self.periods_completed += 1;
1488
1489        let jitter = actual_wake_ns.abs_diff(expected_wake_ns);
1490
1491        self.total_jitter_ns = self.total_jitter_ns.saturating_add(jitter);
1492
1493        if jitter > self.max_jitter_ns {
1494            self.max_jitter_ns = jitter;
1495        }
1496        if self.min_jitter_ns == 0 || jitter < self.min_jitter_ns {
1497            self.min_jitter_ns = jitter;
1498        }
1499
1500        if actual_wake_ns <= expected_wake_ns {
1501            self.on_time_wakes += 1;
1502        } else {
1503            self.late_wakes += 1;
1504        }
1505
1506        self.last_wake_ns = actual_wake_ns;
1507        self.next_expected_wake_ns =
1508            actual_wake_ns.saturating_add(expected_wake_ns.saturating_sub(self.last_wake_ns));
1509    }
1510
1511    /// Record a buffer underrun event.
1512    pub(crate) fn record_underrun(&mut self) {
1513        self.underruns += 1;
1514    }
1515
1516    /// Record a buffer overrun event.
1517    pub(crate) fn record_overrun(&mut self) {
1518        self.overruns += 1;
1519    }
1520}
1521
1522/// Real-time audio scheduler manager.
1523///
1524/// Tracks all registered audio threads and their scheduling statistics.
1525/// Integrates with the kernel's EDF deadline scheduler for actual scheduling.
1526#[cfg(feature = "alloc")]
1527#[derive(Debug)]
1528pub struct AudioScheduler {
1529    /// Registered audio threads and their parameters.
1530    threads: Vec<AudioSchedParams>,
1531    /// Per-thread scheduling statistics (parallel to `threads`).
1532    stats: Vec<AudioSchedStats>,
1533    /// Total CPU reservation in permille across all audio threads.
1534    total_reservation_permille: u32,
1535    /// Maximum total CPU reservation allowed (default: 800 = 80%).
1536    max_reservation_permille: u32,
1537}
1538
1539#[cfg(feature = "alloc")]
1540impl Default for AudioScheduler {
1541    fn default() -> Self {
1542        Self::new()
1543    }
1544}
1545
1546#[cfg(feature = "alloc")]
1547impl AudioScheduler {
1548    /// Maximum number of concurrent audio threads.
1549    const MAX_AUDIO_THREADS: usize = 32;
1550
1551    /// Create a new audio scheduler with default settings.
1552    pub fn new() -> Self {
1553        Self {
1554            threads: Vec::new(),
1555            stats: Vec::new(),
1556            total_reservation_permille: 0,
1557            max_reservation_permille: 800, // 80% max for audio
1558        }
1559    }
1560
1561    /// Create with a custom maximum CPU reservation.
1562    pub(crate) fn with_max_reservation(max_permille: u32) -> Self {
1563        Self {
1564            threads: Vec::new(),
1565            stats: Vec::new(),
1566            total_reservation_permille: 0,
1567            max_reservation_permille: max_permille.min(1000),
1568        }
1569    }
1570
1571    /// Register an audio thread for real-time scheduling.
1572    ///
1573    /// Returns `Err` if:
1574    /// - Maximum thread count exceeded
1575    /// - CPU reservation would exceed the maximum
1576    /// - Thread already registered
1577    pub(crate) fn register_thread(
1578        &mut self,
1579        params: AudioSchedParams,
1580    ) -> Result<(), AudioSchedError> {
1581        // Check duplicates
1582        if self.threads.iter().any(|t| t.pid == params.pid) {
1583            return Err(AudioSchedError::AlreadyRegistered);
1584        }
1585
1586        // Check capacity
1587        if self.threads.len() >= Self::MAX_AUDIO_THREADS {
1588            return Err(AudioSchedError::TooManyThreads);
1589        }
1590
1591        // Check CPU reservation
1592        let new_total = self
1593            .total_reservation_permille
1594            .saturating_add(params.cpu_reservation_permille);
1595        if new_total > self.max_reservation_permille {
1596            return Err(AudioSchedError::InsufficientCpuBudget);
1597        }
1598
1599        self.total_reservation_permille = new_total;
1600        self.threads.push(params);
1601        self.stats.push(AudioSchedStats::default());
1602        Ok(())
1603    }
1604
1605    /// Unregister an audio thread.
1606    pub(crate) fn unregister_thread(&mut self, pid: u64) -> Result<(), AudioSchedError> {
1607        let idx = self
1608            .threads
1609            .iter()
1610            .position(|t| t.pid == pid)
1611            .ok_or(AudioSchedError::NotFound)?;
1612
1613        let params = self.threads.remove(idx);
1614        self.stats.remove(idx);
1615        self.total_reservation_permille = self
1616            .total_reservation_permille
1617            .saturating_sub(params.cpu_reservation_permille);
1618        Ok(())
1619    }
1620
1621    /// Get the scheduling parameters for a thread.
1622    pub(crate) fn get_params(&self, pid: u64) -> Option<&AudioSchedParams> {
1623        self.threads.iter().find(|t| t.pid == pid)
1624    }
1625
1626    /// Get mutable scheduling statistics for a thread.
1627    pub(crate) fn get_stats_mut(&mut self, pid: u64) -> Option<&mut AudioSchedStats> {
1628        let idx = self.threads.iter().position(|t| t.pid == pid)?;
1629        self.stats.get_mut(idx)
1630    }
1631
1632    /// Get scheduling statistics for a thread.
1633    pub(crate) fn get_stats(&self, pid: u64) -> Option<&AudioSchedStats> {
1634        let idx = self.threads.iter().position(|t| t.pid == pid)?;
1635        self.stats.get(idx)
1636    }
1637
1638    /// Record a wake event for a thread.
1639    pub(crate) fn record_wake(
1640        &mut self,
1641        pid: u64,
1642        actual_ns: u64,
1643        expected_ns: u64,
1644    ) -> Result<(), AudioSchedError> {
1645        let stats = self.get_stats_mut(pid).ok_or(AudioSchedError::NotFound)?;
1646        stats.record_wake(actual_ns, expected_ns);
1647        Ok(())
1648    }
1649
1650    /// Total CPU reservation in permille.
1651    pub(crate) fn total_reservation(&self) -> u32 {
1652        self.total_reservation_permille
1653    }
1654
1655    /// Number of registered audio threads.
1656    pub(crate) fn thread_count(&self) -> usize {
1657        self.threads.len()
1658    }
1659
1660    /// Available CPU budget in permille.
1661    pub(crate) fn available_budget(&self) -> u32 {
1662        self.max_reservation_permille
1663            .saturating_sub(self.total_reservation_permille)
1664    }
1665
1666    /// Compute the next wake time for a thread based on its period.
1667    pub(crate) fn next_wake_time(&self, pid: u64, current_ns: u64) -> Option<u64> {
1668        let params = self.get_params(pid)?;
1669        Some(current_ns.saturating_add(params.period_ns))
1670    }
1671
1672    /// Get aggregate statistics across all audio threads.
1673    pub(crate) fn aggregate_stats(&self) -> AudioSchedStats {
1674        let mut agg = AudioSchedStats::default();
1675        for stats in &self.stats {
1676            agg.periods_completed = agg
1677                .periods_completed
1678                .saturating_add(stats.periods_completed);
1679            agg.on_time_wakes = agg.on_time_wakes.saturating_add(stats.on_time_wakes);
1680            agg.late_wakes = agg.late_wakes.saturating_add(stats.late_wakes);
1681            agg.underruns = agg.underruns.saturating_add(stats.underruns);
1682            agg.overruns = agg.overruns.saturating_add(stats.overruns);
1683            if stats.max_jitter_ns > agg.max_jitter_ns {
1684                agg.max_jitter_ns = stats.max_jitter_ns;
1685            }
1686            agg.total_jitter_ns = agg.total_jitter_ns.saturating_add(stats.total_jitter_ns);
1687        }
1688        agg
1689    }
1690}
1691
1692/// Errors from the audio real-time scheduler.
1693#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1694pub enum AudioSchedError {
1695    /// Thread already registered.
1696    AlreadyRegistered,
1697    /// Maximum audio thread count exceeded.
1698    TooManyThreads,
1699    /// Not enough CPU budget for the requested reservation.
1700    InsufficientCpuBudget,
1701    /// Thread not found.
1702    NotFound,
1703    /// Invalid scheduling parameters.
1704    InvalidParams,
1705}
1706
1707/// Global audio scheduling statistics counters (lock-free).
1708pub(crate) static AUDIO_TOTAL_UNDERRUNS: AtomicU64 = AtomicU64::new(0);
1709pub(crate) static AUDIO_TOTAL_OVERRUNS: AtomicU64 = AtomicU64::new(0);
1710pub(crate) static AUDIO_TOTAL_LATE_WAKES: AtomicU64 = AtomicU64::new(0);
1711
1712/// Increment global underrun counter.
1713pub(crate) fn count_audio_underrun() {
1714    AUDIO_TOTAL_UNDERRUNS.fetch_add(1, Ordering::Relaxed);
1715}
1716
1717/// Increment global overrun counter.
1718pub(crate) fn count_audio_overrun() {
1719    AUDIO_TOTAL_OVERRUNS.fetch_add(1, Ordering::Relaxed);
1720}
1721
1722/// Increment global late wake counter.
1723pub(crate) fn count_audio_late_wake() {
1724    AUDIO_TOTAL_LATE_WAKES.fetch_add(1, Ordering::Relaxed);
1725}
1726
1727// ============================================================================
1728// Helper Functions
1729// ============================================================================
1730
1731/// Read a little-endian u32 from a byte slice at the given offset.
1732#[inline]
1733fn read_u32_le(data: &[u8], offset: usize) -> u32 {
1734    if offset + 4 > data.len() {
1735        return 0;
1736    }
1737    u32::from_le_bytes([
1738        data[offset],
1739        data[offset + 1],
1740        data[offset + 2],
1741        data[offset + 3],
1742    ])
1743}
1744
1745/// Read a little-endian i32 from a byte slice at the given offset.
1746#[inline]
1747fn read_i32_le(data: &[u8], offset: usize) -> i32 {
1748    read_u32_le(data, offset) as i32
1749}
1750
1751/// Read a little-endian u16 from a byte slice at the given offset.
1752#[inline]
1753fn read_u16_le(data: &[u8], offset: usize) -> u16 {
1754    if offset + 2 > data.len() {
1755        return 0;
1756    }
1757    u16::from_le_bytes([data[offset], data[offset + 1]])
1758}
1759
1760/// Advance to the next 2-byte aligned position.
1761fn aligned_next(pos: usize) -> usize {
1762    (pos + 1) & !1
1763}
1764
1765/// Advance to the next 2-byte aligned position (relative offset).
1766fn aligned_next_rel(pos: usize) -> usize {
1767    (pos + 1) & !1
1768}
1769
1770/// Parse a u32 from a decimal string.
1771fn parse_u32_from_str(s: &str) -> Option<u32> {
1772    let s = s.trim();
1773    if s.is_empty() {
1774        return None;
1775    }
1776    let mut result: u32 = 0;
1777    for &b in s.as_bytes() {
1778        if !b.is_ascii_digit() {
1779            return None;
1780        }
1781        result = result.checked_mul(10)?.checked_add((b - b'0') as u32)?;
1782    }
1783    Some(result)
1784}
1785
1786/// Parse an SRT timestamp "HH:MM:SS,mmm" to milliseconds.
1787fn parse_srt_timestamp(s: &str) -> Option<u64> {
1788    // Expected: "HH:MM:SS,mmm" or "HH:MM:SS.mmm"
1789    let s = s.trim();
1790    if s.len() < 12 {
1791        return None;
1792    }
1793    let bytes = s.as_bytes();
1794
1795    let hours = parse_two_digits(bytes, 0)? as u64;
1796    // bytes[2] should be ':'
1797    if bytes[2] != b':' {
1798        return None;
1799    }
1800    let minutes = parse_two_digits(bytes, 3)? as u64;
1801    if bytes[5] != b':' {
1802        return None;
1803    }
1804    let seconds = parse_two_digits(bytes, 6)? as u64;
1805    // bytes[8] should be ',' or '.'
1806    if bytes[8] != b',' && bytes[8] != b'.' {
1807        return None;
1808    }
1809    let millis = parse_three_digits(bytes, 9)? as u64;
1810
1811    hours
1812        .checked_mul(3_600_000)?
1813        .checked_add(minutes.checked_mul(60_000)?)?
1814        .checked_add(seconds.checked_mul(1_000)?)?
1815        .checked_add(millis)
1816}
1817
1818/// Parse a timestamp line "start --> end".
1819fn parse_srt_timestamp_line(line: &str) -> Option<(u64, u64)> {
1820    let parts: Vec<&str> = line.split("-->").collect();
1821    if parts.len() != 2 {
1822        return None;
1823    }
1824    let start = parse_srt_timestamp(parts[0])?;
1825    let end = parse_srt_timestamp(parts[1])?;
1826    Some((start, end))
1827}
1828
1829/// Parse two ASCII decimal digits at `offset`.
1830fn parse_two_digits(bytes: &[u8], offset: usize) -> Option<u32> {
1831    if offset + 2 > bytes.len() {
1832        return None;
1833    }
1834    let d0 = bytes[offset].wrapping_sub(b'0');
1835    let d1 = bytes[offset + 1].wrapping_sub(b'0');
1836    if d0 > 9 || d1 > 9 {
1837        return None;
1838    }
1839    Some(d0 as u32 * 10 + d1 as u32)
1840}
1841
1842/// Parse three ASCII decimal digits at `offset`.
1843fn parse_three_digits(bytes: &[u8], offset: usize) -> Option<u32> {
1844    if offset + 3 > bytes.len() {
1845        return None;
1846    }
1847    let d0 = bytes[offset].wrapping_sub(b'0');
1848    let d1 = bytes[offset + 1].wrapping_sub(b'0');
1849    let d2 = bytes[offset + 2].wrapping_sub(b'0');
1850    if d0 > 9 || d1 > 9 || d2 > 9 {
1851        return None;
1852    }
1853    let hundreds = d0 as u32;
1854    let tens = d1 as u32;
1855    hundreds
1856        .checked_mul(100)?
1857        .checked_add(tens.checked_mul(10)?)?
1858        .checked_add(d2 as u32)
1859}
1860
1861// ============================================================================
1862// Unit Tests
1863// ============================================================================
1864
1865#[cfg(test)]
1866mod tests {
1867    #[allow(unused_imports)]
1868    use alloc::vec;
1869
1870    use super::*;
1871
1872    // ---- AVI Parser Tests ----
1873
1874    #[test]
1875    fn test_fourcc_from_bytes() {
1876        let data = b"RIFF";
1877        let fcc = FourCC::from_bytes(data).unwrap();
1878        assert_eq!(fcc, FourCC::RIFF);
1879    }
1880
1881    #[test]
1882    fn test_fourcc_from_bytes_too_short() {
1883        let data = b"RI";
1884        assert!(FourCC::from_bytes(data).is_none());
1885    }
1886
1887    #[test]
1888    fn test_avi_flags() {
1889        let flags = AviFlags(AviFlags::AVIF_HASINDEX | AviFlags::AVIF_ISINTERLEAVED);
1890        assert!(flags.has_flag(AviFlags::AVIF_HASINDEX));
1891        assert!(flags.has_flag(AviFlags::AVIF_ISINTERLEAVED));
1892        assert!(!flags.has_flag(AviFlags::AVIF_COPYRIGHTED));
1893    }
1894
1895    #[test]
1896    fn test_avi_main_header_parse() {
1897        let mut data = [0u8; 40];
1898        // microseconds_per_frame = 33333 (~30fps)
1899        data[0..4].copy_from_slice(&33333u32.to_le_bytes());
1900        // total_frames = 100
1901        data[16..20].copy_from_slice(&100u32.to_le_bytes());
1902        // streams = 2
1903        data[24..28].copy_from_slice(&2u32.to_le_bytes());
1904        // width = 640
1905        data[32..36].copy_from_slice(&640u32.to_le_bytes());
1906        // height = 480
1907        data[36..40].copy_from_slice(&480u32.to_le_bytes());
1908
1909        let hdr = AviMainHeader::parse(&data).unwrap();
1910        assert_eq!(hdr.microseconds_per_frame, 33333);
1911        assert_eq!(hdr.total_frames, 100);
1912        assert_eq!(hdr.streams, 2);
1913        assert_eq!(hdr.width, 640);
1914        assert_eq!(hdr.height, 480);
1915    }
1916
1917    #[test]
1918    fn test_avi_main_header_frame_rate() {
1919        let hdr = AviMainHeader {
1920            microseconds_per_frame: 33333,
1921            ..Default::default()
1922        };
1923        let (num, den) = hdr.frame_rate();
1924        // 1_000_000 / 33333 ~= 30.0003 fps
1925        assert_eq!(num, 1_000_000);
1926        assert_eq!(den, 33333);
1927    }
1928
1929    #[test]
1930    fn test_avi_main_header_parse_too_short() {
1931        let data = [0u8; 20];
1932        assert!(AviMainHeader::parse(&data).is_none());
1933    }
1934
1935    #[test]
1936    fn test_avi_stream_header_parse() {
1937        let mut data = [0u8; 56];
1938        data[0..4].copy_from_slice(b"vids");
1939        data[4..8].copy_from_slice(b"DIB ");
1940        // scale = 1
1941        data[20..24].copy_from_slice(&1u32.to_le_bytes());
1942        // rate = 30
1943        data[24..28].copy_from_slice(&30u32.to_le_bytes());
1944
1945        let hdr = AviStreamHeader::parse(&data).unwrap();
1946        assert_eq!(hdr.get_stream_type(), StreamType::Video);
1947        assert_eq!(hdr.sample_rate(), (30, 1));
1948    }
1949
1950    #[test]
1951    fn test_avi_stream_header_audio() {
1952        let mut data = [0u8; 56];
1953        data[0..4].copy_from_slice(b"auds");
1954
1955        let hdr = AviStreamHeader::parse(&data).unwrap();
1956        assert_eq!(hdr.get_stream_type(), StreamType::Audio);
1957    }
1958
1959    #[test]
1960    fn test_bitmap_info_header_parse() {
1961        let mut data = [0u8; 40];
1962        data[0..4].copy_from_slice(&40u32.to_le_bytes()); // size
1963        data[4..8].copy_from_slice(&320i32.to_le_bytes()); // width
1964        data[8..12].copy_from_slice(&240i32.to_le_bytes()); // height (positive = bottom-up)
1965        data[12..14].copy_from_slice(&1u16.to_le_bytes()); // planes
1966        data[14..16].copy_from_slice(&24u16.to_le_bytes()); // bit_count
1967
1968        let bih = BitmapInfoHeader::parse(&data).unwrap();
1969        assert_eq!(bih.width, 320);
1970        assert_eq!(bih.height, 240);
1971        assert_eq!(bih.bit_count, 24);
1972        assert!(bih.is_bottom_up());
1973        assert_eq!(bih.abs_height(), 240);
1974    }
1975
1976    #[test]
1977    fn test_bitmap_info_header_top_down() {
1978        let mut data = [0u8; 40];
1979        data[0..4].copy_from_slice(&40u32.to_le_bytes());
1980        data[4..8].copy_from_slice(&320i32.to_le_bytes());
1981        data[8..12].copy_from_slice(&(-240i32).to_le_bytes()); // negative = top-down
1982
1983        let bih = BitmapInfoHeader::parse(&data).unwrap();
1984        assert!(!bih.is_bottom_up());
1985        assert_eq!(bih.abs_height(), 240);
1986    }
1987
1988    #[test]
1989    fn test_wave_format_ex_parse() {
1990        let mut data = [0u8; 18];
1991        data[0..2].copy_from_slice(&1u16.to_le_bytes()); // PCM
1992        data[2..4].copy_from_slice(&2u16.to_le_bytes()); // stereo
1993        data[4..8].copy_from_slice(&44100u32.to_le_bytes()); // 44.1kHz
1994        data[8..12].copy_from_slice(&176400u32.to_le_bytes()); // byte rate
1995        data[12..14].copy_from_slice(&4u16.to_le_bytes()); // block align
1996        data[14..16].copy_from_slice(&16u16.to_le_bytes()); // 16-bit
1997
1998        let wfx = WaveFormatEx::parse(&data).unwrap();
1999        assert!(wfx.is_pcm());
2000        assert_eq!(wfx.channels, 2);
2001        assert_eq!(wfx.samples_per_sec, 44100);
2002        assert_eq!(wfx.bits_per_sample, 16);
2003    }
2004
2005    #[test]
2006    fn test_avi_index_entry() {
2007        let mut data = [0u8; 16];
2008        data[0..4].copy_from_slice(b"00dc");
2009        data[4..8].copy_from_slice(&0x10u32.to_le_bytes()); // keyframe
2010        data[8..12].copy_from_slice(&1024u32.to_le_bytes()); // offset
2011        data[12..16].copy_from_slice(&4096u32.to_le_bytes()); // size
2012
2013        let entry = AviIndexEntry::parse(&data).unwrap();
2014        assert!(entry.is_keyframe());
2015        assert!(entry.is_video());
2016        assert!(!entry.is_audio());
2017        assert_eq!(entry.stream_number(), 0);
2018        assert_eq!(entry.offset, 1024);
2019        assert_eq!(entry.size, 4096);
2020    }
2021
2022    #[test]
2023    fn test_avi_index_entry_audio() {
2024        let mut data = [0u8; 16];
2025        data[0..4].copy_from_slice(b"01wb");
2026
2027        let entry = AviIndexEntry::parse(&data).unwrap();
2028        assert!(!entry.is_video());
2029        assert!(entry.is_audio());
2030        assert_eq!(entry.stream_number(), 1);
2031    }
2032
2033    // ---- Frame Rate Conversion Tests ----
2034
2035    #[test]
2036    fn test_source_frame_for_output_identity() {
2037        // Same rate: 30fps -> 30fps, should map 1:1
2038        let conv = FrameRateConverter::new(30, 1, 30, 1, FrameRateMode::Duplicate);
2039        assert_eq!(conv.source_frame_for_output(0), 0);
2040        assert_eq!(conv.source_frame_for_output(1), 1);
2041        assert_eq!(conv.source_frame_for_output(10), 10);
2042    }
2043
2044    #[test]
2045    fn test_source_frame_for_output_downsample() {
2046        // 60fps -> 30fps: every other source frame
2047        let conv = FrameRateConverter::new(60, 1, 30, 1, FrameRateMode::Drop);
2048        assert_eq!(conv.source_frame_for_output(0), 0);
2049        assert_eq!(conv.source_frame_for_output(1), 2);
2050        assert_eq!(conv.source_frame_for_output(2), 4);
2051    }
2052
2053    #[test]
2054    fn test_source_frame_for_output_upsample() {
2055        // 24fps -> 48fps: each source frame used twice
2056        let conv = FrameRateConverter::new(24, 1, 48, 1, FrameRateMode::Duplicate);
2057        assert_eq!(conv.source_frame_for_output(0), 0);
2058        assert_eq!(conv.source_frame_for_output(1), 0);
2059        assert_eq!(conv.source_frame_for_output(2), 1);
2060        assert_eq!(conv.source_frame_for_output(3), 1);
2061    }
2062
2063    #[test]
2064    fn test_pulldown_32_pattern() {
2065        let conv = FrameRateConverter::new(24, 1, 30, 1, FrameRateMode::Pulldown32);
2066        // First cycle of 5 output frames from 4 source frames
2067        assert_eq!(conv.pulldown_32_source(0), (0, false)); // A
2068        assert_eq!(conv.pulldown_32_source(1), (0, true)); // A repeated
2069        assert_eq!(conv.pulldown_32_source(2), (1, false)); // B
2070        assert_eq!(conv.pulldown_32_source(3), (2, false)); // C
2071        assert_eq!(conv.pulldown_32_source(4), (2, true)); // C repeated
2072
2073        // Second cycle
2074        assert_eq!(conv.pulldown_32_source(5), (4, false)); // D
2075        assert_eq!(conv.pulldown_32_source(6), (4, true)); // D repeated
2076    }
2077
2078    #[test]
2079    fn test_blend_frames_50_50() {
2080        let frame_a = vec![0u8, 100, 200, 50];
2081        let frame_b = vec![100u8, 200, 0, 150];
2082        let mut out = vec![0u8; 4];
2083
2084        blend_frames(&frame_a, &frame_b, &mut out, 128);
2085        // 50/50 blend: (0*128 + 100*128)/256 = 50
2086        assert_eq!(out[0], 50);
2087        // (100*128 + 200*128)/256 = 150
2088        assert_eq!(out[1], 150);
2089        // (200*128 + 0*128)/256 = 100
2090        assert_eq!(out[2], 100);
2091        // (50*128 + 150*128)/256 = 100
2092        assert_eq!(out[3], 100);
2093    }
2094
2095    #[test]
2096    fn test_blend_frames_all_a() {
2097        let frame_a = vec![100u8; 4];
2098        let frame_b = vec![200u8; 4];
2099        let mut out = vec![0u8; 4];
2100
2101        blend_frames(&frame_a, &frame_b, &mut out, 0);
2102        // weight=0: 100% frame_a
2103        assert_eq!(out, vec![100u8; 4]);
2104    }
2105
2106    #[test]
2107    fn test_blend_frames_all_b() {
2108        let frame_a = vec![100u8; 4];
2109        let frame_b = vec![200u8; 4];
2110        let mut out = vec![0u8; 4];
2111
2112        blend_frames(&frame_a, &frame_b, &mut out, 256);
2113        // weight=256: 100% frame_b
2114        assert_eq!(out, vec![200u8; 4]);
2115    }
2116
2117    #[test]
2118    fn test_frame_map_duplicate() {
2119        let conv = FrameRateConverter::new(24, 1, 48, 1, FrameRateMode::Duplicate);
2120        let map = conv.build_frame_map(4, 8);
2121        assert_eq!(map.len(), 8);
2122        // Each source frame should appear twice
2123        assert_eq!(map[0].source_index, 0);
2124        assert_eq!(map[1].source_index, 0);
2125        assert_eq!(map[2].source_index, 1);
2126        assert_eq!(map[3].source_index, 1);
2127    }
2128
2129    // ---- SRT Parser Tests ----
2130
2131    #[test]
2132    fn test_parse_srt_timestamp() {
2133        let ts = parse_srt_timestamp("01:23:45,678").unwrap();
2134        // 1*3600000 + 23*60000 + 45*1000 + 678 = 3600000 + 1380000 + 45000 + 678 =
2135        // 5025678
2136        assert_eq!(ts, 5_025_678);
2137    }
2138
2139    #[test]
2140    fn test_parse_srt_timestamp_zero() {
2141        let ts = parse_srt_timestamp("00:00:00,000").unwrap();
2142        assert_eq!(ts, 0);
2143    }
2144
2145    #[test]
2146    fn test_parse_srt_timestamp_dot_separator() {
2147        let ts = parse_srt_timestamp("00:00:01.500").unwrap();
2148        assert_eq!(ts, 1500);
2149    }
2150
2151    #[test]
2152    fn test_parse_srt_basic() {
2153        let srt_text = "1\n00:00:01,000 --> 00:00:04,000\nHello, world!\n\n2\n00:00:05,500 --> \
2154                        00:00:08,000\nSecond subtitle\nwith two lines.\n";
2155        let track = SubtitleTrack::parse_srt(srt_text);
2156        assert_eq!(track.len(), 2);
2157
2158        assert_eq!(track.entries[0].sequence, 1);
2159        assert_eq!(track.entries[0].start_ms, 1000);
2160        assert_eq!(track.entries[0].end_ms, 4000);
2161        assert_eq!(track.entries[0].text, "Hello, world!");
2162
2163        assert_eq!(track.entries[1].sequence, 2);
2164        assert_eq!(track.entries[1].start_ms, 5500);
2165        assert_eq!(track.entries[1].end_ms, 8000);
2166        assert_eq!(track.entries[1].text, "Second subtitle\nwith two lines.");
2167    }
2168
2169    #[test]
2170    fn test_subtitle_active_at() {
2171        let srt_text =
2172            "1\n00:00:01,000 --> 00:00:04,000\nFirst\n\n2\n00:00:05,000 --> 00:00:08,000\nSecond\n";
2173        let track = SubtitleTrack::parse_srt(srt_text);
2174
2175        assert!(track.active_at(0).is_none());
2176        assert_eq!(track.active_at(1000).unwrap().text, "First");
2177        assert_eq!(track.active_at(3999).unwrap().text, "First");
2178        assert!(track.active_at(4000).is_none());
2179        assert_eq!(track.active_at(5000).unwrap().text, "Second");
2180        assert!(track.active_at(8000).is_none());
2181    }
2182
2183    #[test]
2184    fn test_wrap_text_short() {
2185        let lines = wrap_text("Hello world", 20);
2186        assert_eq!(lines.len(), 1);
2187        assert_eq!(lines[0], "Hello world");
2188    }
2189
2190    #[test]
2191    fn test_wrap_text_multiline() {
2192        let lines = wrap_text("This is a longer line of text", 15);
2193        assert!(lines.len() >= 2);
2194        for line in &lines {
2195            assert!(line.len() <= 15);
2196        }
2197    }
2198
2199    #[test]
2200    fn test_wrap_text_existing_newlines() {
2201        let lines = wrap_text("Line one\nLine two", 50);
2202        assert_eq!(lines.len(), 2);
2203        assert_eq!(lines[0], "Line one");
2204        assert_eq!(lines[1], "Line two");
2205    }
2206
2207    // ---- Audio Scheduling Tests ----
2208
2209    #[test]
2210    fn test_audio_priority_class_params() {
2211        assert_eq!(AudioPriorityClass::Critical.period_ns(), 1_000_000);
2212        assert_eq!(AudioPriorityClass::Critical.runtime_ns(), 500_000);
2213        assert_eq!(AudioPriorityClass::Critical.utilization_permille(), 500);
2214
2215        assert_eq!(AudioPriorityClass::Normal.period_ns(), 5_000_000);
2216        assert_eq!(AudioPriorityClass::Normal.runtime_ns(), 2_000_000);
2217        assert_eq!(AudioPriorityClass::Normal.utilization_permille(), 400);
2218
2219        assert_eq!(AudioPriorityClass::Background.period_ns(), 20_000_000);
2220        assert_eq!(AudioPriorityClass::Background.runtime_ns(), 10_000_000);
2221        assert_eq!(AudioPriorityClass::Background.utilization_permille(), 500);
2222    }
2223
2224    #[test]
2225    fn test_audio_scheduler_register() {
2226        let mut sched = AudioScheduler::new();
2227        let params = AudioSchedParams::from_priority(1, AudioPriorityClass::Normal);
2228        assert!(sched.register_thread(params).is_ok());
2229        assert_eq!(sched.thread_count(), 1);
2230        assert_eq!(sched.total_reservation(), 400);
2231    }
2232
2233    #[test]
2234    fn test_audio_scheduler_register_duplicate() {
2235        let mut sched = AudioScheduler::new();
2236        let params = AudioSchedParams::from_priority(1, AudioPriorityClass::Normal);
2237        assert!(sched.register_thread(params).is_ok());
2238        assert_eq!(
2239            sched.register_thread(params),
2240            Err(AudioSchedError::AlreadyRegistered)
2241        );
2242    }
2243
2244    #[test]
2245    fn test_audio_scheduler_cpu_budget() {
2246        let mut sched = AudioScheduler::with_max_reservation(500);
2247        // Normal = 400 permille, should fit
2248        let p1 = AudioSchedParams::from_priority(1, AudioPriorityClass::Normal);
2249        assert!(sched.register_thread(p1).is_ok());
2250        assert_eq!(sched.available_budget(), 100);
2251
2252        // Another Normal = 400, total would be 800 > 500
2253        let p2 = AudioSchedParams::from_priority(2, AudioPriorityClass::Normal);
2254        assert_eq!(
2255            sched.register_thread(p2),
2256            Err(AudioSchedError::InsufficientCpuBudget)
2257        );
2258    }
2259
2260    #[test]
2261    fn test_audio_scheduler_unregister() {
2262        let mut sched = AudioScheduler::new();
2263        let params = AudioSchedParams::from_priority(1, AudioPriorityClass::Normal);
2264        assert!(sched.register_thread(params).is_ok());
2265        assert!(sched.unregister_thread(1).is_ok());
2266        assert_eq!(sched.thread_count(), 0);
2267        assert_eq!(sched.total_reservation(), 0);
2268    }
2269
2270    #[test]
2271    fn test_audio_scheduler_unregister_not_found() {
2272        let mut sched = AudioScheduler::new();
2273        assert_eq!(sched.unregister_thread(42), Err(AudioSchedError::NotFound));
2274    }
2275
2276    #[test]
2277    fn test_audio_sched_stats_record_wake() {
2278        let mut stats = AudioSchedStats::default();
2279
2280        // On-time wake (actual <= expected)
2281        stats.record_wake(1_000_000, 1_000_000);
2282        assert_eq!(stats.periods_completed, 1);
2283        assert_eq!(stats.on_time_wakes, 1);
2284        assert_eq!(stats.late_wakes, 0);
2285        assert_eq!(stats.max_jitter_ns, 0);
2286
2287        // Late wake (actual > expected)
2288        stats.record_wake(2_100_000, 2_000_000);
2289        assert_eq!(stats.periods_completed, 2);
2290        assert_eq!(stats.on_time_wakes, 1);
2291        assert_eq!(stats.late_wakes, 1);
2292        assert_eq!(stats.max_jitter_ns, 100_000);
2293    }
2294
2295    #[test]
2296    fn test_audio_sched_stats_underrun_overrun() {
2297        let mut stats = AudioSchedStats::default();
2298        stats.record_underrun();
2299        stats.record_underrun();
2300        stats.record_overrun();
2301        assert_eq!(stats.underruns, 2);
2302        assert_eq!(stats.overruns, 1);
2303    }
2304
2305    #[test]
2306    fn test_audio_sched_stats_avg_jitter() {
2307        let mut stats = AudioSchedStats::default();
2308        stats.record_wake(1_000_100, 1_000_000); // 100ns jitter
2309        stats.record_wake(2_000_200, 2_000_000); // 200ns jitter
2310        assert_eq!(stats.avg_jitter_ns(), 150); // (100 + 200) / 2
2311    }
2312
2313    #[test]
2314    fn test_audio_scheduler_next_wake_time() {
2315        let mut sched = AudioScheduler::new();
2316        let params = AudioSchedParams::from_priority(1, AudioPriorityClass::Normal);
2317        assert!(sched.register_thread(params).is_ok());
2318
2319        let next = sched.next_wake_time(1, 10_000_000).unwrap();
2320        assert_eq!(next, 15_000_000); // current + 5ms period
2321    }
2322
2323    #[test]
2324    fn test_audio_scheduler_aggregate_stats() {
2325        let mut sched = AudioScheduler::with_max_reservation(1000);
2326        let p1 = AudioSchedParams::from_priority(1, AudioPriorityClass::Normal);
2327        let p2 = AudioSchedParams::from_priority(2, AudioPriorityClass::Background);
2328        assert!(sched.register_thread(p1).is_ok());
2329        assert!(sched.register_thread(p2).is_ok());
2330
2331        // Record events
2332        assert!(sched.record_wake(1, 1_000_000, 1_000_000).is_ok());
2333        assert!(sched.record_wake(2, 2_100_000, 2_000_000).is_ok());
2334        sched.get_stats_mut(1).unwrap().record_underrun();
2335
2336        let agg = sched.aggregate_stats();
2337        assert_eq!(agg.periods_completed, 2);
2338        assert_eq!(agg.on_time_wakes, 1);
2339        assert_eq!(agg.late_wakes, 1);
2340        assert_eq!(agg.underruns, 1);
2341    }
2342
2343    #[test]
2344    fn test_global_audio_counters() {
2345        // These are global atomics, just verify they increment
2346        let before = AUDIO_TOTAL_UNDERRUNS.load(Ordering::Relaxed);
2347        count_audio_underrun();
2348        let after = AUDIO_TOTAL_UNDERRUNS.load(Ordering::Relaxed);
2349        assert_eq!(after, before + 1);
2350    }
2351
2352    // ---- AVI Container Integration Test ----
2353
2354    #[test]
2355    fn test_avi_container_parse_minimal() {
2356        // Build a minimal valid AVI file in memory
2357        let mut avi = Vec::new();
2358
2359        // RIFF header
2360        avi.extend_from_slice(b"RIFF");
2361        let size_pos = avi.len();
2362        avi.extend_from_slice(&0u32.to_le_bytes()); // placeholder
2363        avi.extend_from_slice(b"AVI ");
2364
2365        // hdrl LIST
2366        avi.extend_from_slice(b"LIST");
2367        let hdrl_size_pos = avi.len();
2368        avi.extend_from_slice(&0u32.to_le_bytes()); // placeholder
2369        avi.extend_from_slice(b"hdrl");
2370
2371        // avih chunk
2372        avi.extend_from_slice(b"avih");
2373        avi.extend_from_slice(&40u32.to_le_bytes()); // chunk size
2374        let mut avih_data = [0u8; 40];
2375        avih_data[0..4].copy_from_slice(&33333u32.to_le_bytes()); // ~30fps
2376        avih_data[16..20].copy_from_slice(&1u32.to_le_bytes()); // total_frames
2377        avih_data[24..28].copy_from_slice(&1u32.to_le_bytes()); // streams
2378        avih_data[32..36].copy_from_slice(&320u32.to_le_bytes()); // width
2379        avih_data[36..40].copy_from_slice(&240u32.to_le_bytes()); // height
2380        avi.extend_from_slice(&avih_data);
2381
2382        // Fix hdrl LIST size
2383        let hdrl_size = (avi.len() - hdrl_size_pos - 4) as u32;
2384        avi[hdrl_size_pos..hdrl_size_pos + 4].copy_from_slice(&hdrl_size.to_le_bytes());
2385
2386        // movi LIST (empty)
2387        avi.extend_from_slice(b"LIST");
2388        avi.extend_from_slice(&4u32.to_le_bytes());
2389        avi.extend_from_slice(b"movi");
2390
2391        // Fix RIFF size
2392        let riff_size = (avi.len() - 8) as u32;
2393        avi[size_pos..size_pos + 4].copy_from_slice(&riff_size.to_le_bytes());
2394
2395        let container = AviContainer::parse(&avi).unwrap();
2396        assert_eq!(container.main_header.width, 320);
2397        assert_eq!(container.main_header.height, 240);
2398        assert_eq!(container.main_header.microseconds_per_frame, 33333);
2399        assert_eq!(container.main_header.total_frames, 1);
2400    }
2401
2402    #[test]
2403    fn test_avi_container_parse_invalid() {
2404        // Not a RIFF file
2405        let data = b"NOT_RIFF_DATA";
2406        assert!(AviContainer::parse(data).is_none());
2407    }
2408
2409    #[test]
2410    fn test_avi_demux_streams() {
2411        let mut container = AviContainer {
2412            main_header: AviMainHeader::default(),
2413            streams: Vec::new(),
2414            index: vec![
2415                AviIndexEntry {
2416                    chunk_id: *b"00dc",
2417                    flags: 0x10,
2418                    offset: 0,
2419                    size: 100,
2420                },
2421                AviIndexEntry {
2422                    chunk_id: *b"01wb",
2423                    flags: 0,
2424                    offset: 108,
2425                    size: 50,
2426                },
2427                AviIndexEntry {
2428                    chunk_id: *b"00dc",
2429                    flags: 0,
2430                    offset: 166,
2431                    size: 100,
2432                },
2433            ],
2434            movi_offset: 0,
2435            file_size: 0,
2436        };
2437
2438        let (video, audio) = container.demux_streams();
2439        assert_eq!(video.len(), 2);
2440        assert_eq!(audio.len(), 1);
2441        assert_eq!(container.video_frame_count(), 2);
2442        assert_eq!(container.audio_chunk_count(), 1);
2443    }
2444}