Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion kidfile/src/archive_formats/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ mod afs;
mod lnk;
mod concat2k;
mod infdatabin;
mod p2t;

pub struct ArchiveEntry {
pub data: FileData,
Expand All @@ -22,5 +23,6 @@ pub const ARCHIVE_DECODERS: LazyLock<Vec<Decoder<Archive>>> = LazyLock::new(|| [
afs::ENTRY_AFS,
lnk::ENTRY_LNK,
concat2k::ENTRY_CONCAT2K,
infdatabin::ENTRY_SLPS02669_DATABIN
infdatabin::ENTRY_SLPS02669_DATABIN,
p2t::ENTRY_P2T
].into());
167 changes: 167 additions & 0 deletions kidfile/src/archive_formats/p2t.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
use crate::{Certainty, Decoder};
use crate::file_data::FileData;
use super::{Archive, ArchiveEntry};

pub const ENTRY_P2T: Decoder<Archive> = Decoder {
id: "p2t",
desc: "KID P2T archive",
detect: |file| {
if file.len() < 0x20 {
return Certainty::Impossible;
}

let header_end = match file.read_u32(0x08) {
Ok(val) => val as usize,
Err(_) => return Certainty::Impossible,
};

let num_files = match file.read_u32(0x0C) {
Ok(val) => val as usize,
Err(_) => return Certainty::Impossible,
};

if num_files == 0 || num_files > 0xFFFF || header_end >= file.len() {
return Certainty::Impossible;
}

let check_ff = file.read_u32(header_end + 48);
let check_01 = file.read_u32(header_end + 56);

match (check_ff, check_01) {
(Ok(0xFFFFFFFF), Ok(1)) => Certainty::Certain,
_ => Certainty::Impossible,
}
},
decode: |file| {
let header_end = file.read_u32(0x08)? as usize;
let num_files = file.read_u32(0x0C)? as usize;
let data_start_base = file.read_u32(0x10)? as usize;

if num_files >= 0xFFFF {
return Err("impossibly large entry count".into());
}

let mut entries = Vec::with_capacity(num_files);
let mut current_entry_ptr = header_end;

const ENTRY_SIZE: usize = 64;

// Magic bytes TIM2
let tim2_magic: [u8; 16] = [
0x54, 0x49, 0x4D, 0x32, 0x04, 0x00, 0x01, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
];

for i in 0..num_files {
// 1. Ler o Header TIM parcial (48 bytes) da tabela
let mut tim_header_chunk = vec![0u8; 48];
file.read_chunk_exact(&mut tim_header_chunk, current_entry_ptr)
.map_err(|_| "failed to read entry header")?;

let offset_ptr = current_entry_ptr + 48 + 4;
let length_ptr = offset_ptr + 4 + 4;

let raw_offset = file.read_u32(offset_ptr)? as usize;
let compressed_len = file.read_u32(length_ptr)? as usize;

// O offset aponta para 4 bytes de "Tamanho Descomprimido", e SÓ DEPOIS vêm os dados.
let size_info_offset = data_start_base + raw_offset;
let actual_data_offset = size_info_offset + 4;

// Ler o tamanho real descomprimido (para evitar crashes e cortar lixo)
let real_uncompressed_size = file.read_u32(size_info_offset)? as usize;

// Sanity check: Se o tamanho for absurdo (ex: > 100MB), ignora para não fechar o app
if real_uncompressed_size > 100 * 1024 * 1024 {
current_entry_ptr += ENTRY_SIZE;
continue;
}

// 2. Ler dados comprimidos (Pulando os 4 bytes iniciais)
let mut compressed_data = vec![0u8; compressed_len];
file.read_chunk_exact(&mut compressed_data, actual_data_offset)
.map_err(|_| "failed to read compressed data")?;

// 3. Descomprimir (Threshold 2 padrão)
let mut decompressed_body = decompress_lzss(&compressed_data)
.map_err(|_| format!("failed to decompress file index {}", i))?;

// 4. Ajustar tamanho
// Corta ou estende para ficar EXATAMENTE do tamanho que o arquivo diz ser.
// Isso alinha a paleta de cores (CLUT) no lugar certo.
decompressed_body.resize(real_uncompressed_size, 0);

// 5. Montar arquivo final
let mut full_file = Vec::with_capacity(16 + 48 + decompressed_body.len());
full_file.extend_from_slice(&tim2_magic);
full_file.extend_from_slice(&tim_header_chunk);
full_file.extend_from_slice(&decompressed_body);

let name = format!("{}.tm2", i);

entries.push(ArchiveEntry {
name,
data: FileData::Memory {
buf: full_file.into_boxed_slice()
},
timestamp: None
});

current_entry_ptr += ENTRY_SIZE;
}

Ok(Archive {format: "p2t", entries: entries.into()})
}
};

// LZSS Padrão (Threshold 2)
fn decompress_lzss(inp: &[u8]) -> Result<Vec<u8>, ()> {
let mut out = Vec::with_capacity(inp.len() * 4);
let mut src = inp.iter();
let mut flags = 0;

const N: usize = 4096;
const F: usize = 18;
const THRESHOLD: usize = 2;

let mut text_buf = [0u8; N + F - 1];
let mut r = N - F;

loop {
flags >>= 1;
if flags & 0x100 == 0 {
if let Some(c) = src.next() {
flags = *c as u32 | 0xFF00;
} else {
break;
}
}
if flags & 1 != 0 {
if let Some(c) = src.next().cloned() {
out.push(c);
text_buf[r] = c;
r = (r + 1) & (N - 1);
} else {
break;
}
} else {
if let (Some(i), Some(j)) = (src.next().cloned(), src.next().cloned()) {
let i = i as usize;
let j = j as usize;

let offset = i | ((j & 0xF0) << 4);
let len = (j & 0x0F) + THRESHOLD;

for k in 0..=len {
let c = text_buf[(offset + k) & (N - 1)];
out.push(c);
text_buf[r] = c;
r = (r + 1) & (N - 1);
}
} else {
break;
}
}
}
Ok(out)
}