diff --git a/Cargo.toml b/Cargo.toml index 29a9ce3..690defc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "itunesdb" -version = "0.1.38" +version = "0.1.39" edition = "2021" authors = ["alterwain"] diff --git a/src/artworkdb.rs b/src/artworkdb.rs index a62bdbc..73f3ce5 100644 --- a/src/artworkdb.rs +++ b/src/artworkdb.rs @@ -110,6 +110,16 @@ pub mod serializer { for i in 0..(adb.children.len()) { let data_set = adb.children.get(i).unwrap(); let mut entry_bytes = Vec::new(); + + match data_set.data.data_type { + 1 => { + entry_bytes.append(&mut generate_header_raw(ChunkType::ImageList, 80, data_set.child.len())); + entry_bytes.append(&mut 1u32.to_le_bytes().to_vec()); + entry_bytes.append(&mut [0; 76].to_vec()); + } // Image List + _ => { info!("Unknown data_set type!"); } + } + for img in data_set.child.iter() { let mut args = Vec::new(); for arg in img.args.iter() { @@ -154,14 +164,6 @@ pub mod serializer { entry_bytes.append(&mut args); } - match data_set.data.data_type { - 1 => { - entry_bytes.append(&mut generate_header(ChunkType::ImageList, 80, data_set.child.len())); - entry_bytes.append(&mut [0; 76].to_vec()); - } // Image List - _ => { info!("Unknown data_set type!"); } - } - bytes.append(&mut generate_header(ChunkType::DataSet, 84, entry_bytes.len())); bytes.append(&mut [0; 80].to_vec()); bytes.append(&mut bincode::serialize(&data_set.data).unwrap()); @@ -181,6 +183,12 @@ pub mod serializer { let header = ChunkHeader{ chunk_type: ct.into(), end_of_chunk: header_size, children_count: header_size + data_len as u32}; bincode::serialize(&header).unwrap() } + + fn generate_header_raw(ct: ChunkType, header_size: usize, child_cnt: usize) -> Vec { + let header_size = 12 + header_size as u32; + let header = ChunkHeader{ chunk_type: ct.into(), end_of_chunk: header_size, children_count: child_cnt as u32}; + bincode::serialize(&header).unwrap() + } } pub mod aobjects {