formats/cassimg.cpp: prevent regression in cass images (#13292)

This commit is contained in:
holub 2025-01-28 08:22:59 -05:00 committed by GitHub
parent 46fc75b6b1
commit 8552dcc017
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 15 additions and 3 deletions

View File

@ -820,7 +820,6 @@ cassette_image::error cassette_image::legacy_construct(const LegacyWaveFiller *l
goto done;
}
LOG_FORMATS("Image size: %x\n", size);
std::vector<uint8_t> bytes(size);
image_read(&bytes[0], 0, size);
sample_count = args.chunk_sample_calc(&bytes[0], (int)size);
@ -865,7 +864,17 @@ cassette_image::error cassette_image::legacy_construct(const LegacyWaveFiller *l
image_read(&chunk[0], offset, args.chunk_size);
offset += args.chunk_size;
length = args.fill_wave(&samples[pos], args.chunk_size, &chunk[0]);
/*
This approach is problematic because we don't have control on incomming image size when processing the data
(at least in tap implementation).
The method sending the size of output (calculated in 'chunk_sample_calc' above) which uses same data as a input but
without knowing how much data available in the image. Having wrong header with size bigger than image couses illegal
access beyond image data.
Desired state is:
length = args.fill_wave(&samples[pos], args.chunk_size, &chunk[0]);
aslo the fix for tap is commented out in 'tap_cas_fill_wave'
*/
length = args.fill_wave(&samples[pos], sample_count - pos, &chunk[0]);
if (length < 0)
{
err = error::INVALID_IMAGE;

View File

@ -833,16 +833,19 @@ static int tap_cas_fill_wave( int16_t *buffer, int length, const uint8_t *bytes
int16_t *p = buffer;
int size = 0;
while (length > 0)
//while (length > 0)
while (size < length)
{
int data_size = get_u16le(&bytes[0]);
int pilot_length = (bytes[2] == 0x00) ? 8063 : 3223;
LOG_FORMATS("tap_cas_fill_wave: Handling TAP block containing 0x%X bytes\n", data_size);
/*
length -= data_size;
if (length < 0)
{
data_size += length; // Take as much as we can.
}
*/
bytes += 2;
size += tzx_cas_handle_block(&p, bytes, 1000, data_size, 2168, pilot_length, 667, 735, 855, 1710, 8);
bytes += data_size;