Add BASIC Thomson tokenizer (#5479)

* [Imgtool] Add write support for Thomson BASIC

* [Imgtool] Fix passing --filter= to imgtool get command

This command should work:
imgtool get thom_fd inondation-d-additions.fd INONDATI.BAS TEST.BAS --filter=thombas7
as it matches the expected syntax:
Usage: imgtool get <format> <imagename> <filename> [newname] [--filter=filter] [--fork=fork]
but does not because imgtool fewer "maxargs".

Increase the maximum number of arguments by 2 to cater for --filter and
--fork being passed.

* [Imgtool] Fix handling multiple tokens in BASIC tokenizer

The line:
10 LIMIT$=STR$(LIMIT(N))
was not getting tokenised properly because the loop looking for tokens
wasn't exited and consumed consecutive tokens.

So $ was getting detected, token_shift and token_value were set, the
cursor position got incremented, then = got detected on the next
iteration of the loop.

We should instead exit the loop, and write what we already have.

Closes: #5478
This commit is contained in:
hadess 2019-08-13 20:59:14 +02:00 committed by R. Belmont
parent deb05745b0
commit 82644e50ff
3 changed files with 161 additions and 8 deletions

View File

@ -277,6 +277,7 @@ static imgtoolerr_t basic_writefile(const basictokens *tokens,
{
for (i = 0; (token == nullptr) && (i < tokens->num_entries); i++)
{
bool found = false;
token_table = &tokens->entries[i];
for (j = 0; (token == nullptr) && (j < token_table->num_tokens); j++)
{
@ -286,8 +287,12 @@ static imgtoolerr_t basic_writefile(const basictokens *tokens,
token_shift = token_table->shift;
token_value = token_table->base + j;
pos += strlen(token);
found = true;
break;
}
}
if (found)
break;
}
}

View File

@ -850,7 +850,7 @@ static const struct command cmds[] =
{
{ "create", cmd_create, "<format> <imagename> [--(createoption)=value]", 2, 8, 0},
{ "dir", cmd_dir, "<format> <imagename> [path]", 2, 3, 0 },
{ "get", cmd_get, "<format> <imagename> <filename> [newname] [--filter=filter] [--fork=fork]", 3, 4, 0 },
{ "get", cmd_get, "<format> <imagename> <filename> [newname] [--filter=filter] [--fork=fork]", 3, 6, 0 },
{ "put", cmd_put, "<format> <imagename> <filename>... <destname> [--(fileoption)==value] [--filter=filter] [--fork=fork]", 3, 0xffff, 0 },
{ "getall", cmd_getall, "<format> <imagename> [path] [--filter=filter]", 2, 3, 0 },
{ "del", cmd_del, "<format> <imagename> <filename>...", 3, 3, 1 },

View File

@ -113,6 +113,7 @@
#include "imgtool.h"
#include "iflopimg.h"
#include "formats/imageutl.h"
#include <time.h>
@ -1460,14 +1461,161 @@ static imgtoolerr_t thom_basic_read_file(imgtool::partition &part,
return IMGTOOLERR_SUCCESS;
}
static imgtoolerr_t thom_basic_write_file(imgtool::partition &part,
const char *name,
static imgtoolerr_t thom_basic_write_file(imgtool::partition &partition,
const char *filename,
const char *fork,
imgtool::stream &src,
imgtool::stream &sourcef,
util::option_resolution *opts,
const char *const table[2][128])
{
return IMGTOOLERR_UNIMPLEMENTED;
imgtool::stream::ptr mem_stream;
char buf[1024];
int eof = false;
uint32_t len;
char c;
int i, j, pos, in_quotes;
uint16_t line_number;
uint8_t line_header[4];
uint8_t file_header[3];
const char *token;
uint8_t token_shift, token_value;
uint64_t line_header_loc, end_line_loc;
/* open a memory stream */
mem_stream = imgtool::stream::open_mem(nullptr, 0);
if (!mem_stream)
return IMGTOOLERR_OUTOFMEMORY;
/* skip past 3-byte header */
mem_stream->fill(0x00, 3);
/* loop until the file is complete */
while(!eof)
{
/* read a line */
pos = 0;
while((len = sourcef.read(&c, 1)) > 0)
{
/* break if at end of line */
if ((c == '\r') || (c == '\n'))
break;
if (pos <= ARRAY_LENGTH(buf) - 1)
{
buf[pos++] = c;
}
}
eof = (len == 0);
buf[pos] = '\0';
/* ignore lines that don't start with digits */
if (isdigit(buf[0]))
{
/* start at beginning of line */
pos = 0;
/* read line number */
line_number = 0;
while(isdigit(buf[pos]))
{
line_number *= 10;
line_number += (buf[pos++] - '0');
}
/* set up line header */
memset(&line_header, 0, sizeof(line_header));
/* linelength or offset (2-bytes) will be rewritten */
place_integer_be(line_header, 0, 2, 0xffff);
place_integer_be(line_header, 2, 2, line_number);
/* emit line header */
line_header_loc = mem_stream->tell();
mem_stream->write(line_header, sizeof(line_header));
/* skip spaces */
while(isspace(buf[pos]))
pos++;
/* when we start out, we are not within quotation marks */
in_quotes = false;
/* read until end of line */
while(buf[pos] != '\0')
{
token = nullptr;
token_shift = 0;
token_value = 0;
if (buf[pos] == '\"')
{
/* flip quotation status */
in_quotes = !in_quotes;
}
else if (!in_quotes)
{
for (i = 0; i < 2; i++)
{
bool found = false;
for (j = 0; j < 128; j++)
{
if (table[i][j] == nullptr)
continue;
if (!strncmp(&buf[pos], table[i][j], strlen(table[i][j])))
{
token = table[i][j];
token_shift = (i == 0) ? 0x00 : 0xff;
token_value = 0x80 + j;
pos += strlen(token);
found = true;
break;
}
}
if (found)
break;
}
}
/* did we find a token? */
if (token != nullptr)
{
/* emit the token */
if (token_shift != 0)
mem_stream->write(&token_shift, 1);
mem_stream->write(&token_value, 1);
}
else
{
/* no token; emit the byte */
mem_stream->write(&buf[pos++], 1);
}
}
/* emit line terminator */
mem_stream->fill(0x00, 1);
/* rewrite the line length */
end_line_loc = mem_stream->tell();
mem_stream->seek(line_header_loc, SEEK_SET);
place_integer_be(line_header, 0, 2, end_line_loc - line_header_loc);
mem_stream->write(line_header, sizeof(line_header));
mem_stream->seek(end_line_loc, SEEK_SET);
}
}
/* emit program terminator */
mem_stream->fill(0x00, 2);
/* reset stream */
mem_stream->seek(0, SEEK_SET);
/* Write file header */
place_integer_be(file_header, 0, 1, 0xFF);
place_integer_be(file_header, 1, 2, mem_stream->size());
mem_stream->write(file_header, 3);
mem_stream->seek(0, SEEK_SET);
/* write actual file */
return partition.write_file(filename, fork, *mem_stream, opts, nullptr);
}
@ -1507,11 +1655,11 @@ static imgtoolerr_t thom_basic_write_file(imgtool::partition &part,
}
FILTER( thombas5,
"Thomson MO5 w/ BASIC 1.0, Tokenized Files (read-only, auto-decrypt)" )
"Thomson MO5 w/ BASIC 1.0, Tokenized Files (auto-decrypt)" )
FILTER( thombas7,
"Thomson TO7 w/ BASIC 1.0, Tokenized Files (read-only, auto-decrypt)" )
"Thomson TO7 w/ BASIC 1.0, Tokenized Files (auto-decrypt)" )
FILTER( thombas128,
"Thomson w/ BASIC 128/512, Tokenized Files (read-only, auto-decrypt)" )
"Thomson w/ BASIC 128/512, Tokenized Files (auto-decrypt)" )
/************************* driver ***************************/