mirror of
https://github.com/holub/mame
synced 2025-04-24 09:20:02 +03:00
netlist: Refactored pokenizer.
* Separated tokenizing and reading of tokens. * This enables caching of parsing results on the token level. * Implemented caching of token stream. * Overall this significantly improves parsing performance (~10x) * Next step towards a bare-bone nltool which does not depend on macro devices.
This commit is contained in:
parent
bca7016758
commit
d524688df1
@ -978,8 +978,8 @@ std::unique_ptr<netlist::netlist_state_t> netlist_mame_device::base_validity_che
|
||||
{
|
||||
try
|
||||
{
|
||||
//plib::chrono::timer<plib::chrono::system_ticks> t;
|
||||
//t.start();
|
||||
plib::chrono::timer<plib::chrono::system_ticks> t;
|
||||
t.start();
|
||||
auto lnetlist = std::make_unique<netlist::netlist_state_t>("netlist",
|
||||
plib::make_unique<netlist_validate_callbacks_t, netlist::host_arena>());
|
||||
// enable validation mode
|
||||
@ -997,7 +997,7 @@ std::unique_ptr<netlist::netlist_state_t> netlist_mame_device::base_validity_che
|
||||
}
|
||||
}
|
||||
|
||||
//t.stop();
|
||||
t.stop();
|
||||
//printf("time %s %f\n", this->mconfig().gamedrv().name, t.as_seconds<double>());
|
||||
return lnetlist;
|
||||
}
|
||||
|
@ -283,6 +283,22 @@ namespace netlist
|
||||
pstring m_filename;
|
||||
};
|
||||
|
||||
class source_pattern_t : public source_netlist_t
|
||||
{
|
||||
public:
|
||||
|
||||
explicit source_pattern_t(const pstring &pat)
|
||||
: m_pattern(pat)
|
||||
{
|
||||
}
|
||||
|
||||
protected:
|
||||
stream_ptr stream(const pstring &name) override;
|
||||
|
||||
private:
|
||||
pstring m_pattern;
|
||||
};
|
||||
|
||||
class source_mem_t : public source_netlist_t
|
||||
{
|
||||
public:
|
||||
|
@ -115,7 +115,6 @@ namespace netlist
|
||||
, m_extended_validation(false)
|
||||
, m_dummy_version(1)
|
||||
{
|
||||
|
||||
m_lib = m_callbacks->static_solver_lib();
|
||||
|
||||
m_setup = plib::make_unique<setup_t, host_arena>(*this);
|
||||
@ -130,7 +129,6 @@ namespace netlist
|
||||
devices::initialize_factory(m_setup->parser().factory());
|
||||
|
||||
// Add default include file
|
||||
using a = plib::psource_str_t;
|
||||
const pstring content =
|
||||
"#define RES_R(res) (res) \n"
|
||||
"#define RES_K(res) ((res) * 1e3) \n"
|
||||
@ -141,12 +139,16 @@ namespace netlist
|
||||
"#define IND_U(ind) ((ind) * 1e-6) \n"
|
||||
"#define IND_N(ind) ((ind) * 1e-9) \n"
|
||||
"#define IND_P(ind) ((ind) * 1e-12) \n";
|
||||
m_setup->parser().add_include<a>("netlist/devices/net_lib.h", content);
|
||||
m_setup->parser().add_include<plib::psource_str_t>("netlist/devices/net_lib.h", content);
|
||||
#if 1
|
||||
NETLIST_NAME(base)(m_setup->parser());
|
||||
#else
|
||||
// FIXME: This is very slow - need optimized parsing scanning
|
||||
#if 0
|
||||
m_setup->parser().register_source<source_pattern_t>("src/lib/netlist/macro/nlm_{}.cpp");
|
||||
#else
|
||||
pstring dir = "src/lib/netlist/macro/";
|
||||
//m_setup->parser().register_source<source_pattern_t>("src/lib/netlist/macro/nlm_{}.cpp");
|
||||
m_setup->parser().register_source<source_file_t>(dir + "nlm_base.cpp");
|
||||
m_setup->parser().register_source<source_file_t>(dir + "nlm_opamp.cpp");
|
||||
m_setup->parser().register_source<source_file_t>(dir + "nlm_roms.cpp");
|
||||
@ -154,6 +156,7 @@ namespace netlist
|
||||
m_setup->parser().register_source<source_file_t>(dir + "nlm_other.cpp");
|
||||
m_setup->parser().register_source<source_file_t>(dir + "nlm_ttl74xx.cpp");
|
||||
m_setup->parser().include("base");
|
||||
#endif
|
||||
#endif
|
||||
}
|
||||
|
||||
|
@ -43,6 +43,7 @@ namespace netlist
|
||||
PERRMSGV(MF_UNEXPECTED_NETLIST_END, 0, "Unexpected NETLIST_END")
|
||||
PERRMSGV(MF_UNEXPECTED_END_OF_FILE, 0, "Unexpected end of file, missing NETLIST_END")
|
||||
PERRMSGV(MF_UNEXPECTED_NETLIST_START, 0, "Unexpected NETLIST_START")
|
||||
PERRMSGV(MF_EXPECTED_NETLIST_START_1, 1, "Expected NETLIST_START but got {1}")
|
||||
PERRMSGV(MF_EXPECTED_IDENTIFIER_GOT_1, 1, "Expected an identifier, but got {1}")
|
||||
PERRMSGV(MF_EXPECTED_COMMA_OR_RP_1, 1, "Expected comma or right parenthesis but found <{1}>")
|
||||
PERRMSGV(MF_DIPPINS_EQUAL_NUMBER_1, 1, "DIPPINS requires equal number of pins to DIPPINS, first pin is {}")
|
||||
|
@ -3,6 +3,7 @@
|
||||
|
||||
#include "nl_parser.h"
|
||||
#include "nl_base.h"
|
||||
#include "nl_setup.h"
|
||||
#include "nl_errstr.h"
|
||||
#include "nl_factory.h"
|
||||
|
||||
@ -18,43 +19,64 @@ void parser_t::verror(const pstring &msg)
|
||||
throw nl_exception(plib::pfmt("{1}")(msg));
|
||||
}
|
||||
|
||||
bool parser_t::parse(const pstring &nlname)
|
||||
parser_t::parser_t(nlparse_t &setup)
|
||||
: m_setup(setup)
|
||||
{
|
||||
this->identifier_chars("abcdefghijklmnopqrstuvwvxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890_.-$@")
|
||||
m_tokenizer.identifier_chars("abcdefghijklmnopqrstuvwvxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890_.-$@")
|
||||
.number_chars(".0123456789", "0123456789eE-.") //FIXME: processing of numbers
|
||||
.whitespace(pstring("") + ' ' + static_cast<char>(9) + static_cast<char>(10) + static_cast<char>(13))
|
||||
.comment("/*", "*/", "//");
|
||||
m_tok_paren_left = register_token("(");
|
||||
m_tok_paren_right = register_token(")");
|
||||
m_tok_comma = register_token(",");
|
||||
m_tok_paren_left = m_tokenizer.register_token("(");
|
||||
m_tok_paren_right = m_tokenizer.register_token(")");
|
||||
m_tok_comma = m_tokenizer.register_token(",");
|
||||
|
||||
m_tok_ALIAS = register_token("ALIAS");
|
||||
m_tok_DIPPINS = register_token("DIPPINS");
|
||||
m_tok_NET_C = register_token("NET_C");
|
||||
m_tok_FRONTIER = register_token("OPTIMIZE_FRONTIER");
|
||||
m_tok_PARAM = register_token("PARAM");
|
||||
m_tok_DEFPARAM = register_token("DEFPARAM");
|
||||
m_tok_HINT = register_token("HINT");
|
||||
m_tok_NET_MODEL = register_token("NET_MODEL");
|
||||
m_tok_NET_REGISTER_DEV = register_token("NET_REGISTER_DEV");
|
||||
m_tok_INCLUDE = register_token("INCLUDE");
|
||||
m_tok_LOCAL_SOURCE = register_token("LOCAL_SOURCE");
|
||||
m_tok_LOCAL_LIB_ENTRY = register_token("LOCAL_LIB_ENTRY");
|
||||
m_tok_SUBMODEL = register_token("SUBMODEL");
|
||||
m_tok_NETLIST_START = register_token("NETLIST_START");
|
||||
m_tok_NETLIST_END = register_token("NETLIST_END");
|
||||
m_tok_TRUTHTABLE_START = register_token("TRUTHTABLE_START");
|
||||
m_tok_TRUTHTABLE_END = register_token("TRUTHTABLE_END");
|
||||
m_tok_TT_HEAD = register_token("TT_HEAD");
|
||||
m_tok_TT_LINE = register_token("TT_LINE");
|
||||
m_tok_TT_FAMILY = register_token("TT_FAMILY");
|
||||
m_tok_static = m_tokenizer.register_token("static");
|
||||
m_tok_ALIAS = m_tokenizer.register_token("ALIAS");
|
||||
m_tok_DIPPINS = m_tokenizer.register_token("DIPPINS");
|
||||
m_tok_NET_C = m_tokenizer.register_token("NET_C");
|
||||
m_tok_FRONTIER = m_tokenizer.register_token("OPTIMIZE_FRONTIER");
|
||||
m_tok_PARAM = m_tokenizer.register_token("PARAM");
|
||||
m_tok_DEFPARAM = m_tokenizer.register_token("DEFPARAM");
|
||||
m_tok_HINT = m_tokenizer.register_token("HINT");
|
||||
m_tok_NET_MODEL = m_tokenizer.register_token("NET_MODEL");
|
||||
m_tok_NET_REGISTER_DEV = m_tokenizer.register_token("NET_REGISTER_DEV");
|
||||
m_tok_INCLUDE = m_tokenizer.register_token("INCLUDE");
|
||||
m_tok_LOCAL_SOURCE = m_tokenizer.register_token("LOCAL_SOURCE");
|
||||
m_tok_LOCAL_LIB_ENTRY = m_tokenizer.register_token("LOCAL_LIB_ENTRY");
|
||||
m_tok_SUBMODEL = m_tokenizer.register_token("SUBMODEL");
|
||||
m_tok_NETLIST_START = m_tokenizer.register_token("NETLIST_START");
|
||||
m_tok_NETLIST_END = m_tokenizer.register_token("NETLIST_END");
|
||||
m_tok_TRUTHTABLE_START = m_tokenizer.register_token("TRUTHTABLE_START");
|
||||
m_tok_TRUTHTABLE_END = m_tokenizer.register_token("TRUTHTABLE_END");
|
||||
m_tok_TT_HEAD = m_tokenizer.register_token("TT_HEAD");
|
||||
m_tok_TT_LINE = m_tokenizer.register_token("TT_LINE");
|
||||
m_tok_TT_FAMILY = m_tokenizer.register_token("TT_FAMILY");
|
||||
|
||||
register_token("RES_R");
|
||||
register_token("RES_K");
|
||||
register_token("RES_M");
|
||||
register_token("CAP_U");
|
||||
register_token("CAP_N");
|
||||
register_token("CAP_P");
|
||||
m_tokenizer.register_token("RES_R");
|
||||
m_tokenizer.register_token("RES_K");
|
||||
m_tokenizer.register_token("RES_M");
|
||||
m_tokenizer.register_token("CAP_U");
|
||||
m_tokenizer.register_token("CAP_N");
|
||||
m_tokenizer.register_token("CAP_P");
|
||||
|
||||
}
|
||||
|
||||
bool parser_t::parse(plib::psource_t::stream_ptr &&strm, const pstring &nlname)
|
||||
{
|
||||
token_store tokstor;
|
||||
parse_tokens(std::move(strm), tokstor);
|
||||
return parse(tokstor, nlname);
|
||||
}
|
||||
|
||||
void parser_t::parse_tokens(plib::psource_t::stream_ptr &&strm, token_store &tokstor)
|
||||
{
|
||||
plib::putf8_reader u8reader(strm.release_stream());
|
||||
m_tokenizer.append_to_store(&u8reader, tokstor);
|
||||
}
|
||||
|
||||
bool parser_t::parse(token_store &tokstor, const pstring &nlname)
|
||||
{
|
||||
set_token_source(&tokstor);
|
||||
|
||||
bool in_nl = false;
|
||||
|
||||
@ -92,6 +114,11 @@ bool parser_t::parse(const pstring &nlname)
|
||||
|
||||
in_nl = true;
|
||||
}
|
||||
else if (!in_nl)
|
||||
{
|
||||
if (!token.is(m_tok_static))
|
||||
error(MF_EXPECTED_NETLIST_START_1(token.str()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -8,22 +8,24 @@
|
||||
#ifndef NL_PARSER_H_
|
||||
#define NL_PARSER_H_
|
||||
|
||||
#include "nl_setup.h"
|
||||
#include "nltypes.h" // for setup_t
|
||||
#include "plib/ptokenizer.h"
|
||||
|
||||
namespace netlist
|
||||
{
|
||||
class parser_t : public plib::ptokenizer
|
||||
class parser_t : public plib::ptoken_reader
|
||||
{
|
||||
public:
|
||||
template <typename T>
|
||||
parser_t(T &&strm, nlparse_t &setup)
|
||||
: plib::ptokenizer(std::forward<T>(strm))
|
||||
, m_setup(setup)
|
||||
{
|
||||
}
|
||||
using token_t = plib::ptokenizer::token_t;
|
||||
using token_type = plib::ptokenizer::token_type;
|
||||
using token_id_t = plib::ptokenizer::token_id_t;
|
||||
using token_store = plib::ptokenizer::token_store;
|
||||
|
||||
bool parse(const pstring &nlname);
|
||||
parser_t(nlparse_t &setup);
|
||||
|
||||
bool parse(plib::psource_t::stream_ptr &&strm, const pstring &nlname);
|
||||
bool parse(token_store &tokstor, const pstring &nlname);
|
||||
void parse_tokens(plib::psource_t::stream_ptr &&strm, token_store &tokstor);
|
||||
|
||||
protected:
|
||||
void parse_netlist(const pstring &nlname);
|
||||
@ -49,6 +51,7 @@ namespace netlist
|
||||
token_id_t m_tok_paren_left;
|
||||
token_id_t m_tok_paren_right;
|
||||
token_id_t m_tok_comma;
|
||||
token_id_t m_tok_static;
|
||||
token_id_t m_tok_ALIAS;
|
||||
token_id_t m_tok_NET_C;
|
||||
token_id_t m_tok_DIPPINS;
|
||||
@ -70,6 +73,7 @@ namespace netlist
|
||||
token_id_t m_tok_TT_LINE;
|
||||
token_id_t m_tok_TT_FAMILY;
|
||||
|
||||
plib::ptokenizer m_tokenizer;
|
||||
nlparse_t &m_setup;
|
||||
};
|
||||
|
||||
|
@ -16,6 +16,8 @@
|
||||
|
||||
#include "solver/nld_solver.h"
|
||||
|
||||
#include <sstream>
|
||||
|
||||
namespace netlist
|
||||
{
|
||||
// ----------------------------------------------------------------------------------------
|
||||
@ -352,10 +354,26 @@ namespace netlist
|
||||
|
||||
bool nlparse_t::parse_stream(plib::psource_t::stream_ptr &&istrm, const pstring &name)
|
||||
{
|
||||
auto y = std::make_unique<plib::ppreprocessor>(m_includes, &m_defines);
|
||||
y->process(std::move(istrm), "<stream>");
|
||||
return parser_t(std::move(y), *this).parse(name);
|
||||
//return parser_t(std::move(plib::ppreprocessor(&m_defines).process(std::move(istrm))), *this).parse(name);
|
||||
auto key = istrm.filename();
|
||||
|
||||
if (m_source_cache.find(key) != m_source_cache.end())
|
||||
{
|
||||
return parser_t(*this).parse(m_source_cache[key], name);
|
||||
}
|
||||
else
|
||||
{
|
||||
//printf("searching %s\n", name.c_str());
|
||||
plib::ppreprocessor y(m_includes, &m_defines);
|
||||
y.process(std::move(istrm), istrm.filename());
|
||||
|
||||
auto abc = std::make_unique<std::stringstream>();
|
||||
plib::copystream(*abc, y);
|
||||
|
||||
parser_t::token_store &st = m_source_cache[key];
|
||||
parser_t parser(*this);
|
||||
parser.parse_tokens(plib::psource_t::stream_ptr(std::move(abc), key), st);
|
||||
return parser.parse(st, name);
|
||||
}
|
||||
}
|
||||
|
||||
void nlparse_t::add_define(const pstring &defstr)
|
||||
@ -1690,6 +1708,19 @@ source_file_t::stream_ptr source_file_t::stream(const pstring &name)
|
||||
return stream_ptr();
|
||||
}
|
||||
|
||||
source_file_t::stream_ptr source_pattern_t::stream(const pstring &name)
|
||||
{
|
||||
pstring filename = plib::pfmt(m_pattern)(name);
|
||||
auto f = std::make_unique<plib::ifstream>(plib::filesystem::u8path(filename));
|
||||
if (f->is_open())
|
||||
{
|
||||
return stream_ptr(std::move(f), filename);
|
||||
}
|
||||
else
|
||||
return stream_ptr();
|
||||
}
|
||||
|
||||
|
||||
bool source_proc_t::parse(nlparse_t &setup, const pstring &name)
|
||||
{
|
||||
if (name == m_setup_func_name)
|
||||
|
@ -17,6 +17,7 @@
|
||||
|
||||
#include "nl_config.h"
|
||||
#include "nltypes.h"
|
||||
#include "nl_parser.h"
|
||||
// FIXME: avoid including factory
|
||||
//#include "nl_factory.h"
|
||||
|
||||
@ -270,6 +271,7 @@ namespace netlist
|
||||
plib::psource_collection_t<> m_sources;
|
||||
detail::abstract_t & m_abstract;
|
||||
|
||||
std::unordered_map<pstring, parser_t::token_store> m_source_cache;
|
||||
log_type &m_log;
|
||||
unsigned m_frontier_cnt;
|
||||
};
|
||||
|
@ -68,6 +68,8 @@ public:
|
||||
putf8_reader(std::unique_ptr<std::istream> &&rhs) noexcept
|
||||
: m_strm(std::move(rhs))
|
||||
{
|
||||
// no bad surprises
|
||||
m_strm->imbue(std::locale::classic());
|
||||
}
|
||||
|
||||
bool eof() const { return m_strm->eof(); }
|
||||
|
@ -1,9 +1,9 @@
|
||||
// license:GPL-2.0+
|
||||
// copyright-holders:Couriersud
|
||||
|
||||
#include "ptokenizer.h"
|
||||
#include "palloc.h"
|
||||
#include "pstonum.h"
|
||||
#include "ptokenizer.h"
|
||||
#include "putil.h"
|
||||
|
||||
namespace plib {
|
||||
@ -21,11 +21,6 @@ namespace plib {
|
||||
// A simple tokenizer
|
||||
// ----------------------------------------------------------------------------------------
|
||||
|
||||
pstring ptokenizer::currentline_str() const
|
||||
{
|
||||
return m_cur_line;
|
||||
}
|
||||
|
||||
void ptokenizer::skipeol()
|
||||
{
|
||||
pstring::value_type c = getc();
|
||||
@ -52,9 +47,13 @@ namespace plib {
|
||||
}
|
||||
if (m_px == m_cur_line.end())
|
||||
{
|
||||
++m_source_location.back();
|
||||
if (m_strm.readline(m_cur_line))
|
||||
//++m_source_location.back();
|
||||
if (m_strm->readline(m_cur_line))
|
||||
{
|
||||
m_px = m_cur_line.begin();
|
||||
if (*m_px != '#')
|
||||
m_token_queue->push_back(token_t(token_type::SOURCELINE, m_cur_line));
|
||||
}
|
||||
else
|
||||
return 0;
|
||||
return '\n';
|
||||
@ -68,23 +67,19 @@ namespace plib {
|
||||
m_unget = c;
|
||||
}
|
||||
|
||||
void ptokenizer::require_token(const token_id_t &token_num)
|
||||
void ptoken_reader::require_token(const token_id_t &token_num)
|
||||
{
|
||||
require_token(get_token(), token_num);
|
||||
}
|
||||
void ptokenizer::require_token(const token_t &tok, const token_id_t &token_num)
|
||||
void ptoken_reader::require_token(const token_t &tok, const token_id_t &token_num)
|
||||
{
|
||||
if (!tok.is(token_num))
|
||||
{
|
||||
pstring val("");
|
||||
for (auto &i : m_tokens)
|
||||
if (i.second.id() == token_num.id())
|
||||
val = i.first;
|
||||
error(MF_EXPECTED_TOKEN_1_GOT_2(val, tok.str()));
|
||||
error(MF_EXPECTED_TOKEN_1_GOT_2(token_num.name(), tok.str()));
|
||||
}
|
||||
}
|
||||
|
||||
pstring ptokenizer::get_string()
|
||||
pstring ptoken_reader::get_string()
|
||||
{
|
||||
token_t tok = get_token();
|
||||
if (!tok.is_type(token_type::STRING))
|
||||
@ -95,7 +90,7 @@ namespace plib {
|
||||
}
|
||||
|
||||
|
||||
pstring ptokenizer::get_identifier()
|
||||
pstring ptoken_reader::get_identifier()
|
||||
{
|
||||
token_t tok = get_token();
|
||||
if (!tok.is_type(token_type::IDENTIFIER))
|
||||
@ -105,7 +100,7 @@ namespace plib {
|
||||
return tok.str();
|
||||
}
|
||||
|
||||
pstring ptokenizer::get_identifier_or_number()
|
||||
pstring ptoken_reader::get_identifier_or_number()
|
||||
{
|
||||
token_t tok = get_token();
|
||||
if (!(tok.is_type(token_type::IDENTIFIER) || tok.is_type(token_type::NUMBER)))
|
||||
@ -116,7 +111,7 @@ namespace plib {
|
||||
}
|
||||
|
||||
// FIXME: combine into template
|
||||
double ptokenizer::get_number_double()
|
||||
double ptoken_reader::get_number_double()
|
||||
{
|
||||
token_t tok = get_token();
|
||||
if (!tok.is_type(token_type::NUMBER))
|
||||
@ -130,7 +125,7 @@ namespace plib {
|
||||
return ret;
|
||||
}
|
||||
|
||||
long ptokenizer::get_number_long()
|
||||
long ptoken_reader::get_number_long()
|
||||
{
|
||||
token_t tok = get_token();
|
||||
if (!tok.is_type(token_type::NUMBER))
|
||||
@ -144,30 +139,31 @@ namespace plib {
|
||||
return ret;
|
||||
}
|
||||
|
||||
ptokenizer::token_t ptokenizer::get_token()
|
||||
ptoken_reader::token_t ptoken_reader::get_token()
|
||||
{
|
||||
token_t ret = get_token_internal();
|
||||
token_t ret = get_token_queue();
|
||||
while (true)
|
||||
{
|
||||
if (ret.is_type(token_type::token_type::ENDOFFILE))
|
||||
return ret;
|
||||
|
||||
if (m_support_line_markers && ret.is_type(token_type::LINEMARKER))
|
||||
//printf("%s\n", ret.str().c_str());
|
||||
if (ret.is_type(token_type::LINEMARKER))
|
||||
{
|
||||
bool benter(false);
|
||||
bool bexit(false);
|
||||
pstring file;
|
||||
unsigned lineno(0);
|
||||
|
||||
ret = get_token_internal();
|
||||
ret = get_token_queue();
|
||||
if (!ret.is_type(token_type::NUMBER))
|
||||
error(MF_EXPECTED_LINENUM_GOT_1(ret.str()));
|
||||
lineno = pstonum<unsigned>(ret.str());
|
||||
ret = get_token_internal();
|
||||
ret = get_token_queue();
|
||||
if (!ret.is_type(token_type::STRING))
|
||||
error(MF_EXPECTED_FILENAME_GOT_1(ret.str()));
|
||||
file = ret.str();
|
||||
ret = get_token_internal();
|
||||
ret = get_token_queue();
|
||||
while (ret.is_type(token_type::NUMBER))
|
||||
{
|
||||
if (ret.str() == "1")
|
||||
@ -175,7 +171,7 @@ namespace plib {
|
||||
if (ret.str() == "2")
|
||||
bexit = false;
|
||||
// FIXME: process flags; actually only 1 (file enter) and 2 (after file exit)
|
||||
ret = get_token_internal();
|
||||
ret = get_token_queue();
|
||||
}
|
||||
if (bexit) // pop the last location
|
||||
m_source_location.pop_back();
|
||||
@ -183,17 +179,11 @@ namespace plib {
|
||||
m_source_location.pop_back();
|
||||
m_source_location.emplace_back(plib::source_location(file, lineno));
|
||||
}
|
||||
else if (ret.is(m_tok_comment_start))
|
||||
else if (ret.is_type(token_type::SOURCELINE))
|
||||
{
|
||||
do {
|
||||
ret = get_token_internal();
|
||||
} while (ret.is_not(m_tok_comment_end));
|
||||
ret = get_token_internal();
|
||||
}
|
||||
else if (ret.is(m_tok_line_comment))
|
||||
{
|
||||
skipeol();
|
||||
ret = get_token_internal();
|
||||
m_line = ret.str();
|
||||
++m_source_location.back();
|
||||
ret = get_token_queue();
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -202,7 +192,7 @@ namespace plib {
|
||||
}
|
||||
}
|
||||
|
||||
ptokenizer::token_t ptokenizer::get_token_internal()
|
||||
ptoken_reader::token_t ptokenizer::get_token_internal()
|
||||
{
|
||||
// skip ws
|
||||
pstring::value_type c = getc();
|
||||
@ -287,7 +277,35 @@ namespace plib {
|
||||
}
|
||||
}
|
||||
|
||||
void ptokenizer::error(const perrmsg &errs)
|
||||
ptoken_reader::token_t ptokenizer::get_token_comment()
|
||||
{
|
||||
token_t ret = get_token_internal();
|
||||
while (true)
|
||||
{
|
||||
if (ret.is_type(token_type::token_type::ENDOFFILE))
|
||||
return ret;
|
||||
|
||||
if (ret.is(m_tok_comment_start))
|
||||
{
|
||||
do {
|
||||
ret = get_token_internal();
|
||||
} while (ret.is_not(m_tok_comment_end));
|
||||
ret = get_token_internal();
|
||||
}
|
||||
else if (ret.is(m_tok_line_comment))
|
||||
{
|
||||
skipeol();
|
||||
ret = get_token_internal();
|
||||
}
|
||||
else
|
||||
{
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void ptoken_reader::error(const perrmsg &errs)
|
||||
{
|
||||
pstring s("");
|
||||
pstring trail (" from ");
|
||||
@ -302,7 +320,7 @@ namespace plib {
|
||||
s = plib::pfmt("{1}{2}:{3}:0\n{4}")(trail, m_source_location.back().file_name(), m_source_location.back().line(), s);
|
||||
m_source_location.pop_back();
|
||||
}
|
||||
verror("\n" + s + e + " " + currentline_str() + "\n");
|
||||
verror("\n" + s + e + " " + m_line + "\n");
|
||||
}
|
||||
|
||||
} // namespace plib
|
||||
|
@ -22,17 +22,13 @@ namespace plib {
|
||||
class ptokenizer
|
||||
{
|
||||
public:
|
||||
template <typename T>
|
||||
explicit ptokenizer(T &&strm) // NOLINT(misc-forwarding-reference-overload, bugprone-forwarding-reference-overload)
|
||||
: m_strm(std::forward<T>(strm))
|
||||
, m_cur_line("")
|
||||
, m_px(m_cur_line.begin())
|
||||
, m_unget(0)
|
||||
explicit ptokenizer() // NOLINT(misc-forwarding-reference-overload, bugprone-forwarding-reference-overload)
|
||||
: m_strm(nullptr)
|
||||
, m_string('"')
|
||||
, m_support_line_markers(true) // FIXME
|
||||
, m_token_queue(nullptr)
|
||||
{
|
||||
// add a first entry to the stack
|
||||
m_source_location.emplace_back(plib::source_location("Unknown", 0));
|
||||
clear();
|
||||
}
|
||||
|
||||
PCOPYASSIGNMOVE(ptokenizer, delete)
|
||||
@ -46,6 +42,7 @@ namespace plib {
|
||||
STRING,
|
||||
COMMENT,
|
||||
LINEMARKER,
|
||||
SOURCELINE,
|
||||
UNKNOWN,
|
||||
ENDOFFILE
|
||||
)
|
||||
@ -57,28 +54,33 @@ namespace plib {
|
||||
static constexpr std::size_t npos = static_cast<std::size_t>(-1);
|
||||
|
||||
token_id_t() : m_id(npos) {}
|
||||
explicit token_id_t(const std::size_t id) : m_id(id) {}
|
||||
explicit token_id_t(std::size_t id, const pstring &name)
|
||||
: m_id(id)
|
||||
, m_name(name)
|
||||
{}
|
||||
std::size_t id() const { return m_id; }
|
||||
pstring name() const { return m_name; }
|
||||
private:
|
||||
std::size_t m_id;
|
||||
pstring m_name;
|
||||
};
|
||||
|
||||
struct token_t
|
||||
{
|
||||
explicit token_t(token_type type)
|
||||
: m_type(type), m_token("")
|
||||
: m_type(type), m_id(token_id_t::npos), m_token("")
|
||||
{
|
||||
}
|
||||
token_t(token_type type, const pstring &str)
|
||||
: m_type(type), m_token(str)
|
||||
: m_type(type), m_id(token_id_t::npos), m_token(str)
|
||||
{
|
||||
}
|
||||
token_t(const token_id_t &id, const pstring &str)
|
||||
: m_type(token_type::TOKEN), m_id(id), m_token(str)
|
||||
: m_type(token_type::TOKEN), m_id(id.id()), m_token(str)
|
||||
{
|
||||
}
|
||||
|
||||
bool is(const token_id_t &tok_id) const noexcept { return m_id.id() == tok_id.id(); }
|
||||
bool is(const token_id_t &tok_id) const noexcept { return m_id == tok_id.id(); }
|
||||
bool is_not(const token_id_t &tok_id) const noexcept { return !is(tok_id); }
|
||||
|
||||
bool is_type(const token_type type) const noexcept { return m_type == type; }
|
||||
@ -89,28 +91,17 @@ namespace plib {
|
||||
|
||||
private:
|
||||
token_type m_type;
|
||||
token_id_t m_id;
|
||||
std::size_t m_id;
|
||||
pstring m_token;
|
||||
};
|
||||
|
||||
pstring currentline_str() const;
|
||||
using token_store = std::vector<token_t>;
|
||||
|
||||
// tokenizer stuff follows ...
|
||||
|
||||
token_t get_token();
|
||||
pstring get_string();
|
||||
pstring get_identifier();
|
||||
pstring get_identifier_or_number();
|
||||
|
||||
double get_number_double();
|
||||
long get_number_long();
|
||||
|
||||
void require_token(const token_id_t &token_num);
|
||||
void require_token(const token_t &tok, const token_id_t &token_num);
|
||||
|
||||
token_id_t register_token(const pstring &token)
|
||||
{
|
||||
token_id_t ret(m_tokens.size());
|
||||
token_id_t ret(m_tokens.size(), token);
|
||||
m_tokens.emplace(token, ret);
|
||||
return ret;
|
||||
}
|
||||
@ -127,22 +118,42 @@ namespace plib {
|
||||
return *this;
|
||||
}
|
||||
|
||||
token_t get_token_internal();
|
||||
void error(const perrmsg &errs);
|
||||
|
||||
putf8_reader &stream() { return m_strm; }
|
||||
protected:
|
||||
virtual void verror(const pstring &msg) = 0;
|
||||
void append_to_store(putf8_reader *reader, token_store &tokstor)
|
||||
{
|
||||
clear();
|
||||
m_strm = reader;
|
||||
// Process tokens into queue
|
||||
token_t ret(token_type::UNKNOWN);
|
||||
m_token_queue = &tokstor;
|
||||
do {
|
||||
ret = get_token_comment();
|
||||
tokstor.push_back(ret);
|
||||
} while (!ret.is_type(token_type::token_type::ENDOFFILE));
|
||||
m_token_queue = nullptr;
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
void clear()
|
||||
{
|
||||
m_cur_line = "";
|
||||
m_px = m_cur_line.begin();
|
||||
m_unget = 0;
|
||||
}
|
||||
|
||||
token_t get_token_internal();
|
||||
|
||||
// get internal token with comment processing
|
||||
token_t get_token_comment();
|
||||
|
||||
void skipeol();
|
||||
|
||||
pstring::value_type getc();
|
||||
void ungetc(pstring::value_type c);
|
||||
|
||||
bool eof() const { return m_strm.eof(); }
|
||||
bool eof() const { return m_strm->eof(); }
|
||||
|
||||
putf8_reader m_strm;
|
||||
putf8_reader *m_strm;
|
||||
|
||||
pstring m_cur_line;
|
||||
pstring::const_iterator m_px;
|
||||
@ -161,10 +172,71 @@ namespace plib {
|
||||
token_id_t m_tok_comment_end;
|
||||
token_id_t m_tok_line_comment;
|
||||
|
||||
protected:
|
||||
bool m_support_line_markers;
|
||||
token_store *m_token_queue;
|
||||
};
|
||||
|
||||
class ptoken_reader
|
||||
{
|
||||
public:
|
||||
|
||||
using token_t = ptokenizer::token_t;
|
||||
using token_type = ptokenizer::token_type;
|
||||
using token_id_t = ptokenizer::token_id_t;
|
||||
using token_store = ptokenizer::token_store;
|
||||
|
||||
explicit ptoken_reader()
|
||||
: m_idx(0)
|
||||
, m_token_store(nullptr)
|
||||
{
|
||||
// add a first entry to the stack
|
||||
m_source_location.emplace_back(plib::source_location("Unknown", 0));
|
||||
}
|
||||
|
||||
PCOPYASSIGNMOVE(ptoken_reader, delete)
|
||||
|
||||
virtual ~ptoken_reader() = default;
|
||||
|
||||
void set_token_source(const token_store *tokstor)
|
||||
{
|
||||
m_token_store = tokstor;
|
||||
}
|
||||
|
||||
pstring currentline_str() const;
|
||||
|
||||
// tokenizer stuff follows ...
|
||||
|
||||
token_t get_token();
|
||||
pstring get_string();
|
||||
pstring get_identifier();
|
||||
pstring get_identifier_or_number();
|
||||
|
||||
double get_number_double();
|
||||
long get_number_long();
|
||||
|
||||
void require_token(const token_id_t &token_num);
|
||||
void require_token(const token_t &tok, const token_id_t &token_num);
|
||||
|
||||
void error(const perrmsg &errs);
|
||||
|
||||
protected:
|
||||
virtual void verror(const pstring &msg) = 0;
|
||||
|
||||
private:
|
||||
token_t get_token_queue()
|
||||
{
|
||||
if (m_idx < m_token_store->size())
|
||||
return (*m_token_store)[m_idx++];
|
||||
return token_t(token_type::ENDOFFILE);
|
||||
}
|
||||
|
||||
// source locations, vector used as stack because we need to loop through stack
|
||||
|
||||
bool m_support_line_markers;
|
||||
std::vector<plib::source_location> m_source_location;
|
||||
pstring m_line;
|
||||
std::size_t m_idx;
|
||||
const token_store * m_token_store;
|
||||
};
|
||||
|
||||
} // namespace plib
|
||||
|
@ -177,7 +177,7 @@ namespace plib
|
||||
|
||||
bool empty() { return m_strm == nullptr; }
|
||||
|
||||
// FIXME: workaround input conext should accept stream_ptr
|
||||
// FIXME: workaround input context should accept stream_ptr
|
||||
|
||||
std::unique_ptr<std::istream> release_stream() { return std::move(m_strm); }
|
||||
private:
|
||||
|
@ -731,8 +731,8 @@ void nl_convert_spice_t::process_line(const pstring &line)
|
||||
// Eagle converter
|
||||
// -------------------------------------------------
|
||||
|
||||
nl_convert_eagle_t::tokenizer::tokenizer(nl_convert_eagle_t &convert, plib::putf8_reader &&strm)
|
||||
: plib::ptokenizer(std::move(strm))
|
||||
nl_convert_eagle_t::tokenizer::tokenizer(nl_convert_eagle_t &convert)
|
||||
: plib::ptokenizer()
|
||||
, m_convert(convert)
|
||||
{
|
||||
this->identifier_chars("abcdefghijklmnopqrstuvwvxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890_.-")
|
||||
@ -759,8 +759,13 @@ void nl_convert_eagle_t::tokenizer::verror(const pstring &msg)
|
||||
void nl_convert_eagle_t::convert(const pstring &contents)
|
||||
{
|
||||
|
||||
tokenizer tok(*this, plib::putf8_reader(std::make_unique<std::istringstream>(contents)));
|
||||
tok.stream().stream().imbue(std::locale::classic());
|
||||
tokenizer tok(*this);
|
||||
|
||||
tokenizer::token_store tokstor;
|
||||
plib::putf8_reader u8reader(std::make_unique<std::istringstream>(contents));
|
||||
|
||||
tok.append_to_store(&u8reader, tokstor);
|
||||
tok.set_token_source(&tokstor);
|
||||
|
||||
out("NETLIST_START(dummy)\n");
|
||||
add_term("GND", "GND");
|
||||
@ -869,8 +874,8 @@ void nl_convert_eagle_t::convert(const pstring &contents)
|
||||
// RINF converter
|
||||
// -------------------------------------------------
|
||||
|
||||
nl_convert_rinf_t::tokenizer::tokenizer(nl_convert_rinf_t &convert, plib::putf8_reader &&strm)
|
||||
: plib::ptokenizer(std::move(strm))
|
||||
nl_convert_rinf_t::tokenizer::tokenizer(nl_convert_rinf_t &convert)
|
||||
: plib::ptokenizer()
|
||||
, m_convert(convert)
|
||||
{
|
||||
this->identifier_chars(".abcdefghijklmnopqrstuvwvxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890_-")
|
||||
@ -906,8 +911,14 @@ void nl_convert_rinf_t::tokenizer::verror(const pstring &msg)
|
||||
|
||||
void nl_convert_rinf_t::convert(const pstring &contents)
|
||||
{
|
||||
tokenizer tok(*this, plib::putf8_reader(std::make_unique<std::istringstream>(contents)));
|
||||
tok.stream().stream().imbue(std::locale::classic());
|
||||
tokenizer tok(*this);
|
||||
|
||||
tokenizer::token_store tokstor;
|
||||
plib::putf8_reader u8reader(std::make_unique<std::istringstream>(contents));
|
||||
|
||||
tok.append_to_store(&u8reader, tokstor);
|
||||
tok.set_token_source(&tokstor);
|
||||
|
||||
auto lm = read_lib_map(s_lib_map);
|
||||
|
||||
out("NETLIST_START(dummy)\n");
|
||||
@ -1009,7 +1020,7 @@ void nl_convert_rinf_t::convert(const pstring &contents)
|
||||
if (token.is(tok.m_tok_TER))
|
||||
{
|
||||
token = tok.get_token();
|
||||
while (token.is_type(plib::ptokenizer::token_type::IDENTIFIER))
|
||||
while (token.is_type(plib::ptoken_reader::token_type::IDENTIFIER))
|
||||
{
|
||||
pin = tok.get_identifier_or_number();
|
||||
add_term(net, token.str() + "." + pin);
|
||||
|
@ -10,11 +10,12 @@
|
||||
|
||||
#include "plib/palloc.h"
|
||||
#include "plib/pstring.h"
|
||||
#include "plib/ptokenizer.h"
|
||||
#include "plib/ptypes.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "../plib/ptokenizer.h"
|
||||
|
||||
// -------------------------------------------------
|
||||
// convert - convert a spice netlist
|
||||
// -------------------------------------------------
|
||||
@ -212,10 +213,15 @@ public:
|
||||
|
||||
nl_convert_eagle_t() = default;
|
||||
|
||||
class tokenizer : public plib::ptokenizer
|
||||
class tokenizer : public plib::ptokenizer, public plib::ptoken_reader
|
||||
{
|
||||
public:
|
||||
tokenizer(nl_convert_eagle_t &convert, plib::putf8_reader &&strm);
|
||||
using token_t = ptokenizer::token_t;
|
||||
using token_type = ptokenizer::token_type;
|
||||
using token_id_t = ptokenizer::token_id_t;
|
||||
using token_store = ptokenizer::token_store;
|
||||
|
||||
tokenizer(nl_convert_eagle_t &convert);
|
||||
|
||||
token_id_t m_tok_ADD; // NOLINT
|
||||
token_id_t m_tok_VALUE; // NOLINT
|
||||
@ -244,10 +250,14 @@ public:
|
||||
|
||||
nl_convert_rinf_t() = default;
|
||||
|
||||
class tokenizer : public plib::ptokenizer
|
||||
class tokenizer : public plib::ptokenizer, public plib::ptoken_reader
|
||||
{
|
||||
public:
|
||||
tokenizer(nl_convert_rinf_t &convert, plib::putf8_reader &&strm);
|
||||
using token_t = ptokenizer::token_t;
|
||||
using token_type = ptokenizer::token_type;
|
||||
using token_id_t = ptokenizer::token_id_t;
|
||||
using token_store = ptokenizer::token_store;
|
||||
tokenizer(nl_convert_rinf_t &convert);
|
||||
|
||||
token_id_t m_tok_HEA; // NOLINT
|
||||
token_id_t m_tok_APP; // NOLINT
|
||||
|
Loading…
Reference in New Issue
Block a user