20 #include <boost/algorithm/string/predicate.hpp> 21 #include <boost/algorithm/string/trim.hpp> 30 Tokenizer::Tokenizer(
const FilePath &fp, Separator separator)
36 separator_(separator),
37 line_tokenizer_(line_, separator_)
39 own_stream_ =
new ifstream;
46 Tokenizer::Tokenizer( std::istream &in, Separator separator)
47 : f_name_(
"__anonymous_stream__"),
52 separator_(separator),
53 line_tokenizer_(line_, separator_)
57 void Tokenizer::set_comment_pattern(
const std::string &pattern) {
58 comment_pattern_=pattern;
62 bool Tokenizer::skip_to(
const std::string& pattern,
const std::string &end_search_pattern)
65 bool end_search= (end_search_pattern.size() > 0);
68 if (line_.find(pattern)!=string::npos ) {
72 if ( end_search && line_.find(end_search_pattern)!=string::npos )
return false;
80 bool Tokenizer::next_line(
bool assert_for_remaining_tokens) {
82 if (assert_for_remaining_tokens && (! eol() )) {
83 WarningOut().fmt(
"Remaining token '{}', file '{}', line {} after token #{}\n",
84 *tok_, f_name_, line_num(), position_.line_position_);
87 if (eof())
return false;
91 while ( ! eof() && line_ ==
"") {
92 std::getline( *in_, line_);
93 position_.line_counter_++;
95 if (in_->bad())
xprintf(
Err,
"Can not read from stream, file: '%s', line: '%d'\n", f_name_.c_str(), line_num());
98 if (comment_pattern_.size() && 0==line_.compare(0, comment_pattern_.size(), comment_pattern_) ) line_=
"";
100 if (! in_->fail() ) {
112 if ( eol() )
xprintf(
UsrErr,
"Missing token, file: '%s', line: '%d', position: '%d'.\n", f_name_.c_str(), line_num(), position_.line_position_);
118 void Tokenizer::set_tokenizer()
120 line_tokenizer_.assign(line_);
121 tok_ = line_tokenizer_.begin();
122 position_.line_position_ = 0;
124 while (! eol() && (*tok_).size()==0 ) {position_.line_position_++; ++tok_;}
130 string Tokenizer::position_msg()
const {
132 ss <<
"token: " << pos() <<
", line: " << line_num() <<
", in file '" << f_name() <<
"'";
137 const Tokenizer::Position Tokenizer::get_position()
139 position_.file_position_ = in_->tellg();
144 void Tokenizer::set_position(
const Tokenizer::Position pos)
147 in_->seekg(pos.file_position_);
154 Tokenizer::~Tokenizer() {
155 if (own_stream_ != NULL)
delete own_stream_;
UnitSI operator*(const UnitSI &a, const UnitSI &b)
Product of two units.
void open_stream(Stream &stream) const
Global macros to enhance readability and debugging, general constants.
Dedicated class for storing path to input and output files.
#define WarningOut()
Macro defining 'warning' record of log.