10 #include <boost/algorithm/string/predicate.hpp>
11 #include <boost/algorithm/string/trim.hpp>
20 Tokenizer::Tokenizer(
const FilePath &fp)
24 line_counter_(0), position_(0),
25 separator_(
"\\",
" \t",
"\""),
26 line_tokenizer_(line_, separator_)
28 in_ = own_stream_ =
new ifstream;
29 own_stream_->open(
string(fp).c_str() );
31 INPUT_CHECK(! own_stream_->fail(),
"Can not open input file '%s'.\n", f_name_.c_str() );
37 Tokenizer::Tokenizer( std::istream &in)
38 : f_name_(
"__anonymous_stream__"),
42 line_counter_(0), position_(0),
43 separator_(
"\\",
" \t",
"\""),
44 line_tokenizer_(line_, separator_)
48 void Tokenizer::set_comment_pattern(
const std::string &pattern) {
49 comment_pattern_=pattern;
53 bool Tokenizer::skip_to(
const std::string& pattern,
const std::string &end_search_pattern)
55 ASSERT( in_->good(),
"Tokenizer stream (for file: %s) is not ready for i/o operations. Perhaps missing check about correct open.\n", f_name_.c_str());
56 bool end_search= (end_search_pattern.size() > 0);
59 if (line_.find(pattern)!=string::npos ) {
63 if ( end_search && line_.find(end_search_pattern)!=string::npos )
return false;
71 bool Tokenizer::next_line(
bool assert_for_remaining_tokens) {
73 if (assert_for_remaining_tokens && (! eol() )) {
74 xprintf(
Warn,
"Remaining tokens, file '%s', line '%d', after token #%d.\n", f_name_.c_str(), line_num(), position_);
77 if (eof())
return false;
81 while ( ! eof() && line_ ==
"") {
82 std::getline( *in_, line_);
85 if (in_->bad())
xprintf(
Err,
"Can not read from stream, file: '%s', line: '%d'\n", f_name_.c_str(), line_num());
88 if (comment_pattern_.size() && 0==line_.compare(0, comment_pattern_.size(), comment_pattern_) ) line_=
"";
94 DBGMSG(
"Line: '%s'\n", line_.c_str());
104 if ( eol() )
xprintf(
UsrErr,
"Missing token, file: '%s', line: '%d', position: '%d'.\n", f_name_.c_str(), line_num(), position_);
110 void Tokenizer::set_tokenizer()
112 line_tokenizer_.assign(line_);
113 tok_ = line_tokenizer_.begin();
116 while (! eol() && (*tok_).size()==0 ) {position_++; ++tok_;}
122 string Tokenizer::position_msg()
const {
124 ss <<
"token: " << pos() <<
", line: " << line_num() <<
", in file '" << f_name() <<
"'";
129 Tokenizer::~Tokenizer() {
130 if (own_stream_ != NULL)
delete own_stream_;
UnitSI operator*(const UnitSI &a, const UnitSI &b)
Product of two units.
Global macros to enhance readability and debugging, general constants.
#define INPUT_CHECK(i,...)
Debugging macros.
Dedicated class for storing path to input and output files.