clean up usage of boost::tokenizer

This commit is contained in:
Dane Springmeyer 2011-10-17 11:18:44 -07:00
parent c15892ff04
commit f5b08ed821

View file

@ -54,7 +54,6 @@ csv_datasource::csv_datasource(parameters const& params, bool bind)
general: general:
- refactor parser into generic class - refactor parser into generic class
- tests - tests
- clean up double usage of Tokenizer types
alternate large file pipeline: alternate large file pipeline:
- stat file, detect > 15 MB - stat file, detect > 15 MB
- build up csv line-by-line iterator - build up csv line-by-line iterator
@ -192,7 +191,6 @@ void csv_datasource::parse_csv(T& stream,
} }
typedef boost::escaped_list_separator<char> escape_type; typedef boost::escaped_list_separator<char> escape_type;
typedef boost::char_separator<char> separator_type;
std::string esc = boost::trim_copy(escape); std::string esc = boost::trim_copy(escape);
if (esc.empty()) esc = "\\"; if (esc.empty()) esc = "\\";
@ -217,8 +215,7 @@ void csv_datasource::parse_csv(T& stream,
throw mapnik::datasource_exception(s.str()); throw mapnik::datasource_exception(s.str());
} }
typedef boost::tokenizer< separator_type > Tokenizer; typedef boost::tokenizer< escape_type > Tokenizer;
typedef boost::tokenizer< escape_type > ETokenizer;
int line_number(1); int line_number(1);
bool has_wkt_field = false; bool has_wkt_field = false;
@ -230,9 +227,7 @@ void csv_datasource::parse_csv(T& stream,
if (!manual_headers_.empty()) if (!manual_headers_.empty())
{ {
//escape_type grammer2(esc, ",", quo); Tokenizer tok(manual_headers_, grammer);
separator_type sep(",");
Tokenizer tok(manual_headers_, sep);
Tokenizer::iterator beg = tok.begin(); Tokenizer::iterator beg = tok.begin();
unsigned idx(0); unsigned idx(0);
for (; beg != tok.end(); ++beg) for (; beg != tok.end(); ++beg)
@ -264,8 +259,7 @@ void csv_datasource::parse_csv(T& stream,
{ {
try try
{ {
separator_type sep(",","",boost::keep_empty_tokens); Tokenizer tok(csv_line, grammer);
Tokenizer tok(csv_line, sep);
Tokenizer::iterator beg = tok.begin(); Tokenizer::iterator beg = tok.begin();
std::string val = boost::trim_copy(*beg); std::string val = boost::trim_copy(*beg);
@ -358,8 +352,8 @@ void csv_datasource::parse_csv(T& stream,
try try
{ {
ETokenizer tok(csv_line, grammer); Tokenizer tok(csv_line, grammer);
ETokenizer::iterator beg = tok.begin(); Tokenizer::iterator beg = tok.begin();
// early return for strict mode // early return for strict mode
if (strict_) if (strict_)