use boost::iostreams::stream_offset for dbf offset and record_length_ to work around >2 GB issue with filtering features - closes #595 (TODO - look into offset issues in shape indexes - #603)

This commit is contained in:
Dane Springmeyer 2010-09-14 16:21:25 +00:00
parent d84ac3ad5b
commit 73fe7fda4a
2 changed files with 5 additions and 5 deletions

View file

@ -91,7 +91,7 @@ void dbf_file::move_to(int index)
{
if (index>0 && index<=num_records_)
{
long pos=(num_fields_<<5)+34+(index-1)*(record_length_+1);
stream_offset pos=(num_fields_<<5)+34+(index-1)*(record_length_+1);
file_.seekg(pos,std::ios::beg);
file_.read(record_,record_length_);
}
@ -179,7 +179,7 @@ void dbf_file::read_header()
assert(num_fields_>0);
num_fields_=(num_fields_-33)/32;
skip(22);
int offset=0;
stream_offset offset=0;
char name[11];
memset(&name,0,11);
fields_.reserve(num_fields_);

View file

@ -36,6 +36,7 @@
using mapnik::transcoder;
using mapnik::Feature;
using namespace boost::iostreams;
struct field_descriptor
{
@ -44,17 +45,16 @@ struct field_descriptor
char type_;
int length_;
int dec_;
int offset_;
stream_offset offset_;
};
using namespace boost::iostreams;
class dbf_file
{
private:
int num_records_;
int num_fields_;
int record_length_;
stream_offset record_length_;
std::vector<field_descriptor> fields_;
#ifdef SHAPE_MEMORY_MAPPED_FILE
stream<mapped_file_source> file_;