use tempfile modules to construct temporary paths (which should be compatible with windows)
This commit is contained in:
parent
a842565915
commit
2037cb0d46
2 changed files with 7 additions and 5 deletions
|
@ -5,6 +5,7 @@ import os
|
|||
from nose.tools import *
|
||||
from utilities import execution_path
|
||||
from utilities import Todo
|
||||
import tempfile
|
||||
|
||||
import mapnik, pickle
|
||||
|
||||
|
@ -365,11 +366,13 @@ def test_map_init_from_string():
|
|||
eq_(m.base, './')
|
||||
mapnik.load_map_from_string(m, map_string, False, "") # this "" will have no effect
|
||||
eq_(m.base, './')
|
||||
|
||||
tmp_dir = tempfile.gettempdir()
|
||||
try:
|
||||
mapnik.load_map_from_string(m, map_string, False, "/tmp")
|
||||
mapnik.load_map_from_string(m, map_string, False, tmp_dir)
|
||||
except RuntimeError:
|
||||
pass # runtime error expected because shapefile path should be wrong and datasource will throw
|
||||
eq_(m.base, '/tmp') # /tmp will be set despite the exception because load_map mostly worked
|
||||
eq_(m.base, tmp_dir) # tmp_dir will be set despite the exception because load_map mostly worked
|
||||
m.base = 'foo'
|
||||
mapnik.load_map_from_string(m, map_string, True, ".")
|
||||
eq_(m.base, '.')
|
||||
|
|
|
@ -2,10 +2,9 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from nose.tools import *
|
||||
|
||||
import tempfile
|
||||
import os, mapnik
|
||||
from nose.tools import *
|
||||
|
||||
from utilities import execution_path
|
||||
from utilities import Todo
|
||||
|
||||
|
@ -216,7 +215,7 @@ def test_render_points():
|
|||
p = mapnik.Projection(projs[projdescr])
|
||||
m.zoom_to_box(p.forward(mapnik.Box2d(ul_lonlat,lr_lonlat)))
|
||||
# Render to SVG so that it can be checked how many points are there with string comparison
|
||||
svg_file = '/tmp/%s.svg'
|
||||
svg_file = os.path.join(tempfile.gettempdir(),'%s.svg')
|
||||
mapnik.render_to_file(m, svg_file)
|
||||
num_points_present = len(places_ds.all_features())
|
||||
svg = open(svg_file,'r').read()
|
||||
|
|
Loading…
Add table
Reference in a new issue