summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorStan Seibert <stan@mtrr.org>2011-08-10 12:40:45 -0400
committerStan Seibert <stan@mtrr.org>2011-08-10 12:40:45 -0400
commited5b635e31464585aebae8f462c1369224c2681d (patch)
tree6353aed9f8ed8b0af7d28a9faaa6c1d2ce9ab05c
parent01d5527db39520ca548518ed1194f8b863a4f077 (diff)
downloadchroma-ed5b635e31464585aebae8f462c1369224c2681d.tar.gz
chroma-ed5b635e31464585aebae8f462c1369224c2681d.tar.bz2
chroma-ed5b635e31464585aebae8f462c1369224c2681d.zip
Using gzip compression level 1, the BVH at 10 bit is 7x smaller. Adds
10 seconds to cache load time if the BVH file is in the memory cache, otherwise this is faster if the BVH file must be read fresh from disk.
-rw-r--r--geometry.py19
1 files changed, 11 insertions, 8 deletions
diff --git a/geometry.py b/geometry.py
index 579e01d..35da2ac 100644
--- a/geometry.py
+++ b/geometry.py
@@ -5,6 +5,7 @@ from itertoolset import *
from tools import timeit
from hashlib import md5
import cPickle as pickle
+import gzip
# all material/surface properties are interpolated at these
# wavelengths when they are sent to the gpu
@@ -285,7 +286,7 @@ class Geometry(object):
cache_path = os.path.join(cache_dir, cache_file)
try:
- f = open(cache_path, 'rb')
+ f = gzip.GzipFile(cache_path, 'rb')
except IOError:
pass
else:
@@ -357,13 +358,15 @@ class Geometry(object):
if unique_zvalues.size == 1:
break
+ print >>sys.stderr, 'Writing BVH to cache directory...'
+
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
- f = open(cache_path, 'wb')
- data = {}
- for key in ['material1_index', 'material2_index', 'surface_index', 'colors', 'solid_id', 'lower_bounds', 'upper_bounds', 'node_map', 'node_map_end', 'layers', 'first_node']:
- data[key] = getattr(self, key)
- data['reorder'] = reorder
- pickle.dump(data, f, -1)
- f.close()
+ with gzip.GzipFile(cache_path, 'wb', compresslevel=1) as f:
+ data = {}
+ for key in ['material1_index', 'material2_index', 'surface_index', 'colors', 'solid_id', 'lower_bounds', 'upper_bounds', 'node_map', 'node_map_end', 'layers', 'first_node']:
+ data[key] = getattr(self, key)
+ data['reorder'] = reorder
+ pickle.dump(data, f, -1)
+