test_mio.py 42 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291
  1. # -*- coding: utf-8 -*-
  2. ''' Nose test generators
  3. Need function load / save / roundtrip tests
  4. '''
  5. import os
  6. from collections import OrderedDict
  7. from os.path import join as pjoin, dirname
  8. from glob import glob
  9. from io import BytesIO
  10. import re
  11. from tempfile import mkdtemp
  12. import warnings
  13. import shutil
  14. import gzip
  15. from numpy.testing import (assert_array_equal, assert_array_almost_equal,
  16. assert_equal, assert_, assert_warns)
  17. import pytest
  18. from pytest import raises as assert_raises
  19. import numpy as np
  20. from numpy import array
  21. import scipy.sparse as SP
  22. import scipy.io
  23. from scipy.io.matlab import MatlabOpaque, MatlabFunction, MatlabObject
  24. import scipy.io.matlab._byteordercodes as boc
  25. from scipy.io.matlab._miobase import (
  26. matdims, MatWriteError, MatReadError, matfile_version)
  27. from scipy.io.matlab._mio import mat_reader_factory, loadmat, savemat, whosmat
  28. from scipy.io.matlab._mio5 import (
  29. MatFile5Writer, MatFile5Reader, varmats_from_mat, to_writeable,
  30. EmptyStructMarker)
  31. import scipy.io.matlab._mio5_params as mio5p
  32. test_data_path = pjoin(dirname(__file__), 'data')
  33. def mlarr(*args, **kwargs):
  34. """Convenience function to return matlab-compatible 2-D array."""
  35. arr = np.array(*args, **kwargs)
  36. arr.shape = matdims(arr)
  37. return arr
  38. # Define cases to test
  39. theta = np.pi/4*np.arange(9,dtype=float).reshape(1,9)
  40. case_table4 = [
  41. {'name': 'double',
  42. 'classes': {'testdouble': 'double'},
  43. 'expected': {'testdouble': theta}
  44. }]
  45. case_table4.append(
  46. {'name': 'string',
  47. 'classes': {'teststring': 'char'},
  48. 'expected': {'teststring':
  49. array(['"Do nine men interpret?" "Nine men," I nod.'])}
  50. })
  51. case_table4.append(
  52. {'name': 'complex',
  53. 'classes': {'testcomplex': 'double'},
  54. 'expected': {'testcomplex': np.cos(theta) + 1j*np.sin(theta)}
  55. })
  56. A = np.zeros((3,5))
  57. A[0] = list(range(1,6))
  58. A[:,0] = list(range(1,4))
  59. case_table4.append(
  60. {'name': 'matrix',
  61. 'classes': {'testmatrix': 'double'},
  62. 'expected': {'testmatrix': A},
  63. })
  64. case_table4.append(
  65. {'name': 'sparse',
  66. 'classes': {'testsparse': 'sparse'},
  67. 'expected': {'testsparse': SP.coo_matrix(A)},
  68. })
  69. B = A.astype(complex)
  70. B[0,0] += 1j
  71. case_table4.append(
  72. {'name': 'sparsecomplex',
  73. 'classes': {'testsparsecomplex': 'sparse'},
  74. 'expected': {'testsparsecomplex': SP.coo_matrix(B)},
  75. })
  76. case_table4.append(
  77. {'name': 'multi',
  78. 'classes': {'theta': 'double', 'a': 'double'},
  79. 'expected': {'theta': theta, 'a': A},
  80. })
  81. case_table4.append(
  82. {'name': 'minus',
  83. 'classes': {'testminus': 'double'},
  84. 'expected': {'testminus': mlarr(-1)},
  85. })
  86. case_table4.append(
  87. {'name': 'onechar',
  88. 'classes': {'testonechar': 'char'},
  89. 'expected': {'testonechar': array(['r'])},
  90. })
  91. # Cell arrays stored as object arrays
  92. CA = mlarr(( # tuple for object array creation
  93. [],
  94. mlarr([1]),
  95. mlarr([[1,2]]),
  96. mlarr([[1,2,3]])), dtype=object).reshape(1,-1)
  97. CA[0,0] = array(
  98. ['This cell contains this string and 3 arrays of increasing length'])
  99. case_table5 = [
  100. {'name': 'cell',
  101. 'classes': {'testcell': 'cell'},
  102. 'expected': {'testcell': CA}}]
  103. CAE = mlarr(( # tuple for object array creation
  104. mlarr(1),
  105. mlarr(2),
  106. mlarr([]),
  107. mlarr([]),
  108. mlarr(3)), dtype=object).reshape(1,-1)
  109. objarr = np.empty((1,1),dtype=object)
  110. objarr[0,0] = mlarr(1)
  111. case_table5.append(
  112. {'name': 'scalarcell',
  113. 'classes': {'testscalarcell': 'cell'},
  114. 'expected': {'testscalarcell': objarr}
  115. })
  116. case_table5.append(
  117. {'name': 'emptycell',
  118. 'classes': {'testemptycell': 'cell'},
  119. 'expected': {'testemptycell': CAE}})
  120. case_table5.append(
  121. {'name': 'stringarray',
  122. 'classes': {'teststringarray': 'char'},
  123. 'expected': {'teststringarray': array(
  124. ['one ', 'two ', 'three'])},
  125. })
  126. case_table5.append(
  127. {'name': '3dmatrix',
  128. 'classes': {'test3dmatrix': 'double'},
  129. 'expected': {
  130. 'test3dmatrix': np.transpose(np.reshape(list(range(1,25)), (4,3,2)))}
  131. })
  132. st_sub_arr = array([np.sqrt(2),np.exp(1),np.pi]).reshape(1,3)
  133. dtype = [(n, object) for n in ['stringfield', 'doublefield', 'complexfield']]
  134. st1 = np.zeros((1,1), dtype)
  135. st1['stringfield'][0,0] = array(['Rats live on no evil star.'])
  136. st1['doublefield'][0,0] = st_sub_arr
  137. st1['complexfield'][0,0] = st_sub_arr * (1 + 1j)
  138. case_table5.append(
  139. {'name': 'struct',
  140. 'classes': {'teststruct': 'struct'},
  141. 'expected': {'teststruct': st1}
  142. })
  143. CN = np.zeros((1,2), dtype=object)
  144. CN[0,0] = mlarr(1)
  145. CN[0,1] = np.zeros((1,3), dtype=object)
  146. CN[0,1][0,0] = mlarr(2, dtype=np.uint8)
  147. CN[0,1][0,1] = mlarr([[3]], dtype=np.uint8)
  148. CN[0,1][0,2] = np.zeros((1,2), dtype=object)
  149. CN[0,1][0,2][0,0] = mlarr(4, dtype=np.uint8)
  150. CN[0,1][0,2][0,1] = mlarr(5, dtype=np.uint8)
  151. case_table5.append(
  152. {'name': 'cellnest',
  153. 'classes': {'testcellnest': 'cell'},
  154. 'expected': {'testcellnest': CN},
  155. })
  156. st2 = np.empty((1,1), dtype=[(n, object) for n in ['one', 'two']])
  157. st2[0,0]['one'] = mlarr(1)
  158. st2[0,0]['two'] = np.empty((1,1), dtype=[('three', object)])
  159. st2[0,0]['two'][0,0]['three'] = array(['number 3'])
  160. case_table5.append(
  161. {'name': 'structnest',
  162. 'classes': {'teststructnest': 'struct'},
  163. 'expected': {'teststructnest': st2}
  164. })
  165. a = np.empty((1,2), dtype=[(n, object) for n in ['one', 'two']])
  166. a[0,0]['one'] = mlarr(1)
  167. a[0,0]['two'] = mlarr(2)
  168. a[0,1]['one'] = array(['number 1'])
  169. a[0,1]['two'] = array(['number 2'])
  170. case_table5.append(
  171. {'name': 'structarr',
  172. 'classes': {'teststructarr': 'struct'},
  173. 'expected': {'teststructarr': a}
  174. })
  175. ODT = np.dtype([(n, object) for n in
  176. ['expr', 'inputExpr', 'args',
  177. 'isEmpty', 'numArgs', 'version']])
  178. MO = MatlabObject(np.zeros((1,1), dtype=ODT), 'inline')
  179. m0 = MO[0,0]
  180. m0['expr'] = array(['x'])
  181. m0['inputExpr'] = array([' x = INLINE_INPUTS_{1};'])
  182. m0['args'] = array(['x'])
  183. m0['isEmpty'] = mlarr(0)
  184. m0['numArgs'] = mlarr(1)
  185. m0['version'] = mlarr(1)
  186. case_table5.append(
  187. {'name': 'object',
  188. 'classes': {'testobject': 'object'},
  189. 'expected': {'testobject': MO}
  190. })
  191. fp_u_str = open(pjoin(test_data_path, 'japanese_utf8.txt'), 'rb')
  192. u_str = fp_u_str.read().decode('utf-8')
  193. fp_u_str.close()
  194. case_table5.append(
  195. {'name': 'unicode',
  196. 'classes': {'testunicode': 'char'},
  197. 'expected': {'testunicode': array([u_str])}
  198. })
  199. case_table5.append(
  200. {'name': 'sparse',
  201. 'classes': {'testsparse': 'sparse'},
  202. 'expected': {'testsparse': SP.coo_matrix(A)},
  203. })
  204. case_table5.append(
  205. {'name': 'sparsecomplex',
  206. 'classes': {'testsparsecomplex': 'sparse'},
  207. 'expected': {'testsparsecomplex': SP.coo_matrix(B)},
  208. })
  209. case_table5.append(
  210. {'name': 'bool',
  211. 'classes': {'testbools': 'logical'},
  212. 'expected': {'testbools':
  213. array([[True], [False]])},
  214. })
  215. case_table5_rt = case_table5[:]
  216. # Inline functions can't be concatenated in matlab, so RT only
  217. case_table5_rt.append(
  218. {'name': 'objectarray',
  219. 'classes': {'testobjectarray': 'object'},
  220. 'expected': {'testobjectarray': np.repeat(MO, 2).reshape(1,2)}})
  221. def types_compatible(var1, var2):
  222. """Check if types are same or compatible.
  223. 0-D numpy scalars are compatible with bare python scalars.
  224. """
  225. type1 = type(var1)
  226. type2 = type(var2)
  227. if type1 is type2:
  228. return True
  229. if type1 is np.ndarray and var1.shape == ():
  230. return type(var1.item()) is type2
  231. if type2 is np.ndarray and var2.shape == ():
  232. return type(var2.item()) is type1
  233. return False
  234. def _check_level(label, expected, actual):
  235. """ Check one level of a potentially nested array """
  236. if SP.issparse(expected): # allow different types of sparse matrices
  237. assert_(SP.issparse(actual))
  238. assert_array_almost_equal(actual.toarray(),
  239. expected.toarray(),
  240. err_msg=label,
  241. decimal=5)
  242. return
  243. # Check types are as expected
  244. assert_(types_compatible(expected, actual),
  245. "Expected type %s, got %s at %s" %
  246. (type(expected), type(actual), label))
  247. # A field in a record array may not be an ndarray
  248. # A scalar from a record array will be type np.void
  249. if not isinstance(expected,
  250. (np.void, np.ndarray, MatlabObject)):
  251. assert_equal(expected, actual)
  252. return
  253. # This is an ndarray-like thing
  254. assert_(expected.shape == actual.shape,
  255. msg='Expected shape %s, got %s at %s' % (expected.shape,
  256. actual.shape,
  257. label))
  258. ex_dtype = expected.dtype
  259. if ex_dtype.hasobject: # array of objects
  260. if isinstance(expected, MatlabObject):
  261. assert_equal(expected.classname, actual.classname)
  262. for i, ev in enumerate(expected):
  263. level_label = "%s, [%d], " % (label, i)
  264. _check_level(level_label, ev, actual[i])
  265. return
  266. if ex_dtype.fields: # probably recarray
  267. for fn in ex_dtype.fields:
  268. level_label = "%s, field %s, " % (label, fn)
  269. _check_level(level_label,
  270. expected[fn], actual[fn])
  271. return
  272. if ex_dtype.type in (str, # string or bool
  273. np.unicode_,
  274. np.bool_):
  275. assert_equal(actual, expected, err_msg=label)
  276. return
  277. # Something numeric
  278. assert_array_almost_equal(actual, expected, err_msg=label, decimal=5)
  279. def _load_check_case(name, files, case):
  280. for file_name in files:
  281. matdict = loadmat(file_name, struct_as_record=True)
  282. label = "test %s; file %s" % (name, file_name)
  283. for k, expected in case.items():
  284. k_label = "%s, variable %s" % (label, k)
  285. assert_(k in matdict, "Missing key at %s" % k_label)
  286. _check_level(k_label, expected, matdict[k])
  287. def _whos_check_case(name, files, case, classes):
  288. for file_name in files:
  289. label = "test %s; file %s" % (name, file_name)
  290. whos = whosmat(file_name)
  291. expected_whos = [
  292. (k, expected.shape, classes[k]) for k, expected in case.items()]
  293. whos.sort()
  294. expected_whos.sort()
  295. assert_equal(whos, expected_whos,
  296. "%s: %r != %r" % (label, whos, expected_whos)
  297. )
  298. # Round trip tests
  299. def _rt_check_case(name, expected, format):
  300. mat_stream = BytesIO()
  301. savemat(mat_stream, expected, format=format)
  302. mat_stream.seek(0)
  303. _load_check_case(name, [mat_stream], expected)
  304. # generator for tests
  305. def _cases(version, filt='test%(name)s_*.mat'):
  306. if version == '4':
  307. cases = case_table4
  308. elif version == '5':
  309. cases = case_table5
  310. else:
  311. assert version == '5_rt'
  312. cases = case_table5_rt
  313. for case in cases:
  314. name = case['name']
  315. expected = case['expected']
  316. if filt is None:
  317. files = None
  318. else:
  319. use_filt = pjoin(test_data_path, filt % dict(name=name))
  320. files = glob(use_filt)
  321. assert len(files) > 0, \
  322. "No files for test %s using filter %s" % (name, filt)
  323. classes = case['classes']
  324. yield name, files, expected, classes
  325. @pytest.mark.parametrize('version', ('4', '5'))
  326. def test_load(version):
  327. for case in _cases(version):
  328. _load_check_case(*case[:3])
  329. @pytest.mark.parametrize('version', ('4', '5'))
  330. def test_whos(version):
  331. for case in _cases(version):
  332. _whos_check_case(*case)
  333. # generator for round trip tests
  334. @pytest.mark.parametrize('version, fmts', [
  335. ('4', ['4', '5']),
  336. ('5_rt', ['5']),
  337. ])
  338. def test_round_trip(version, fmts):
  339. for case in _cases(version, filt=None):
  340. for fmt in fmts:
  341. _rt_check_case(case[0], case[2], fmt)
  342. def test_gzip_simple():
  343. xdense = np.zeros((20,20))
  344. xdense[2,3] = 2.3
  345. xdense[4,5] = 4.5
  346. x = SP.csc_matrix(xdense)
  347. name = 'gzip_test'
  348. expected = {'x':x}
  349. format = '4'
  350. tmpdir = mkdtemp()
  351. try:
  352. fname = pjoin(tmpdir,name)
  353. mat_stream = gzip.open(fname, mode='wb')
  354. savemat(mat_stream, expected, format=format)
  355. mat_stream.close()
  356. mat_stream = gzip.open(fname, mode='rb')
  357. actual = loadmat(mat_stream, struct_as_record=True)
  358. mat_stream.close()
  359. finally:
  360. shutil.rmtree(tmpdir)
  361. assert_array_almost_equal(actual['x'].toarray(),
  362. expected['x'].toarray(),
  363. err_msg=repr(actual))
  364. def test_multiple_open():
  365. # Ticket #1039, on Windows: check that files are not left open
  366. tmpdir = mkdtemp()
  367. try:
  368. x = dict(x=np.zeros((2, 2)))
  369. fname = pjoin(tmpdir, "a.mat")
  370. # Check that file is not left open
  371. savemat(fname, x)
  372. os.unlink(fname)
  373. savemat(fname, x)
  374. loadmat(fname)
  375. os.unlink(fname)
  376. # Check that stream is left open
  377. f = open(fname, 'wb')
  378. savemat(f, x)
  379. f.seek(0)
  380. f.close()
  381. f = open(fname, 'rb')
  382. loadmat(f)
  383. f.seek(0)
  384. f.close()
  385. finally:
  386. shutil.rmtree(tmpdir)
  387. def test_mat73():
  388. # Check any hdf5 files raise an error
  389. filenames = glob(
  390. pjoin(test_data_path, 'testhdf5*.mat'))
  391. assert_(len(filenames) > 0)
  392. for filename in filenames:
  393. fp = open(filename, 'rb')
  394. assert_raises(NotImplementedError,
  395. loadmat,
  396. fp,
  397. struct_as_record=True)
  398. fp.close()
  399. def test_warnings():
  400. # This test is an echo of the previous behavior, which was to raise a
  401. # warning if the user triggered a search for mat files on the Python system
  402. # path. We can remove the test in the next version after upcoming (0.13).
  403. fname = pjoin(test_data_path, 'testdouble_7.1_GLNX86.mat')
  404. with warnings.catch_warnings():
  405. warnings.simplefilter('error')
  406. # This should not generate a warning
  407. loadmat(fname, struct_as_record=True)
  408. # This neither
  409. loadmat(fname, struct_as_record=False)
  410. def test_regression_653():
  411. # Saving a dictionary with only invalid keys used to raise an error. Now we
  412. # save this as an empty struct in matlab space.
  413. sio = BytesIO()
  414. savemat(sio, {'d':{1:2}}, format='5')
  415. back = loadmat(sio)['d']
  416. # Check we got an empty struct equivalent
  417. assert_equal(back.shape, (1,1))
  418. assert_equal(back.dtype, np.dtype(object))
  419. assert_(back[0,0] is None)
  420. def test_structname_len():
  421. # Test limit for length of field names in structs
  422. lim = 31
  423. fldname = 'a' * lim
  424. st1 = np.zeros((1,1), dtype=[(fldname, object)])
  425. savemat(BytesIO(), {'longstruct': st1}, format='5')
  426. fldname = 'a' * (lim+1)
  427. st1 = np.zeros((1,1), dtype=[(fldname, object)])
  428. assert_raises(ValueError, savemat, BytesIO(),
  429. {'longstruct': st1}, format='5')
  430. def test_4_and_long_field_names_incompatible():
  431. # Long field names option not supported in 4
  432. my_struct = np.zeros((1,1),dtype=[('my_fieldname',object)])
  433. assert_raises(ValueError, savemat, BytesIO(),
  434. {'my_struct':my_struct}, format='4', long_field_names=True)
  435. def test_long_field_names():
  436. # Test limit for length of field names in structs
  437. lim = 63
  438. fldname = 'a' * lim
  439. st1 = np.zeros((1,1), dtype=[(fldname, object)])
  440. savemat(BytesIO(), {'longstruct': st1}, format='5',long_field_names=True)
  441. fldname = 'a' * (lim+1)
  442. st1 = np.zeros((1,1), dtype=[(fldname, object)])
  443. assert_raises(ValueError, savemat, BytesIO(),
  444. {'longstruct': st1}, format='5',long_field_names=True)
  445. def test_long_field_names_in_struct():
  446. # Regression test - long_field_names was erased if you passed a struct
  447. # within a struct
  448. lim = 63
  449. fldname = 'a' * lim
  450. cell = np.ndarray((1,2),dtype=object)
  451. st1 = np.zeros((1,1), dtype=[(fldname, object)])
  452. cell[0,0] = st1
  453. cell[0,1] = st1
  454. savemat(BytesIO(), {'longstruct': cell}, format='5',long_field_names=True)
  455. #
  456. # Check to make sure it fails with long field names off
  457. #
  458. assert_raises(ValueError, savemat, BytesIO(),
  459. {'longstruct': cell}, format='5', long_field_names=False)
  460. def test_cell_with_one_thing_in_it():
  461. # Regression test - make a cell array that's 1 x 2 and put two
  462. # strings in it. It works. Make a cell array that's 1 x 1 and put
  463. # a string in it. It should work but, in the old days, it didn't.
  464. cells = np.ndarray((1,2),dtype=object)
  465. cells[0,0] = 'Hello'
  466. cells[0,1] = 'World'
  467. savemat(BytesIO(), {'x': cells}, format='5')
  468. cells = np.ndarray((1,1),dtype=object)
  469. cells[0,0] = 'Hello, world'
  470. savemat(BytesIO(), {'x': cells}, format='5')
  471. def test_writer_properties():
  472. # Tests getting, setting of properties of matrix writer
  473. mfw = MatFile5Writer(BytesIO())
  474. assert_equal(mfw.global_vars, [])
  475. mfw.global_vars = ['avar']
  476. assert_equal(mfw.global_vars, ['avar'])
  477. assert_equal(mfw.unicode_strings, False)
  478. mfw.unicode_strings = True
  479. assert_equal(mfw.unicode_strings, True)
  480. assert_equal(mfw.long_field_names, False)
  481. mfw.long_field_names = True
  482. assert_equal(mfw.long_field_names, True)
  483. def test_use_small_element():
  484. # Test whether we're using small data element or not
  485. sio = BytesIO()
  486. wtr = MatFile5Writer(sio)
  487. # First check size for no sde for name
  488. arr = np.zeros(10)
  489. wtr.put_variables({'aaaaa': arr})
  490. w_sz = len(sio.getvalue())
  491. # Check small name results in largish difference in size
  492. sio.truncate(0)
  493. sio.seek(0)
  494. wtr.put_variables({'aaaa': arr})
  495. assert_(w_sz - len(sio.getvalue()) > 4)
  496. # Whereas increasing name size makes less difference
  497. sio.truncate(0)
  498. sio.seek(0)
  499. wtr.put_variables({'aaaaaa': arr})
  500. assert_(len(sio.getvalue()) - w_sz < 4)
  501. def test_save_dict():
  502. # Test that both dict and OrderedDict can be saved (as recarray),
  503. # loaded as matstruct, and preserve order
  504. ab_exp = np.array([[(1, 2)]], dtype=[('a', object), ('b', object)])
  505. for dict_type in (dict, OrderedDict):
  506. # Initialize with tuples to keep order
  507. d = dict_type([('a', 1), ('b', 2)])
  508. stream = BytesIO()
  509. savemat(stream, {'dict': d})
  510. stream.seek(0)
  511. vals = loadmat(stream)['dict']
  512. assert_equal(vals.dtype.names, ('a', 'b'))
  513. assert_array_equal(vals, ab_exp)
  514. def test_1d_shape():
  515. # New 5 behavior is 1D -> row vector
  516. arr = np.arange(5)
  517. for format in ('4', '5'):
  518. # Column is the default
  519. stream = BytesIO()
  520. savemat(stream, {'oned': arr}, format=format)
  521. vals = loadmat(stream)
  522. assert_equal(vals['oned'].shape, (1, 5))
  523. # can be explicitly 'column' for oned_as
  524. stream = BytesIO()
  525. savemat(stream, {'oned':arr},
  526. format=format,
  527. oned_as='column')
  528. vals = loadmat(stream)
  529. assert_equal(vals['oned'].shape, (5,1))
  530. # but different from 'row'
  531. stream = BytesIO()
  532. savemat(stream, {'oned':arr},
  533. format=format,
  534. oned_as='row')
  535. vals = loadmat(stream)
  536. assert_equal(vals['oned'].shape, (1,5))
  537. def test_compression():
  538. arr = np.zeros(100).reshape((5,20))
  539. arr[2,10] = 1
  540. stream = BytesIO()
  541. savemat(stream, {'arr':arr})
  542. raw_len = len(stream.getvalue())
  543. vals = loadmat(stream)
  544. assert_array_equal(vals['arr'], arr)
  545. stream = BytesIO()
  546. savemat(stream, {'arr':arr}, do_compression=True)
  547. compressed_len = len(stream.getvalue())
  548. vals = loadmat(stream)
  549. assert_array_equal(vals['arr'], arr)
  550. assert_(raw_len > compressed_len)
  551. # Concatenate, test later
  552. arr2 = arr.copy()
  553. arr2[0,0] = 1
  554. stream = BytesIO()
  555. savemat(stream, {'arr':arr, 'arr2':arr2}, do_compression=False)
  556. vals = loadmat(stream)
  557. assert_array_equal(vals['arr2'], arr2)
  558. stream = BytesIO()
  559. savemat(stream, {'arr':arr, 'arr2':arr2}, do_compression=True)
  560. vals = loadmat(stream)
  561. assert_array_equal(vals['arr2'], arr2)
  562. def test_single_object():
  563. stream = BytesIO()
  564. savemat(stream, {'A':np.array(1, dtype=object)})
  565. def test_skip_variable():
  566. # Test skipping over the first of two variables in a MAT file
  567. # using mat_reader_factory and put_variables to read them in.
  568. #
  569. # This is a regression test of a problem that's caused by
  570. # using the compressed file reader seek instead of the raw file
  571. # I/O seek when skipping over a compressed chunk.
  572. #
  573. # The problem arises when the chunk is large: this file has
  574. # a 256x256 array of random (uncompressible) doubles.
  575. #
  576. filename = pjoin(test_data_path,'test_skip_variable.mat')
  577. #
  578. # Prove that it loads with loadmat
  579. #
  580. d = loadmat(filename, struct_as_record=True)
  581. assert_('first' in d)
  582. assert_('second' in d)
  583. #
  584. # Make the factory
  585. #
  586. factory, file_opened = mat_reader_factory(filename, struct_as_record=True)
  587. #
  588. # This is where the factory breaks with an error in MatMatrixGetter.to_next
  589. #
  590. d = factory.get_variables('second')
  591. assert_('second' in d)
  592. factory.mat_stream.close()
  593. def test_empty_struct():
  594. # ticket 885
  595. filename = pjoin(test_data_path,'test_empty_struct.mat')
  596. # before ticket fix, this would crash with ValueError, empty data
  597. # type
  598. d = loadmat(filename, struct_as_record=True)
  599. a = d['a']
  600. assert_equal(a.shape, (1,1))
  601. assert_equal(a.dtype, np.dtype(object))
  602. assert_(a[0,0] is None)
  603. stream = BytesIO()
  604. arr = np.array((), dtype='U')
  605. # before ticket fix, this used to give data type not understood
  606. savemat(stream, {'arr':arr})
  607. d = loadmat(stream)
  608. a2 = d['arr']
  609. assert_array_equal(a2, arr)
  610. def test_save_empty_dict():
  611. # saving empty dict also gives empty struct
  612. stream = BytesIO()
  613. savemat(stream, {'arr': {}})
  614. d = loadmat(stream)
  615. a = d['arr']
  616. assert_equal(a.shape, (1,1))
  617. assert_equal(a.dtype, np.dtype(object))
  618. assert_(a[0,0] is None)
  619. def assert_any_equal(output, alternatives):
  620. """ Assert `output` is equal to at least one element in `alternatives`
  621. """
  622. one_equal = False
  623. for expected in alternatives:
  624. if np.all(output == expected):
  625. one_equal = True
  626. break
  627. assert_(one_equal)
  628. def test_to_writeable():
  629. # Test to_writeable function
  630. res = to_writeable(np.array([1])) # pass through ndarrays
  631. assert_equal(res.shape, (1,))
  632. assert_array_equal(res, 1)
  633. # Dict fields can be written in any order
  634. expected1 = np.array([(1, 2)], dtype=[('a', '|O8'), ('b', '|O8')])
  635. expected2 = np.array([(2, 1)], dtype=[('b', '|O8'), ('a', '|O8')])
  636. alternatives = (expected1, expected2)
  637. assert_any_equal(to_writeable({'a':1,'b':2}), alternatives)
  638. # Fields with underscores discarded
  639. assert_any_equal(to_writeable({'a':1,'b':2, '_c':3}), alternatives)
  640. # Not-string fields discarded
  641. assert_any_equal(to_writeable({'a':1,'b':2, 100:3}), alternatives)
  642. # String fields that are valid Python identifiers discarded
  643. assert_any_equal(to_writeable({'a':1,'b':2, '99':3}), alternatives)
  644. # Object with field names is equivalent
  645. class klass:
  646. pass
  647. c = klass
  648. c.a = 1
  649. c.b = 2
  650. assert_any_equal(to_writeable(c), alternatives)
  651. # empty list and tuple go to empty array
  652. res = to_writeable([])
  653. assert_equal(res.shape, (0,))
  654. assert_equal(res.dtype.type, np.float64)
  655. res = to_writeable(())
  656. assert_equal(res.shape, (0,))
  657. assert_equal(res.dtype.type, np.float64)
  658. # None -> None
  659. assert_(to_writeable(None) is None)
  660. # String to strings
  661. assert_equal(to_writeable('a string').dtype.type, np.str_)
  662. # Scalars to numpy to NumPy scalars
  663. res = to_writeable(1)
  664. assert_equal(res.shape, ())
  665. assert_equal(res.dtype.type, np.array(1).dtype.type)
  666. assert_array_equal(res, 1)
  667. # Empty dict returns EmptyStructMarker
  668. assert_(to_writeable({}) is EmptyStructMarker)
  669. # Object does not have (even empty) __dict__
  670. assert_(to_writeable(object()) is None)
  671. # Custom object does have empty __dict__, returns EmptyStructMarker
  672. class C:
  673. pass
  674. assert_(to_writeable(c()) is EmptyStructMarker)
  675. # dict keys with legal characters are convertible
  676. res = to_writeable({'a': 1})['a']
  677. assert_equal(res.shape, (1,))
  678. assert_equal(res.dtype.type, np.object_)
  679. # Only fields with illegal characters, falls back to EmptyStruct
  680. assert_(to_writeable({'1':1}) is EmptyStructMarker)
  681. assert_(to_writeable({'_a':1}) is EmptyStructMarker)
  682. # Unless there are valid fields, in which case structured array
  683. assert_equal(to_writeable({'1':1, 'f': 2}),
  684. np.array([(2,)], dtype=[('f', '|O8')]))
  685. def test_recarray():
  686. # check roundtrip of structured array
  687. dt = [('f1', 'f8'),
  688. ('f2', 'S10')]
  689. arr = np.zeros((2,), dtype=dt)
  690. arr[0]['f1'] = 0.5
  691. arr[0]['f2'] = 'python'
  692. arr[1]['f1'] = 99
  693. arr[1]['f2'] = 'not perl'
  694. stream = BytesIO()
  695. savemat(stream, {'arr': arr})
  696. d = loadmat(stream, struct_as_record=False)
  697. a20 = d['arr'][0,0]
  698. assert_equal(a20.f1, 0.5)
  699. assert_equal(a20.f2, 'python')
  700. d = loadmat(stream, struct_as_record=True)
  701. a20 = d['arr'][0,0]
  702. assert_equal(a20['f1'], 0.5)
  703. assert_equal(a20['f2'], 'python')
  704. # structs always come back as object types
  705. assert_equal(a20.dtype, np.dtype([('f1', 'O'),
  706. ('f2', 'O')]))
  707. a21 = d['arr'].flat[1]
  708. assert_equal(a21['f1'], 99)
  709. assert_equal(a21['f2'], 'not perl')
  710. def test_save_object():
  711. class C:
  712. pass
  713. c = C()
  714. c.field1 = 1
  715. c.field2 = 'a string'
  716. stream = BytesIO()
  717. savemat(stream, {'c': c})
  718. d = loadmat(stream, struct_as_record=False)
  719. c2 = d['c'][0,0]
  720. assert_equal(c2.field1, 1)
  721. assert_equal(c2.field2, 'a string')
  722. d = loadmat(stream, struct_as_record=True)
  723. c2 = d['c'][0,0]
  724. assert_equal(c2['field1'], 1)
  725. assert_equal(c2['field2'], 'a string')
  726. def test_read_opts():
  727. # tests if read is seeing option sets, at initialization and after
  728. # initialization
  729. arr = np.arange(6).reshape(1,6)
  730. stream = BytesIO()
  731. savemat(stream, {'a': arr})
  732. rdr = MatFile5Reader(stream)
  733. back_dict = rdr.get_variables()
  734. rarr = back_dict['a']
  735. assert_array_equal(rarr, arr)
  736. rdr = MatFile5Reader(stream, squeeze_me=True)
  737. assert_array_equal(rdr.get_variables()['a'], arr.reshape((6,)))
  738. rdr.squeeze_me = False
  739. assert_array_equal(rarr, arr)
  740. rdr = MatFile5Reader(stream, byte_order=boc.native_code)
  741. assert_array_equal(rdr.get_variables()['a'], arr)
  742. # inverted byte code leads to error on read because of swapped
  743. # header etc.
  744. rdr = MatFile5Reader(stream, byte_order=boc.swapped_code)
  745. assert_raises(Exception, rdr.get_variables)
  746. rdr.byte_order = boc.native_code
  747. assert_array_equal(rdr.get_variables()['a'], arr)
  748. arr = np.array(['a string'])
  749. stream.truncate(0)
  750. stream.seek(0)
  751. savemat(stream, {'a': arr})
  752. rdr = MatFile5Reader(stream)
  753. assert_array_equal(rdr.get_variables()['a'], arr)
  754. rdr = MatFile5Reader(stream, chars_as_strings=False)
  755. carr = np.atleast_2d(np.array(list(arr.item()), dtype='U1'))
  756. assert_array_equal(rdr.get_variables()['a'], carr)
  757. rdr.chars_as_strings = True
  758. assert_array_equal(rdr.get_variables()['a'], arr)
  759. def test_empty_string():
  760. # make sure reading empty string does not raise error
  761. estring_fname = pjoin(test_data_path, 'single_empty_string.mat')
  762. fp = open(estring_fname, 'rb')
  763. rdr = MatFile5Reader(fp)
  764. d = rdr.get_variables()
  765. fp.close()
  766. assert_array_equal(d['a'], np.array([], dtype='U1'))
  767. # Empty string round trip. Matlab cannot distinguish
  768. # between a string array that is empty, and a string array
  769. # containing a single empty string, because it stores strings as
  770. # arrays of char. There is no way of having an array of char that
  771. # is not empty, but contains an empty string.
  772. stream = BytesIO()
  773. savemat(stream, {'a': np.array([''])})
  774. rdr = MatFile5Reader(stream)
  775. d = rdr.get_variables()
  776. assert_array_equal(d['a'], np.array([], dtype='U1'))
  777. stream.truncate(0)
  778. stream.seek(0)
  779. savemat(stream, {'a': np.array([], dtype='U1')})
  780. rdr = MatFile5Reader(stream)
  781. d = rdr.get_variables()
  782. assert_array_equal(d['a'], np.array([], dtype='U1'))
  783. stream.close()
  784. def test_corrupted_data():
  785. import zlib
  786. for exc, fname in [(ValueError, 'corrupted_zlib_data.mat'),
  787. (zlib.error, 'corrupted_zlib_checksum.mat')]:
  788. with open(pjoin(test_data_path, fname), 'rb') as fp:
  789. rdr = MatFile5Reader(fp)
  790. assert_raises(exc, rdr.get_variables)
  791. def test_corrupted_data_check_can_be_disabled():
  792. with open(pjoin(test_data_path, 'corrupted_zlib_data.mat'), 'rb') as fp:
  793. rdr = MatFile5Reader(fp, verify_compressed_data_integrity=False)
  794. rdr.get_variables()
  795. def test_read_both_endian():
  796. # make sure big- and little- endian data is read correctly
  797. for fname in ('big_endian.mat', 'little_endian.mat'):
  798. fp = open(pjoin(test_data_path, fname), 'rb')
  799. rdr = MatFile5Reader(fp)
  800. d = rdr.get_variables()
  801. fp.close()
  802. assert_array_equal(d['strings'],
  803. np.array([['hello'],
  804. ['world']], dtype=object))
  805. assert_array_equal(d['floats'],
  806. np.array([[2., 3.],
  807. [3., 4.]], dtype=np.float32))
  808. def test_write_opposite_endian():
  809. # We don't support writing opposite endian .mat files, but we need to behave
  810. # correctly if the user supplies an other-endian NumPy array to write out.
  811. float_arr = np.array([[2., 3.],
  812. [3., 4.]])
  813. int_arr = np.arange(6).reshape((2, 3))
  814. uni_arr = np.array(['hello', 'world'], dtype='U')
  815. stream = BytesIO()
  816. savemat(stream, {'floats': float_arr.byteswap().newbyteorder(),
  817. 'ints': int_arr.byteswap().newbyteorder(),
  818. 'uni_arr': uni_arr.byteswap().newbyteorder()})
  819. rdr = MatFile5Reader(stream)
  820. d = rdr.get_variables()
  821. assert_array_equal(d['floats'], float_arr)
  822. assert_array_equal(d['ints'], int_arr)
  823. assert_array_equal(d['uni_arr'], uni_arr)
  824. stream.close()
  825. def test_logical_array():
  826. # The roundtrip test doesn't verify that we load the data up with the
  827. # correct (bool) dtype
  828. with open(pjoin(test_data_path, 'testbool_8_WIN64.mat'), 'rb') as fobj:
  829. rdr = MatFile5Reader(fobj, mat_dtype=True)
  830. d = rdr.get_variables()
  831. x = np.array([[True], [False]], dtype=np.bool_)
  832. assert_array_equal(d['testbools'], x)
  833. assert_equal(d['testbools'].dtype, x.dtype)
  834. def test_logical_out_type():
  835. # Confirm that bool type written as uint8, uint8 class
  836. # See gh-4022
  837. stream = BytesIO()
  838. barr = np.array([False, True, False])
  839. savemat(stream, {'barray': barr})
  840. stream.seek(0)
  841. reader = MatFile5Reader(stream)
  842. reader.initialize_read()
  843. reader.read_file_header()
  844. hdr, _ = reader.read_var_header()
  845. assert_equal(hdr.mclass, mio5p.mxUINT8_CLASS)
  846. assert_equal(hdr.is_logical, True)
  847. var = reader.read_var_array(hdr, False)
  848. assert_equal(var.dtype.type, np.uint8)
  849. def test_roundtrip_zero_dimensions():
  850. stream = BytesIO()
  851. savemat(stream, {'d':np.empty((10, 0))})
  852. d = loadmat(stream)
  853. assert d['d'].shape == (10, 0)
  854. def test_mat4_3d():
  855. # test behavior when writing 3-D arrays to matlab 4 files
  856. stream = BytesIO()
  857. arr = np.arange(24).reshape((2,3,4))
  858. assert_raises(ValueError, savemat, stream, {'a': arr}, True, '4')
  859. def test_func_read():
  860. func_eg = pjoin(test_data_path, 'testfunc_7.4_GLNX86.mat')
  861. fp = open(func_eg, 'rb')
  862. rdr = MatFile5Reader(fp)
  863. d = rdr.get_variables()
  864. fp.close()
  865. assert isinstance(d['testfunc'], MatlabFunction)
  866. stream = BytesIO()
  867. wtr = MatFile5Writer(stream)
  868. assert_raises(MatWriteError, wtr.put_variables, d)
  869. def test_mat_dtype():
  870. double_eg = pjoin(test_data_path, 'testmatrix_6.1_SOL2.mat')
  871. fp = open(double_eg, 'rb')
  872. rdr = MatFile5Reader(fp, mat_dtype=False)
  873. d = rdr.get_variables()
  874. fp.close()
  875. assert_equal(d['testmatrix'].dtype.kind, 'u')
  876. fp = open(double_eg, 'rb')
  877. rdr = MatFile5Reader(fp, mat_dtype=True)
  878. d = rdr.get_variables()
  879. fp.close()
  880. assert_equal(d['testmatrix'].dtype.kind, 'f')
  881. def test_sparse_in_struct():
  882. # reproduces bug found by DC where Cython code was insisting on
  883. # ndarray return type, but getting sparse matrix
  884. st = {'sparsefield': SP.coo_matrix(np.eye(4))}
  885. stream = BytesIO()
  886. savemat(stream, {'a':st})
  887. d = loadmat(stream, struct_as_record=True)
  888. assert_array_equal(d['a'][0, 0]['sparsefield'].toarray(), np.eye(4))
  889. def test_mat_struct_squeeze():
  890. stream = BytesIO()
  891. in_d = {'st':{'one':1, 'two':2}}
  892. savemat(stream, in_d)
  893. # no error without squeeze
  894. loadmat(stream, struct_as_record=False)
  895. # previous error was with squeeze, with mat_struct
  896. loadmat(stream, struct_as_record=False, squeeze_me=True)
  897. def test_scalar_squeeze():
  898. stream = BytesIO()
  899. in_d = {'scalar': [[0.1]], 'string': 'my name', 'st':{'one':1, 'two':2}}
  900. savemat(stream, in_d)
  901. out_d = loadmat(stream, squeeze_me=True)
  902. assert_(isinstance(out_d['scalar'], float))
  903. assert_(isinstance(out_d['string'], str))
  904. assert_(isinstance(out_d['st'], np.ndarray))
  905. def test_str_round():
  906. # from report by Angus McMorland on mailing list 3 May 2010
  907. stream = BytesIO()
  908. in_arr = np.array(['Hello', 'Foob'])
  909. out_arr = np.array(['Hello', 'Foob '])
  910. savemat(stream, dict(a=in_arr))
  911. res = loadmat(stream)
  912. # resulted in ['HloolFoa', 'elWrdobr']
  913. assert_array_equal(res['a'], out_arr)
  914. stream.truncate(0)
  915. stream.seek(0)
  916. # Make Fortran ordered version of string
  917. in_str = in_arr.tobytes(order='F')
  918. in_from_str = np.ndarray(shape=a.shape,
  919. dtype=in_arr.dtype,
  920. order='F',
  921. buffer=in_str)
  922. savemat(stream, dict(a=in_from_str))
  923. assert_array_equal(res['a'], out_arr)
  924. # unicode save did lead to buffer too small error
  925. stream.truncate(0)
  926. stream.seek(0)
  927. in_arr_u = in_arr.astype('U')
  928. out_arr_u = out_arr.astype('U')
  929. savemat(stream, {'a': in_arr_u})
  930. res = loadmat(stream)
  931. assert_array_equal(res['a'], out_arr_u)
  932. def test_fieldnames():
  933. # Check that field names are as expected
  934. stream = BytesIO()
  935. savemat(stream, {'a': {'a':1, 'b':2}})
  936. res = loadmat(stream)
  937. field_names = res['a'].dtype.names
  938. assert_equal(set(field_names), set(('a', 'b')))
  939. def test_loadmat_varnames():
  940. # Test that we can get just one variable from a mat file using loadmat
  941. mat5_sys_names = ['__globals__',
  942. '__header__',
  943. '__version__']
  944. for eg_file, sys_v_names in (
  945. (pjoin(test_data_path, 'testmulti_4.2c_SOL2.mat'), []), (pjoin(
  946. test_data_path, 'testmulti_7.4_GLNX86.mat'), mat5_sys_names)):
  947. vars = loadmat(eg_file)
  948. assert_equal(set(vars.keys()), set(['a', 'theta'] + sys_v_names))
  949. vars = loadmat(eg_file, variable_names='a')
  950. assert_equal(set(vars.keys()), set(['a'] + sys_v_names))
  951. vars = loadmat(eg_file, variable_names=['a'])
  952. assert_equal(set(vars.keys()), set(['a'] + sys_v_names))
  953. vars = loadmat(eg_file, variable_names=['theta'])
  954. assert_equal(set(vars.keys()), set(['theta'] + sys_v_names))
  955. vars = loadmat(eg_file, variable_names=('theta',))
  956. assert_equal(set(vars.keys()), set(['theta'] + sys_v_names))
  957. vars = loadmat(eg_file, variable_names=[])
  958. assert_equal(set(vars.keys()), set(sys_v_names))
  959. vnames = ['theta']
  960. vars = loadmat(eg_file, variable_names=vnames)
  961. assert_equal(vnames, ['theta'])
  962. def test_round_types():
  963. # Check that saving, loading preserves dtype in most cases
  964. arr = np.arange(10)
  965. stream = BytesIO()
  966. for dts in ('f8','f4','i8','i4','i2','i1',
  967. 'u8','u4','u2','u1','c16','c8'):
  968. stream.truncate(0)
  969. stream.seek(0) # needed for BytesIO in Python 3
  970. savemat(stream, {'arr': arr.astype(dts)})
  971. vars = loadmat(stream)
  972. assert_equal(np.dtype(dts), vars['arr'].dtype)
  973. def test_varmats_from_mat():
  974. # Make a mat file with several variables, write it, read it back
  975. names_vars = (('arr', mlarr(np.arange(10))),
  976. ('mystr', mlarr('a string')),
  977. ('mynum', mlarr(10)))
  978. # Dict like thing to give variables in defined order
  979. class C:
  980. def items(self):
  981. return names_vars
  982. stream = BytesIO()
  983. savemat(stream, C())
  984. varmats = varmats_from_mat(stream)
  985. assert_equal(len(varmats), 3)
  986. for i in range(3):
  987. name, var_stream = varmats[i]
  988. exp_name, exp_res = names_vars[i]
  989. assert_equal(name, exp_name)
  990. res = loadmat(var_stream)
  991. assert_array_equal(res[name], exp_res)
  992. def test_one_by_zero():
  993. # Test 1x0 chars get read correctly
  994. func_eg = pjoin(test_data_path, 'one_by_zero_char.mat')
  995. fp = open(func_eg, 'rb')
  996. rdr = MatFile5Reader(fp)
  997. d = rdr.get_variables()
  998. fp.close()
  999. assert_equal(d['var'].shape, (0,))
  1000. def test_load_mat4_le():
  1001. # We were getting byte order wrong when reading little-endian floa64 dense
  1002. # matrices on big-endian platforms
  1003. mat4_fname = pjoin(test_data_path, 'test_mat4_le_floats.mat')
  1004. vars = loadmat(mat4_fname)
  1005. assert_array_equal(vars['a'], [[0.1, 1.2]])
  1006. def test_unicode_mat4():
  1007. # Mat4 should save unicode as latin1
  1008. bio = BytesIO()
  1009. var = {'second_cat': 'Schrödinger'}
  1010. savemat(bio, var, format='4')
  1011. var_back = loadmat(bio)
  1012. assert_equal(var_back['second_cat'], var['second_cat'])
  1013. def test_logical_sparse():
  1014. # Test we can read logical sparse stored in mat file as bytes.
  1015. # See https://github.com/scipy/scipy/issues/3539.
  1016. # In some files saved by MATLAB, the sparse data elements (Real Part
  1017. # Subelement in MATLAB speak) are stored with apparent type double
  1018. # (miDOUBLE) but are in fact single bytes.
  1019. filename = pjoin(test_data_path,'logical_sparse.mat')
  1020. # Before fix, this would crash with:
  1021. # ValueError: indices and data should have the same size
  1022. d = loadmat(filename, struct_as_record=True)
  1023. log_sp = d['sp_log_5_4']
  1024. assert_(isinstance(log_sp, SP.csc_matrix))
  1025. assert_equal(log_sp.dtype.type, np.bool_)
  1026. assert_array_equal(log_sp.toarray(),
  1027. [[True, True, True, False],
  1028. [False, False, True, False],
  1029. [False, False, True, False],
  1030. [False, False, False, False],
  1031. [False, False, False, False]])
  1032. def test_empty_sparse():
  1033. # Can we read empty sparse matrices?
  1034. sio = BytesIO()
  1035. import scipy.sparse
  1036. empty_sparse = scipy.sparse.csr_matrix([[0,0],[0,0]])
  1037. savemat(sio, dict(x=empty_sparse))
  1038. sio.seek(0)
  1039. res = loadmat(sio)
  1040. assert_array_equal(res['x'].shape, empty_sparse.shape)
  1041. assert_array_equal(res['x'].toarray(), 0)
  1042. # Do empty sparse matrices get written with max nnz 1?
  1043. # See https://github.com/scipy/scipy/issues/4208
  1044. sio.seek(0)
  1045. reader = MatFile5Reader(sio)
  1046. reader.initialize_read()
  1047. reader.read_file_header()
  1048. hdr, _ = reader.read_var_header()
  1049. assert_equal(hdr.nzmax, 1)
  1050. def test_empty_mat_error():
  1051. # Test we get a specific warning for an empty mat file
  1052. sio = BytesIO()
  1053. assert_raises(MatReadError, loadmat, sio)
  1054. def test_miuint32_compromise():
  1055. # Reader should accept miUINT32 for miINT32, but check signs
  1056. # mat file with miUINT32 for miINT32, but OK values
  1057. filename = pjoin(test_data_path, 'miuint32_for_miint32.mat')
  1058. res = loadmat(filename)
  1059. assert_equal(res['an_array'], np.arange(10)[None, :])
  1060. # mat file with miUINT32 for miINT32, with negative value
  1061. filename = pjoin(test_data_path, 'bad_miuint32.mat')
  1062. with assert_raises(ValueError):
  1063. loadmat(filename)
  1064. def test_miutf8_for_miint8_compromise():
  1065. # Check reader accepts ascii as miUTF8 for array names
  1066. filename = pjoin(test_data_path, 'miutf8_array_name.mat')
  1067. res = loadmat(filename)
  1068. assert_equal(res['array_name'], [[1]])
  1069. # mat file with non-ascii utf8 name raises error
  1070. filename = pjoin(test_data_path, 'bad_miutf8_array_name.mat')
  1071. with assert_raises(ValueError):
  1072. loadmat(filename)
  1073. def test_bad_utf8():
  1074. # Check that reader reads bad UTF with 'replace' option
  1075. filename = pjoin(test_data_path,'broken_utf8.mat')
  1076. res = loadmat(filename)
  1077. assert_equal(res['bad_string'],
  1078. b'\x80 am broken'.decode('utf8', 'replace'))
  1079. def test_save_unicode_field(tmpdir):
  1080. filename = os.path.join(str(tmpdir), 'test.mat')
  1081. test_dict = {u'a':{u'b':1,u'c':'test_str'}}
  1082. savemat(filename, test_dict)
  1083. def test_filenotfound():
  1084. # Check the correct error is thrown
  1085. assert_raises(OSError, loadmat, "NotExistentFile00.mat")
  1086. assert_raises(OSError, loadmat, "NotExistentFile00")
  1087. def test_simplify_cells():
  1088. # Test output when simplify_cells=True
  1089. filename = pjoin(test_data_path, 'testsimplecell.mat')
  1090. res1 = loadmat(filename, simplify_cells=True)
  1091. res2 = loadmat(filename, simplify_cells=False)
  1092. assert_(isinstance(res1["s"], dict))
  1093. assert_(isinstance(res2["s"], np.ndarray))
  1094. assert_array_equal(res1["s"]["mycell"], np.array(["a", "b", "c"]))
  1095. @pytest.mark.parametrize('version, filt, regex', [
  1096. (0, '_4*_*', None),
  1097. (1, '_5*_*', None),
  1098. (1, '_6*_*', None),
  1099. (1, '_7*_*', '^((?!hdf5).)*$'), # not containing hdf5
  1100. (2, '_7*_*', '.*hdf5.*'),
  1101. (1, '8*_*', None),
  1102. ])
  1103. def test_matfile_version(version, filt, regex):
  1104. use_filt = pjoin(test_data_path, 'test*%s.mat' % filt)
  1105. files = glob(use_filt)
  1106. if regex is not None:
  1107. files = [file for file in files if re.match(regex, file) is not None]
  1108. assert len(files) > 0, \
  1109. "No files for version %s using filter %s" % (version, filt)
  1110. for file in files:
  1111. got_version = matfile_version(file)
  1112. assert got_version[0] == version
  1113. def test_opaque():
  1114. """Test that we can read a MatlabOpaque object."""
  1115. data = loadmat(pjoin(test_data_path, 'parabola.mat'))
  1116. assert isinstance(data['parabola'], MatlabFunction)
  1117. assert isinstance(data['parabola'].item()[3].item()[3], MatlabOpaque)
  1118. def test_deprecation():
  1119. """Test that access to previous attributes still works."""
  1120. # This should be accessible immediately from scipy.io import
  1121. with assert_warns(DeprecationWarning):
  1122. scipy.io.matlab.mio5_params.MatlabOpaque # noqa
  1123. # These should be importable but warn as well
  1124. with assert_warns(DeprecationWarning):
  1125. from scipy.io.matlab.miobase import MatReadError # noqa