test_spss.py 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113
  1. from pathlib import Path
  2. import numpy as np
  3. import pytest
  4. import pandas as pd
  5. import pandas._testing as tm
  6. pyreadstat = pytest.importorskip("pyreadstat")
  7. # TODO(CoW) - detection of chained assignment in cython
  8. # https://github.com/pandas-dev/pandas/issues/51315
  9. @pytest.mark.filterwarnings("ignore::pandas.errors.ChainedAssignmentError")
  10. @pytest.mark.parametrize("path_klass", [lambda p: p, Path])
  11. def test_spss_labelled_num(path_klass, datapath):
  12. # test file from the Haven project (https://haven.tidyverse.org/)
  13. fname = path_klass(datapath("io", "data", "spss", "labelled-num.sav"))
  14. df = pd.read_spss(fname, convert_categoricals=True)
  15. expected = pd.DataFrame({"VAR00002": "This is one"}, index=[0])
  16. expected["VAR00002"] = pd.Categorical(expected["VAR00002"])
  17. tm.assert_frame_equal(df, expected)
  18. df = pd.read_spss(fname, convert_categoricals=False)
  19. expected = pd.DataFrame({"VAR00002": 1.0}, index=[0])
  20. tm.assert_frame_equal(df, expected)
  21. @pytest.mark.filterwarnings("ignore::pandas.errors.ChainedAssignmentError")
  22. def test_spss_labelled_num_na(datapath):
  23. # test file from the Haven project (https://haven.tidyverse.org/)
  24. fname = datapath("io", "data", "spss", "labelled-num-na.sav")
  25. df = pd.read_spss(fname, convert_categoricals=True)
  26. expected = pd.DataFrame({"VAR00002": ["This is one", None]})
  27. expected["VAR00002"] = pd.Categorical(expected["VAR00002"])
  28. tm.assert_frame_equal(df, expected)
  29. df = pd.read_spss(fname, convert_categoricals=False)
  30. expected = pd.DataFrame({"VAR00002": [1.0, np.nan]})
  31. tm.assert_frame_equal(df, expected)
  32. @pytest.mark.filterwarnings("ignore::pandas.errors.ChainedAssignmentError")
  33. def test_spss_labelled_str(datapath):
  34. # test file from the Haven project (https://haven.tidyverse.org/)
  35. fname = datapath("io", "data", "spss", "labelled-str.sav")
  36. df = pd.read_spss(fname, convert_categoricals=True)
  37. expected = pd.DataFrame({"gender": ["Male", "Female"]})
  38. expected["gender"] = pd.Categorical(expected["gender"])
  39. tm.assert_frame_equal(df, expected)
  40. df = pd.read_spss(fname, convert_categoricals=False)
  41. expected = pd.DataFrame({"gender": ["M", "F"]})
  42. tm.assert_frame_equal(df, expected)
  43. @pytest.mark.filterwarnings("ignore::pandas.errors.ChainedAssignmentError")
  44. def test_spss_umlauts(datapath):
  45. # test file from the Haven project (https://haven.tidyverse.org/)
  46. fname = datapath("io", "data", "spss", "umlauts.sav")
  47. df = pd.read_spss(fname, convert_categoricals=True)
  48. expected = pd.DataFrame(
  49. {"var1": ["the ä umlaut", "the ü umlaut", "the ä umlaut", "the ö umlaut"]}
  50. )
  51. expected["var1"] = pd.Categorical(expected["var1"])
  52. tm.assert_frame_equal(df, expected)
  53. df = pd.read_spss(fname, convert_categoricals=False)
  54. expected = pd.DataFrame({"var1": [1.0, 2.0, 1.0, 3.0]})
  55. tm.assert_frame_equal(df, expected)
  56. def test_spss_usecols(datapath):
  57. # usecols must be list-like
  58. fname = datapath("io", "data", "spss", "labelled-num.sav")
  59. with pytest.raises(TypeError, match="usecols must be list-like."):
  60. pd.read_spss(fname, usecols="VAR00002")
  61. def test_spss_umlauts_dtype_backend(datapath, dtype_backend):
  62. # test file from the Haven project (https://haven.tidyverse.org/)
  63. fname = datapath("io", "data", "spss", "umlauts.sav")
  64. df = pd.read_spss(fname, convert_categoricals=False, dtype_backend=dtype_backend)
  65. expected = pd.DataFrame({"var1": [1.0, 2.0, 1.0, 3.0]}, dtype="Int64")
  66. if dtype_backend == "pyarrow":
  67. pa = pytest.importorskip("pyarrow")
  68. from pandas.arrays import ArrowExtensionArray
  69. expected = pd.DataFrame(
  70. {
  71. col: ArrowExtensionArray(pa.array(expected[col], from_pandas=True))
  72. for col in expected.columns
  73. }
  74. )
  75. tm.assert_frame_equal(df, expected)
  76. def test_invalid_dtype_backend():
  77. msg = (
  78. "dtype_backend numpy is invalid, only 'numpy_nullable' and "
  79. "'pyarrow' are allowed."
  80. )
  81. with pytest.raises(ValueError, match=msg):
  82. pd.read_spss("test", dtype_backend="numpy")