common.py 40 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253
  1. """Common IO api utilities"""
  2. from __future__ import annotations
  3. from abc import (
  4. ABC,
  5. abstractmethod,
  6. )
  7. import codecs
  8. from collections import defaultdict
  9. import dataclasses
  10. import functools
  11. import gzip
  12. from io import (
  13. BufferedIOBase,
  14. BytesIO,
  15. RawIOBase,
  16. StringIO,
  17. TextIOBase,
  18. TextIOWrapper,
  19. )
  20. import mmap
  21. import os
  22. from pathlib import Path
  23. import re
  24. import tarfile
  25. from typing import (
  26. IO,
  27. Any,
  28. AnyStr,
  29. DefaultDict,
  30. Generic,
  31. Hashable,
  32. Literal,
  33. Mapping,
  34. Sequence,
  35. TypeVar,
  36. cast,
  37. overload,
  38. )
  39. from urllib.parse import (
  40. urljoin,
  41. urlparse as parse_url,
  42. uses_netloc,
  43. uses_params,
  44. uses_relative,
  45. )
  46. import warnings
  47. import zipfile
  48. from pandas._typing import (
  49. BaseBuffer,
  50. CompressionDict,
  51. CompressionOptions,
  52. FilePath,
  53. ReadBuffer,
  54. ReadCsvBuffer,
  55. StorageOptions,
  56. WriteBuffer,
  57. )
  58. from pandas.compat import get_lzma_file
  59. from pandas.compat._optional import import_optional_dependency
  60. from pandas.compat.compressors import BZ2File as _BZ2File
  61. from pandas.util._decorators import doc
  62. from pandas.util._exceptions import find_stack_level
  63. from pandas.core.dtypes.common import (
  64. is_bool,
  65. is_file_like,
  66. is_integer,
  67. is_list_like,
  68. )
  69. from pandas.core.indexes.api import MultiIndex
  70. from pandas.core.shared_docs import _shared_docs
  71. _VALID_URLS = set(uses_relative + uses_netloc + uses_params)
  72. _VALID_URLS.discard("")
  73. _RFC_3986_PATTERN = re.compile(r"^[A-Za-z][A-Za-z0-9+\-+.]*://")
  74. BaseBufferT = TypeVar("BaseBufferT", bound=BaseBuffer)
  75. @dataclasses.dataclass
  76. class IOArgs:
  77. """
  78. Return value of io/common.py:_get_filepath_or_buffer.
  79. """
  80. filepath_or_buffer: str | BaseBuffer
  81. encoding: str
  82. mode: str
  83. compression: CompressionDict
  84. should_close: bool = False
  85. @dataclasses.dataclass
  86. class IOHandles(Generic[AnyStr]):
  87. """
  88. Return value of io/common.py:get_handle
  89. Can be used as a context manager.
  90. This is used to easily close created buffers and to handle corner cases when
  91. TextIOWrapper is inserted.
  92. handle: The file handle to be used.
  93. created_handles: All file handles that are created by get_handle
  94. is_wrapped: Whether a TextIOWrapper needs to be detached.
  95. """
  96. # handle might not implement the IO-interface
  97. handle: IO[AnyStr]
  98. compression: CompressionDict
  99. created_handles: list[IO[bytes] | IO[str]] = dataclasses.field(default_factory=list)
  100. is_wrapped: bool = False
  101. def close(self) -> None:
  102. """
  103. Close all created buffers.
  104. Note: If a TextIOWrapper was inserted, it is flushed and detached to
  105. avoid closing the potentially user-created buffer.
  106. """
  107. if self.is_wrapped:
  108. assert isinstance(self.handle, TextIOWrapper)
  109. self.handle.flush()
  110. self.handle.detach()
  111. self.created_handles.remove(self.handle)
  112. for handle in self.created_handles:
  113. handle.close()
  114. self.created_handles = []
  115. self.is_wrapped = False
  116. def __enter__(self) -> IOHandles[AnyStr]:
  117. return self
  118. def __exit__(self, *args: Any) -> None:
  119. self.close()
  120. def is_url(url: object) -> bool:
  121. """
  122. Check to see if a URL has a valid protocol.
  123. Parameters
  124. ----------
  125. url : str or unicode
  126. Returns
  127. -------
  128. isurl : bool
  129. If `url` has a valid protocol return True otherwise False.
  130. """
  131. if not isinstance(url, str):
  132. return False
  133. return parse_url(url).scheme in _VALID_URLS
  134. @overload
  135. def _expand_user(filepath_or_buffer: str) -> str:
  136. ...
  137. @overload
  138. def _expand_user(filepath_or_buffer: BaseBufferT) -> BaseBufferT:
  139. ...
  140. def _expand_user(filepath_or_buffer: str | BaseBufferT) -> str | BaseBufferT:
  141. """
  142. Return the argument with an initial component of ~ or ~user
  143. replaced by that user's home directory.
  144. Parameters
  145. ----------
  146. filepath_or_buffer : object to be converted if possible
  147. Returns
  148. -------
  149. expanded_filepath_or_buffer : an expanded filepath or the
  150. input if not expandable
  151. """
  152. if isinstance(filepath_or_buffer, str):
  153. return os.path.expanduser(filepath_or_buffer)
  154. return filepath_or_buffer
  155. def validate_header_arg(header: object) -> None:
  156. if header is None:
  157. return
  158. if is_integer(header):
  159. header = cast(int, header)
  160. if header < 0:
  161. # GH 27779
  162. raise ValueError(
  163. "Passing negative integer to header is invalid. "
  164. "For no header, use header=None instead"
  165. )
  166. return
  167. if is_list_like(header, allow_sets=False):
  168. header = cast(Sequence, header)
  169. if not all(map(is_integer, header)):
  170. raise ValueError("header must be integer or list of integers")
  171. if any(i < 0 for i in header):
  172. raise ValueError("cannot specify multi-index header with negative integers")
  173. return
  174. if is_bool(header):
  175. raise TypeError(
  176. "Passing a bool to header is invalid. Use header=None for no header or "
  177. "header=int or list-like of ints to specify "
  178. "the row(s) making up the column names"
  179. )
  180. # GH 16338
  181. raise ValueError("header must be integer or list of integers")
  182. @overload
  183. def stringify_path(filepath_or_buffer: FilePath, convert_file_like: bool = ...) -> str:
  184. ...
  185. @overload
  186. def stringify_path(
  187. filepath_or_buffer: BaseBufferT, convert_file_like: bool = ...
  188. ) -> BaseBufferT:
  189. ...
  190. def stringify_path(
  191. filepath_or_buffer: FilePath | BaseBufferT,
  192. convert_file_like: bool = False,
  193. ) -> str | BaseBufferT:
  194. """
  195. Attempt to convert a path-like object to a string.
  196. Parameters
  197. ----------
  198. filepath_or_buffer : object to be converted
  199. Returns
  200. -------
  201. str_filepath_or_buffer : maybe a string version of the object
  202. Notes
  203. -----
  204. Objects supporting the fspath protocol (python 3.6+) are coerced
  205. according to its __fspath__ method.
  206. Any other object is passed through unchanged, which includes bytes,
  207. strings, buffers, or anything else that's not even path-like.
  208. """
  209. if not convert_file_like and is_file_like(filepath_or_buffer):
  210. # GH 38125: some fsspec objects implement os.PathLike but have already opened a
  211. # file. This prevents opening the file a second time. infer_compression calls
  212. # this function with convert_file_like=True to infer the compression.
  213. return cast(BaseBufferT, filepath_or_buffer)
  214. if isinstance(filepath_or_buffer, os.PathLike):
  215. filepath_or_buffer = filepath_or_buffer.__fspath__()
  216. return _expand_user(filepath_or_buffer)
  217. def urlopen(*args, **kwargs):
  218. """
  219. Lazy-import wrapper for stdlib urlopen, as that imports a big chunk of
  220. the stdlib.
  221. """
  222. import urllib.request
  223. return urllib.request.urlopen(*args, **kwargs)
  224. def is_fsspec_url(url: FilePath | BaseBuffer) -> bool:
  225. """
  226. Returns true if the given URL looks like
  227. something fsspec can handle
  228. """
  229. return (
  230. isinstance(url, str)
  231. and bool(_RFC_3986_PATTERN.match(url))
  232. and not url.startswith(("http://", "https://"))
  233. )
  234. @doc(
  235. storage_options=_shared_docs["storage_options"],
  236. compression_options=_shared_docs["compression_options"] % "filepath_or_buffer",
  237. )
  238. def _get_filepath_or_buffer(
  239. filepath_or_buffer: FilePath | BaseBuffer,
  240. encoding: str = "utf-8",
  241. compression: CompressionOptions = None,
  242. mode: str = "r",
  243. storage_options: StorageOptions = None,
  244. ) -> IOArgs:
  245. """
  246. If the filepath_or_buffer is a url, translate and return the buffer.
  247. Otherwise passthrough.
  248. Parameters
  249. ----------
  250. filepath_or_buffer : a url, filepath (str, py.path.local or pathlib.Path),
  251. or buffer
  252. {compression_options}
  253. .. versionchanged:: 1.4.0 Zstandard support.
  254. encoding : the encoding to use to decode bytes, default is 'utf-8'
  255. mode : str, optional
  256. {storage_options}
  257. .. versionadded:: 1.2.0
  258. ..versionchange:: 1.2.0
  259. Returns the dataclass IOArgs.
  260. """
  261. filepath_or_buffer = stringify_path(filepath_or_buffer)
  262. # handle compression dict
  263. compression_method, compression = get_compression_method(compression)
  264. compression_method = infer_compression(filepath_or_buffer, compression_method)
  265. # GH21227 internal compression is not used for non-binary handles.
  266. if compression_method and hasattr(filepath_or_buffer, "write") and "b" not in mode:
  267. warnings.warn(
  268. "compression has no effect when passing a non-binary object as input.",
  269. RuntimeWarning,
  270. stacklevel=find_stack_level(),
  271. )
  272. compression_method = None
  273. compression = dict(compression, method=compression_method)
  274. # bz2 and xz do not write the byte order mark for utf-16 and utf-32
  275. # print a warning when writing such files
  276. if (
  277. "w" in mode
  278. and compression_method in ["bz2", "xz"]
  279. and encoding in ["utf-16", "utf-32"]
  280. ):
  281. warnings.warn(
  282. f"{compression} will not write the byte order mark for {encoding}",
  283. UnicodeWarning,
  284. stacklevel=find_stack_level(),
  285. )
  286. # Use binary mode when converting path-like objects to file-like objects (fsspec)
  287. # except when text mode is explicitly requested. The original mode is returned if
  288. # fsspec is not used.
  289. fsspec_mode = mode
  290. if "t" not in fsspec_mode and "b" not in fsspec_mode:
  291. fsspec_mode += "b"
  292. if isinstance(filepath_or_buffer, str) and is_url(filepath_or_buffer):
  293. # TODO: fsspec can also handle HTTP via requests, but leaving this
  294. # unchanged. using fsspec appears to break the ability to infer if the
  295. # server responded with gzipped data
  296. storage_options = storage_options or {}
  297. # waiting until now for importing to match intended lazy logic of
  298. # urlopen function defined elsewhere in this module
  299. import urllib.request
  300. # assuming storage_options is to be interpreted as headers
  301. req_info = urllib.request.Request(filepath_or_buffer, headers=storage_options)
  302. with urlopen(req_info) as req:
  303. content_encoding = req.headers.get("Content-Encoding", None)
  304. if content_encoding == "gzip":
  305. # Override compression based on Content-Encoding header
  306. compression = {"method": "gzip"}
  307. reader = BytesIO(req.read())
  308. return IOArgs(
  309. filepath_or_buffer=reader,
  310. encoding=encoding,
  311. compression=compression,
  312. should_close=True,
  313. mode=fsspec_mode,
  314. )
  315. if is_fsspec_url(filepath_or_buffer):
  316. assert isinstance(
  317. filepath_or_buffer, str
  318. ) # just to appease mypy for this branch
  319. # two special-case s3-like protocols; these have special meaning in Hadoop,
  320. # but are equivalent to just "s3" from fsspec's point of view
  321. # cc #11071
  322. if filepath_or_buffer.startswith("s3a://"):
  323. filepath_or_buffer = filepath_or_buffer.replace("s3a://", "s3://")
  324. if filepath_or_buffer.startswith("s3n://"):
  325. filepath_or_buffer = filepath_or_buffer.replace("s3n://", "s3://")
  326. fsspec = import_optional_dependency("fsspec")
  327. # If botocore is installed we fallback to reading with anon=True
  328. # to allow reads from public buckets
  329. err_types_to_retry_with_anon: list[Any] = []
  330. try:
  331. import_optional_dependency("botocore")
  332. from botocore.exceptions import (
  333. ClientError,
  334. NoCredentialsError,
  335. )
  336. err_types_to_retry_with_anon = [
  337. ClientError,
  338. NoCredentialsError,
  339. PermissionError,
  340. ]
  341. except ImportError:
  342. pass
  343. try:
  344. file_obj = fsspec.open(
  345. filepath_or_buffer, mode=fsspec_mode, **(storage_options or {})
  346. ).open()
  347. # GH 34626 Reads from Public Buckets without Credentials needs anon=True
  348. except tuple(err_types_to_retry_with_anon):
  349. if storage_options is None:
  350. storage_options = {"anon": True}
  351. else:
  352. # don't mutate user input.
  353. storage_options = dict(storage_options)
  354. storage_options["anon"] = True
  355. file_obj = fsspec.open(
  356. filepath_or_buffer, mode=fsspec_mode, **(storage_options or {})
  357. ).open()
  358. return IOArgs(
  359. filepath_or_buffer=file_obj,
  360. encoding=encoding,
  361. compression=compression,
  362. should_close=True,
  363. mode=fsspec_mode,
  364. )
  365. elif storage_options:
  366. raise ValueError(
  367. "storage_options passed with file object or non-fsspec file path"
  368. )
  369. if isinstance(filepath_or_buffer, (str, bytes, mmap.mmap)):
  370. return IOArgs(
  371. filepath_or_buffer=_expand_user(filepath_or_buffer),
  372. encoding=encoding,
  373. compression=compression,
  374. should_close=False,
  375. mode=mode,
  376. )
  377. # is_file_like requires (read | write) & __iter__ but __iter__ is only
  378. # needed for read_csv(engine=python)
  379. if not (
  380. hasattr(filepath_or_buffer, "read") or hasattr(filepath_or_buffer, "write")
  381. ):
  382. msg = f"Invalid file path or buffer object type: {type(filepath_or_buffer)}"
  383. raise ValueError(msg)
  384. return IOArgs(
  385. filepath_or_buffer=filepath_or_buffer,
  386. encoding=encoding,
  387. compression=compression,
  388. should_close=False,
  389. mode=mode,
  390. )
  391. def file_path_to_url(path: str) -> str:
  392. """
  393. converts an absolute native path to a FILE URL.
  394. Parameters
  395. ----------
  396. path : a path in native format
  397. Returns
  398. -------
  399. a valid FILE URL
  400. """
  401. # lazify expensive import (~30ms)
  402. from urllib.request import pathname2url
  403. return urljoin("file:", pathname2url(path))
  404. extension_to_compression = {
  405. ".tar": "tar",
  406. ".tar.gz": "tar",
  407. ".tar.bz2": "tar",
  408. ".tar.xz": "tar",
  409. ".gz": "gzip",
  410. ".bz2": "bz2",
  411. ".zip": "zip",
  412. ".xz": "xz",
  413. ".zst": "zstd",
  414. }
  415. _supported_compressions = set(extension_to_compression.values())
  416. def get_compression_method(
  417. compression: CompressionOptions,
  418. ) -> tuple[str | None, CompressionDict]:
  419. """
  420. Simplifies a compression argument to a compression method string and
  421. a mapping containing additional arguments.
  422. Parameters
  423. ----------
  424. compression : str or mapping
  425. If string, specifies the compression method. If mapping, value at key
  426. 'method' specifies compression method.
  427. Returns
  428. -------
  429. tuple of ({compression method}, Optional[str]
  430. {compression arguments}, Dict[str, Any])
  431. Raises
  432. ------
  433. ValueError on mapping missing 'method' key
  434. """
  435. compression_method: str | None
  436. if isinstance(compression, Mapping):
  437. compression_args = dict(compression)
  438. try:
  439. compression_method = compression_args.pop("method")
  440. except KeyError as err:
  441. raise ValueError("If mapping, compression must have key 'method'") from err
  442. else:
  443. compression_args = {}
  444. compression_method = compression
  445. return compression_method, compression_args
  446. @doc(compression_options=_shared_docs["compression_options"] % "filepath_or_buffer")
  447. def infer_compression(
  448. filepath_or_buffer: FilePath | BaseBuffer, compression: str | None
  449. ) -> str | None:
  450. """
  451. Get the compression method for filepath_or_buffer. If compression='infer',
  452. the inferred compression method is returned. Otherwise, the input
  453. compression method is returned unchanged, unless it's invalid, in which
  454. case an error is raised.
  455. Parameters
  456. ----------
  457. filepath_or_buffer : str or file handle
  458. File path or object.
  459. {compression_options}
  460. .. versionchanged:: 1.4.0 Zstandard support.
  461. Returns
  462. -------
  463. string or None
  464. Raises
  465. ------
  466. ValueError on invalid compression specified.
  467. """
  468. if compression is None:
  469. return None
  470. # Infer compression
  471. if compression == "infer":
  472. # Convert all path types (e.g. pathlib.Path) to strings
  473. filepath_or_buffer = stringify_path(filepath_or_buffer, convert_file_like=True)
  474. if not isinstance(filepath_or_buffer, str):
  475. # Cannot infer compression of a buffer, assume no compression
  476. return None
  477. # Infer compression from the filename/URL extension
  478. for extension, compression in extension_to_compression.items():
  479. if filepath_or_buffer.lower().endswith(extension):
  480. return compression
  481. return None
  482. # Compression has been specified. Check that it's valid
  483. if compression in _supported_compressions:
  484. return compression
  485. valid = ["infer", None] + sorted(_supported_compressions)
  486. msg = (
  487. f"Unrecognized compression type: {compression}\n"
  488. f"Valid compression types are {valid}"
  489. )
  490. raise ValueError(msg)
  491. def check_parent_directory(path: Path | str) -> None:
  492. """
  493. Check if parent directory of a file exists, raise OSError if it does not
  494. Parameters
  495. ----------
  496. path: Path or str
  497. Path to check parent directory of
  498. """
  499. parent = Path(path).parent
  500. if not parent.is_dir():
  501. raise OSError(rf"Cannot save file into a non-existent directory: '{parent}'")
  502. @overload
  503. def get_handle(
  504. path_or_buf: FilePath | BaseBuffer,
  505. mode: str,
  506. *,
  507. encoding: str | None = ...,
  508. compression: CompressionOptions = ...,
  509. memory_map: bool = ...,
  510. is_text: Literal[False],
  511. errors: str | None = ...,
  512. storage_options: StorageOptions = ...,
  513. ) -> IOHandles[bytes]:
  514. ...
  515. @overload
  516. def get_handle(
  517. path_or_buf: FilePath | BaseBuffer,
  518. mode: str,
  519. *,
  520. encoding: str | None = ...,
  521. compression: CompressionOptions = ...,
  522. memory_map: bool = ...,
  523. is_text: Literal[True] = ...,
  524. errors: str | None = ...,
  525. storage_options: StorageOptions = ...,
  526. ) -> IOHandles[str]:
  527. ...
  528. @overload
  529. def get_handle(
  530. path_or_buf: FilePath | BaseBuffer,
  531. mode: str,
  532. *,
  533. encoding: str | None = ...,
  534. compression: CompressionOptions = ...,
  535. memory_map: bool = ...,
  536. is_text: bool = ...,
  537. errors: str | None = ...,
  538. storage_options: StorageOptions = ...,
  539. ) -> IOHandles[str] | IOHandles[bytes]:
  540. ...
  541. @doc(compression_options=_shared_docs["compression_options"] % "path_or_buf")
  542. def get_handle(
  543. path_or_buf: FilePath | BaseBuffer,
  544. mode: str,
  545. *,
  546. encoding: str | None = None,
  547. compression: CompressionOptions = None,
  548. memory_map: bool = False,
  549. is_text: bool = True,
  550. errors: str | None = None,
  551. storage_options: StorageOptions = None,
  552. ) -> IOHandles[str] | IOHandles[bytes]:
  553. """
  554. Get file handle for given path/buffer and mode.
  555. Parameters
  556. ----------
  557. path_or_buf : str or file handle
  558. File path or object.
  559. mode : str
  560. Mode to open path_or_buf with.
  561. encoding : str or None
  562. Encoding to use.
  563. {compression_options}
  564. .. versionchanged:: 1.0.0
  565. May now be a dict with key 'method' as compression mode
  566. and other keys as compression options if compression
  567. mode is 'zip'.
  568. .. versionchanged:: 1.1.0
  569. Passing compression options as keys in dict is now
  570. supported for compression modes 'gzip', 'bz2', 'zstd' and 'zip'.
  571. .. versionchanged:: 1.4.0 Zstandard support.
  572. memory_map : bool, default False
  573. See parsers._parser_params for more information. Only used by read_csv.
  574. is_text : bool, default True
  575. Whether the type of the content passed to the file/buffer is string or
  576. bytes. This is not the same as `"b" not in mode`. If a string content is
  577. passed to a binary file/buffer, a wrapper is inserted.
  578. errors : str, default 'strict'
  579. Specifies how encoding and decoding errors are to be handled.
  580. See the errors argument for :func:`open` for a full list
  581. of options.
  582. storage_options: StorageOptions = None
  583. Passed to _get_filepath_or_buffer
  584. .. versionchanged:: 1.2.0
  585. Returns the dataclass IOHandles
  586. """
  587. # Windows does not default to utf-8. Set to utf-8 for a consistent behavior
  588. encoding = encoding or "utf-8"
  589. errors = errors or "strict"
  590. # read_csv does not know whether the buffer is opened in binary/text mode
  591. if _is_binary_mode(path_or_buf, mode) and "b" not in mode:
  592. mode += "b"
  593. # validate encoding and errors
  594. codecs.lookup(encoding)
  595. if isinstance(errors, str):
  596. codecs.lookup_error(errors)
  597. # open URLs
  598. ioargs = _get_filepath_or_buffer(
  599. path_or_buf,
  600. encoding=encoding,
  601. compression=compression,
  602. mode=mode,
  603. storage_options=storage_options,
  604. )
  605. handle = ioargs.filepath_or_buffer
  606. handles: list[BaseBuffer]
  607. # memory mapping needs to be the first step
  608. # only used for read_csv
  609. handle, memory_map, handles = _maybe_memory_map(handle, memory_map)
  610. is_path = isinstance(handle, str)
  611. compression_args = dict(ioargs.compression)
  612. compression = compression_args.pop("method")
  613. # Only for write methods
  614. if "r" not in mode and is_path:
  615. check_parent_directory(str(handle))
  616. if compression:
  617. if compression != "zstd":
  618. # compression libraries do not like an explicit text-mode
  619. ioargs.mode = ioargs.mode.replace("t", "")
  620. elif compression == "zstd" and "b" not in ioargs.mode:
  621. # python-zstandard defaults to text mode, but we always expect
  622. # compression libraries to use binary mode.
  623. ioargs.mode += "b"
  624. # GZ Compression
  625. if compression == "gzip":
  626. if isinstance(handle, str):
  627. # error: Incompatible types in assignment (expression has type
  628. # "GzipFile", variable has type "Union[str, BaseBuffer]")
  629. handle = gzip.GzipFile( # type: ignore[assignment]
  630. filename=handle,
  631. mode=ioargs.mode,
  632. **compression_args,
  633. )
  634. else:
  635. handle = gzip.GzipFile(
  636. # No overload variant of "GzipFile" matches argument types
  637. # "Union[str, BaseBuffer]", "str", "Dict[str, Any]"
  638. fileobj=handle, # type: ignore[call-overload]
  639. mode=ioargs.mode,
  640. **compression_args,
  641. )
  642. # BZ Compression
  643. elif compression == "bz2":
  644. # Overload of "BZ2File" to handle pickle protocol 5
  645. # "Union[str, BaseBuffer]", "str", "Dict[str, Any]"
  646. handle = _BZ2File( # type: ignore[call-overload]
  647. handle,
  648. mode=ioargs.mode,
  649. **compression_args,
  650. )
  651. # ZIP Compression
  652. elif compression == "zip":
  653. # error: Argument 1 to "_BytesZipFile" has incompatible type
  654. # "Union[str, BaseBuffer]"; expected "Union[Union[str, PathLike[str]],
  655. # ReadBuffer[bytes], WriteBuffer[bytes]]"
  656. handle = _BytesZipFile(
  657. handle, ioargs.mode, **compression_args # type: ignore[arg-type]
  658. )
  659. if handle.buffer.mode == "r":
  660. handles.append(handle)
  661. zip_names = handle.buffer.namelist()
  662. if len(zip_names) == 1:
  663. handle = handle.buffer.open(zip_names.pop())
  664. elif not zip_names:
  665. raise ValueError(f"Zero files found in ZIP file {path_or_buf}")
  666. else:
  667. raise ValueError(
  668. "Multiple files found in ZIP file. "
  669. f"Only one file per ZIP: {zip_names}"
  670. )
  671. # TAR Encoding
  672. elif compression == "tar":
  673. compression_args.setdefault("mode", ioargs.mode)
  674. if isinstance(handle, str):
  675. handle = _BytesTarFile(name=handle, **compression_args)
  676. else:
  677. # error: Argument "fileobj" to "_BytesTarFile" has incompatible
  678. # type "BaseBuffer"; expected "Union[ReadBuffer[bytes],
  679. # WriteBuffer[bytes], None]"
  680. handle = _BytesTarFile(
  681. fileobj=handle, **compression_args # type: ignore[arg-type]
  682. )
  683. assert isinstance(handle, _BytesTarFile)
  684. if "r" in handle.buffer.mode:
  685. handles.append(handle)
  686. files = handle.buffer.getnames()
  687. if len(files) == 1:
  688. file = handle.buffer.extractfile(files[0])
  689. assert file is not None
  690. handle = file
  691. elif not files:
  692. raise ValueError(f"Zero files found in TAR archive {path_or_buf}")
  693. else:
  694. raise ValueError(
  695. "Multiple files found in TAR archive. "
  696. f"Only one file per TAR archive: {files}"
  697. )
  698. # XZ Compression
  699. elif compression == "xz":
  700. # error: Argument 1 to "LZMAFile" has incompatible type "Union[str,
  701. # BaseBuffer]"; expected "Optional[Union[Union[str, bytes, PathLike[str],
  702. # PathLike[bytes]], IO[bytes]]]"
  703. handle = get_lzma_file()(handle, ioargs.mode) # type: ignore[arg-type]
  704. # Zstd Compression
  705. elif compression == "zstd":
  706. zstd = import_optional_dependency("zstandard")
  707. if "r" in ioargs.mode:
  708. open_args = {"dctx": zstd.ZstdDecompressor(**compression_args)}
  709. else:
  710. open_args = {"cctx": zstd.ZstdCompressor(**compression_args)}
  711. handle = zstd.open(
  712. handle,
  713. mode=ioargs.mode,
  714. **open_args,
  715. )
  716. # Unrecognized Compression
  717. else:
  718. msg = f"Unrecognized compression type: {compression}"
  719. raise ValueError(msg)
  720. assert not isinstance(handle, str)
  721. handles.append(handle)
  722. elif isinstance(handle, str):
  723. # Check whether the filename is to be opened in binary mode.
  724. # Binary mode does not support 'encoding' and 'newline'.
  725. if ioargs.encoding and "b" not in ioargs.mode:
  726. # Encoding
  727. handle = open(
  728. handle,
  729. ioargs.mode,
  730. encoding=ioargs.encoding,
  731. errors=errors,
  732. newline="",
  733. )
  734. else:
  735. # Binary mode
  736. handle = open(handle, ioargs.mode)
  737. handles.append(handle)
  738. # Convert BytesIO or file objects passed with an encoding
  739. is_wrapped = False
  740. if not is_text and ioargs.mode == "rb" and isinstance(handle, TextIOBase):
  741. # not added to handles as it does not open/buffer resources
  742. handle = _BytesIOWrapper(
  743. handle,
  744. encoding=ioargs.encoding,
  745. )
  746. elif is_text and (
  747. compression or memory_map or _is_binary_mode(handle, ioargs.mode)
  748. ):
  749. if (
  750. not hasattr(handle, "readable")
  751. or not hasattr(handle, "writable")
  752. or not hasattr(handle, "seekable")
  753. ):
  754. handle = _IOWrapper(handle)
  755. # error: Argument 1 to "TextIOWrapper" has incompatible type
  756. # "_IOWrapper"; expected "IO[bytes]"
  757. handle = TextIOWrapper(
  758. handle, # type: ignore[arg-type]
  759. encoding=ioargs.encoding,
  760. errors=errors,
  761. newline="",
  762. )
  763. handles.append(handle)
  764. # only marked as wrapped when the caller provided a handle
  765. is_wrapped = not (
  766. isinstance(ioargs.filepath_or_buffer, str) or ioargs.should_close
  767. )
  768. if "r" in ioargs.mode and not hasattr(handle, "read"):
  769. raise TypeError(
  770. "Expected file path name or file-like object, "
  771. f"got {type(ioargs.filepath_or_buffer)} type"
  772. )
  773. handles.reverse() # close the most recently added buffer first
  774. if ioargs.should_close:
  775. assert not isinstance(ioargs.filepath_or_buffer, str)
  776. handles.append(ioargs.filepath_or_buffer)
  777. return IOHandles(
  778. # error: Argument "handle" to "IOHandles" has incompatible type
  779. # "Union[TextIOWrapper, GzipFile, BaseBuffer, typing.IO[bytes],
  780. # typing.IO[Any]]"; expected "pandas._typing.IO[Any]"
  781. handle=handle, # type: ignore[arg-type]
  782. # error: Argument "created_handles" to "IOHandles" has incompatible type
  783. # "List[BaseBuffer]"; expected "List[Union[IO[bytes], IO[str]]]"
  784. created_handles=handles, # type: ignore[arg-type]
  785. is_wrapped=is_wrapped,
  786. compression=ioargs.compression,
  787. )
  788. # error: Definition of "__enter__" in base class "IOBase" is incompatible
  789. # with definition in base class "BinaryIO"
  790. class _BufferedWriter(BytesIO, ABC): # type: ignore[misc]
  791. """
  792. Some objects do not support multiple .write() calls (TarFile and ZipFile).
  793. This wrapper writes to the underlying buffer on close.
  794. """
  795. @abstractmethod
  796. def write_to_buffer(self) -> None:
  797. ...
  798. def close(self) -> None:
  799. if self.closed:
  800. # already closed
  801. return
  802. if self.getvalue():
  803. # write to buffer
  804. self.seek(0)
  805. # error: "_BufferedWriter" has no attribute "buffer"
  806. with self.buffer: # type: ignore[attr-defined]
  807. self.write_to_buffer()
  808. else:
  809. # error: "_BufferedWriter" has no attribute "buffer"
  810. self.buffer.close() # type: ignore[attr-defined]
  811. super().close()
  812. class _BytesTarFile(_BufferedWriter):
  813. def __init__(
  814. self,
  815. name: str | None = None,
  816. mode: Literal["r", "a", "w", "x"] = "r",
  817. fileobj: ReadBuffer[bytes] | WriteBuffer[bytes] | None = None,
  818. archive_name: str | None = None,
  819. **kwargs,
  820. ) -> None:
  821. super().__init__()
  822. self.archive_name = archive_name
  823. self.name = name
  824. # error: Argument "fileobj" to "open" of "TarFile" has incompatible
  825. # type "Union[ReadBuffer[bytes], WriteBuffer[bytes], None]"; expected
  826. # "Optional[IO[bytes]]"
  827. self.buffer = tarfile.TarFile.open(
  828. name=name,
  829. mode=self.extend_mode(mode),
  830. fileobj=fileobj, # type: ignore[arg-type]
  831. **kwargs,
  832. )
  833. def extend_mode(self, mode: str) -> str:
  834. mode = mode.replace("b", "")
  835. if mode != "w":
  836. return mode
  837. if self.name is not None:
  838. suffix = Path(self.name).suffix
  839. if suffix in (".gz", ".xz", ".bz2"):
  840. mode = f"{mode}:{suffix[1:]}"
  841. return mode
  842. def infer_filename(self) -> str | None:
  843. """
  844. If an explicit archive_name is not given, we still want the file inside the zip
  845. file not to be named something.tar, because that causes confusion (GH39465).
  846. """
  847. if self.name is None:
  848. return None
  849. filename = Path(self.name)
  850. if filename.suffix == ".tar":
  851. return filename.with_suffix("").name
  852. elif filename.suffix in (".tar.gz", ".tar.bz2", ".tar.xz"):
  853. return filename.with_suffix("").with_suffix("").name
  854. return filename.name
  855. def write_to_buffer(self) -> None:
  856. # TarFile needs a non-empty string
  857. archive_name = self.archive_name or self.infer_filename() or "tar"
  858. tarinfo = tarfile.TarInfo(name=archive_name)
  859. tarinfo.size = len(self.getvalue())
  860. self.buffer.addfile(tarinfo, self)
  861. class _BytesZipFile(_BufferedWriter):
  862. def __init__(
  863. self,
  864. file: FilePath | ReadBuffer[bytes] | WriteBuffer[bytes],
  865. mode: str,
  866. archive_name: str | None = None,
  867. **kwargs,
  868. ) -> None:
  869. super().__init__()
  870. mode = mode.replace("b", "")
  871. self.archive_name = archive_name
  872. kwargs.setdefault("compression", zipfile.ZIP_DEFLATED)
  873. # error: Argument 1 to "ZipFile" has incompatible type "Union[
  874. # Union[str, PathLike[str]], ReadBuffer[bytes], WriteBuffer[bytes]]";
  875. # expected "Union[Union[str, PathLike[str]], IO[bytes]]"
  876. self.buffer = zipfile.ZipFile(file, mode, **kwargs) # type: ignore[arg-type]
  877. def infer_filename(self) -> str | None:
  878. """
  879. If an explicit archive_name is not given, we still want the file inside the zip
  880. file not to be named something.zip, because that causes confusion (GH39465).
  881. """
  882. if isinstance(self.buffer.filename, (os.PathLike, str)):
  883. filename = Path(self.buffer.filename)
  884. if filename.suffix == ".zip":
  885. return filename.with_suffix("").name
  886. return filename.name
  887. return None
  888. def write_to_buffer(self) -> None:
  889. # ZipFile needs a non-empty string
  890. archive_name = self.archive_name or self.infer_filename() or "zip"
  891. self.buffer.writestr(archive_name, self.getvalue())
  892. class _IOWrapper:
  893. # TextIOWrapper is overly strict: it request that the buffer has seekable, readable,
  894. # and writable. If we have a read-only buffer, we shouldn't need writable and vice
  895. # versa. Some buffers, are seek/read/writ-able but they do not have the "-able"
  896. # methods, e.g., tempfile.SpooledTemporaryFile.
  897. # If a buffer does not have the above "-able" methods, we simple assume they are
  898. # seek/read/writ-able.
  899. def __init__(self, buffer: BaseBuffer) -> None:
  900. self.buffer = buffer
  901. def __getattr__(self, name: str):
  902. return getattr(self.buffer, name)
  903. def readable(self) -> bool:
  904. if hasattr(self.buffer, "readable"):
  905. return self.buffer.readable()
  906. return True
  907. def seekable(self) -> bool:
  908. if hasattr(self.buffer, "seekable"):
  909. return self.buffer.seekable()
  910. return True
  911. def writable(self) -> bool:
  912. if hasattr(self.buffer, "writable"):
  913. return self.buffer.writable()
  914. return True
  915. class _BytesIOWrapper:
  916. # Wrapper that wraps a StringIO buffer and reads bytes from it
  917. # Created for compat with pyarrow read_csv
  918. def __init__(self, buffer: StringIO | TextIOBase, encoding: str = "utf-8") -> None:
  919. self.buffer = buffer
  920. self.encoding = encoding
  921. # Because a character can be represented by more than 1 byte,
  922. # it is possible that reading will produce more bytes than n
  923. # We store the extra bytes in this overflow variable, and append the
  924. # overflow to the front of the bytestring the next time reading is performed
  925. self.overflow = b""
  926. def __getattr__(self, attr: str):
  927. return getattr(self.buffer, attr)
  928. def read(self, n: int | None = -1) -> bytes:
  929. assert self.buffer is not None
  930. bytestring = self.buffer.read(n).encode(self.encoding)
  931. # When n=-1/n greater than remaining bytes: Read entire file/rest of file
  932. combined_bytestring = self.overflow + bytestring
  933. if n is None or n < 0 or n >= len(combined_bytestring):
  934. self.overflow = b""
  935. return combined_bytestring
  936. else:
  937. to_return = combined_bytestring[:n]
  938. self.overflow = combined_bytestring[n:]
  939. return to_return
  940. def _maybe_memory_map(
  941. handle: str | BaseBuffer, memory_map: bool
  942. ) -> tuple[str | BaseBuffer, bool, list[BaseBuffer]]:
  943. """Try to memory map file/buffer."""
  944. handles: list[BaseBuffer] = []
  945. memory_map &= hasattr(handle, "fileno") or isinstance(handle, str)
  946. if not memory_map:
  947. return handle, memory_map, handles
  948. # mmap used by only read_csv
  949. handle = cast(ReadCsvBuffer, handle)
  950. # need to open the file first
  951. if isinstance(handle, str):
  952. handle = open(handle, "rb")
  953. handles.append(handle)
  954. try:
  955. # open mmap and adds *-able
  956. # error: Argument 1 to "_IOWrapper" has incompatible type "mmap";
  957. # expected "BaseBuffer"
  958. wrapped = _IOWrapper(
  959. mmap.mmap(
  960. handle.fileno(), 0, access=mmap.ACCESS_READ # type: ignore[arg-type]
  961. )
  962. )
  963. finally:
  964. for handle in reversed(handles):
  965. # error: "BaseBuffer" has no attribute "close"
  966. handle.close() # type: ignore[attr-defined]
  967. return wrapped, memory_map, [wrapped]
  968. def file_exists(filepath_or_buffer: FilePath | BaseBuffer) -> bool:
  969. """Test whether file exists."""
  970. exists = False
  971. filepath_or_buffer = stringify_path(filepath_or_buffer)
  972. if not isinstance(filepath_or_buffer, str):
  973. return exists
  974. try:
  975. exists = os.path.exists(filepath_or_buffer)
  976. # gh-5874: if the filepath is too long will raise here
  977. except (TypeError, ValueError):
  978. pass
  979. return exists
  980. def _is_binary_mode(handle: FilePath | BaseBuffer, mode: str) -> bool:
  981. """Whether the handle is opened in binary mode"""
  982. # specified by user
  983. if "t" in mode or "b" in mode:
  984. return "b" in mode
  985. # exceptions
  986. text_classes = (
  987. # classes that expect string but have 'b' in mode
  988. codecs.StreamWriter,
  989. codecs.StreamReader,
  990. codecs.StreamReaderWriter,
  991. )
  992. if issubclass(type(handle), text_classes):
  993. return False
  994. return isinstance(handle, _get_binary_io_classes()) or "b" in getattr(
  995. handle, "mode", mode
  996. )
  997. @functools.lru_cache
  998. def _get_binary_io_classes() -> tuple[type, ...]:
  999. """IO classes that that expect bytes"""
  1000. binary_classes: tuple[type, ...] = (BufferedIOBase, RawIOBase)
  1001. # python-zstandard doesn't use any of the builtin base classes; instead we
  1002. # have to use the `zstd.ZstdDecompressionReader` class for isinstance checks.
  1003. # Unfortunately `zstd.ZstdDecompressionReader` isn't exposed by python-zstandard
  1004. # so we have to get it from a `zstd.ZstdDecompressor` instance.
  1005. # See also https://github.com/indygreg/python-zstandard/pull/165.
  1006. zstd = import_optional_dependency("zstandard", errors="ignore")
  1007. if zstd is not None:
  1008. with zstd.ZstdDecompressor().stream_reader(b"") as reader:
  1009. binary_classes += (type(reader),)
  1010. return binary_classes
  1011. def is_potential_multi_index(
  1012. columns: Sequence[Hashable] | MultiIndex,
  1013. index_col: bool | Sequence[int] | None = None,
  1014. ) -> bool:
  1015. """
  1016. Check whether or not the `columns` parameter
  1017. could be converted into a MultiIndex.
  1018. Parameters
  1019. ----------
  1020. columns : array-like
  1021. Object which may or may not be convertible into a MultiIndex
  1022. index_col : None, bool or list, optional
  1023. Column or columns to use as the (possibly hierarchical) index
  1024. Returns
  1025. -------
  1026. bool : Whether or not columns could become a MultiIndex
  1027. """
  1028. if index_col is None or isinstance(index_col, bool):
  1029. index_col = []
  1030. return bool(
  1031. len(columns)
  1032. and not isinstance(columns, MultiIndex)
  1033. and all(isinstance(c, tuple) for c in columns if c not in list(index_col))
  1034. )
  1035. def dedup_names(
  1036. names: Sequence[Hashable], is_potential_multiindex: bool
  1037. ) -> Sequence[Hashable]:
  1038. """
  1039. Rename column names if duplicates exist.
  1040. Currently the renaming is done by appending a period and an autonumeric,
  1041. but a custom pattern may be supported in the future.
  1042. Examples
  1043. --------
  1044. >>> dedup_names(["x", "y", "x", "x"], is_potential_multiindex=False)
  1045. ['x', 'y', 'x.1', 'x.2']
  1046. """
  1047. names = list(names) # so we can index
  1048. counts: DefaultDict[Hashable, int] = defaultdict(int)
  1049. for i, col in enumerate(names):
  1050. cur_count = counts[col]
  1051. while cur_count > 0:
  1052. counts[col] = cur_count + 1
  1053. if is_potential_multiindex:
  1054. # for mypy
  1055. assert isinstance(col, tuple)
  1056. col = col[:-1] + (f"{col[-1]}.{cur_count}",)
  1057. else:
  1058. col = f"{col}.{cur_count}"
  1059. cur_count = counts[col]
  1060. names[i] = col
  1061. counts[col] = cur_count + 1
  1062. return names