Skip to content

Commit

Permalink
Make kwargs explicit in put, append (pandas-dev#29957)
Browse files Browse the repository at this point in the history
  • Loading branch information
jbrockmendel authored and proost committed Dec 19, 2019
1 parent 9dc55a4 commit f6f7c2f
Show file tree
Hide file tree
Showing 2 changed files with 97 additions and 14 deletions.
21 changes: 15 additions & 6 deletions pandas/core/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -2413,9 +2413,13 @@ def to_hdf(
complib: Optional[str] = None,
append: bool_t = False,
format: Optional[str] = None,
index: bool_t = True,
min_itemsize: Optional[Union[int, Dict[str, int]]] = None,
nan_rep=None,
dropna: Optional[bool_t] = None,
data_columns: Optional[List[str]] = None,
errors: str = "strict",
encoding: str = "UTF-8",
**kwargs,
):
"""
Write the contained data to an HDF5 file using HDFStore.
Expand Down Expand Up @@ -2472,15 +2476,16 @@ def to_hdf(
See the errors argument for :func:`open` for a full list
of options.
encoding : str, default "UTF-8"
min_itemsize : dict or int, optional
Map column names to minimum string sizes for columns.
nan_rep : Any, optional
How to represent null values as str.
Not allowed with append=True.
data_columns : list of columns or True, optional
List of columns to create as indexed data columns for on-disk
queries, or True to use all columns. By default only the axes
of the object are indexed. See :ref:`io.hdf5-query-data-columns`.
Applicable only to format='table'.
fletcher32 : bool, default False
If applying compression use the fletcher32 checksum.
dropna : bool, default False
If true, ALL nan rows will not be written to store.
See Also
--------
Expand Down Expand Up @@ -2531,9 +2536,13 @@ def to_hdf(
complib=complib,
append=append,
format=format,
index=index,
min_itemsize=min_itemsize,
nan_rep=nan_rep,
dropna=dropna,
data_columns=data_columns,
errors=errors,
encoding=encoding,
**kwargs,
)

def to_msgpack(self, path_or_buf=None, encoding="utf-8", **kwargs):
Expand Down
90 changes: 82 additions & 8 deletions pandas/io/pytables.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,19 +260,41 @@ def to_hdf(
complib: Optional[str] = None,
append: bool = False,
format: Optional[str] = None,
index: bool = True,
min_itemsize: Optional[Union[int, Dict[str, int]]] = None,
nan_rep=None,
dropna: Optional[bool] = None,
data_columns: Optional[List[str]] = None,
errors: str = "strict",
encoding: str = "UTF-8",
**kwargs,
):
""" store this object, close it if we opened it """

if append:
f = lambda store: store.append(
key, value, format=format, errors=errors, encoding=encoding, **kwargs
key,
value,
format=format,
index=index,
min_itemsize=min_itemsize,
nan_rep=nan_rep,
dropna=dropna,
data_columns=data_columns,
errors=errors,
encoding=encoding,
)
else:
# NB: dropna is not passed to `put`
f = lambda store: store.put(
key, value, format=format, errors=errors, encoding=encoding, **kwargs
key,
value,
format=format,
index=index,
min_itemsize=min_itemsize,
nan_rep=nan_rep,
data_columns=data_columns,
errors=errors,
encoding=encoding,
)

path_or_buf = _stringify_path(path_or_buf)
Expand Down Expand Up @@ -984,7 +1006,21 @@ def func(_start, _stop, _where):

return it.get_result(coordinates=True)

def put(self, key: str, value: FrameOrSeries, format=None, append=False, **kwargs):
def put(
self,
key: str,
value: FrameOrSeries,
format=None,
index=True,
append=False,
complib=None,
complevel: Optional[int] = None,
min_itemsize: Optional[Union[int, Dict[str, int]]] = None,
nan_rep=None,
data_columns: Optional[List[str]] = None,
encoding=None,
errors: str = "strict",
):
"""
Store object in HDFStore.
Expand Down Expand Up @@ -1014,7 +1050,20 @@ def put(self, key: str, value: FrameOrSeries, format=None, append=False, **kwarg
if format is None:
format = get_option("io.hdf.default_format") or "fixed"
format = self._validate_format(format)
self._write_to_group(key, value, format=format, append=append, **kwargs)
self._write_to_group(
key,
value,
format=format,
index=index,
append=append,
complib=complib,
complevel=complevel,
min_itemsize=min_itemsize,
nan_rep=nan_rep,
data_columns=data_columns,
encoding=encoding,
errors=errors,
)

def remove(self, key: str, where=None, start=None, stop=None):
"""
Expand Down Expand Up @@ -1075,10 +1124,20 @@ def append(
key: str,
value: FrameOrSeries,
format=None,
axes=None,
index=True,
append=True,
complib=None,
complevel: Optional[int] = None,
columns=None,
min_itemsize: Optional[Union[int, Dict[str, int]]] = None,
nan_rep=None,
chunksize=None,
expectedrows=None,
dropna: Optional[bool] = None,
**kwargs,
data_columns: Optional[List[str]] = None,
encoding=None,
errors: str = "strict",
):
"""
Append to Table in file. Node must already exist and be Table
Expand Down Expand Up @@ -1125,7 +1184,22 @@ def append(
format = get_option("io.hdf.default_format") or "table"
format = self._validate_format(format)
self._write_to_group(
key, value, format=format, append=append, dropna=dropna, **kwargs
key,
value,
format=format,
axes=axes,
index=index,
append=append,
complib=complib,
complevel=complevel,
min_itemsize=min_itemsize,
nan_rep=nan_rep,
chunksize=chunksize,
expectedrows=expectedrows,
dropna=dropna,
data_columns=data_columns,
encoding=encoding,
errors=errors,
)

def append_to_multiple(
Expand Down Expand Up @@ -1586,7 +1660,7 @@ def _write_to_group(
complib=None,
complevel: Optional[int] = None,
fletcher32=None,
min_itemsize=None,
min_itemsize: Optional[Union[int, Dict[str, int]]] = None,
chunksize=None,
expectedrows=None,
dropna=False,
Expand Down

0 comments on commit f6f7c2f

Please sign in to comment.