Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions pandas-stubs/core/reshape/concat.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ from collections.abc import (
from typing import (
Literal,
Never,
TypeVar,
overload,
)

Expand All @@ -23,6 +24,8 @@ from pandas._typing import (
HashableT4,
)

DataFrameCompatType = TypeVar("DataFrameCompatType", bound=DataFrame)

@overload
def concat(
objs: Iterable[None] | Mapping[HashableT1, None],
Expand Down Expand Up @@ -63,6 +66,19 @@ def concat( # type: ignore[overload-overlap]
sort: bool = False,
) -> Series: ...
@overload
def concat(
objs: Iterable[DataFrameCompatType | None],
*,
axis: Axis = 0,
join: Literal["inner", "outer"] = "outer",
ignore_index: bool = False,
keys: Iterable[HashableT2] | None = None,
levels: Sequence[list[HashableT3] | tuple[HashableT3, ...]] | None = None,
names: list[HashableT4] | None = None,
verify_integrity: bool = False,
sort: bool = False,
) -> DataFrameCompatType: ...
@overload
def concat(
objs: Iterable[NDFrame | None] | Mapping[HashableT1, NDFrame | None],
*,
Expand Down
15 changes: 15 additions & 0 deletions tests/test_pandas.py
Original file line number Diff line number Diff line change
Expand Up @@ -2544,3 +2544,18 @@ def test_argmin_and_argmax_return() -> None:
i1 = df.a.abs().argmax()
check(assert_type(i, np.int64), np.int64)
check(assert_type(i1, np.int64), np.int64)


def test_frame_subclass_concat() -> None:
"""Test concatenate subclass of DataFrame GH1396."""

class ChildDataFrame(pd.DataFrame):
@property
def _constructor(self) -> type[ChildDataFrame]:
return ChildDataFrame

cdf1 = ChildDataFrame(data={"a": [0]})
cdf2 = ChildDataFrame(data={"a": [1]})

cdf = pd.concat([cdf1, cdf2], ignore_index=True)
check(assert_type(cdf, ChildDataFrame), ChildDataFrame)
Loading