Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into pycapsule-import
Browse files Browse the repository at this point in the history
  • Loading branch information
MarcoGorelli committed Oct 17, 2024
2 parents 2ac16c9 + 3a167ab commit fd8eea1
Show file tree
Hide file tree
Showing 8 changed files with 53 additions and 65 deletions.
4 changes: 0 additions & 4 deletions .github/workflows/extremes.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,6 @@ jobs:
run: uv pip freeze
- name: Run pytest
run: pytest tests --cov=narwhals --cov=tests --cov-fail-under=50 --runslow
- name: Run doctests
run: pytest narwhals --doctest-modules

not_so_old_versions:
strategy:
Expand Down Expand Up @@ -88,8 +86,6 @@ jobs:
run: uv pip freeze
- name: Run pytest
run: pytest tests --cov=narwhals --cov=tests --cov-fail-under=50 --runslow
- name: Run doctests
run: pytest narwhals --doctest-modules

nightlies:
strategy:
Expand Down
6 changes: 1 addition & 5 deletions .github/workflows/pytest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,6 @@ jobs:
run: uv pip freeze
- name: Run pytest
run: pytest tests --cov=narwhals --cov=tests --cov-fail-under=85
- name: Run doctests
if: startsWith(matrix.os, 'windows') != true
run: pytest narwhals --doctest-modules

pytest-windows:
strategy:
Expand Down Expand Up @@ -60,8 +57,6 @@ jobs:
run: uv pip freeze
- name: Run pytest
run: pytest tests --cov=narwhals --cov=tests --runslow --cov-fail-under=95
- name: Run doctests
run: pytest narwhals --doctest-modules

pytest-coverage:
strategy:
Expand Down Expand Up @@ -95,4 +90,5 @@ jobs:
- name: Run pytest
run: pytest tests --cov=narwhals --cov=tests --cov-fail-under=100 --runslow
- name: Run doctests
if: matrix.python-version == '3.12'
run: pytest narwhals --doctest-modules
42 changes: 21 additions & 21 deletions narwhals/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -546,12 +546,12 @@ def write_csv(self, file: str | Path | BytesIO | None = None) -> Any:
We can pass any supported library such as pandas, Polars or PyArrow to `func`:
>>> func(df_pd) # doctest: +SKIP
'foo,bar,ham\n1,6.0,a\n2,7.0,b\n3,8.0,c\n'
>>> func(df_pl) # doctest: +SKIP
>>> func(df_pd)
'foo,bar,ham\n1,6.0,a\n2,7.0,b\n3,8.0,c\n'
>>> func(df_pa) # doctest: +SKIP
>>> func(df_pl)
'foo,bar,ham\n1,6.0,a\n2,7.0,b\n3,8.0,c\n'
>>> func(df_pa)
'"foo","bar","ham"\n1,6,"a"\n2,7,"b"\n3,8,"c"\n'
If we had passed a file name to `write_csv`, it would have been
written to that file.
Expand Down Expand Up @@ -582,9 +582,9 @@ def write_parquet(self, file: str | Path | BytesIO) -> Any:
We can then pass either pandas, Polars or PyArrow to `func`:
>>> func(df_pd) # doctest:+SKIP
>>> func(df_pl) # doctest:+SKIP
>>> func(df_pa) # doctest:+SKIP
>>> func(df_pd)
>>> func(df_pl)
>>> func(df_pa)
"""
self._compliant_frame.write_parquet(file)

Expand Down Expand Up @@ -1116,12 +1116,12 @@ def schema(self) -> Schema:
You can pass either pandas or Polars to `func`:
>>> df_pd_schema = func(df_pd)
>>> df_pd_schema # doctest:+SKIP
Schema({'foo': Int64, 'bar': Float64, 'ham', String})
>>> df_pd_schema
Schema({'foo': Int64, 'bar': Float64, 'ham': String})
>>> df_pl_schema = func(df_pl)
>>> df_pl_schema # doctest:+SKIP
Schema({'foo': Int64, 'bar': Float64, 'ham', String})
>>> df_pl_schema
Schema({'foo': Int64, 'bar': Float64, 'ham': String})
"""
return super().schema

Expand Down Expand Up @@ -1150,12 +1150,12 @@ def collect_schema(self: Self) -> Schema:
You can pass either pandas or Polars to `func`:
>>> df_pd_schema = func(df_pd)
>>> df_pd_schema # doctest:+SKIP
Schema({'foo': Int64, 'bar': Float64, 'ham', String})
>>> df_pd_schema
Schema({'foo': Int64, 'bar': Float64, 'ham': String})
>>> df_pl_schema = func(df_pl)
>>> df_pl_schema # doctest:+SKIP
Schema({'foo': Int64, 'bar': Float64, 'ham', String})
>>> df_pl_schema
Schema({'foo': Int64, 'bar': Float64, 'ham': String})
"""
return super().collect_schema()

Expand Down Expand Up @@ -2478,8 +2478,8 @@ def item(self: Self, row: int | None = None, column: int | str | None = None) ->
We can then pass either pandas or Polars to `func`:
>>> func(df_pd, 1, 1), func(df_pd, 2, "b") # doctest:+SKIP
(5, 6)
>>> func(df_pd, 1, 1), func(df_pd, 2, "b")
(np.int64(5), np.int64(6))
>>> func(df_pl, 1, 1), func(df_pl, 2, "b")
(5, 6)
Expand Down Expand Up @@ -2581,7 +2581,7 @@ def to_arrow(self: Self) -> pa.Table:
... def func(df):
... return df.to_arrow()
>>> func(df_pd) # doctest:+SKIP
>>> func(df_pd)
pyarrow.Table
foo: int64
bar: string
Expand Down Expand Up @@ -3010,7 +3010,7 @@ def schema(self) -> Schema:
... }
... )
>>> lf = nw.from_native(lf_pl)
>>> lf.schema # doctest:+SKIP
>>> lf.schema # doctest: +SKIP
Schema({'foo': Int64, 'bar': Float64, 'ham', String})
"""
return super().schema
Expand All @@ -3030,8 +3030,8 @@ def collect_schema(self: Self) -> Schema:
... }
... )
>>> lf = nw.from_native(lf_pl)
>>> lf.collect_schema() # doctest:+SKIP
Schema({'foo': Int64, 'bar': Float64, 'ham', String})
>>> lf.collect_schema()
Schema({'foo': Int64, 'bar': Float64, 'ham': String})
"""
return super().collect_schema()

Expand Down
16 changes: 7 additions & 9 deletions narwhals/expr.py
Original file line number Diff line number Diff line change
Expand Up @@ -1262,12 +1262,12 @@ def sample(
We can then pass either pandas or Polars to `func`:
>>> func(df_pd) # doctest:+SKIP
>>> func(df_pd) # doctest: +SKIP
a
2 3
0 1
2 3
>>> func(df_pl) # doctest:+SKIP
>>> func(df_pl) # doctest: +SKIP
shape: (3, 1)
┌─────┐
│ a │
Expand Down Expand Up @@ -2662,9 +2662,7 @@ def date(self) -> Expr:
>>> from datetime import datetime
>>> import narwhals as nw
>>> data = {"a": [datetime(2012, 1, 7, 10, 20), datetime(2023, 3, 10, 11, 32)]}
>>> df_pd = pd.DataFrame(data).convert_dtypes(
... dtype_backend="pyarrow"
... ) # doctest:+SKIP
>>> df_pd = pd.DataFrame(data).convert_dtypes(dtype_backend="pyarrow")
>>> df_pl = pl.DataFrame(data)
We define a library agnostic function:
Expand All @@ -2675,7 +2673,7 @@ def date(self) -> Expr:
We can then pass either pandas or Polars to `func`:
>>> func(df_pd) # doctest:+SKIP
>>> func(df_pd)
a
0 2012-01-07
1 2023-03-10
Expand Down Expand Up @@ -3546,7 +3544,7 @@ def replace_time_zone(self, time_zone: str | None) -> Expr:
│ 2024-01-01 00:00:00 +0545 │
│ 2024-01-02 00:00:00 +0545 │
└──────────────────────────────┘
>>> func(df_pa) # doctest:+SKIP
>>> func(df_pa)
pyarrow.Table
a: timestamp[us, tz=Asia/Kathmandu]
----
Expand Down Expand Up @@ -3601,7 +3599,7 @@ def convert_time_zone(self, time_zone: str) -> Expr:
│ 2024-01-01 05:45:00 +0545 │
│ 2024-01-02 05:45:00 +0545 │
└──────────────────────────────┘
>>> func(df_pa) # doctest:+SKIP
>>> func(df_pa)
pyarrow.Table
a: timestamp[us, tz=Asia/Kathmandu]
----
Expand Down Expand Up @@ -3898,7 +3896,7 @@ def nth(*indices: int | Sequence[int]) -> Expr:
a
0 2
1 4
>>> func(df_pl) # doctest: +SKIP
>>> func(df_pl)
shape: (2, 1)
┌─────┐
│ a │
Expand Down
2 changes: 1 addition & 1 deletion narwhals/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -570,7 +570,7 @@ def show_versions() -> None:
Examples:
>>> from narwhals import show_versions
>>> show_versions() # doctest:+SKIP
>>> show_versions() # doctest: +SKIP
"""

sys_info = _get_sys_info()
Expand Down
2 changes: 1 addition & 1 deletion narwhals/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ class Schema(BaseSchema):
>>> import narwhals as nw
>>> schema = nw.Schema({"foo": nw.Int8(), "bar": nw.String()})
>>> schema # doctest:+SKIP
>>> schema
Schema({'foo': Int8, 'bar': String})
Access the data type associated with a specific column name.
Expand Down
42 changes: 20 additions & 22 deletions narwhals/series.py
Original file line number Diff line number Diff line change
Expand Up @@ -512,7 +512,7 @@ def mean(self) -> Any:
We can then pass either pandas or Polars to `func`:
>>> func(s_pd) # doctest:+SKIP
>>> func(s_pd)
np.float64(2.0)
>>> func(s_pl)
2.0
Expand All @@ -539,7 +539,7 @@ def count(self) -> Any:
We can then pass either pandas or Polars to `func`:
>>> func(s_pd) # doctest:+SKIP
>>> func(s_pd)
np.int64(3)
>>> func(s_pl)
3
Expand Down Expand Up @@ -570,7 +570,7 @@ def any(self) -> Any:
We can then pass either pandas or Polars to `func`:
>>> func(s_pd) # doctest:+SKIP
>>> func(s_pd)
np.True_
>>> func(s_pl)
True
Expand All @@ -597,7 +597,7 @@ def all(self) -> Any:
We can then pass either pandas or Polars to `func`:
>>> func(s_pd) # doctest:+SKIP
>>> func(s_pd)
np.False_
>>> func(s_pl)
False
Expand Down Expand Up @@ -625,7 +625,7 @@ def min(self) -> Any:
We can then pass either pandas or Polars to `func`:
>>> func(s_pd) # doctest:+SKIP
>>> func(s_pd)
np.int64(1)
>>> func(s_pl)
1
Expand All @@ -652,7 +652,7 @@ def max(self) -> Any:
We can then pass either pandas or Polars to `func`:
>>> func(s_pd) # doctest:+SKIP
>>> func(s_pd)
np.int64(3)
>>> func(s_pl)
3
Expand All @@ -679,7 +679,7 @@ def sum(self) -> Any:
We can then pass either pandas or Polars to `func`:
>>> func(s_pd) # doctest:+SKIP
>>> func(s_pd)
np.int64(6)
>>> func(s_pl)
6
Expand Down Expand Up @@ -710,7 +710,7 @@ def std(self, *, ddof: int = 1) -> Any:
We can then pass either pandas or Polars to `func`:
>>> func(s_pd) # doctest:+SKIP
>>> func(s_pd)
np.float64(1.0)
>>> func(s_pl)
1.0
Expand Down Expand Up @@ -1175,13 +1175,13 @@ def sample(
We can then pass either pandas or Polars to `func`:
>>> func(s_pd) # doctest:+SKIP
>>> func(s_pd) # doctest: +SKIP
a
2 3
1 2
3 4
3 4
>>> func(s_pl) # doctest:+SKIP
>>> func(s_pl) # doctest: +SKIP
shape: (4,)
Series: '' [i64]
[
Expand Down Expand Up @@ -1867,8 +1867,8 @@ def null_count(self: Self) -> int:
... return s.null_count()
We can then pass either pandas or Polars to `func`:
>>> func(s_pd) # doctest:+SKIP
1
>>> func(s_pd)
np.int64(1)
>>> func(s_pl)
2
"""
Expand Down Expand Up @@ -2082,8 +2082,8 @@ def quantile(
We can then pass either pandas or Polars to `func`:
>>> func(s_pd) # doctest: +SKIP
[5, 12, 24, 37, 44]
>>> func(s_pd)
[np.int64(5), np.int64(12), np.int64(24), np.int64(37), np.int64(44)]
>>> func(s_pl) # doctest: +NORMALIZE_WHITESPACE
[5.0, 12.0, 25.0, 37.0, 44.0]
Expand Down Expand Up @@ -2166,8 +2166,8 @@ def item(self: Self, index: int | None = None) -> Any:
We can then pass either pandas or Polars to `func`:
>>> func(pl.Series("a", [1]), None), func(pd.Series([1]), None) # doctest:+SKIP
(1, 1)
>>> func(pl.Series("a", [1]), None), func(pd.Series([1]), None)
(1, np.int64(1))
>>> func(pl.Series("a", [9, 8, 7]), -1), func(pl.Series([9, 8, 7]), -2)
(7, 8)
Expand Down Expand Up @@ -3175,9 +3175,7 @@ def date(self) -> Series:
>>> from datetime import datetime
>>> import narwhals as nw
>>> dates = [datetime(2012, 1, 7, 10, 20), datetime(2023, 3, 10, 11, 32)]
>>> s_pd = pd.Series(dates).convert_dtypes(
... dtype_backend="pyarrow"
... ) # doctest:+SKIP
>>> s_pd = pd.Series(dates).convert_dtypes(dtype_backend="pyarrow")
>>> s_pl = pl.Series(dates)
We define a library agnostic function:
Expand All @@ -3188,7 +3186,7 @@ def date(self) -> Series:
We can then pass either pandas or Polars to `func`:
>>> func(s_pd) # doctest:+SKIP
>>> func(s_pd)
0 2012-01-07
1 2023-03-10
dtype: date32[day][pyarrow]
Expand Down Expand Up @@ -3935,7 +3933,7 @@ def replace_time_zone(self, time_zone: str | None) -> Series:
2024-01-01 00:00:00 +0545
2024-01-02 00:00:00 +0545
]
>>> func(s_pa) # doctest: +SKIP
>>> func(s_pa)
<pyarrow.lib.ChunkedArray object at ...>
[
[
Expand Down Expand Up @@ -3988,7 +3986,7 @@ def convert_time_zone(self, time_zone: str) -> Series:
2024-01-01 05:45:00 +0545
2024-01-02 05:45:00 +0545
]
>>> func(s_pa) # doctest: +SKIP
>>> func(s_pa)
<pyarrow.lib.ChunkedArray object at ...>
[
[
Expand Down
Loading

0 comments on commit fd8eea1

Please sign in to comment.