Skip to content

Commit a2044f4

Browse files
committed
Improve the detection of large integers
1 parent d92fbab commit a2044f4

File tree

5 files changed

+53
-5
lines changed

5 files changed

+53
-5
lines changed

docs/changelog.md

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,13 @@
11
ITables ChangeLog
22
=================
33

4+
2.1.3 (2024-06-22)
5+
------------------
6+
7+
**Fixed**
8+
- We have improved the detection of large integers in the context of Polars DataFrames ([#291](https://github.com/mwouts/itables/issues/291))
9+
10+
411
2.1.2 (2024-06-19)
512
------------------
613

@@ -9,7 +16,7 @@ ITables ChangeLog
916
an automatic horizontal scrolling in Jupyter, Jupyter Book and also Streamlit if the table is too wide ([#282](https://github.com/mwouts/itables/pull/282)).
1017

1118
**Fixed**
12-
- The dependencies of the streamlit components have been updated to fix a vulnerability in `ws` ([Alert #1](https://github.com/mwouts/itables/security/dependabot/1))
19+
- The dependencies of the streamlit components have been updated to fix a vulnerability in `ws` ([Alert 1](https://github.com/mwouts/itables/security/dependabot/1))
1320

1421

1522
2.1.1 (2024-06-08)

src/itables/datatables_format.py

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,15 @@ def datatables_rows(df, count=None, warn_on_unexpected_types=False, pure_json=Fa
129129
def n_suffix_for_bigints(js, pure_json=False):
130130
def n_suffix(matchobj):
131131
if pure_json:
132-
return '"' + matchobj.group(1) + '"' + matchobj.group(2)
133-
return 'BigInt("' + matchobj.group(1) + '")' + matchobj.group(2)
132+
return matchobj.group(1) + '"' + matchobj.group(2) + '"' + matchobj.group(3)
133+
return (
134+
matchobj.group(1)
135+
+ 'BigInt("'
136+
+ matchobj.group(2)
137+
+ '")'
138+
+ matchobj.group(3)
139+
)
134140

135-
return re.sub(r"(-?\d{16,})(,|])", n_suffix, js)
141+
big_int_re = re.compile(r"^([\[\s]+)(-?\d{16,})(\]*)$")
142+
parts = js.split(",")
143+
return ",".join(re.sub(big_int_re, n_suffix, part) for part in parts)

src/itables/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
"""ITables' version number"""
22

3-
__version__ = "2.1.2"
3+
__version__ = "2.1.3"

tests/test_datatables_format.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -154,3 +154,19 @@ def test_encode_max_int(large):
154154
def test_encode_not_max_int(large):
155155
large //= 10
156156
assert n_suffix_for_bigints(json.dumps([large])) == "[{}]".format(large)
157+
158+
159+
def test_encode_mixed_contents():
160+
# Make sure that the bigint escape works for mixed content # 291
161+
df = pd.DataFrame(
162+
{
163+
"bigint": [1666767918216000000],
164+
"int": [1699300000000],
165+
"float": [0.9510565400123596],
166+
"neg": [-0.30901700258255005],
167+
}
168+
)
169+
assert (
170+
datatables_rows(df)
171+
== '[[BigInt("1666767918216000000"), 1699300000000, 0.951057, -0.309017]]'
172+
)

tests/test_polars.py

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import pytest
22

33
from itables import to_html_datatable
4+
from itables.javascript import datatables_rows
45
from itables.sample_dfs import get_dict_of_test_dfs, get_dict_of_test_series
56

67
try:
@@ -21,3 +22,19 @@ def test_show_polars_series(name, x, use_to_html):
2122
)
2223
def test_show_polars_df(name, df, use_to_html):
2324
to_html_datatable(df, use_to_html)
25+
26+
27+
def test_encode_mixed_contents():
28+
# Make sure that the bigint escape works for mixed content # 291
29+
df = polars.DataFrame(
30+
{
31+
"bigint": [1666767918216000000],
32+
"int": [1699300000000],
33+
"float": [0.9510565400123596],
34+
"neg": [-0.30901700258255005],
35+
}
36+
)
37+
assert (
38+
datatables_rows(df)
39+
== '[[BigInt("1666767918216000000"), 1699300000000, 0.9510565400123596, -0.30901700258255005]]'
40+
)

0 commit comments

Comments
 (0)