@@ -4882,7 +4882,16 @@ def to_tf(
48824882 >>> import ray
48834883 >>> ds = ray.data.read_csv("s3://anonymous@air-example-data/iris.csv")
48844884 >>> ds
4885- Dataset(num_rows=?, schema=...)
4885+ Dataset(
4886+ num_rows=?,
4887+ schema={
4888+ sepal length (cm): double,
4889+ sepal width (cm): double,
4890+ petal length (cm): double,
4891+ petal width (cm): double,
4892+ target: int64
4893+ }
4894+ )
48864895
48874896 If your model accepts a single tensor as input, specify a single feature column.
48884897
@@ -4904,7 +4913,16 @@ def to_tf(
49044913 >>> ds = preprocessor.transform(ds)
49054914 >>> ds
49064915 Concatenator
4907- +- Dataset(num_rows=?, schema=...)
4916+ +- Dataset(
4917+ num_rows=?,
4918+ schema={
4919+ sepal length (cm): double,
4920+ sepal width (cm): double,
4921+ petal length (cm): double,
4922+ petal width (cm): double,
4923+ target: int64
4924+ }
4925+ )
49084926 >>> ds.to_tf("features", "target")
49094927 <_OptionsDataset element_spec=(TensorSpec(shape=(None, 4), dtype=tf.float64, name='features'), TensorSpec(shape=(None,), dtype=tf.int64, name='target'))>
49104928
@@ -5609,7 +5627,16 @@ def serialize_lineage(self) -> bytes:
56095627
56105628 .. testoutput::
56115629
5612- Dataset(num_rows=?, schema=...)
5630+ Dataset(
5631+ num_rows=?,
5632+ schema={
5633+ sepal length (cm): double,
5634+ sepal width (cm): double,
5635+ petal length (cm): double,
5636+ petal width (cm): double,
5637+ target: int64
5638+ }
5639+ )
56135640
56145641
56155642 Returns:
@@ -5682,7 +5709,16 @@ def deserialize_lineage(serialized_ds: bytes) -> "Dataset":
56825709
56835710 .. testoutput::
56845711
5685- Dataset(num_rows=?, schema=...)
5712+ Dataset(
5713+ num_rows=?,
5714+ schema={
5715+ sepal length (cm): double,
5716+ sepal width (cm): double,
5717+ petal length (cm): double,
5718+ petal width (cm): double,
5719+ target: int64
5720+ }
5721+ )
56865722
56875723 Args:
56885724 serialized_ds: The serialized Dataset that we wish to deserialize.
0 commit comments