Skip to content

Commit e58a6e0

Browse files
carsonwangkira-lin
andauthored
Prepare 0.4.1 Release (#193)
* upgrade ray dependency to 1.8.0 (#192) * upgrade ray dependency * fix * require 1.8.0 * Prepare for 0.4.1 Co-authored-by: Zhi Lin <zl1nn@outlook.com>
1 parent 7655efd commit e58a6e0

File tree

5 files changed

+8
-9
lines changed

5 files changed

+8
-9
lines changed

core/pom.xml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66

77
<groupId>com.intel</groupId>
88
<artifactId>raydp</artifactId>
9-
<version>0.4.0</version>
9+
<version>0.4.1</version>
1010

1111
<name>raydp</name>
1212

@@ -21,7 +21,7 @@
2121
<fasterxml.jackson.version>2.12.2</fasterxml.jackson.version>
2222
<jersey.version>2.31</jersey.version>
2323
<scala.binary.version>2.12</scala.binary.version>
24-
<ray.version>1.7.0</ray.version>
24+
<ray.version>1.8.0</ray.version>
2525
</properties>
2626

2727
<repositories>

core/src/main/scala/org/apache/spark/sql/raydp/ObjectStoreWriter.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ class ObjectStoreWriter(@transient val df: DataFrame) extends Serializable {
6464
val objectRefImpl = RayDPUtils.convert(objectRef)
6565
val objectId = objectRefImpl.getId
6666
val runtime = Ray.internal.asInstanceOf[RayRuntimeInternal]
67-
val addressInfo = runtime.getObjectStore.promoteAndGetOwnershipInfo(objectId)
67+
val addressInfo = runtime.getObjectStore.getOwnershipInfo(objectId)
6868
RecordBatch(addressInfo, objectId.getBytes, numRecords)
6969
}
7070

python/raydp/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,6 @@
1717

1818
from raydp.context import init_spark, stop_spark
1919

20-
__version__ = "0.4.0"
20+
__version__ = "0.4.1"
2121

2222
__all__ = ["init_spark", "stop_spark"]

python/raydp/spark/dataset.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,7 @@
2727
from pyspark.sql.types import StructType
2828
from pyspark.sql.pandas.types import from_arrow_type
2929
import ray
30-
from ray.data import from_arrow
31-
from ray.data import Dataset
30+
from ray.data import Dataset, from_arrow_refs
3231
from ray.types import ObjectRef
3332
import ray.util.data as ml_dataset
3433
import ray.util.iter as parallel_it
@@ -466,7 +465,7 @@ def spark_dataframe_to_ray_dataset(df: sql.DataFrame,
466465
elif parallelism > num_part:
467466
df = df.repartition(parallelism)
468467
blocks, _ = _save_spark_df_to_object_store(df, False)
469-
return from_arrow(blocks)
468+
return from_arrow_refs(blocks)
470469

471470
@ray.remote
472471
class RayDPConversionHelper():

python/setup.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
if package_name == 'raydp_nightly':
3030
VERSION = datetime.today().strftime("%Y.%m.%d.dev0")
3131
else:
32-
VERSION = "0.4.0"
32+
VERSION = "0.4.1"
3333

3434
ROOT_DIR = os.path.dirname(__file__)
3535

@@ -95,7 +95,7 @@ def run(self):
9595
"pandas >= 1.1.4",
9696
"psutil",
9797
"pyarrow >= 0.10",
98-
"ray >= 1.7.0",
98+
"ray >= 1.8.0",
9999
"pyspark >= 3.2.0",
100100
"netifaces"
101101
]

0 commit comments

Comments
 (0)