@@ -32,6 +32,7 @@ __all__ = [
3232 "ColumnAlreadyExistsError" ,
3333 "ColumnDoesNotExistError" ,
3434 "InvalidColumnValueError" ,
35+ "PushError" ,
3536 "GcsStorageProviderFailed" ,
3637 "History" ,
3738 "InvalidType" ,
@@ -380,6 +381,9 @@ class TagExistsError(Exception):
380381class CannotTagUncommittedDatasetError (Exception ):
381382 pass
382383
384+ class PushError (Exception ):
385+ pass
386+
383387class Tags :
384388 """
385389 Provides access to the tags within a dataset.
@@ -1155,7 +1159,7 @@ class Dataset(DatasetView):
11551159 """Restores dataset from a pickled state.
11561160
11571161 Args:
1158- state (dict): The pickled state used to restore the dataset.
1162+ arg0 (dict): The pickled state used to restore the dataset.
11591163 """
11601164
11611165 def add_column (
@@ -1331,6 +1335,57 @@ class Dataset(DatasetView):
13311335 Asynchronously reverts any in-progress changes to the dataset you have made. Does not revert any changes that have been committed.
13321336 """
13331337
1338+ def push (self , url : str , creds : dict [str , str ] | None = None , token : str | None = None ) -> None :
1339+ """
1340+ Pushes any new history from this dataset to the dataset at the given url
1341+
1342+ Similar to [deeplake.Dataset.pull][] but the other direction.
1343+
1344+ Parameters:
1345+ url: The URL of the destination dataset
1346+ creds: Optional credentials needed to connect to the dataset
1347+ token: Optional deeplake token
1348+ """
1349+ ...
1350+ def push_async (self , url : str , creds : dict [str , str ] | None = None , token : str | None = None ) -> FutureVoid :
1351+ """
1352+ Asynchronously Pushes new any history from this dataset to the dataset at the given url
1353+
1354+ Similar to [deeplake.Dataset.pull_async][] but the other direction.
1355+
1356+ Parameters:
1357+ url: The URL of the destination dataset
1358+ creds: Optional credentials needed to connect to the dataset
1359+ token: Optional deeplake token
1360+ """
1361+ ...
1362+
1363+ def pull (self , url : str , creds : dict [str , str ] | None = None , token : str | None = None ) -> None :
1364+ """
1365+ Pulls any new history from the dataset at the passed url into this dataset.
1366+
1367+ Similar to [deeplake.Dataset.push][] but the other direction.
1368+
1369+ Parameters:
1370+ url: The URL of the destination dataset
1371+ creds: Optional credentials needed to connect to the dataset
1372+ token: Optional deeplake token
1373+ """
1374+ ...
1375+ def pull_async (self , url : str , creds : dict [str , str ] | None = None , token : str | None = None ) -> FutureVoid :
1376+ """
1377+ Asynchronously pulls any new history from the dataset at the passed url into this dataset.
1378+
1379+ Similar to [deeplake.Dataset.push_async][] but the other direction.
1380+
1381+ Parameters:
1382+ url: The URL of the destination dataset
1383+ creds: Optional credentials needed to connect to the dataset
1384+ token: Optional deeplake token
1385+ """
1386+ ...
1387+
1388+
13341389 @property
13351390 def history (self ) -> History :
13361391 """
@@ -1406,7 +1461,7 @@ class ReadOnlyDataset(DatasetView):
14061461 @property
14071462 def tags (self ) -> TagsView :
14081463 """
1409- The collection of [deeplake.TagsView ][] within the dataset
1464+ The collection of [deeplake.TagView ][]s within the dataset
14101465 """
14111466 ...
14121467
@@ -1474,6 +1529,27 @@ class ReadOnlyDataset(DatasetView):
14741529 """
14751530 ...
14761531
1532+ def push (self , url : str , creds : dict [str , str ] | None = None , token : str | None = None ) -> None :
1533+ """
1534+ Pushes any history from this dataset to the dataset at the given url
1535+
1536+ Parameters:
1537+ url: The URL of the destination dataset
1538+ creds: Optional credentials needed to connect to the dataset
1539+ token: Optional deeplake token
1540+ """
1541+ ...
1542+ def push_async (self , url : str , creds : dict [str , str ] | None = None , token : str | None = None ) -> FutureVoid :
1543+ """
1544+ Asynchronously Pushes any history from this dataset to the dataset at the given url
1545+
1546+ Parameters:
1547+ url: The URL of the destination dataset
1548+ creds: Optional credentials needed to connect to the dataset
1549+ token: Optional deeplake token
1550+ """
1551+ ...
1552+
14771553 def __getstate__ (self ) -> tuple :
14781554 """Returns a dict that can be pickled and used to restore this dataset.
14791555
@@ -1803,7 +1879,7 @@ def create(url: str, creds: dict[str, str] | None = None, token: str | None = No
18031879 - To use credentials managed in your Activeloop organization, use they key 'creds_key': 'managed_key_name'. This requires the org_id dataset argument to be set.
18041880 - If nothing is given is, credentials are fetched from the environment variables. This is also the case when creds is not passed for cloud datasets
18051881 token (str, optional): Activeloop token, used for fetching credentials to the dataset at path if it is a Deep Lake dataset. This is optional, tokens are normally autogenerated.
1806- schema (dict): The initial schema to use for the dataset. See `deeplake.schema` such as [deeplake.schemas.TextDocuments ][] for common starting schemas.
1882+ schema (dict): The initial schema to use for the dataset. See `deeplake.schema` such as [deeplake.schemas.TextEmbeddings ][] for common starting schemas.
18071883
18081884 Examples:
18091885 >>> import deeplake
0 commit comments