diff --git a/doc/DaphneLib/APIRef.md b/doc/DaphneLib/APIRef.md index 14fb144b0..647ffa76e 100644 --- a/doc/DaphneLib/APIRef.md +++ b/doc/DaphneLib/APIRef.md @@ -29,6 +29,10 @@ However, as the methods largely map to DaphneDSL built-in functions, you can fin ### `DaphneContext` API Reference +**Importing data from Python lists:** + +- **`from_python`**`(mat: [], shared_memory=True, verbose=False, return_shape=False) -> Matrix` + **Importing data from other Python libraries:** - **`from_numpy`**`(mat: np.array, shared_memory=True, verbose=False, return_shape=False) -> Matrix` diff --git a/doc/DaphneLib/Overview.md b/doc/DaphneLib/Overview.md index a3428458a..6fc6f6c68 100644 --- a/doc/DaphneLib/Overview.md +++ b/doc/DaphneLib/Overview.md @@ -239,13 +239,65 @@ A comprehensive list of these methods can be found in the [DaphneLib API referen The data transfer from DaphneLib back to Python happens during the call to `compute()`. If the result of the computation in DAPHNE is a matrix, `compute()` returns a `numpy.ndarray` (or optionally a `tensorflow.Tensor` or `torch.Tensor`); if the result is a frame, it returns a `pandas.DataFrame`; and if the result is a scalar, it returns a plain Python scalar. -So far, DaphneLib can exchange data with numpy, pandas, TensorFlow, and PyTorch. +So far, DaphneLib can exchange data with numpy, pandas, TensorFlow, PyTorch, and plain Python lists. By default, the data transfer is via shared memory (and in many cases zero-copy). Numpy and pandas are *required* dependencies for DaphneLib, so they should anyway be installed. TensorFlow and PyTorch are *optional* for DaphneLib; if these libraries are not installed, DaphneLib cannot exchange data with them, but all remaining features still work. In case you run DAPHNE inside the [`daphne-dev` container](/doc/GettingStarted.md), please note that TensorFlow and PyTorch are *not* included in the `daphne-dev` container due to their large footprint. Please follow the [instructions](/doc/development/InstallPythonLibsInContainer.md) on installing Python libraries in the `daphne-dev` container if you need them. +### Data Exchange with Plain Python Lists + +*Example:* + +```python +from daphne.context.daphne_context import DaphneContext + +dc = DaphneContext() + +# Create a python list. +a = [10, 20, 30, 40, 50, 60] + +# Transfer data to DaphneLib (lazily evaluated). +X = dc.from_python(a) + +print("How DAPHNE sees the data:") +X.print().compute() + +# Add 100 to each value in X. +X = X + 100.0 + +# Compute in DAPHNE, transfer result back to Python. +print("\nResult of adding 100 to each value, back in Python:") +print(X.compute()) +``` + +*Run by:* +```shell +python3 scripts/examples/daphnelib/data-exchange-python.py +``` + +*Output:* +```text +How DAPHNE sees the data: +DenseMatrix(6x1, int64_t) +10 +20 +30 +40 +50 +60 + +Result of adding 100 to each value, back in Python: +[[110.] + [120.] + [130.] + [140.] + [150.] + [160.]] +``` + + ### Data Exchange with numpy *Example:* diff --git a/scripts/examples/daphnelib/data-exchange-python.py b/scripts/examples/daphnelib/data-exchange-python.py new file mode 100644 index 000000000..136e18416 --- /dev/null +++ b/scripts/examples/daphnelib/data-exchange-python.py @@ -0,0 +1,33 @@ +# Copyright 2025 The DAPHNE Consortium +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from daphne.context.daphne_context import DaphneContext + +dc = DaphneContext() + +# Create a python list. +a = [10, 20, 30, 40, 50, 60] + +# Transfer data to DaphneLib (lazily evaluated). +X = dc.from_python(a) + +print("How DAPHNE sees the data:") +X.print().compute() + +# Add 100 to each value in X. +X = X + 100.0 + +# Compute in DAPHNE, transfer result back to Python. +print("\nResult of adding 100 to each value, back in Python:") +print(X.compute()) \ No newline at end of file diff --git a/src/api/python/daphne/context/daphne_context.py b/src/api/python/daphne/context/daphne_context.py index bdc1da66f..82c46ecad 100644 --- a/src/api/python/daphne/context/daphne_context.py +++ b/src/api/python/daphne/context/daphne_context.py @@ -32,7 +32,7 @@ from daphne.operator.nodes.do_while_loop import DoWhileLoop from daphne.operator.nodes.multi_return import MultiReturn from daphne.operator.operation_node import OperationNode -from daphne.utils.consts import VALID_INPUT_TYPES, VALID_COMPUTED_TYPES, TMP_PATH, F64, F32, SI64, SI32, SI8, UI64, UI32, UI8 +from daphne.utils.consts import VALID_INPUT_TYPES, VALID_COMPUTED_TYPES, TMP_PATH, F64, F32, SI64, SI32, SI8, UI64, UI32, UI8, STR import numpy as np import pandas as pd @@ -69,6 +69,55 @@ def readFrame(self, file: str) -> Frame: """ unnamed_params = ['\"'+file+'\"'] return Frame(self, 'readFrame', unnamed_params) + + def from_python(self, mat: [], shared_memory=True, verbose=False, return_shape=False): + """Generates a `DAGNode` representing a matrix with data given by a Python `list`. + :param mat: The Python list. + :param shared_memory: Whether to use shared memory data transfer (True) or not (False). + :param verbose: Whether to print timing information (True) or not (False). + :param return_shape: Whether to return the original shape of the input array. + :return: The data from Python as a Matrix. + """ + + original_mat_length = len(mat) + original_mat_dim2_length = None + original_mat_dim3_length = None + + # check if mat has one, two or more dimensions + if isinstance(mat[0], list): + original_list_dim2_length = len(mat[0]) + if isinstance(mat[0][0], list): + original_list_dim3_length = len(mat[0][0]) + + if verbose: + start_time = time.time() + + # Check if the python list is 2d or higher dimensional. + if original_mat_dim2_length is not None and original_mat_dim3_length is None: + # If 2d, handle as a matrix, convert to numpy array. + mat = np.array(mat) + # Using the existing from_numpy method for 2d arrays. + matrix = self.from_numpy(mat, shared_memory, verbose) + else: + # If higher dimensional, reshape to 2d and handle as a matrix. + # Store the original numpy representation. + original_mat = np.array(mat) + # Reshape to 2d using numpy's zero copy reshape. + reshaped_mat = original_mat.reshape((original_mat_length, -1)) + + if verbose: + # Check if the original and reshaped lists share memory. + shares_memory = np.shares_memory(mat, reshaped_mat) + print(f"from_python(): original and reshaped lists share memory: {shares_memory}") + + # Use the existing from_numpy method for the reshaped 2D array + matrix = self.from_numpy(mat=reshaped_mat, shared_memory=shared_memory, verbose=verbose) + + if verbose: + print(f"from_python(): total Python-side execution time: {(time.time() - start_time):.10f} seconds") + + # Return the matrix, and the original shape if return_shape is set to True. + return (matrix, (original_mat_length, original_mat_dim2_length, original_mat_dim3_length)) if return_shape else matrix def from_numpy(self, mat: np.array, shared_memory=True, verbose=False, return_shape=False): """Generates a `DAGNode` representing a matrix with data given by a numpy `array`. @@ -88,6 +137,7 @@ def from_numpy(self, mat: np.array, shared_memory=True, verbose=False, return_sh if mat.ndim == 1: rows = mat.shape[0] cols = 1 + mat = mat.reshape(-1, 1) elif mat.ndim >= 2: if mat.ndim > 2: mat = mat.reshape((original_shape[0], -1)) @@ -121,6 +171,8 @@ def from_numpy(self, mat: np.array, shared_memory=True, verbose=False, return_sh vtc = UI32 elif d_type == np.uint64: vtc = UI64 + elif mat.dtype.kind in {'U', 'S', 'O'}: + raise RuntimeError("transfering a numpy array of strings to DAPHNE via shared memory is not supported yet") else: # TODO Raise an error here? print("unsupported numpy dtype") @@ -216,6 +268,8 @@ def from_pandas(self, df: pd.DataFrame, shared_memory=True, verbose=False, keepI vtc = UI32 elif d_type == np.uint64: vtc = UI64 + elif mat.dtype.kind in {'U', 'S', 'O'}: + vtc = STR else: raise TypeError(f'Unsupported numpy dtype in column "{column}" ({idx})') @@ -251,7 +305,7 @@ def from_pandas(self, df: pd.DataFrame, shared_memory=True, verbose=False, keepI # This feature is only available if TensorFlow is available. if isinstance(tf, ImportError): - def from_tensorflow(self, tensor , shared_memory=True, verbose=False, return_shape=False): + def from_tensorflow(self, tensor, shared_memory=True, verbose=False, return_shape=False): raise tf else: def from_tensorflow(self, tensor: tf.Tensor, shared_memory=True, verbose=False, return_shape=False): diff --git a/src/api/python/daphne/operator/nodes/frame.py b/src/api/python/daphne/operator/nodes/frame.py index 6d4cf76e4..bdb309938 100644 --- a/src/api/python/daphne/operator/nodes/frame.py +++ b/src/api/python/daphne/operator/nodes/frame.py @@ -73,7 +73,7 @@ def code_line(self, var_name: str, unnamed_input_vars: Sequence[str], named_inpu "numCols": self._pd_dataframe.shape[1], "schema": [ { - "label": self._pd_dataframe.columns[i], + "label": str(self._pd_dataframe.columns[i]), "valueType": self.getDType(self._pd_dataframe.dtypes.iloc[i]) } for i in range(self._pd_dataframe.shape[1]) @@ -94,6 +94,8 @@ def getDType(self, d_type): return "si64" elif d_type == "float64": return "f64" + elif d_type.kind in {'U', 'S', 'O'}: + return "str" else: print("Error") diff --git a/src/api/python/daphne/operator/nodes/matrix.py b/src/api/python/daphne/operator/nodes/matrix.py index 1f9b79f00..ad193e8ae 100644 --- a/src/api/python/daphne/operator/nodes/matrix.py +++ b/src/api/python/daphne/operator/nodes/matrix.py @@ -48,13 +48,13 @@ def __init__(self, daphne_context: 'DaphneContext', operation:str, unnamed_input local_data: np.array = None, brackets:bool = False, left_brackets: bool = False, copy: bool = False, consumer_list: List['OperationNode'] = None)->'Matrix': self.__copy = copy - is_python_local_data = False if local_data is not None: - self._np_array = local_data is_python_local_data = True else: self._np_array = None + is_python_local_data = False + super().__init__(daphne_context, operation, unnamed_input_nodes, named_input_nodes, OutputType.MATRIX,is_python_local_data, brackets, left_brackets, consumer_list) def code_line(self, var_name: str, unnamed_input_vars: Sequence[str], @@ -66,7 +66,15 @@ def code_line(self, var_name: str, unnamed_input_vars: Sequence[str], if self._is_numpy() and self.operation == "readMatrix": with open(TMP_PATH+"/"+var_name+".csv", "wb") as f: - np.savetxt(f, self._np_array, delimiter=",") + if self._np_array.dtype in [np.float32, np.float64]: + fmt = "%.18e" + elif self._np_array.dtype in [np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16, np.uint32, np.uint64]: + fmt = "%i" + elif self._np_array.dtype.kind in {'U', 'S', 'O'}: + fmt = "%s" + else: + raise RuntimeError("unsupport numpy dtype") + np.savetxt(f, self._np_array, delimiter=",", fmt=fmt) with open(TMP_PATH+"/"+var_name+".csv.meta", "w") as f: json.dump( { @@ -79,24 +87,28 @@ def code_line(self, var_name: str, unnamed_input_vars: Sequence[str], return code_line def getDType(self, d_type): - if d_type == np.dtype('f4'): + if d_type == np.dtype('float32'): return "f32" - elif d_type == np.dtype('f8'): + elif d_type == np.dtype('float64'): return "f64" - elif d_type == np.dtype('si2'): - return "si8" - elif d_type == np.dtype('si4'): + elif d_type == np.dtype('int16'): + return "si16" + elif d_type == np.dtype('int32'): return "si32" - elif d_type == np.dtype('si8'): + elif d_type == np.dtype('int64'): return "si64" - elif d_type == np.dtype('ui2'): - return "ui8" - elif d_type == np.dtype('ui4'): - return "ui8" - elif d_type == np.dtype('ui8'): + elif d_type == np.dtype('uint8'): return "ui8" + elif d_type == np.dtype('uint16'): + return "ui16" + elif d_type == np.dtype('uint32'): + return "ui32" + elif d_type == np.dtype('uint64'): + return "ui64" + elif d_type.kind in {'U', 'S', 'O'}: + return "str" else: - print("Error") + raise RuntimeError("unsupported numpy dtype") def _is_numpy(self) -> bool: return self._np_array is not None diff --git a/src/api/python/daphne/operator/operation_node.py b/src/api/python/daphne/operator/operation_node.py index 5b5e0a854..331585ed6 100644 --- a/src/api/python/daphne/operator/operation_node.py +++ b/src/api/python/daphne/operator/operation_node.py @@ -91,7 +91,7 @@ def update_node_in_input_list(self, new_node, current_node): current_index = self._unnamed_input_nodes.index(current_node) self._unnamed_input_nodes[current_index] = new_node - def compute(self, type="shared memory", verbose=False, asTensorFlow=False, asPyTorch=False, shape=None, useIndexColumn=False): + def compute(self, type="shared memory", verbose=False, asTensorFlow=False, asPyTorch=False, shape=None, useIndexColumn=False) -> Union[np.array, pd.DataFrame, 'tf.Tensor', 'torch.Tensor', float]: """ Compute function for processing the Daphne Object or operation node and returning the results. The function builds a DaphneDSL script from the node and its context, executes it, and processes the results @@ -194,7 +194,16 @@ def compute(self, type="shared memory", verbose=False, asTensorFlow=False, asPyT ) self.clear_tmp() elif self._output_type == OutputType.MATRIX and type=="files": - arr = np.genfromtxt(result, delimiter=',') + # Ensure string data is handled correctly + arr = np.genfromtxt(result, delimiter=',', dtype=None, encoding='utf-8') + meta_file_name = result + ".meta" + if os.path.exists(meta_file_name): + with open(meta_file_name, "r") as meta_file: + meta_data = json.load(meta_file) + if meta_data.get("valueType") == "str": + arr = arr.astype(str) + else: + print(f"metadata file not found: {meta_file_name}") self.clear_tmp() return arr elif self._output_type == OutputType.SCALAR: diff --git a/src/api/python/daphne/script_building/script.py b/src/api/python/daphne/script_building/script.py index f1d5d01b0..18bea36c5 100644 --- a/src/api/python/daphne/script_building/script.py +++ b/src/api/python/daphne/script_building/script.py @@ -90,9 +90,12 @@ def clear(self, dag_root:DAGNode): def execute(self): temp_out_path = os.path.join(TMP_PATH, "tmpdaphne.daphne") - temp_out_file = open(temp_out_path, "w") - temp_out_file.writelines(self.daphnedsl_script) - temp_out_file.close() + with open(temp_out_path, "w") as temp_out_file: + temp_out_file.writelines(self.daphnedsl_script) + + # Check if the file exists + if not os.path.exists(temp_out_path): + raise RuntimeError(f"file '{temp_out_path}' does not exist") #os.environ['OPENBLAS_NUM_THREADS'] = '1' res = DaphneLib.daphne(ctypes.c_char_p(str.encode(PROTOTYPE_PATH)), ctypes.c_char_p(str.encode(temp_out_path))) diff --git a/src/api/python/daphne/utils/consts.py b/src/api/python/daphne/utils/consts.py index c1a7a7c9f..97358ae09 100644 --- a/src/api/python/daphne/utils/consts.py +++ b/src/api/python/daphne/utils/consts.py @@ -30,7 +30,7 @@ from daphne.operator.nodes.frame import Frame from daphne.operator.nodes.scalar import Scalar -VALID_INPUT_TYPES = Union['DAGNode', str, int, float, bool] +VALID_INPUT_TYPES = Union['DAGNode', str, int, float, bool, object] # These are the operator symbols used in DaphneDSL (not in Python). BINARY_OPERATIONS = ['+', '-', '/', '*', '^', '%', '<', '<=', '>', '>=', '==', '!=', '@', '&&', '||'] VALID_ARITHMETIC_TYPES = Union['DAGNode', int, float] @@ -55,3 +55,4 @@ UI64 = 5 F32 = 6 F64 = 7 +STR = 8 diff --git a/test/api/python/DaphneLibTest.cpp b/test/api/python/DaphneLibTest.cpp index 95179f171..e3107f9f1 100644 --- a/test/api/python/DaphneLibTest.cpp +++ b/test/api/python/DaphneLibTest.cpp @@ -66,6 +66,65 @@ const std::string dirPath = "test/api/python/"; compareDaphneLibToStr(str, prefix + ".py"); \ } +MAKE_TEST_CASE("data_transfer_numpy_array_float64_1d") +MAKE_TEST_CASE("data_transfer_numpy_array_float64_1d_vector") +MAKE_TEST_CASE("data_transfer_numpy_array_float64_1d_shared_memory") +MAKE_TEST_CASE("data_transfer_numpy_array_float64_2d") +MAKE_TEST_CASE("data_transfer_numpy_array_float64_2d_shared_memory") +MAKE_TEST_CASE("data_transfer_numpy_array_float64_large_ones") +MAKE_TEST_CASE("data_transfer_numpy_array_float64_large_ones_shared_memory") +MAKE_TEST_CASE("data_transfer_numpy_array_float64_large_zeros") +MAKE_TEST_CASE("data_transfer_numpy_array_float64_large_zeros_shared_memory") +MAKE_TEST_CASE("data_transfer_numpy_array_int64_1d") +MAKE_TEST_CASE("data_transfer_numpy_array_int64_1d_vector") +MAKE_TEST_CASE("data_transfer_numpy_array_string_1d") +MAKE_TEST_CASE("data_transfer_numpy_array_string_1d_vector") +MAKE_TEST_CASE("data_transfer_numpy_array_string_2d") + +MAKE_TEST_CASE("data_transfer_python_list_float64_1d") +MAKE_TEST_CASE("data_transfer_python_list_float64_1d_shared_memory") +MAKE_TEST_CASE("data_transfer_python_list_float64_2d") +MAKE_TEST_CASE("data_transfer_python_list_float64_2d_shared_memory") +MAKE_TEST_CASE("data_transfer_python_list_float64_large_ones") +MAKE_TEST_CASE("data_transfer_python_list_float64_large_ones_shared_memory") +MAKE_TEST_CASE("data_transfer_python_list_float64_large_zeros") +MAKE_TEST_CASE("data_transfer_python_list_float64_large_zeros_shared_memory") +MAKE_TEST_CASE("data_transfer_python_list_int64_1d") +MAKE_TEST_CASE("data_transfer_python_list_string_1d") +MAKE_TEST_CASE("data_transfer_python_list_string_2d") + +MAKE_TEST_CASE("data_transfer_pandas_series_float64") +MAKE_TEST_CASE("data_transfer_pandas_series_float64_shared_memory") +MAKE_TEST_CASE("data_transfer_pandas_series_float64_large_ones") +MAKE_TEST_CASE("data_transfer_pandas_series_float64_large_ones_shared_memory") +MAKE_TEST_CASE("data_transfer_pandas_series_float64_large_zeros") +MAKE_TEST_CASE("data_transfer_pandas_series_float64_large_zeros_shared_memory") +MAKE_TEST_CASE("data_transfer_pandas_series_int64") +MAKE_TEST_CASE("data_transfer_pandas_series_int64_shared_memory") +MAKE_TEST_CASE("data_transfer_pandas_series_string") + +MAKE_TEST_CASE_ENVVAR("data_transfer_pytorch_tensor_float64_1d", "DAPHNE_DEP_AVAIL_PYTORCH") +MAKE_TEST_CASE_ENVVAR("data_transfer_pytorch_tensor_float64_1d_shared_memory", "DAPHNE_DEP_AVAIL_PYTORCH") +MAKE_TEST_CASE_ENVVAR("data_transfer_pytorch_tensor_float64_2d", "DAPHNE_DEP_AVAIL_PYTORCH") +MAKE_TEST_CASE_ENVVAR("data_transfer_pytorch_tensor_float64_2d_shared_memory", "DAPHNE_DEP_AVAIL_PYTORCH") +MAKE_TEST_CASE_ENVVAR("data_transfer_pytorch_tensor_float64_large_ones", "DAPHNE_DEP_AVAIL_PYTORCH") +MAKE_TEST_CASE_ENVVAR("data_transfer_pytorch_tensor_float64_large_ones_shared_memory", "DAPHNE_DEP_AVAIL_PYTORCH") +MAKE_TEST_CASE_ENVVAR("data_transfer_pytorch_tensor_float64_large_zeros", "DAPHNE_DEP_AVAIL_PYTORCH") +MAKE_TEST_CASE_ENVVAR("data_transfer_pytorch_tensor_float64_large_zeros_shared_memory", "DAPHNE_DEP_AVAIL_PYTORCH") +MAKE_TEST_CASE_ENVVAR("data_transfer_pytorch_tensor_int64_1d", "DAPHNE_DEP_AVAIL_PYTORCH") + +MAKE_TEST_CASE_ENVVAR("data_transfer_tensorflow_tensor_float64_1d", "DAPHNE_DEP_AVAIL_TENSFORFLOW") +MAKE_TEST_CASE_ENVVAR("data_transfer_tensorflow_tensor_float64_1d_shared_memory", "DAPHNE_DEP_AVAIL_TENSFORFLOW") +MAKE_TEST_CASE_ENVVAR("data_transfer_tensorflow_tensor_float64_2d", "DAPHNE_DEP_AVAIL_TENSFORFLOW") +MAKE_TEST_CASE_ENVVAR("data_transfer_tensorflow_tensor_float64_2d_shared_memory", "DAPHNE_DEP_AVAIL_TENSFORFLOW") +MAKE_TEST_CASE_ENVVAR("data_transfer_tensorflow_tensor_float64_large_ones", "DAPHNE_DEP_AVAIL_TENSFORFLOW") +MAKE_TEST_CASE_ENVVAR("data_transfer_tensorflow_tensor_float64_large_ones_shared_memory", + "DAPHNE_DEP_AVAIL_TENSFORFLOW") +MAKE_TEST_CASE_ENVVAR("data_transfer_tensorflow_tensor_float64_large_zeros", "DAPHNE_DEP_AVAIL_TENSFORFLOW") +MAKE_TEST_CASE_ENVVAR("data_transfer_tensorflow_tensor_float64_large_zeros_shared_memory", + "DAPHNE_DEP_AVAIL_TENSFORFLOW") +MAKE_TEST_CASE_ENVVAR("data_transfer_tensorflow_tensor_int64_1d", "DAPHNE_DEP_AVAIL_TENSFORFLOW") + MAKE_TEST_CASE("data_transfer_numpy_1") MAKE_TEST_CASE("data_transfer_numpy_2") MAKE_TEST_CASE("data_transfer_numpy_3") @@ -76,6 +135,7 @@ MAKE_TEST_CASE("data_transfer_pandas_4_sparse_dataframe") MAKE_TEST_CASE("data_transfer_pandas_5_categorical_dataframe") MAKE_TEST_CASE_ENVVAR("data_transfer_pytorch_1", "DAPHNE_DEP_AVAIL_PYTORCH") MAKE_TEST_CASE_ENVVAR("data_transfer_tensorflow_1", "DAPHNE_DEP_AVAIL_TENSFORFLOW") + MAKE_TEST_CASE("frame_innerJoin") MAKE_TEST_CASE("frame_setColLabels") MAKE_TEST_CASE("frame_setColLabelsPrefix") diff --git a/test/api/python/data_transfer_numpy_array_float64_1d.daphne b/test/api/python/data_transfer_numpy_array_float64_1d.daphne new file mode 100644 index 000000000..405fe9fdf --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_1d.daphne @@ -0,0 +1,2 @@ +m1 = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf]); +print(m1); \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_1d.py b/test/api/python/data_transfer_numpy_array_float64_1d.py new file mode 100644 index 000000000..bb89eaa0c --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_1d.py @@ -0,0 +1,10 @@ +# Data transfer from numpy to DAPHNE and back, via files. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m1 = np.array([np.nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, np.inf, -np.inf], dtype=np.float64).reshape(-1, 1) + +dctx = DaphneContext() + +(dctx.from_numpy(m1, shared_memory=False).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_1d_shared_memory.daphne b/test/api/python/data_transfer_numpy_array_float64_1d_shared_memory.daphne new file mode 100644 index 000000000..405fe9fdf --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_1d_shared_memory.daphne @@ -0,0 +1,2 @@ +m1 = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf]); +print(m1); \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_1d_shared_memory.py b/test/api/python/data_transfer_numpy_array_float64_1d_shared_memory.py new file mode 100644 index 000000000..180d8f214 --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_1d_shared_memory.py @@ -0,0 +1,10 @@ +# Data transfer from numpy to DAPHNE and back, via shared memory. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m1 = np.array([np.nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, np.inf, -np.inf], dtype=np.float64).reshape(-1, 1) + +dctx = DaphneContext() + +(dctx.from_numpy(m1, shared_memory=True).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_1d_vector.daphne b/test/api/python/data_transfer_numpy_array_float64_1d_vector.daphne new file mode 100644 index 000000000..6f4935e33 --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_1d_vector.daphne @@ -0,0 +1,3 @@ +m1 = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf]); + +print(m1); \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_1d_vector.py b/test/api/python/data_transfer_numpy_array_float64_1d_vector.py new file mode 100644 index 000000000..7724db35c --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_1d_vector.py @@ -0,0 +1,10 @@ +# Data transfer from numpy to DAPHNE and back, via files. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m1 = np.array([np.nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, np.inf, -np.inf], dtype=np.float64) + +dctx = DaphneContext() + +(dctx.from_numpy(m1, shared_memory=False).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_2d.daphne b/test/api/python/data_transfer_numpy_array_float64_2d.daphne new file mode 100644 index 000000000..43bb76e67 --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_2d.daphne @@ -0,0 +1,2 @@ +m1 = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf](6, 2)); +print(m1); \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_2d.py b/test/api/python/data_transfer_numpy_array_float64_2d.py new file mode 100644 index 000000000..6b08d7232 --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_2d.py @@ -0,0 +1,10 @@ +# Data transfer from numpy to DAPHNE and back, via files. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m1 = np.array([[np.nan, 0.0], [1.0, -1.0], [12.3, -12.3], [2e-10, -2e-10], [2e10, -2e10], [np.inf, -np.inf]], dtype=np.float64) + +dctx = DaphneContext() + +(dctx.from_numpy(m1, shared_memory=False).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_2d_shared_memory.daphne b/test/api/python/data_transfer_numpy_array_float64_2d_shared_memory.daphne new file mode 100644 index 000000000..43bb76e67 --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_2d_shared_memory.daphne @@ -0,0 +1,2 @@ +m1 = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf](6, 2)); +print(m1); \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_2d_shared_memory.py b/test/api/python/data_transfer_numpy_array_float64_2d_shared_memory.py new file mode 100644 index 000000000..f646831fc --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_2d_shared_memory.py @@ -0,0 +1,10 @@ +# Data transfer from numpy to DAPHNE and back, via shared memory. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m1 = np.array([[np.nan, 0.0], [1.0, -1.0], [12.3, -12.3], [2e-10, -2e-10], [2e10, -2e10], [np.inf, -np.inf]], dtype=np.float64) + +dctx = DaphneContext() + +(dctx.from_numpy(m1, shared_memory=True).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_large_ones.daphne b/test/api/python/data_transfer_numpy_array_float64_large_ones.daphne new file mode 100644 index 000000000..a48d5380d --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_large_ones.daphne @@ -0,0 +1,2 @@ +m = fill(1.0, 1000, 1000); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_large_ones.py b/test/api/python/data_transfer_numpy_array_float64_large_ones.py new file mode 100644 index 000000000..6207d1051 --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_large_ones.py @@ -0,0 +1,10 @@ +# Data transfer from numpy to DAPHNE and back, via files. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m = np.ones((1000, 1000), dtype=np.float64) + +dctx = DaphneContext() + +dctx.from_numpy(m, shared_memory=False).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_large_ones_shared_memory.daphne b/test/api/python/data_transfer_numpy_array_float64_large_ones_shared_memory.daphne new file mode 100644 index 000000000..a48d5380d --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_large_ones_shared_memory.daphne @@ -0,0 +1,2 @@ +m = fill(1.0, 1000, 1000); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_large_ones_shared_memory.py b/test/api/python/data_transfer_numpy_array_float64_large_ones_shared_memory.py new file mode 100644 index 000000000..c6dbb25fd --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_large_ones_shared_memory.py @@ -0,0 +1,10 @@ +# Data transfer from numpy to DAPHNE and back, via shared memory. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m = np.ones((1000, 1000), dtype=np.float64) + +dctx = DaphneContext() + +dctx.from_numpy(m, shared_memory=True).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_large_zeros.daphne b/test/api/python/data_transfer_numpy_array_float64_large_zeros.daphne new file mode 100644 index 000000000..83542850f --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_large_zeros.daphne @@ -0,0 +1,2 @@ +m = fill(0.0, 1000, 1000); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_large_zeros.py b/test/api/python/data_transfer_numpy_array_float64_large_zeros.py new file mode 100644 index 000000000..9d2655ed6 --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_large_zeros.py @@ -0,0 +1,10 @@ +# Data transfer from numpy to DAPHNE and back, via files. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m = np.zeros((1000, 1000), dtype=np.float64) + +dctx = DaphneContext() + +dctx.from_numpy(m, shared_memory=False).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_large_zeros_shared_memory.daphne b/test/api/python/data_transfer_numpy_array_float64_large_zeros_shared_memory.daphne new file mode 100644 index 000000000..83542850f --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_large_zeros_shared_memory.daphne @@ -0,0 +1,2 @@ +m = fill(0.0, 1000, 1000); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_float64_large_zeros_shared_memory.py b/test/api/python/data_transfer_numpy_array_float64_large_zeros_shared_memory.py new file mode 100644 index 000000000..eb96da375 --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_float64_large_zeros_shared_memory.py @@ -0,0 +1,10 @@ +# Data transfer from numpy to DAPHNE and back, via shared memory. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m = np.zeros((1000, 1000), dtype=np.float64) + +dctx = DaphneContext() + +dctx.from_numpy(m, shared_memory=True).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_int64_1d.daphne b/test/api/python/data_transfer_numpy_array_int64_1d.daphne new file mode 100644 index 000000000..8b32302ee --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_int64_1d.daphne @@ -0,0 +1,2 @@ +m = as.si64([0, 1, -1, 12, -12, 1000, -1000]); +print(m); diff --git a/test/api/python/data_transfer_numpy_array_int64_1d.py b/test/api/python/data_transfer_numpy_array_int64_1d.py new file mode 100644 index 000000000..726470fde --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_int64_1d.py @@ -0,0 +1,10 @@ +# Data transfer from numpy to DAPHNE and back, via files. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m = np.array([0, 1, -1, 12, -12, 1000, -1000], dtype=np.int64).reshape(-1, 1) + +dctx = DaphneContext() + +dctx.from_numpy(m, shared_memory=False).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_int64_1d_vector.daphne b/test/api/python/data_transfer_numpy_array_int64_1d_vector.daphne new file mode 100644 index 000000000..cc4b857b7 --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_int64_1d_vector.daphne @@ -0,0 +1,2 @@ +m = as.si64([0, 1, -1, 12, -12, 1000, -1000]); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_int64_1d_vector.py b/test/api/python/data_transfer_numpy_array_int64_1d_vector.py new file mode 100644 index 000000000..b80105710 --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_int64_1d_vector.py @@ -0,0 +1,10 @@ +# Data transfer from numpy to DAPHNE and back, via files. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m = np.array([0, 1, -1, 12, -12, 1000, -1000], dtype=np.int64) + +dctx = DaphneContext() + +dctx.from_numpy(m, shared_memory=False).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_string_1d.daphne b/test/api/python/data_transfer_numpy_array_string_1d.daphne new file mode 100644 index 000000000..0b23d2658 --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_string_1d.daphne @@ -0,0 +1,3 @@ +m1 = reshape(["apple","banana","cherry"], 3, 1); + +print(m1); \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_string_1d.py b/test/api/python/data_transfer_numpy_array_string_1d.py new file mode 100644 index 000000000..f65ae171d --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_string_1d.py @@ -0,0 +1,10 @@ +# Data transfer from numpy to DAPHNE and back, via files. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m1 = np.array(["apple", "banana", "cherry"], dtype=str).reshape(-1, 1) + +dctx = DaphneContext() + +(dctx.from_numpy(m1, shared_memory=False).print().compute()) diff --git a/test/api/python/data_transfer_numpy_array_string_1d_vector.daphne b/test/api/python/data_transfer_numpy_array_string_1d_vector.daphne new file mode 100644 index 000000000..af2ab260d --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_string_1d_vector.daphne @@ -0,0 +1,3 @@ +m1 = ["apple","banana","cherry"]; + +print(m1); \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_string_1d_vector.py b/test/api/python/data_transfer_numpy_array_string_1d_vector.py new file mode 100644 index 000000000..dfc9a4986 --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_string_1d_vector.py @@ -0,0 +1,10 @@ +# Data transfer from numpy to DAPHNE and back, via files. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m1 = np.array(["apple", "banana", "cherry"], dtype=str) + +dctx = DaphneContext() + +(dctx.from_numpy(m1, shared_memory=False).print().compute()) diff --git a/test/api/python/data_transfer_numpy_array_string_2d.daphne b/test/api/python/data_transfer_numpy_array_string_2d.daphne new file mode 100644 index 000000000..1a312e698 --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_string_2d.daphne @@ -0,0 +1,3 @@ +m1 = reshape(["apple","banana","cherry","fig"] , 2, 2); + +print(m1); \ No newline at end of file diff --git a/test/api/python/data_transfer_numpy_array_string_2d.py b/test/api/python/data_transfer_numpy_array_string_2d.py new file mode 100644 index 000000000..d12cc9019 --- /dev/null +++ b/test/api/python/data_transfer_numpy_array_string_2d.py @@ -0,0 +1,10 @@ +# Data transfer from numpy to DAPHNE and back, via files. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m1 = np.array([["apple", "banana"], ["cherry", "fig"]], dtype=str) + +dctx = DaphneContext() + +(dctx.from_numpy(m1, shared_memory=False).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_1.daphne b/test/api/python/data_transfer_pandas_1.daphne index 9c7772d41..cba30a4f4 100644 --- a/test/api/python/data_transfer_pandas_1.daphne +++ b/test/api/python/data_transfer_pandas_1.daphne @@ -14,5 +14,5 @@ * limitations under the License. */ -df = createFrame([1, 2, 3], [-1.1, -2.2, -3.3], "abc", "def"); +df = createFrame([1, 2, 3], [-1.1, -2.2, -3.3], ["red", "green", "blue"], "abc", "def", "ghi"); print(df); \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_1.py b/test/api/python/data_transfer_pandas_1.py index 0f70bf6f7..c1c803c74 100644 --- a/test/api/python/data_transfer_pandas_1.py +++ b/test/api/python/data_transfer_pandas_1.py @@ -20,7 +20,7 @@ import pandas as pd from daphne.context.daphne_context import DaphneContext -df = pd.DataFrame({"abc": [1, 2, 3], "def": [-1.1, -2.2, -3.3]}) +df = pd.DataFrame({"abc": [1, 2, 3], "def": [-1.1, -2.2, -3.3], "ghi": ["red", "green", "blue"]}) dctx = DaphneContext() diff --git a/test/api/python/data_transfer_pandas_series_float64.daphne b/test/api/python/data_transfer_pandas_series_float64.daphne new file mode 100644 index 000000000..db5f793d3 --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_float64.daphne @@ -0,0 +1,2 @@ +s = createFrame([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf], "0"); +print(s); \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_float64.py b/test/api/python/data_transfer_pandas_series_float64.py new file mode 100644 index 000000000..3b1e08e05 --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_float64.py @@ -0,0 +1,11 @@ +# Data transfer from pandas to DAPHNE and back, via files. + +import numpy as np +import pandas as pd +from daphne.context.daphne_context import DaphneContext + +s = pd.Series([np.nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, np.inf, -np.inf]) + +dctx = DaphneContext() + +dctx.from_pandas(s, shared_memory=False).print().compute(type="files") \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_float64_large_ones.daphne b/test/api/python/data_transfer_pandas_series_float64_large_ones.daphne new file mode 100644 index 000000000..27c6dac2f --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_float64_large_ones.daphne @@ -0,0 +1,2 @@ +s = createFrame(fill(1.0, 100000, 1), "0"); +print(s); \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_float64_large_ones.py b/test/api/python/data_transfer_pandas_series_float64_large_ones.py new file mode 100644 index 000000000..556d192c0 --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_float64_large_ones.py @@ -0,0 +1,11 @@ +# Data transfer from pandas to DAPHNE and back, via files. + +import pandas as pd +import numpy as np +from daphne.context.daphne_context import DaphneContext + +s = pd.Series(np.ones(100000)) + +dctx = DaphneContext() + +dctx.from_pandas(s, shared_memory=False).print().compute(type="files") \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_float64_large_ones_shared_memory.daphne b/test/api/python/data_transfer_pandas_series_float64_large_ones_shared_memory.daphne new file mode 100644 index 000000000..27c6dac2f --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_float64_large_ones_shared_memory.daphne @@ -0,0 +1,2 @@ +s = createFrame(fill(1.0, 100000, 1), "0"); +print(s); \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_float64_large_ones_shared_memory.py b/test/api/python/data_transfer_pandas_series_float64_large_ones_shared_memory.py new file mode 100644 index 000000000..b98c275cd --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_float64_large_ones_shared_memory.py @@ -0,0 +1,11 @@ +# Data transfer from pandas to DAPHNE and back, via shared memory. +# +import pandas as pd +import numpy as np +from daphne.context.daphne_context import DaphneContext + +s = pd.Series(np.ones(100000)) + +dctx = DaphneContext() + +dctx.from_pandas(s, shared_memory=True).print().compute(type="shared memory") \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_float64_large_zeros.daphne b/test/api/python/data_transfer_pandas_series_float64_large_zeros.daphne new file mode 100644 index 000000000..ee3fa8838 --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_float64_large_zeros.daphne @@ -0,0 +1,2 @@ +s = createFrame(fill(0.0, 100000, 1), "0"); +print(s); \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_float64_large_zeros.py b/test/api/python/data_transfer_pandas_series_float64_large_zeros.py new file mode 100644 index 000000000..7c7abfa1c --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_float64_large_zeros.py @@ -0,0 +1,11 @@ +# Data transfer from pandas to DAPHNE and back, via files. + +import pandas as pd +import numpy as np +from daphne.context.daphne_context import DaphneContext + +s = pd.Series(np.zeros(100000)) + +dctx = DaphneContext() + +dctx.from_pandas(s, shared_memory=False).print().compute(type="files") \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_float64_large_zeros_shared_memory.daphne b/test/api/python/data_transfer_pandas_series_float64_large_zeros_shared_memory.daphne new file mode 100644 index 000000000..ee3fa8838 --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_float64_large_zeros_shared_memory.daphne @@ -0,0 +1,2 @@ +s = createFrame(fill(0.0, 100000, 1), "0"); +print(s); \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_float64_large_zeros_shared_memory.py b/test/api/python/data_transfer_pandas_series_float64_large_zeros_shared_memory.py new file mode 100644 index 000000000..1cca71c9a --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_float64_large_zeros_shared_memory.py @@ -0,0 +1,11 @@ +# Data transfer from pandas to DAPHNE and back, via shared memory. + +import pandas as pd +import numpy as np +from daphne.context.daphne_context import DaphneContext + +s = pd.Series(np.zeros(100000)) + +dctx = DaphneContext() + +dctx.from_pandas(s, shared_memory=True).print().compute(type="shared memory") \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_float64_shared_memory.daphne b/test/api/python/data_transfer_pandas_series_float64_shared_memory.daphne new file mode 100644 index 000000000..db5f793d3 --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_float64_shared_memory.daphne @@ -0,0 +1,2 @@ +s = createFrame([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf], "0"); +print(s); \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_float64_shared_memory.py b/test/api/python/data_transfer_pandas_series_float64_shared_memory.py new file mode 100644 index 000000000..1a6193b60 --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_float64_shared_memory.py @@ -0,0 +1,11 @@ +# Data transfer from pandas to DAPHNE and back, via shared memory. + +import numpy as np +import pandas as pd +from daphne.context.daphne_context import DaphneContext + +s = pd.Series([np.nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, np.inf, -np.inf]) + +dctx = DaphneContext() + +dctx.from_pandas(s, shared_memory=True).print().compute(type="shared memory") \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_int64.daphne b/test/api/python/data_transfer_pandas_series_int64.daphne new file mode 100644 index 000000000..1f5ae3625 --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_int64.daphne @@ -0,0 +1,2 @@ +s = createFrame([0, 1, -1, 12, -12, 1000, -1000], "0"); +print(s); \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_int64.py b/test/api/python/data_transfer_pandas_series_int64.py new file mode 100644 index 000000000..8510dc3a4 --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_int64.py @@ -0,0 +1,11 @@ +# Data transfer from pandas to DAPHNE and back, via files. + +import numpy as np +import pandas as pd +from daphne.context.daphne_context import DaphneContext + +s = pd.Series([0, 1, -1, 12, -12, 1000, -1000]) + +dctx = DaphneContext() + +dctx.from_pandas(s, shared_memory=False).print().compute(type="files") \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_int64_shared_memory.daphne b/test/api/python/data_transfer_pandas_series_int64_shared_memory.daphne new file mode 100644 index 000000000..1f5ae3625 --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_int64_shared_memory.daphne @@ -0,0 +1,2 @@ +s = createFrame([0, 1, -1, 12, -12, 1000, -1000], "0"); +print(s); \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_int64_shared_memory.py b/test/api/python/data_transfer_pandas_series_int64_shared_memory.py new file mode 100644 index 000000000..fc74fbcad --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_int64_shared_memory.py @@ -0,0 +1,11 @@ +# Data transfer from pandas to DAPHNE and back, via shared memory. + +import numpy as np +import pandas as pd +from daphne.context.daphne_context import DaphneContext + +s = pd.Series([0, 1, -1, 12, -12, 1000, -1000]) + +dctx = DaphneContext() + +dctx.from_pandas(s, shared_memory=True).print().compute(type="shared memory") \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_string.daphne b/test/api/python/data_transfer_pandas_series_string.daphne new file mode 100644 index 000000000..c0b9608d6 --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_string.daphne @@ -0,0 +1,2 @@ +s = {"0": ["apple", "banana", "cherry"]}; +print(s); \ No newline at end of file diff --git a/test/api/python/data_transfer_pandas_series_string.py b/test/api/python/data_transfer_pandas_series_string.py new file mode 100644 index 000000000..360f98935 --- /dev/null +++ b/test/api/python/data_transfer_pandas_series_string.py @@ -0,0 +1,11 @@ +# Data transfer from pandas to DAPHNE and back, via files. + +import pandas as pd +import numpy as np +from daphne.context.daphne_context import DaphneContext + +s = pd.Series(["apple", "banana", "cherry"], dtype=str) + +dctx = DaphneContext() + +dctx.from_pandas(s, shared_memory=False).print().compute(type="files") \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_float64_1d.daphne b/test/api/python/data_transfer_python_list_float64_1d.daphne new file mode 100644 index 000000000..04a439a1a --- /dev/null +++ b/test/api/python/data_transfer_python_list_float64_1d.daphne @@ -0,0 +1,2 @@ +m = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf]); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_float64_1d.py b/test/api/python/data_transfer_python_list_float64_1d.py new file mode 100644 index 000000000..126d7f9fd --- /dev/null +++ b/test/api/python/data_transfer_python_list_float64_1d.py @@ -0,0 +1,10 @@ +# Data transfer from python lists to DAPHNE and back, via files. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m = [np.nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, np.inf, -np.inf] + +dctx = DaphneContext() + +(dctx.from_python(m, shared_memory=False).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_float64_1d_shared_memory.daphne b/test/api/python/data_transfer_python_list_float64_1d_shared_memory.daphne new file mode 100644 index 000000000..04a439a1a --- /dev/null +++ b/test/api/python/data_transfer_python_list_float64_1d_shared_memory.daphne @@ -0,0 +1,2 @@ +m = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf]); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_float64_1d_shared_memory.py b/test/api/python/data_transfer_python_list_float64_1d_shared_memory.py new file mode 100644 index 000000000..34e242d83 --- /dev/null +++ b/test/api/python/data_transfer_python_list_float64_1d_shared_memory.py @@ -0,0 +1,10 @@ +# Data transfer from numpy to DAPHNE and back, via shared memory. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m = [np.nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, np.inf, -np.inf] + +dctx = DaphneContext() + +(dctx.from_python(m, shared_memory=True).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_float64_2d.daphne b/test/api/python/data_transfer_python_list_float64_2d.daphne new file mode 100644 index 000000000..993a9b767 --- /dev/null +++ b/test/api/python/data_transfer_python_list_float64_2d.daphne @@ -0,0 +1,2 @@ +m = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf](6, 2)); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_float64_2d.py b/test/api/python/data_transfer_python_list_float64_2d.py new file mode 100644 index 000000000..4fe52fe42 --- /dev/null +++ b/test/api/python/data_transfer_python_list_float64_2d.py @@ -0,0 +1,10 @@ +# Data transfer from python lists to DAPHNE and back, via files. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m = [[np.nan, 0.0], [1.0, -1.0], [12.3, -12.3], [2e-10, -2e-10], [2e10, -2e10], [np.inf, -np.inf]] + +dctx = DaphneContext() + +(dctx.from_python(m, shared_memory=False).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_float64_2d_shared_memory.daphne b/test/api/python/data_transfer_python_list_float64_2d_shared_memory.daphne new file mode 100644 index 000000000..993a9b767 --- /dev/null +++ b/test/api/python/data_transfer_python_list_float64_2d_shared_memory.daphne @@ -0,0 +1,2 @@ +m = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf](6, 2)); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_float64_2d_shared_memory.py b/test/api/python/data_transfer_python_list_float64_2d_shared_memory.py new file mode 100644 index 000000000..97aae2720 --- /dev/null +++ b/test/api/python/data_transfer_python_list_float64_2d_shared_memory.py @@ -0,0 +1,10 @@ +# Data transfer from python lists to DAPHNE and back, via shared memory. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m = [[np.nan, 0.0], [1.0, -1.0], [12.3, -12.3], [2e-10, -2e-10], [2e10, -2e10], [np.inf, -np.inf]] + +dctx = DaphneContext() + +(dctx.from_python(m, shared_memory=True).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_float64_large_ones.daphne b/test/api/python/data_transfer_python_list_float64_large_ones.daphne new file mode 100644 index 000000000..a48d5380d --- /dev/null +++ b/test/api/python/data_transfer_python_list_float64_large_ones.daphne @@ -0,0 +1,2 @@ +m = fill(1.0, 1000, 1000); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_float64_large_ones.py b/test/api/python/data_transfer_python_list_float64_large_ones.py new file mode 100644 index 000000000..01ead975f --- /dev/null +++ b/test/api/python/data_transfer_python_list_float64_large_ones.py @@ -0,0 +1,14 @@ +# Data transfer from python lists to DAPHNE and back, via files. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m = [] +for i in range(0, 1000): + m.append([]) + for j in range(0, 1000): + m[i].append(1.0) + +dctx = DaphneContext() + +dctx.from_python(m, shared_memory=False).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_float64_large_ones_shared_memory.daphne b/test/api/python/data_transfer_python_list_float64_large_ones_shared_memory.daphne new file mode 100644 index 000000000..a48d5380d --- /dev/null +++ b/test/api/python/data_transfer_python_list_float64_large_ones_shared_memory.daphne @@ -0,0 +1,2 @@ +m = fill(1.0, 1000, 1000); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_float64_large_ones_shared_memory.py b/test/api/python/data_transfer_python_list_float64_large_ones_shared_memory.py new file mode 100644 index 000000000..7b3d8166d --- /dev/null +++ b/test/api/python/data_transfer_python_list_float64_large_ones_shared_memory.py @@ -0,0 +1,14 @@ +# Data transfer from python lists to DAPHNE and back, via shared memory. + +from daphne.context.daphne_context import DaphneContext + +m = [] +for i in range(0, 1000): + m.append([]) + for j in range(0, 1000): + m[i].append(1.0) + + +dctx = DaphneContext() + +dctx.from_python(m, shared_memory=True).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_float64_large_zeros.daphne b/test/api/python/data_transfer_python_list_float64_large_zeros.daphne new file mode 100644 index 000000000..83542850f --- /dev/null +++ b/test/api/python/data_transfer_python_list_float64_large_zeros.daphne @@ -0,0 +1,2 @@ +m = fill(0.0, 1000, 1000); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_float64_large_zeros.py b/test/api/python/data_transfer_python_list_float64_large_zeros.py new file mode 100644 index 000000000..c0c941e9f --- /dev/null +++ b/test/api/python/data_transfer_python_list_float64_large_zeros.py @@ -0,0 +1,14 @@ +# Data transfer from python lists to DAPHNE and back, via files. + +import numpy as np +from daphne.context.daphne_context import DaphneContext + +m = [] +for i in range(0, 1000): + m.append([]) + for j in range(0, 1000): + m[i].append(0.0) + +dctx = DaphneContext() + +dctx.from_python(m, shared_memory=False).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_float64_large_zeros_shared_memory.daphne b/test/api/python/data_transfer_python_list_float64_large_zeros_shared_memory.daphne new file mode 100644 index 000000000..83542850f --- /dev/null +++ b/test/api/python/data_transfer_python_list_float64_large_zeros_shared_memory.daphne @@ -0,0 +1,2 @@ +m = fill(0.0, 1000, 1000); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_float64_large_zeros_shared_memory.py b/test/api/python/data_transfer_python_list_float64_large_zeros_shared_memory.py new file mode 100644 index 000000000..885230f8a --- /dev/null +++ b/test/api/python/data_transfer_python_list_float64_large_zeros_shared_memory.py @@ -0,0 +1,13 @@ +# Data transfer from python lists to DAPHNE and back, via shared memory. + +from daphne.context.daphne_context import DaphneContext + +m = [] +for i in range(0, 1000): + m.append([]) + for j in range(0, 1000): + m[i].append(0.0) + +dctx = DaphneContext() + +dctx.from_python(m, shared_memory=True).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_int64_1d.daphne b/test/api/python/data_transfer_python_list_int64_1d.daphne new file mode 100644 index 000000000..8b32302ee --- /dev/null +++ b/test/api/python/data_transfer_python_list_int64_1d.daphne @@ -0,0 +1,2 @@ +m = as.si64([0, 1, -1, 12, -12, 1000, -1000]); +print(m); diff --git a/test/api/python/data_transfer_python_list_int64_1d.py b/test/api/python/data_transfer_python_list_int64_1d.py new file mode 100644 index 000000000..d535b7060 --- /dev/null +++ b/test/api/python/data_transfer_python_list_int64_1d.py @@ -0,0 +1,9 @@ +# Data transfer from python lists to DAPHNE and back, via files. + +from daphne.context.daphne_context import DaphneContext + +m = [0, 1, -1, 12, -12, 1000, -1000] + +dctx = DaphneContext() + +dctx.from_python(m, shared_memory=False).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_string_1d.daphne b/test/api/python/data_transfer_python_list_string_1d.daphne new file mode 100644 index 000000000..71d06922b --- /dev/null +++ b/test/api/python/data_transfer_python_list_string_1d.daphne @@ -0,0 +1,2 @@ +m = ["apple", "banana", "cherry"]; +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_string_1d.py b/test/api/python/data_transfer_python_list_string_1d.py new file mode 100644 index 000000000..a4ace9c7b --- /dev/null +++ b/test/api/python/data_transfer_python_list_string_1d.py @@ -0,0 +1,9 @@ +# Data transfer from python lists to DAPHNE and back, via files. + +from daphne.context.daphne_context import DaphneContext + +m = ["apple", "banana", "cherry"] + +dctx = DaphneContext() + +(dctx.from_python(m, shared_memory=False).print().compute()) diff --git a/test/api/python/data_transfer_python_list_string_2d.daphne b/test/api/python/data_transfer_python_list_string_2d.daphne new file mode 100644 index 000000000..a1bc45bcd --- /dev/null +++ b/test/api/python/data_transfer_python_list_string_2d.daphne @@ -0,0 +1,2 @@ +m = ["apple", "banana", "cherry", "fig"](2, 2); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_python_list_string_2d.py b/test/api/python/data_transfer_python_list_string_2d.py new file mode 100644 index 000000000..90f1c68bc --- /dev/null +++ b/test/api/python/data_transfer_python_list_string_2d.py @@ -0,0 +1,9 @@ +# Data transfer from python lists to DAPHNE and back, via files. + +from daphne.context.daphne_context import DaphneContext + +m = [["apple", "banana"], ["cherry", "fig"]] + +dctx = DaphneContext() + +(dctx.from_python(m, shared_memory=False).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_float64_1d.daphne b/test/api/python/data_transfer_pytorch_tensor_float64_1d.daphne new file mode 100644 index 000000000..0650870e8 --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_float64_1d.daphne @@ -0,0 +1,2 @@ +t1 = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf]); +print(t1); \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_float64_1d.py b/test/api/python/data_transfer_pytorch_tensor_float64_1d.py new file mode 100644 index 000000000..2533776a9 --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_float64_1d.py @@ -0,0 +1,11 @@ +# Data transfer from pytorch to DAPHNE and back, via files. + +import numpy as np +import torch +from daphne.context.daphne_context import DaphneContext + +t1 = torch.from_numpy(np.array([np.nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, np.inf, -np.inf], dtype=np.float64).reshape(-1, 1)) + +dctx = DaphneContext() + +(dctx.from_pytorch(t1, shared_memory=False).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_float64_1d_shared_memory.daphne b/test/api/python/data_transfer_pytorch_tensor_float64_1d_shared_memory.daphne new file mode 100644 index 000000000..0650870e8 --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_float64_1d_shared_memory.daphne @@ -0,0 +1,2 @@ +t1 = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf]); +print(t1); \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_float64_1d_shared_memory.py b/test/api/python/data_transfer_pytorch_tensor_float64_1d_shared_memory.py new file mode 100644 index 000000000..9e2534f46 --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_float64_1d_shared_memory.py @@ -0,0 +1,11 @@ +# Data transfer from pytorch to DAPHNE and back, via shared memory. + +import numpy as np +import torch +from daphne.context.daphne_context import DaphneContext + +t1 = torch.from_numpy(np.array([np.nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, np.inf, -np.inf], dtype=np.float64).reshape(-1, 1)) + +dctx = DaphneContext() + +(dctx.from_pytorch(t1, shared_memory=True).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_float64_2d.daphne b/test/api/python/data_transfer_pytorch_tensor_float64_2d.daphne new file mode 100644 index 000000000..e2a27d516 --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_float64_2d.daphne @@ -0,0 +1,2 @@ +t1 = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf](6, 2)); +print(t1); \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_float64_2d.py b/test/api/python/data_transfer_pytorch_tensor_float64_2d.py new file mode 100644 index 000000000..b11156063 --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_float64_2d.py @@ -0,0 +1,11 @@ +# Data transfer from pytorch to DAPHNE and back, via files. + +import numpy as np +import torch +from daphne.context.daphne_context import DaphneContext + +t1 = torch.from_numpy(np.array([[np.nan, 0.0], [1.0, -1.0], [12.3, -12.3], [2e-10, -2e-10], [2e10, -2e10], [np.inf, -np.inf]], dtype=np.float64)) + +dctx = DaphneContext() + +(dctx.from_pytorch(t1, shared_memory=False).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_float64_2d_shared_memory.daphne b/test/api/python/data_transfer_pytorch_tensor_float64_2d_shared_memory.daphne new file mode 100644 index 000000000..e2a27d516 --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_float64_2d_shared_memory.daphne @@ -0,0 +1,2 @@ +t1 = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf](6, 2)); +print(t1); \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_float64_2d_shared_memory.py b/test/api/python/data_transfer_pytorch_tensor_float64_2d_shared_memory.py new file mode 100644 index 000000000..ac7a63e86 --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_float64_2d_shared_memory.py @@ -0,0 +1,11 @@ +# Data transfer from pytorch to DAPHNE and back, via shared memory. + +import numpy as np +import torch +from daphne.context.daphne_context import DaphneContext + +t1 = torch.from_numpy(np.array([[np.nan, 0.0], [1.0, -1.0], [12.3, -12.3], [2e-10, -2e-10], [2e10, -2e10], [np.inf, -np.inf]], dtype=np.float64)) + +dctx = DaphneContext() + +(dctx.from_pytorch(t1, shared_memory=True).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_float64_large_ones.daphne b/test/api/python/data_transfer_pytorch_tensor_float64_large_ones.daphne new file mode 100644 index 000000000..a48d5380d --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_float64_large_ones.daphne @@ -0,0 +1,2 @@ +m = fill(1.0, 1000, 1000); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_float64_large_ones.py b/test/api/python/data_transfer_pytorch_tensor_float64_large_ones.py new file mode 100644 index 000000000..619389b84 --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_float64_large_ones.py @@ -0,0 +1,11 @@ +# Data transfer from pytorch to DAPHNE and back, via files. + +import numpy as np +import torch +from daphne.context.daphne_context import DaphneContext + +t = torch.from_numpy(np.ones((1000, 1000), dtype=np.float64)) + +dctx = DaphneContext() + +dctx.from_pytorch(t, shared_memory=False).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_float64_large_ones_shared_memory.daphne b/test/api/python/data_transfer_pytorch_tensor_float64_large_ones_shared_memory.daphne new file mode 100644 index 000000000..a48d5380d --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_float64_large_ones_shared_memory.daphne @@ -0,0 +1,2 @@ +m = fill(1.0, 1000, 1000); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_float64_large_ones_shared_memory.py b/test/api/python/data_transfer_pytorch_tensor_float64_large_ones_shared_memory.py new file mode 100644 index 000000000..bd4a35aba --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_float64_large_ones_shared_memory.py @@ -0,0 +1,11 @@ +# Data transfer from pytorch to DAPHNE and back, via shared memory. + +import numpy as np +import torch +from daphne.context.daphne_context import DaphneContext + +t = torch.from_numpy(np.ones((1000, 1000), dtype=np.float64)) + +dctx = DaphneContext() + +dctx.from_pytorch(t, shared_memory=True).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_float64_large_zeros.daphne b/test/api/python/data_transfer_pytorch_tensor_float64_large_zeros.daphne new file mode 100644 index 000000000..83542850f --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_float64_large_zeros.daphne @@ -0,0 +1,2 @@ +m = fill(0.0, 1000, 1000); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_float64_large_zeros.py b/test/api/python/data_transfer_pytorch_tensor_float64_large_zeros.py new file mode 100644 index 000000000..259048cdf --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_float64_large_zeros.py @@ -0,0 +1,11 @@ +# Data transfer from pytorch to DAPHNE and back, via files. + +import numpy as np +import torch +from daphne.context.daphne_context import DaphneContext + +t = torch.from_numpy(np.zeros((1000, 1000), dtype=np.float64)) + +dctx = DaphneContext() + +dctx.from_pytorch(t, shared_memory=False).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_float64_large_zeros_shared_memory.daphne b/test/api/python/data_transfer_pytorch_tensor_float64_large_zeros_shared_memory.daphne new file mode 100644 index 000000000..83542850f --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_float64_large_zeros_shared_memory.daphne @@ -0,0 +1,2 @@ +m = fill(0.0, 1000, 1000); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_float64_large_zeros_shared_memory.py b/test/api/python/data_transfer_pytorch_tensor_float64_large_zeros_shared_memory.py new file mode 100644 index 000000000..35ddfb100 --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_float64_large_zeros_shared_memory.py @@ -0,0 +1,11 @@ +# Data transfer from pytorch to DAPHNE and back, via shared memory. + +import numpy as np +import torch +from daphne.context.daphne_context import DaphneContext + +t = torch.from_numpy(np.zeros((1000, 1000), dtype=np.float64)) + +dctx = DaphneContext() + +dctx.from_pytorch(t, shared_memory=True).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_pytorch_tensor_int64_1d.daphne b/test/api/python/data_transfer_pytorch_tensor_int64_1d.daphne new file mode 100644 index 000000000..2603ece78 --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_int64_1d.daphne @@ -0,0 +1,2 @@ +t = as.si64([0, 1, -1, 12, -12, 1000, -1000]); +print(t); diff --git a/test/api/python/data_transfer_pytorch_tensor_int64_1d.py b/test/api/python/data_transfer_pytorch_tensor_int64_1d.py new file mode 100644 index 000000000..01e1b52d3 --- /dev/null +++ b/test/api/python/data_transfer_pytorch_tensor_int64_1d.py @@ -0,0 +1,11 @@ +# Data transfer from pytorch to DAPHNE and back, via files. + +import numpy as np +import torch +from daphne.context.daphne_context import DaphneContext + +t = torch.from_numpy(np.array([0, 1, -1, 12, -12, 1000, -1000], dtype=np.int64).reshape(-1, 1)) + +dctx = DaphneContext() + +dctx.from_pytorch(t, shared_memory=False).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_float64_1d.daphne b/test/api/python/data_transfer_tensorflow_tensor_float64_1d.daphne new file mode 100644 index 000000000..0650870e8 --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_float64_1d.daphne @@ -0,0 +1,2 @@ +t1 = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf]); +print(t1); \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_float64_1d.py b/test/api/python/data_transfer_tensorflow_tensor_float64_1d.py new file mode 100644 index 000000000..df21b8df8 --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_float64_1d.py @@ -0,0 +1,11 @@ +# Data transfer from tensorflow to DAPHNE and back, via files. + +import numpy as np +import tensorflow as tf +from daphne.context.daphne_context import DaphneContext + +t1 = tf.constant([np.nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, np.inf, -np.inf], dtype=tf.float64) + +dctx = DaphneContext() + +(dctx.from_tensorflow(t1, shared_memory=False).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_float64_1d_shared_memory.daphne b/test/api/python/data_transfer_tensorflow_tensor_float64_1d_shared_memory.daphne new file mode 100644 index 000000000..0650870e8 --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_float64_1d_shared_memory.daphne @@ -0,0 +1,2 @@ +t1 = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf]); +print(t1); \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_float64_1d_shared_memory.py b/test/api/python/data_transfer_tensorflow_tensor_float64_1d_shared_memory.py new file mode 100644 index 000000000..8247eb807 --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_float64_1d_shared_memory.py @@ -0,0 +1,11 @@ +# Data transfer from tensorflow to DAPHNE and back, via shared memory. + +import numpy as np +import tensorflow as tf +from daphne.context.daphne_context import DaphneContext + +t1 = tf.constant([np.nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, np.inf, -np.inf], dtype=tf.float64) + +dctx = DaphneContext() + +(dctx.from_tensorflow(t1, shared_memory=True).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_float64_2d.daphne b/test/api/python/data_transfer_tensorflow_tensor_float64_2d.daphne new file mode 100644 index 000000000..e2a27d516 --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_float64_2d.daphne @@ -0,0 +1,2 @@ +t1 = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf](6, 2)); +print(t1); \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_float64_2d.py b/test/api/python/data_transfer_tensorflow_tensor_float64_2d.py new file mode 100644 index 000000000..0d46a41a8 --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_float64_2d.py @@ -0,0 +1,11 @@ +# Data transfer from tensorflow to DAPHNE and back, via files. + +import numpy as np +import tensorflow as tf +from daphne.context.daphne_context import DaphneContext + +t1 = tf.constant([[np.nan, 0.0], [1.0, -1.0], [12.3, -12.3], [2e-10, -2e-10], [2e10, -2e10], [np.inf, -np.inf]], dtype=tf.float64) + +dctx = DaphneContext() + +(dctx.from_tensorflow(t1, shared_memory=False).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_float64_2d_shared_memory.daphne b/test/api/python/data_transfer_tensorflow_tensor_float64_2d_shared_memory.daphne new file mode 100644 index 000000000..e2a27d516 --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_float64_2d_shared_memory.daphne @@ -0,0 +1,2 @@ +t1 = as.f64([nan, 0.0, 1.0, -1.0, 12.3, -12.3, 2e-10, -2e-10, 2e10, -2e10, inf, -inf](6, 2)); +print(t1); \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_float64_2d_shared_memory.py b/test/api/python/data_transfer_tensorflow_tensor_float64_2d_shared_memory.py new file mode 100644 index 000000000..9eefc78dc --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_float64_2d_shared_memory.py @@ -0,0 +1,11 @@ +# Data transfer from tensorflow to DAPHNE and back, via shared memory. + +import numpy as np +import tensorflow as tf +from daphne.context.daphne_context import DaphneContext + +t1 = tf.constant([[np.nan, 0.0], [1.0, -1.0], [12.3, -12.3], [2e-10, -2e-10], [2e10, -2e10], [np.inf, -np.inf]], dtype=tf.float64) + +dctx = DaphneContext() + +(dctx.from_tensorflow(t1, shared_memory=True).print().compute()) \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_float64_large_ones.daphne b/test/api/python/data_transfer_tensorflow_tensor_float64_large_ones.daphne new file mode 100644 index 000000000..a48d5380d --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_float64_large_ones.daphne @@ -0,0 +1,2 @@ +m = fill(1.0, 1000, 1000); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_float64_large_ones.py b/test/api/python/data_transfer_tensorflow_tensor_float64_large_ones.py new file mode 100644 index 000000000..f3acc3368 --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_float64_large_ones.py @@ -0,0 +1,11 @@ +# Data transfer from tensorflow to DAPHNE and back, via files. + +import numpy as np +import tensorflow as tf +from daphne.context.daphne_context import DaphneContext + +t = tf.constant(np.ones((1000, 1000), dtype=np.float64)) + +dctx = DaphneContext() + +dctx.from_tensorflow(t, shared_memory=False).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_float64_large_ones_shared_memory.daphne b/test/api/python/data_transfer_tensorflow_tensor_float64_large_ones_shared_memory.daphne new file mode 100644 index 000000000..a48d5380d --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_float64_large_ones_shared_memory.daphne @@ -0,0 +1,2 @@ +m = fill(1.0, 1000, 1000); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_float64_large_ones_shared_memory.py b/test/api/python/data_transfer_tensorflow_tensor_float64_large_ones_shared_memory.py new file mode 100644 index 000000000..fa3b38b50 --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_float64_large_ones_shared_memory.py @@ -0,0 +1,11 @@ +# Data transfer from tensorflow to DAPHNE and back, via shared memory. + +import numpy as np +import tensorflow as tf +from daphne.context.daphne_context import DaphneContext + +t = tf.constant(np.ones((1000, 1000), dtype=np.float64)) + +dctx = DaphneContext() + +dctx.from_tensorflow(t, shared_memory=True).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_float64_large_zeros.daphne b/test/api/python/data_transfer_tensorflow_tensor_float64_large_zeros.daphne new file mode 100644 index 000000000..83542850f --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_float64_large_zeros.daphne @@ -0,0 +1,2 @@ +m = fill(0.0, 1000, 1000); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_float64_large_zeros.py b/test/api/python/data_transfer_tensorflow_tensor_float64_large_zeros.py new file mode 100644 index 000000000..096220e3b --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_float64_large_zeros.py @@ -0,0 +1,11 @@ +# Data transfer from tensorflow to DAPHNE and back, via files. + +import numpy as np +import tensorflow as tf +from daphne.context.daphne_context import DaphneContext + +t = tf.constant(np.zeros((1000, 1000), dtype=np.float64)) + +dctx = DaphneContext() + +dctx.from_tensorflow(t, shared_memory=False).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_float64_large_zeros_shared_memory.daphne b/test/api/python/data_transfer_tensorflow_tensor_float64_large_zeros_shared_memory.daphne new file mode 100644 index 000000000..83542850f --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_float64_large_zeros_shared_memory.daphne @@ -0,0 +1,2 @@ +m = fill(0.0, 1000, 1000); +print(m); \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_float64_large_zeros_shared_memory.py b/test/api/python/data_transfer_tensorflow_tensor_float64_large_zeros_shared_memory.py new file mode 100644 index 000000000..efbda575a --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_float64_large_zeros_shared_memory.py @@ -0,0 +1,11 @@ +# Data transfer from tensorflow to DAPHNE and back, via shared memory. + +import numpy as np +import tensorflow as tf +from daphne.context.daphne_context import DaphneContext + +t = tf.constant(np.zeros((1000, 1000), dtype=np.float64)) + +dctx = DaphneContext() + +dctx.from_tensorflow(t, shared_memory=True).print().compute() \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_int64_1d.daphne b/test/api/python/data_transfer_tensorflow_tensor_int64_1d.daphne new file mode 100644 index 000000000..1d1a6d23a --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_int64_1d.daphne @@ -0,0 +1,2 @@ +t = as.si64([0, 1, -1, 12, -12, 1000, -1000]); +print(t); \ No newline at end of file diff --git a/test/api/python/data_transfer_tensorflow_tensor_int64_1d.py b/test/api/python/data_transfer_tensorflow_tensor_int64_1d.py new file mode 100644 index 000000000..edc24aeaa --- /dev/null +++ b/test/api/python/data_transfer_tensorflow_tensor_int64_1d.py @@ -0,0 +1,10 @@ +# Data transfer from tensorflow to DAPHNE and back, via files. + +import tensorflow as tf +from daphne.context.daphne_context import DaphneContext + +t = tf.constant([0, 1, -1, 12, -12, 1000, -1000], dtype=tf.int64) + +dctx = DaphneContext() + +dctx.from_tensorflow(t, shared_memory=False).print().compute() \ No newline at end of file