|
30 | 30 | from tensorflow.compat.v2 import data
|
31 | 31 | from tensorflow.python.data.ops.dataset_ops import flat_structure
|
32 | 32 | from tensorflow.python.data.util import structure as structure_lib
|
33 |
| -from tensorflow_io import _load_library |
34 |
| -arrow_ops = _load_library('_arrow_ops.so') |
| 33 | +from tensorflow_io.core.python.ops import core_ops |
35 | 34 |
|
36 | 35 | if hasattr(tf, "nest"):
|
37 | 36 | from tensorflow import nest # pylint: disable=ungrouped-imports
|
@@ -183,7 +182,7 @@ def __init__(self,
|
183 | 182 | "auto" (size to number of records in Arrow record batch)
|
184 | 183 | """
|
185 | 184 | super(ArrowDataset, self).__init__(
|
186 |
| - partial(arrow_ops.arrow_dataset, serialized_batches), |
| 185 | + partial(core_ops.arrow_dataset, serialized_batches), |
187 | 186 | columns,
|
188 | 187 | output_types,
|
189 | 188 | output_shapes,
|
@@ -316,7 +315,7 @@ def __init__(self,
|
316 | 315 | dtype=dtypes.string,
|
317 | 316 | name="filenames")
|
318 | 317 | super(ArrowFeatherDataset, self).__init__(
|
319 |
| - partial(arrow_ops.arrow_feather_dataset, filenames), |
| 318 | + partial(core_ops.arrow_feather_dataset, filenames), |
320 | 319 | columns,
|
321 | 320 | output_types,
|
322 | 321 | output_shapes,
|
@@ -401,7 +400,7 @@ def __init__(self,
|
401 | 400 | dtype=dtypes.string,
|
402 | 401 | name="endpoints")
|
403 | 402 | super(ArrowStreamDataset, self).__init__(
|
404 |
| - partial(arrow_ops.arrow_stream_dataset, endpoints), |
| 403 | + partial(core_ops.arrow_stream_dataset, endpoints), |
405 | 404 | columns,
|
406 | 405 | output_types,
|
407 | 406 | output_shapes,
|
@@ -600,7 +599,7 @@ def list_feather_columns(filename, **kwargs):
|
600 | 599 | if not tf.executing_eagerly():
|
601 | 600 | raise NotImplementedError("list_feather_columns only support eager mode")
|
602 | 601 | memory = kwargs.get("memory", "")
|
603 |
| - columns, dtypes_, shapes = arrow_ops.list_feather_columns( |
| 602 | + columns, dtypes_, shapes = core_ops.list_feather_columns( |
604 | 603 | filename, memory=memory)
|
605 | 604 | entries = zip(tf.unstack(columns), tf.unstack(dtypes_), tf.unstack(shapes))
|
606 | 605 | return dict([(column.numpy().decode(), tf.TensorSpec(
|
|
0 commit comments