For the sake of completeness, here's my implementation of the` _load` method:
`def _load(self) -> tf.data.Dataset:
logger = logging.getLogger('TensorFlowCSVDataSet')
load_path = get_filepath_str(self._get_load_path(), self._protocol)
logger.info(f'remote path: {load_path}')
logger.info(self._fs.ls(load_path))
logger.info(f'remote path contents: {self._fs.ls(load_path)}')
self._tmp_data_dir = tempfile.TemporaryDirectory(prefix=self._tmp_prefix)
logger.info(f'local path: {self._tmp_data_dir.name}')
self._fs.get(load_path + '/*.parquet',
self._tmp_data_dir.name + '/',
recursive=True)
ds_dir = list(Path(self._tmp_data_dir.name).iterdir())
logger.info(f'local path contents: {ds_dir}')
ds = (tf.data.experimental
.make_csv_dataset(file_pattern=f'{self._tmp_data_dir.name}/*.csv',
**self._load_args)).unbatch()
return d`s