Failure to load array data from files opened with s3fs
Closed this issue · 1 comments
braingram commented
Tested with the current asdf main branch and a mock s3 the following script fails:
import asdf
import boto3
from moto.server import ThreadedMotoServer
import numpy
import s3fs
ip = "127.0.0.1"
port = 3000
endpoint_url = f"http://{ip}:{port}"
if __name__ == '__main__':
server = ThreadedMotoServer(ip_address="127.0.0.1", port=3000, verbose=False)
server.start()
s3 = boto3.client('s3', endpoint_url=endpoint_url)
bucket_name = 'test'
s3.create_bucket(Bucket=bucket_name)
base_url = f"s3://{bucket_name}"
fs = s3fs.S3FileSystem(endpoint_url=endpoint_url)
arr = numpy.arange(42)
af = asdf.AsdfFile({'arr': arr})
url = f"{base_url}/test.asdf"
with fs.open(url, mode="wb") as f:
af.write_to(f)
with fs.open(url, mode="rb") as f:
with asdf.open(f, copy_arrays=True) as af:
print(af['arr'][0])
with:
File ".../asdf/asdf/generic_io.py", line 822, in read_into_array
return np.fromfile(self._fd, dtype=np.uint8, count=size)
io.UnsupportedOperation: fileno