diff --git a/doc/bugs/Can__39__t_get_content_from_S3_with_s3-aws_library/comment_9_769de1e47221dfb6c810665e3704bbb2._comment b/doc/bugs/Can__39__t_get_content_from_S3_with_s3-aws_library/comment_9_769de1e47221dfb6c810665e3704bbb2._comment new file mode 100644 index 0000000000..788ccf7ed5 --- /dev/null +++ b/doc/bugs/Can__39__t_get_content_from_S3_with_s3-aws_library/comment_9_769de1e47221dfb6c810665e3704bbb2._comment @@ -0,0 +1,10 @@ +[[!comment format=mdwn + username="https://www.google.com/accounts/o8/id?id=AItOawnSenxKyE_2Z6Wb-EBMO8FciyRywjx1ZiQ" + nickname="Walter" + subject="comment 9" + date="2015-04-23T19:07:56Z" + content=""" +Is it possible to do a fast ``fsck`` on an S3 remote? Because I don't want to download all the files again, it would be nice to just have the option to check it it exists. + +I get a ``failed to download file from remote`` error when I try it. +"""]]