Restore Connector Task Fails w/ No data Sourced

I deployed a Backup connector for an Avro topic which successfully wrote the topic data as Avro files into S3.

I created a new topic with the same number of partitions, and set the deserialization types for Key & Value to Avro. Then I attempted to Restore the topic from my S3 bucket.

Task-1 for the Restore connector always fails with the error: java.io.IOException: Not an Avro data file.
If I attempt to restore to a topic with a single partition, than the single connector task fails with this error.

Lenses version: 5.3.0
Restore/Source Connector version: 5.0.0

Connector logs:

java.io.IOException: Not an Avro data file.
	at lshaded.apache.avro.file.DataFileStream.initialize(DataFileStream.java:108)
	at lshaded.apache.avro.file.DataFileStream.<init>(DataFileStream.java:90)
	at io.lenses.streamreactor.connect.aws.s3.formats.reader.AvroStreamReader.<init>(AvroStreamReader.scala:32)
	at io.lenses.streamreactor.connect.aws.s3.config.AvroFormatSelection$.toStreamReader(FormatSelection.scala:140)
	at io.lenses.streamreactor.connect.aws.s3.source.reader.ResultReader$.$anonfun$create$11(ResultReader.scala:99)
	at scala.util.Either.map(Either.scala:382)
	at io.lenses.streamreactor.connect.aws.s3.source.reader.ResultReader$.$anonfun$create$9(ResultReader.scala:85)
	at scala.util.Either.flatMap(Either.scala:352)
	at io.lenses.streamreactor.connect.aws.s3.source.reader.ResultReader$.$anonfun$create$7(ResultReader.scala:81)
	at scala.util.Either.flatMap(Either.scala:352)
	at io.lenses.streamreactor.connect.aws.s3.source.reader.ResultReader$.$anonfun$create$5(ResultReader.scala:80)
	at scala.util.Either.flatMap(Either.scala:352)
	at io.lenses.streamreactor.connect.aws.s3.source.reader.ResultReader$.$anonfun$create$3(ResultReader.scala:79)
	at scala.util.Either.flatMap(Either.scala:352)
	at io.lenses.streamreactor.connect.aws.s3.source.reader.ResultReader$.$anonfun$create$1(ResultReader.scala:78)
	at io.lenses.streamreactor.connect.aws.s3.source.reader.ReaderManager.$anonfun$poll$9(ReaderManager.scala:55)
	at delay @ io.lenses.streamreactor.connect.aws.s3.source.reader.ReaderManager.$anonfun$poll$7(ReaderManager.scala:53)
	at flatMap @ io.lenses.streamreactor.connect.aws.s3.source.reader.ReaderManager.$anonfun$poll$7(ReaderManager.scala:52)
	at map @ io.lenses.streamreactor.connect.aws.s3.source.reader.ReaderManager.$anonfun$poll$4(ReaderManager.scala:49)
	at flatMap @ io.lenses.streamreactor.connect.aws.s3.source.reader.ReaderManager.$anonfun$poll$2(ReaderManager.scala:48)
	at delay @ io.lenses.streamreactor.connect.aws.s3.source.reader.ReaderManager.closeAndLog(ReaderManager.scala:104)
	at flatMap @ io.lenses.streamreactor.connect.aws.s3.source.reader.ReaderManager.$anonfun$poll$1(ReaderManager.scala:45)
	at getAndSet @ io.lenses.streamreactor.connect.aws.s3.source.reader.ReaderManager.fromNexFile$1(ReaderManager.scala:44)
	at flatMap @ io.lenses.streamreactor.connect.aws.s3.source.reader.ReaderManager.fromNexFile$1(ReaderManager.scala:44)
	at map @ io.lenses.streamreactor.connect.aws.s3.source.reader.ReaderManager.$anonfun$poll$14(ReaderManager.scala:75)
	at get @ io.lenses.streamreactor.connect.aws.s3.source.state.S3SourceState$.$anonfun$make$11(S3SourceBuilder.scala:73)
	at flatMap @ io.lenses.streamreactor.connect.aws.s3.source.reader.ReaderManager.acc$1(ReaderManager.scala:74)
	at traverse @ io.lenses.streamreactor.connect.aws.s3.source.distribution.PartitionSearcher.find(PartitionSearcher.scala:40)
	at traverse @ io.lenses.streamreactor.connect.aws.s3.source.distribution.PartitionSearcher.find(PartitionSearcher.scala:40)
	at traverse @ io.lenses.streamreactor.connect.aws.s3.source.distribution.PartitionSearcher.find(PartitionSearcher.scala:40)
	at map @ io.lenses.streamreactor.connect.aws.s3.source.state.S3SourceTaskState.$anonfun$poll$1(S3SourceTaskState.scala:33)
	at map @ io.lenses.streamreactor.connect.aws.s3.source.state.S3SourceTaskState.$anonfun$poll$1(S3SourceTaskState.scala:33)
Caused by: java.io.EOFException
	at lshaded.apache.avro.io.BinaryDecoder$InputStreamByteSource.readRaw(BinaryDecoder.java:883)
	at lshaded.apache.avro.io.BinaryDecoder.doReadBytes(BinaryDecoder.java:405)
	at lshaded.apache.avro.io.BinaryDecoder.readFixed(BinaryDecoder.java:361)
	at lshaded.apache.avro.io.Decoder.readFixed(Decoder.java:159)
	at lshaded.apache.avro.file.DataFileStream.initialize(DataFileStream.java:106)
	at lshaded.apache.avro.file.DataFileStream.<init>(DataFileStream.java:90)
	at io.lenses.streamreactor.connect.aws.s3.formats.reader.AvroStreamReader.<init>(AvroStreamReader.scala:32)
	at io.lenses.streamreactor.connect.aws.s3.config.AvroFormatSelection$.toStreamReader(FormatSelection.scala:140)
	at io.lenses.streamreactor.connect.aws.s3.source.reader.ResultReader$.$anonfun$create$11(ResultReader.scala:99)
	at scala.util.Either.map(Either.scala:382)
	at io.lenses.streamreactor.connect.aws.s3.source.reader.ResultReader$.$anonfun$create$9(ResultReader.scala:85)
	at scala.util.Either.flatMap(Either.scala:352)
	at io.lenses.streamreactor.connect.aws.s3.source.reader.ResultReader$.$anonfun$create$7(ResultReader.scala:81)
	at scala.util.Either.flatMap(Either.scala:352)
	at io.lenses.streamreactor.connect.aws.s3.source.reader.ResultReader$.$anonfun$create$5(ResultReader.scala:80)
	at scala.util.Either.flatMap(Either.scala:352)
	at io.lenses.streamreactor.connect.aws.s3.source.reader.ResultReader$.$anonfun$create$3(ResultReader.scala:79)
	at scala.util.Either.flatMap(Either.scala:352)
	at io.lenses.streamreactor.connect.aws.s3.source.reader.ResultReader$.$anonfun$create$1(ResultReader.scala:78)
	at io.lenses.streamreactor.connect.aws.s3.source.reader.ReaderManager.$anonfun$poll$9(ReaderManager.scala:55)
	at cats.effect.IOFiber.runLoop(IOFiber.scala:403)
	at cats.effect.IOFiber.execR(IOFiber.scala:1324)
	at cats.effect.IOFiber.run(IOFiber.scala:118)
	at cats.effect.unsafe.WorkerThread.run(WorkerThread.scala:555)