hail
hail copied to clipboard
test_king_small fails after reinitializing JVM
To replicate, replace the contents of test_king.py::test_king_small
with:
@fails_local_backend()
def test_king_small():
hl.init(idempotent=True) # Should be no error
hl.stop()
hl.init(idempotent=True) # Should be no error
hl.init(hl.spark_context(), idempotent=True) # Should be no error
plink_path = resource('balding-nichols-1024-variants-4-samples-3-populations')
mt = hl.import_plink(bed=f'{plink_path}.bed',
bim=f'{plink_path}.bim',
fam=f'{plink_path}.fam')
kinship = hl.king(mt.GT)
assert_c_king_same_as_hail_king(
resource('balding-nichols-1024-variants-4-samples-3-populations.kin0'),
kinship)
Stack trace:
E hail.utils.java.FatalError: IndexOutOfBoundsException: 0
E
E Java stack trace:
E org.apache.spark.SparkException: Job aborted due to stage failure: Task 7 in stage 18.0 failed 1 times, most recent failure: Lost task 7.0 in stage 18.0 (TID 34, localhost, executor driver): java.lang.IndexOutOfBoundsException: 0
E at scala.collection.immutable.NumericRange.apply(NumericRange.scala:112)
E at is.hail.linalg.BlockMatrixReadRowBlockedRDD$$anonfun$compute$9.apply(BlockMatrix.scala:2131)
E at is.hail.linalg.BlockMatrixReadRowBlockedRDD$$anonfun$compute$9.apply(BlockMatrix.scala:2127)
E at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitionsWithIndex$1$$anonfun$apply$18$$anonfun$apply$19.apply(ContextRDD.scala:259)
E at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitionsWithIndex$1$$anonfun$apply$18$$anonfun$apply$19.apply(ContextRDD.scala:259)
E at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
E at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:197)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
E at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002)
E at is.hail.utils.richUtils.RichIterator$$anon$7.isValid(RichIterator.scala:30)
E at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48)
E at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:327)
E at is.hail.utils.FlipbookIterator$$anon$9.<init>(FlipbookIterator.scala:344)
E at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323)
E at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:43)
E at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:152)
E at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:149)
E at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:316)
E at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:316)
E at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$10$$anonfun$apply$11.apply(ContextRDD.scala:218)
E at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$10$$anonfun$apply$11.apply(ContextRDD.scala:218)
E at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
E at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:197)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
E at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002)
E at is.hail.utils.richUtils.RichIterator$$anon$7.isValid(RichIterator.scala:30)
E at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48)
E at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:327)
E at is.hail.utils.FlipbookIterator$$anon$9.<init>(FlipbookIterator.scala:344)
E at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323)
E at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:43)
E at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:152)
E at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:149)
E at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:316)
E at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:316)
E at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$10$$anonfun$apply$11.apply(ContextRDD.scala:218)
E at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$10$$anonfun$apply$11.apply(ContextRDD.scala:218)
E at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
E at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:197)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
E at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002)
E at is.hail.utils.richUtils.RichIterator$$anon$7.isValid(RichIterator.scala:30)
E at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48)
E at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:327)
E at is.hail.utils.FlipbookIterator$$anon$9.<init>(FlipbookIterator.scala:344)
E at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323)
E at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:43)
E at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:152)
E at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:149)
E at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:316)
E at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:316)
E at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$10$$anonfun$apply$11.apply(ContextRDD.scala:218)
E at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$10$$anonfun$apply$11.apply(ContextRDD.scala:218)
E at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
E at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
E at is.hail.rvd.RVD$$anonfun$34.apply(RVD.scala:1220)
E at is.hail.rvd.RVD$$anonfun$34.apply(RVD.scala:1219)
E at is.hail.sparkextras.ContextRDD$$anonfun$crunJobWithIndex$1.apply(ContextRDD.scala:242)
E at is.hail.sparkextras.ContextRDD$$anonfun$crunJobWithIndex$1.apply(ContextRDD.scala:240)
E at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
E at org.apache.spark.scheduler.Task.run(Task.scala:121)
E at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:403)
E at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
E at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:409)
E at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
E at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
E at java.lang.Thread.run(Thread.java:748)
randomly assigned Patrick.
This is still an issue.