Skip to content

Unsupported week(s) calendar duration. #2145

Open
@piotrszul

Description

@piotrszul

'week(s); is a valid Fhirpath/UCUM duration but is not supported due the implementation based on strictly on java supported Calendar units.

Details
Expected: true but got: null ==> expected: but was:

1 week = 7 days [null]
7 days = 1 week [** testQuantity5]
6 days < 1 week [** testQuantity7]
8 days > 1 week [** testQuantity8]

org.apache.spark.SparkException: Job aborted due to stage failure: ?? (executor driver): org.apache.spark.SparkException: [FAILED_EXECUTE_UDF] Failed to execute user defined function (date_add_duration (UDFRegistration$$Lambda$1739/0x0000007800a055f8): (string, structid:void,value:decimal(32,6),value_scale:int,comparator:void,unit:void,system:string,code:string,_value_canonicalized:void,_code_canonicalized:void,_fid:void) => string). at org.apache.spark.sql.errors.QueryExecutionErrors$.failedExecuteUserDefinedFunctionError(QueryExecutionErrors.scala:198) at org.apache.spark.sql.errors.QueryExecutionErrors.failedExecuteUserDefinedFunctionError(QueryExecutionErrors.scala) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenEvaluatorFactory$WholeStageCodegenPartitionEvaluator$$anon$1.hasNext(WholeStageCodegenEvaluatorFactory.scala:43) at org.apache.spark.sql.execution.SparkPlan.$anonfun$getByteArrayRdd$1(SparkPlan.scala:388) at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2(RDD.scala:893) at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2$adapted(RDD.scala:893) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:367) at org.apache.spark.rdd.RDD.iterator(RDD.scala:331) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93) at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:166) at org.apache.spark.scheduler.Task.run(Task.scala:141) at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) at java.base/java.lang.Thread.run(Thread.java:842) Caused by: au.csiro.pathling.errors.InvalidUserInputError: Unsupported calendar duration unit: week at au.csiro.pathling.utilities.Preconditions.checkUserInput(Preconditions.java:108) at au.csiro.pathling.fhirpath.CalendarDurationUtils.ucumToCalendarTemporalUnit(CalendarDurationUtils.java:57) at au.csiro.pathling.fhirpath.CalendarDurationUtils.getTemporalUnit(CalendarDurationUtils.java:77) at au.csiro.pathling.sql.dates.TemporalArithmeticFunction.performArithmetic(TemporalArithmeticFunction.java:62) at au.csiro.pathling.sql.dates.TemporalArithmeticFunction.performAddition(TemporalArithmeticFunction.java:47) at au.csiro.pathling.sql.dates.date.DateAddDurationFunction.lambda$getOperationFunction$0(DateAddDurationFunction.java:37) at au.csiro.pathling.sql.dates.TemporalArithmeticFunction.call(TemporalArithmeticFunction.java:93) at au.csiro.pathling.sql.dates.TemporalArithmeticFunction.call(TemporalArithmeticFunction.java:39) at org.apache.spark.sql.UDFRegistration.$anonfun$register$354(UDFRegistration.scala:767) ... 20 more Driver stacktrace:

@2016-02-28 + 1 week [null]

org.apache.spark.SparkException: Job aborted due to stage failure: ?? (executor driver): org.apache.spark.SparkException: [FAILED_EXECUTE_UDF] Failed to execute user defined function (date_add_duration (UDFRegistration$$Lambda$1739/0x0000007800a055f8): (string, structid:void,value:decimal(32,6),value_scale:int,comparator:void,unit:void,system:string,code:string,_value_canonicalized:void,_code_canonicalized:void,_fid:void) => string). at org.apache.spark.sql.errors.QueryExecutionErrors$.failedExecuteUserDefinedFunctionError(QueryExecutionErrors.scala:198) at org.apache.spark.sql.errors.QueryExecutionErrors.failedExecuteUserDefinedFunctionError(QueryExecutionErrors.scala) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenEvaluatorFactory$WholeStageCodegenPartitionEvaluator$$anon$1.hasNext(WholeStageCodegenEvaluatorFactory.scala:43) at org.apache.spark.sql.execution.SparkPlan.$anonfun$getByteArrayRdd$1(SparkPlan.scala:388) at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2(RDD.scala:893) at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2$adapted(RDD.scala:893) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:367) at org.apache.spark.rdd.RDD.iterator(RDD.scala:331) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93) at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:166) at org.apache.spark.scheduler.Task.run(Task.scala:141) at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) at java.base/java.lang.Thread.run(Thread.java:842) Caused by: au.csiro.pathling.errors.InvalidUserInputError: Unsupported calendar duration unit: weeks at au.csiro.pathling.utilities.Preconditions.checkUserInput(Preconditions.java:108) at au.csiro.pathling.fhirpath.CalendarDurationUtils.ucumToCalendarTemporalUnit(CalendarDurationUtils.java:57) at au.csiro.pathling.fhirpath.CalendarDurationUtils.getTemporalUnit(CalendarDurationUtils.java:77) at au.csiro.pathling.sql.dates.TemporalArithmeticFunction.performArithmetic(TemporalArithmeticFunction.java:62) at au.csiro.pathling.sql.dates.TemporalArithmeticFunction.performAddition(TemporalArithmeticFunction.java:47) at au.csiro.pathling.sql.dates.date.DateAddDurationFunction.lambda$getOperationFunction$0(DateAddDurationFunction.java:37) at au.csiro.pathling.sql.dates.TemporalArithmeticFunction.call(TemporalArithmeticFunction.java:93) at au.csiro.pathling.sql.dates.TemporalArithmeticFunction.call(TemporalArithmeticFunction.java:39) at org.apache.spark.sql.UDFRegistration.$anonfun$register$354(UDFRegistration.scala:767) ... 20 more Driver stacktrace:

@2016-02-28 + 1.5 weeks [null]

org.apache.spark.SparkException: Job aborted due to stage failure: ?? (executor driver): org.apache.spark.SparkException: [FAILED_EXECUTE_UDF] Failed to execute user defined function (date_subtract_duration (UDFRegistration$$Lambda$1739/0x0000007800a055f8): (string, structid:void,value:decimal(32,6),value_scale:int,comparator:void,unit:void,system:string,code:string,_value_canonicalized:void,_code_canonicalized:void,_fid:void) => string). at org.apache.spark.sql.errors.QueryExecutionErrors$.failedExecuteUserDefinedFunctionError(QueryExecutionErrors.scala:198) at org.apache.spark.sql.errors.QueryExecutionErrors.failedExecuteUserDefinedFunctionError(QueryExecutionErrors.scala) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenEvaluatorFactory$WholeStageCodegenPartitionEvaluator$$anon$1.hasNext(WholeStageCodegenEvaluatorFactory.scala:43) at org.apache.spark.sql.execution.SparkPlan.$anonfun$getByteArrayRdd$1(SparkPlan.scala:388) at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2(RDD.scala:893) at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2$adapted(RDD.scala:893) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:367) at org.apache.spark.rdd.RDD.iterator(RDD.scala:331) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93) at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:166) at org.apache.spark.scheduler.Task.run(Task.scala:141) at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) at java.base/java.lang.Thread.run(Thread.java:842) Caused by: au.csiro.pathling.errors.InvalidUserInputError: Unsupported calendar duration unit: weeks at au.csiro.pathling.utilities.Preconditions.checkUserInput(Preconditions.java:108) at au.csiro.pathling.fhirpath.CalendarDurationUtils.ucumToCalendarTemporalUnit(CalendarDurationUtils.java:57) at au.csiro.pathling.fhirpath.CalendarDurationUtils.getTemporalUnit(CalendarDurationUtils.java:77) at au.csiro.pathling.sql.dates.TemporalArithmeticFunction.performArithmetic(TemporalArithmeticFunction.java:62) at au.csiro.pathling.sql.dates.TemporalArithmeticFunction.performSubtraction(TemporalArithmeticFunction.java:53) at au.csiro.pathling.sql.dates.date.DateSubtractDurationFunction.lambda$getOperationFunction$0(DateSubtractDurationFunction.java:37) at au.csiro.pathling.sql.dates.TemporalArithmeticFunction.call(TemporalArithmeticFunction.java:93) at au.csiro.pathling.sql.dates.TemporalArithmeticFunction.call(TemporalArithmeticFunction.java:39) at org.apache.spark.sql.UDFRegistration.$anonfun$register$354(UDFRegistration.scala:767) ... 20 more Driver stacktrace:

@2016-02-28 - 1.5 weeks [null]

Metadata

Metadata

Assignees

No one assigned

    Labels

    bugSomething isn't workingfhirpathRelated to fhirpath reference implementation

    Type

    Projects

    Status

    Done

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions