janino-compiler/janino

Operand stack underflow

snuyanzin opened this issue · 3 comments

Currently we are in process of update Calcite 1.27.0(janinio 3.0.11) to Calcite 1.28.0 (janinio 3.1.7)
and some tests (passing on 3.0.11) started to fail with (trace below)
the source is generated and looks like, if it makes sense i could try to minimize it

/* 1 */
/* 2 */ public class SourceConversion$36
        extends org.apache.flink.table.runtime.operators.TableStreamOperator
        /* 3 */ implements org.apache.flink.streaming.api.operators.OneInputStreamOperator {
    /* 4 *//* 5 */ private final Object[] references;
    /* 6 */ private transient org.apache.flink.table.data.conversion.RowRowConverter converter$32;
    /* 7 */ org.apache.flink.table.data.GenericRowData out =
            new org.apache.flink.table.data.GenericRowData(3);
    /* 8 */ private final org.apache.flink.streaming.runtime.streamrecord.StreamRecord outElement =
            new org.apache.flink.streaming.runtime.streamrecord.StreamRecord(null);
    /* 9 */
    /* 10 */ public SourceConversion$36(
            /* 11 */ Object[] references,
            /* 12 */ org.apache.flink.streaming.runtime.tasks.StreamTask task,
            /* 13 */ org.apache.flink.streaming.api.graph.StreamConfig config,
            /* 14 */ org.apache.flink.streaming.api.operators.Output output,
            /* 15 */ org.apache.flink.streaming.runtime.tasks.ProcessingTimeService
                    processingTimeService)
            throws Exception {
        /* 16 */ this.references = references;
        /* 17 */ converter$32 =
                (((org.apache.flink.table.data.conversion.RowRowConverter) references[0]));
        /* 18 */ this.setup(task, config, output);
        /* 19 */ if (this
                instanceof org.apache.flink.streaming.api.operators.AbstractStreamOperator) {
            /* 20 */ ((org.apache.flink.streaming.api.operators.AbstractStreamOperator) this)
                    /* 21 */ .setProcessingTimeService(processingTimeService);
            /* 22 */ }
        /* 23 */ }
    /* 24 */
    /* 25 */ @Override
    /* 26 */ public void open() throws Exception {
        /* 27 */ super.open();
        /* 28 */
        /* 29 */ converter$32.open(getRuntimeContext().getUserCodeClassLoader());
        /* 30 */
        /* 31 */ }
    /* 32 */
    /* 33 */ @Override
    /* 34 */ public void processElement(
            org.apache.flink.streaming.runtime.streamrecord.StreamRecord element) throws Exception {
        /* 35 */ org.apache.flink.table.data.RowData in1 =
                (org.apache.flink.table.data.RowData)
                        (org.apache.flink.table.data.RowData)
                                converter$32.toInternalOrNull(
                                        (org.apache.flink.types.Row) element.getValue());
        /* 36 */
        /* 37 */ int field$33;
        /* 38 */ boolean isNull$33;
        /* 39 */ long field$34;
        /* 40 */ boolean isNull$34;
        /* 41 */ org.apache.flink.table.data.binary.BinaryStringData field$35;
        /* 42 */ boolean isNull$35;
        /* 43 */ isNull$35 = in1.isNullAt(2);
        /* 44 */ field$35 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
        /* 45 */ if (!isNull$35) {
            /* 46 */ field$35 =
                    ((org.apache.flink.table.data.binary.BinaryStringData) in1.getString(2));
            /* 47 */ }
        /* 48 */ isNull$33 = in1.isNullAt(0);
        /* 49 */ field$33 = -1;
        /* 50 */ if (!isNull$33) {
            /* 51 */ field$33 = in1.getInt(0);
            /* 52 */ }
        /* 53 */ isNull$34 = in1.isNullAt(1);
        /* 54 */ field$34 = -1L;
        /* 55 */ if (!isNull$34) {
            /* 56 */ field$34 = in1.getLong(1);
            /* 57 */ }
        /* 58 */
        /* 59 */
        /* 60 */
        /* 61 */
        /* 62 */
        /* 63 */
        /* 64 */ if (isNull$33) {
            /* 65 */ out.setField(0, null);
            /* 66 */ } else {
            /* 67 */ out.setField(0, field$33);
            /* 68 */ }
        /* 69 */
        /* 70 */
        /* 71 */
        /* 72 */ if (isNull$34) {
            /* 73 */ out.setField(1, null);
            /* 74 */ } else {
            /* 75 */ out.setField(1, field$34);
            /* 76 */ }
        /* 77 */
        /* 78 */
        /* 79 */
        /* 80 */ if (isNull$35) {
            /* 81 */ out.setField(2, null);
            /* 82 */ } else {
            /* 83 */ out.setField(2, field$35);
            /* 84 */ }
        /* 85 */
        /* 86 */
        /* 87 */ output.collect(outElement.replace(out));
        /* 88 */
        /* 89 */
        /* 90 */ }
    /* 91 */
    /* 92 */
    /* 93 */
    /* 94 */ @Override
    /* 95 */ public void finish() throws Exception {
        /* 96 */
        /* 97 */ super.finish();
        /* 98 */ }
    /* 99 */
    /* 100 */ @Override
    /* 101 */ public void close() throws Exception {
        /* 102 */ super.close();
        /* 103 */
        /* 104 */ }
    /* 105 */
    /* 106 */
    /* 107 */ }
/* 108 */

and janino fails with

Caused by: org.codehaus.commons.compiler.InternalCompilerException: Compiling "SourceConversion$36" in Line 2, Column 14: Line 34, Column 21: Compiling "processElement(org.apache.flink.streaming.runtime.streamrecord.StreamRecord element)": Operand stack underflow
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:369)
	at org.codehaus.janino.UnitCompiler.access$000(UnitCompiler.java:231)
	at org.codehaus.janino.UnitCompiler$1.visitCompilationUnit(UnitCompiler.java:333)
	at org.codehaus.janino.UnitCompiler$1.visitCompilationUnit(UnitCompiler.java:330)
	at org.codehaus.janino.Java$CompilationUnit.accept(Java.java:367)
	at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:330)
	at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:245)
	at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:473)
	at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:223)
	at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:209)
	at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:82)
	at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
	at org.apache.flink.table.runtime.generated.CompileUtils.doCompile(CompileUtils.java:104)
	... 25 more
Caused by: org.codehaus.commons.compiler.InternalCompilerException: Line 34, Column 21: Compiling "processElement(org.apache.flink.streaming.runtime.streamrecord.StreamRecord element)": Operand stack underflow
	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3222)
	at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1379)
	at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1352)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:800)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:412)
	at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:231)
	at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:391)
	at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:386)
	at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1692)
	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:386)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:359)
	... 37 more
Caused by: org.codehaus.commons.compiler.InternalCompilerException: Operand stack underflow
	at org.codehaus.janino.StackMap.peekOperand(StackMap.java:94)
	at org.codehaus.janino.CodeContext.popOperand(CodeContext.java:1341)
	at org.codehaus.janino.CodeContext.popOperand(CodeContext.java:1357)
	at org.codehaus.janino.CodeContext.popIntOperand(CodeContext.java:1423)
	at org.codehaus.janino.UnitCompiler.ifxx(UnitCompiler.java:12098)
	at org.codehaus.janino.UnitCompiler.compileBoolean2(UnitCompiler.java:4209)
	at org.codehaus.janino.UnitCompiler.access$6200(UnitCompiler.java:231)
	at org.codehaus.janino.UnitCompiler$13$1.visitAmbiguousName(UnitCompiler.java:4151)
	at org.codehaus.janino.UnitCompiler$13$1.visitAmbiguousName(UnitCompiler.java:4148)
	at org.codehaus.janino.Java$AmbiguousName.accept(Java.java:4574)
	at org.codehaus.janino.UnitCompiler$13.visitLvalue(UnitCompiler.java:4148)
	at org.codehaus.janino.UnitCompiler$13.visitLvalue(UnitCompiler.java:4144)
	at org.codehaus.janino.Java$Lvalue.accept(Java.java:4498)
	at org.codehaus.janino.UnitCompiler.compileBoolean(UnitCompiler.java:4144)
	at org.codehaus.janino.UnitCompiler.compileBoolean2(UnitCompiler.java:4219)
	at org.codehaus.janino.UnitCompiler.access$6400(UnitCompiler.java:231)
	at org.codehaus.janino.UnitCompiler$13.visitUnaryOperation(UnitCompiler.java:4165)
	at org.codehaus.janino.UnitCompiler$13.visitUnaryOperation(UnitCompiler.java:4144)
	at org.codehaus.janino.Java$UnaryOperation.accept(Java.java:5088)
	at org.codehaus.janino.UnitCompiler.compileBoolean(UnitCompiler.java:4144)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2521)
	at org.codehaus.janino.UnitCompiler.access$1900(UnitCompiler.java:231)
	at org.codehaus.janino.UnitCompiler$6.visitIfStatement(UnitCompiler.java:1531)
	at org.codehaus.janino.UnitCompiler$6.visitIfStatement(UnitCompiler.java:1523)
	at org.codehaus.janino.Java$IfStatement.accept(Java.java:3274)
	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1523)
	at org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1607)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:3531)
	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3218)
	... 47 more

The current version of Janino is 3.1.8, which has an improved stack trace logic, so I would get more error information. Could you please update to 3.1.8 and start over?

Also, lots of things have changed in Janino 3.1.8, so it absolutely makes sense to not debug the older version, but upgrade first.

If the problem persists, then please minimize the test case; that would help a ton.

close this since it looks fixed after changes within #187

My pleasure. :-)