Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 2 additions & 5 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,8 @@ jobs:
strategy:
matrix:
# Java versions to run unit tests
java: [ '8', '11', '17' ]
java: [ '11', '17', '21' ]
profile: ['default-hadoop']
include:
- java: '8'
profile: 'hadoop-2'
fail-fast: false
steps:
- name: Checkout
Expand Down Expand Up @@ -82,7 +79,7 @@ jobs:
uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: '8'
java-version: '11'
cache: 'maven'
# Caches built protobuf library
- name: Cache protobufs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,6 @@
*/
package org.apache.drill.exec.compile;

import java.io.IOException;
import java.util.Arrays;
import java.util.Map;

import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.exec.compile.ClassTransformer.ClassNames;
Expand All @@ -34,6 +30,12 @@
import org.apache.drill.exec.server.options.TypeValidators.LongValidator;
import org.apache.drill.exec.server.options.TypeValidators.StringValidator;
import org.codehaus.commons.compiler.CompileException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.util.Arrays;
import java.util.Map;

/**
* Selects between the two supported Java compilers: Janino and
Expand Down Expand Up @@ -65,7 +67,7 @@
*/

public class ClassCompilerSelector {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ClassCompilerSelector.class);
private static final Logger logger = LoggerFactory.getLogger(ClassCompilerSelector.class);

public enum CompilerPolicy {
DEFAULT, JDK, JANINO;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@
*/
package org.apache.drill.exec.compile;

import java.io.IOException;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.util.DrillFileUtils;
Expand All @@ -35,11 +35,10 @@
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.tree.ClassNode;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import java.io.IOException;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;

/**
* Compiles generated code, merges the resulting class with the
Expand All @@ -52,7 +51,7 @@
public class ClassTransformer {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ClassTransformer.class);

private static final int MAX_SCALAR_REPLACE_CODE_SIZE = 2*1024*1024; // 2meg
private static final int MAX_SCALAR_REPLACE_CODE_SIZE = 2 * 1024 * 1024; // 2meg

private final ByteCodeLoader byteCodeLoader = new ByteCodeLoader();
private final DrillConfig config;
Expand All @@ -72,7 +71,7 @@ public enum ScalarReplacementOption {
* @throws IllegalArgumentException if the string doesn't match any of the enum values
*/
public static ScalarReplacementOption fromString(final String s) {
switch(s) {
switch (s) {
case "off":
return OFF;
case "try":
Expand Down Expand Up @@ -228,8 +227,11 @@ public Class<?> getImplementationClass(
final TemplateClassDefinition<?> templateDefinition,
final String entireClass,
final String materializedClassName) throws ClassTransformationException {
// unfortunately, this hasn't been set up at construction time, so we have to do it here
final ScalarReplacementOption scalarReplacementOption = ScalarReplacementOption.fromString(optionManager.getOption(ExecConstants.SCALAR_REPLACEMENT_VALIDATOR));
final ScalarReplacementOption scalarReplacementOption =
ScalarReplacementOption.fromString(optionManager.getOption(ExecConstants.SCALAR_REPLACEMENT_VALIDATOR));

// Track injected class names to avoid duplicates
Set<String> injectedClassNames = new java.util.HashSet<>();

try {
final long t1 = System.nanoTime();
Expand All @@ -251,53 +253,32 @@ public Class<?> getImplementationClass(
final Set<ClassSet> namesCompleted = Sets.newHashSet();
names.add(set);

while ( !names.isEmpty() ) {
while (!names.isEmpty()) {
final ClassSet nextSet = names.removeFirst();
if (namesCompleted.contains(nextSet)) {
continue;
}
final ClassNames nextPrecompiled = nextSet.precompiled;
final byte[] precompiledBytes = byteCodeLoader.getClassByteCodeFromPath(nextPrecompiled.clazz);
final ClassNames nextGenerated = nextSet.generated;
// keeps only classes that have not be merged
// keeps only classes that have not been merged
Pair<byte[], ClassNode> classNodePair = classesToMerge.remove(nextGenerated.slash);
final ClassNode generatedNode;
if (classNodePair != null) {
generatedNode = classNodePair.getValue();
} else {
generatedNode = null;
}
final ClassNode generatedNode = (classNodePair != null) ? classNodePair.getValue() : null;

/*
* TODO
* We're having a problem with some cases of scalar replacement, but we want to get
* the code in so it doesn't rot anymore.
*
* Here, we use the specified replacement option. The loop will allow us to retry if
* we're using TRY.
*/
MergedClassResult result = null;
boolean scalarReplace = scalarReplacementOption != ScalarReplacementOption.OFF && entireClass.length() < MAX_SCALAR_REPLACE_CODE_SIZE;
while(true) {
boolean scalarReplace = scalarReplacementOption != ScalarReplacementOption.OFF
&& entireClass.length() < MAX_SCALAR_REPLACE_CODE_SIZE;
while (true) {
try {
result = MergeAdapter.getMergedClass(nextSet, precompiledBytes, generatedNode, scalarReplace);
break;
} catch(RuntimeException e) {
// if we had a problem without using scalar replacement, then rethrow
} catch (RuntimeException e) {
if (!scalarReplace) {
throw e;
}

// if we did try to use scalar replacement, decide if we need to retry or not
if (scalarReplacementOption == ScalarReplacementOption.ON) {
// option is forced on, so this is a hard error
throw e;
}

/*
* We tried to use scalar replacement, with the option to fall back to not using it.
* Log this failure before trying again without scalar replacement.
*/
logger.info("scalar replacement failure (retrying)\n", e);
scalarReplace = false;
}
Expand All @@ -307,26 +288,38 @@ public Class<?> getImplementationClass(
s = s.replace(DrillFileUtils.SEPARATOR_CHAR, '.');
names.add(nextSet.getChild(s));
}
classLoader.injectByteCode(nextGenerated.dot, result.bytes);

// Only inject bytecode if not already injected
if (!injectedClassNames.contains(nextGenerated.dot)) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's hard to tell from the diff what was broken here before. Was the namesCompleted Set not enough to keep track of classes that had already been merged?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not sure honestly. It seemed to break when I switched the Java version to 11 in the test suite.

classLoader.injectByteCode(nextGenerated.dot, result.bytes);
injectedClassNames.add(nextGenerated.dot);
}

namesCompleted.add(nextSet);
}

// adds byte code of the classes that have not been merged to make them accessible for outer class
for (Map.Entry<String, Pair<byte[], ClassNode>> clazz : classesToMerge.entrySet()) {
classLoader.injectByteCode(clazz.getKey().replace(DrillFileUtils.SEPARATOR_CHAR, '.'), clazz.getValue().getKey());
String classNameDot = clazz.getKey().replace(DrillFileUtils.SEPARATOR_CHAR, '.');
if (!injectedClassNames.contains(classNameDot)) {
classLoader.injectByteCode(classNameDot, clazz.getValue().getKey());
injectedClassNames.add(classNameDot);
}
}

Class<?> c = classLoader.findClass(set.generated.dot);
if (templateDefinition.getExternalInterface().isAssignableFrom(c)) {
logger.debug("Compiled and merged {}: bytecode size = {}, time = {} ms.",
c.getSimpleName(),
DrillStringUtils.readable(totalBytecodeSize),
(System.nanoTime() - t1 + 500_000) / 1_000_000);
c.getSimpleName(),
DrillStringUtils.readable(totalBytecodeSize),
(System.nanoTime() - t1 + 500_000) / 1_000_000);
return c;
}

throw new ClassTransformationException("The requested class did not implement the expected interface.");
} catch (CompileException | IOException | ClassNotFoundException e) {
throw new ClassTransformationException(String.format("Failure generating transformation classes for value: \n %s", entireClass), e);
throw new ClassTransformationException(
String.format("Failure generating transformation classes for value: \n %s", entireClass), e);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,32 +17,33 @@
*/
package org.apache.drill.exec.compile;

import java.io.IOException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicLong;

import com.google.common.collect.MapMaker;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.exec.compile.ClassTransformer.ClassNames;
import org.apache.drill.exec.exception.ClassTransformationException;
import org.apache.drill.exec.server.options.OptionSet;
import org.codehaus.commons.compiler.CompileException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.google.common.collect.MapMaker;
import java.io.IOException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicLong;

/**
* Per-compilation unit class loader that holds both caching and compilation
* steps. */

public class QueryClassLoader extends URLClassLoader {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(QueryClassLoader.class);
static final Logger logger = LoggerFactory.getLogger(QueryClassLoader.class);

private ClassCompilerSelector compilerSelector;
private final ClassCompilerSelector compilerSelector;

private AtomicLong index = new AtomicLong(0);

private ConcurrentMap<String, byte[]> customClasses = new MapMaker().concurrencyLevel(4).makeMap();
private final ConcurrentMap<String, byte[]> customClasses = new MapMaker().concurrencyLevel(4).makeMap();

public QueryClassLoader(DrillConfig config, OptionSet sessionOptions) {
super(new URL[0], Thread.currentThread().getContextClassLoader());
Expand Down
33 changes: 16 additions & 17 deletions exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
Original file line number Diff line number Diff line change
Expand Up @@ -17,29 +17,26 @@
*/
package org.apache.drill;

import java.io.IOException;
import java.net.URL;
import java.nio.charset.StandardCharsets;

import org.apache.calcite.jdbc.DynamicSchema;
import org.apache.drill.exec.alias.AliasRegistryProvider;
import org.apache.drill.exec.ops.ViewExpansionContext;
import com.codahale.metrics.MetricRegistry;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.io.Resources;
import io.netty.buffer.DrillBuf;
import org.apache.calcite.jdbc.DynamicSchema;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.config.LogicalPlanPersistence;
import org.apache.drill.common.scanner.ClassPathScanner;
import org.apache.drill.common.scanner.persistence.ScanResult;
import org.apache.drill.common.types.TypeProtos;
import org.apache.drill.exec.expr.holders.ValueHolder;
import org.apache.drill.exec.vector.ValueHolderHelper;
import org.apache.drill.test.TestTools;
import org.apache.drill.exec.ExecTest;
import org.apache.drill.exec.alias.AliasRegistryProvider;
import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
import org.apache.drill.exec.expr.holders.ValueHolder;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.memory.RootAllocatorFactory;
import org.apache.drill.exec.ops.QueryContext;
import org.apache.drill.exec.ops.ViewExpansionContext;
import org.apache.drill.exec.physical.PhysicalPlan;
import org.apache.drill.exec.planner.physical.PlannerSettings;
import org.apache.drill.exec.planner.sql.DrillOperatorTable;
Expand All @@ -55,16 +52,18 @@
import org.apache.drill.exec.store.StoragePluginRegistryImpl;
import org.apache.drill.exec.store.sys.store.provider.LocalPersistentStoreProvider;
import org.apache.drill.exec.testing.ExecutionControls;
import org.apache.drill.exec.vector.ValueHolderHelper;
import org.apache.drill.test.TestTools;
import org.junit.Rule;
import org.junit.rules.TestRule;
import org.mockito.ArgumentMatchers;

import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.ImmutableList;
import com.google.common.io.Resources;
import org.mockito.Matchers;
import java.io.IOException;
import java.net.URL;
import java.nio.charset.StandardCharsets;

import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

Expand Down Expand Up @@ -131,11 +130,11 @@ protected void testSqlPlan(String sqlCommands) throws Exception {
when(context.getManagedBuffer()).thenReturn(allocator.buffer(4));
when(context.getConstantValueHolder(eq("0.03"),
eq(TypeProtos.MinorType.VARDECIMAL),
Matchers.<Function<DrillBuf, ValueHolder>>any()))
ArgumentMatchers.<Function<DrillBuf, ValueHolder>>any()))
.thenReturn(ValueHolderHelper.getVarDecimalHolder(allocator.buffer(4), "0.03"));
when(context.getConstantValueHolder(eq("0.01"),
eq(TypeProtos.MinorType.VARDECIMAL),
Matchers.<Function<DrillBuf, ValueHolder>>any()))
ArgumentMatchers.<Function<DrillBuf, ValueHolder>>any()))
.thenReturn(ValueHolderHelper.getVarDecimalHolder(allocator.buffer(4), "0.01"));
when(context.getOption(anyString())).thenCallRealMethod();
when(context.getViewExpansionContext()).thenReturn(viewExpansionContext);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,32 +17,34 @@
*/
package org.apache.drill.exec.compile;

import java.io.IOException;
import java.util.Arrays;
import java.util.List;

import org.apache.drill.common.util.DrillFileUtils;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.compile.bytecode.ValueHolderReplacementVisitor;
import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.exec.compile.ClassTransformer.ClassSet;
import org.apache.drill.exec.compile.bytecode.ValueHolderReplacementVisitor;
import org.apache.drill.exec.compile.sig.GeneratorMapping;
import org.apache.drill.exec.compile.sig.MappingSet;
import org.apache.drill.exec.exception.ClassTransformationException;
import org.apache.drill.exec.expr.ClassGenerator;
import org.apache.drill.exec.expr.CodeGenerator;
import org.apache.drill.exec.rpc.user.UserSession;
import org.apache.drill.exec.server.options.SessionOptionManager;
import org.apache.drill.test.BaseTestQuery;
import org.codehaus.commons.compiler.CompileException;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.tree.ClassNode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.util.Arrays;
import java.util.List;

public class TestClassTransformation extends BaseTestQuery {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestClassTransformation.class);
private static final Logger logger = LoggerFactory.getLogger(TestClassTransformation.class);

private static final int ITERATION_COUNT = Integer.parseInt(System.getProperty("TestClassTransformation.iteration", "1"));

Expand Down
Loading
Loading