diff --git a/NOTICE b/NOTICE index fb36a21373f4..71a02dd4c63f 100644 --- a/NOTICE +++ b/NOTICE @@ -77,3 +77,9 @@ This product contains a modified version of The Guava Authors's Closer class fro * https://github.com/google/guava * COMMIT TAG: * https://github.com/google/guava/blob/c462d69329709f72a17a64cb229d15e76e72199c + +This product contains code adapted from Apache Hadoop + * LICENSE: + * https://github.com/apache/hadoop/blob/trunk/LICENSE.txt (Apache License, Version 2.0) + * HOMEPAGE: + * http://hadoop.apache.org/ \ No newline at end of file diff --git a/api/src/main/java/io/druid/guice/annotations/EscalatedGlobal.java b/api/src/main/java/io/druid/guice/annotations/EscalatedGlobal.java new file mode 100644 index 000000000000..ef60239a4eb0 --- /dev/null +++ b/api/src/main/java/io/druid/guice/annotations/EscalatedGlobal.java @@ -0,0 +1,37 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.guice.annotations; + +import com.google.inject.BindingAnnotation; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + */ +@BindingAnnotation +@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD}) +@Retention(RetentionPolicy.RUNTIME) +@PublicApi +public @interface EscalatedGlobal +{ +} diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java index a4321afaa58e..76fd7473aec5 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java @@ -39,6 +39,8 @@ import io.druid.query.dimension.DimensionSpec; import io.druid.query.groupby.GroupByQuery; import io.druid.segment.QueryableIndex; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthTestUtils; import io.druid.sql.calcite.planner.DruidPlanner; import io.druid.sql.calcite.planner.PlannerConfig; import io.druid.sql.calcite.planner.PlannerFactory; @@ -112,13 +114,15 @@ public void setup() throws Exception final PlannerConfig plannerConfig = new PlannerConfig(); this.walker = new SpecificSegmentsQuerySegmentWalker(conglomerate).add(dataSegment, index); - plannerFactory = new PlannerFactory( CalciteTests.createMockSchema(walker, plannerConfig), CalciteTests.createMockQueryLifecycleFactory(walker), CalciteTests.createOperatorTable(), CalciteTests.createExprMacroTable(), plannerConfig, + new AuthConfig(), + AuthTestUtils.TEST_AUTHENTICATOR_MAPPER, + AuthTestUtils.TEST_AUTHORIZER_MAPPER, CalciteTests.getJsonMapper() ); groupByQuery = GroupByQuery diff --git a/docs/content/configuration/auth.md b/docs/content/configuration/auth.md new file mode 100644 index 000000000000..d498d217f4fe --- /dev/null +++ b/docs/content/configuration/auth.md @@ -0,0 +1,102 @@ +--- +layout: doc_page +--- + +# Authentication and Authorization + +|Property|Type|Description|Default|Required| +|--------|-----------|--------|--------|--------| +|`druid.auth.authenticationChain`|JSON List of Strings|List of Authenticator type names|["allowAll"]|no| +|`druid.auth.escalatedAuthenticator`|String|Type of the Authenticator that should be used for internal Druid communications. This Authenticator must be present in `druid.auth.authenticationChain`.|"allowAll"|no| +|`druid.auth.authorizers`|JSON List of Strings|List of Authorizer type names |["allowAll"]|no| + +## Enabling Authentication/Authorization + +## Authentication Chain +Authentication decisions are handled by a chain of Authenticator instances. A request will be checked by Authenticators in the sequence defined by the `druid.auth.authenticationChain`. + +Authenticator implementions are provided by extensions. + +For example, the following authentication chain definition enables the Kerberos and HTTP Basic authenticators, from the `druid-kerberos` and `druid-basic-security` core extensions, respectively: + +``` +druid.auth.authenticationChain=["kerberos", "basic"] +``` + +A request will pass through all Authenticators in the chain, until one of the Authenticators successfully authenticates the request or sends an HTTP error response. Authenticators later in the chain will be skipped after the first successful authentication or if the request is terminated with an error response. + +If no Authenticator in the chain successfully authenticated a request or sent an HTTP error response, an HTTP error response will be sent at the end of the chain. + +Druid includes a built-in Authenticator, used for the default unsecured configuration. + +### AllowAll Authenticator + +This built-in Authenticator authenticates all requests, and always directs them to an Authorizer named "allowAll". It is not intended to be used for anything other than the default unsecured configuration. + +## Internal Authenticator +The `druid.auth.escalatedAuthenticator` property determines what authentication scheme should be used for internal Druid cluster communications (such as when a broker node communicates with historical nodes for query processing). + +The Authenticator chosen for this property must also be present in `druid.auth.authenticationChain`. + +## Authorizers +Authorization decisions are handled by an Authorizer. The `druid.auth.authorizers` property determines what Authorizer implementations will be active. + +There are two built-in Authorizers, "default" and "noop". Other implementations are provided by extensions. + +For example, the following authorizers definition enables the "basic" implementation from `druid-basic-security`: + +``` +druid.auth.authorizers=["basic"] +``` + + +Only a single Authorizer will authorize any given request. + +Druid includes one built in authorizer: + +### AllowAll Authorizer +The Authorizer with type name "allowAll" accepts all requests. + +## Default Unsecured Configuration + +When `druid.auth.authenticationChain` is left empty or unspecified, Druid will create an authentication chain with a single AllowAll Authenticator named "allowAll". + +When `druid.auth.authorizers` is left empty or unspecified, Druid will create a single AllowAll Authorizer named "allowAll". + +The default value of `druid.auth.escalatedAuthenticator` is "allowAll" to match the default unsecured Authenticator/Authorizer configurations. + +## Authenticator to Authorizer Routing + +When an Authenticator successfully authenticates a request, it must attach a AuthenticationResult to the request, containing an information about the identity of the requester, as well as the name of the Authorizer that should authorize the authenticated request. + +An Authenticator implementation should provide some means through configuration to allow users to select what Authorizer(s) the Authenticator should route requests to. + +## Internal System User + +Internal requests between Druid nodes (non-user initiated communications) need to have authentication credentials attached. + +These requests should be run as an "internal system user", an identity that represents the Druid cluster itself, with full access permissions. + +The details of how the internal system user is defined is left to Authorizer and Authenticator implementations. + +### Authorizer Internal System User Handling + +Authorizers implementations must recognize and authorize an identity for the "internal system user", with full access permissions. + +### Authenticator Internal System User Handling + +Authenticators must implement three methods related to the internal system user: + +```java + public HttpClient createEscalatedClient(HttpClient baseClient); + + public org.eclipse.jetty.client.HttpClient createEscalatedJettyClient(org.eclipse.jetty.client.HttpClient baseClient); + + public AuthenticationResult createEscalatedAuthenticationResult(); +``` + +`createEscalatedClient` returns an wrapped HttpClient that attaches the credentials of the "internal system user" to requests. + +`createEscalatedJettyClient` is similar to `createEscalatedClient`, except that it operates on a Jetty HttpClient. + +`createEscalatedAuthenticationResult` returns an AuthenticationResult containing the identity of the "internal system user". diff --git a/docs/content/development/extensions-core/druid-kerberos.md b/docs/content/development/extensions-core/druid-kerberos.md index 44e67cce2c7b..dc5a8fb5bb90 100644 --- a/docs/content/development/extensions-core/druid-kerberos.md +++ b/docs/content/development/extensions-core/druid-kerberos.md @@ -5,29 +5,46 @@ layout: doc_page # Druid-Kerberos Druid Extension to enable Authentication for Druid Nodes using Kerberos. -This extension adds AuthenticationFilter which is used to protect HTTP Endpoints using the simple and protected GSSAPI negotiation mechanism [SPNEGO](https://en.wikipedia.org/wiki/SPNEGO). +This extension adds an Authenticator which is used to protect HTTP Endpoints using the simple and protected GSSAPI negotiation mechanism [SPNEGO](https://en.wikipedia.org/wiki/SPNEGO). Make sure to [include](../../operations/including-extensions.html) `druid-kerberos` as an extension. ## Configuration +### Creating an Authenticator +``` +druid.auth.authenticatorChain=["MyKerberosAuthenticator"] + +druid.auth.authenticator.MyKerberosAuthenticator.type=kerberos +``` + +To use the Kerberos authenticator, add an authenticator with type `kerberos` to the authenticatorChain. The example above uses the name "MyKerberosAuthenticator" for the Authenticator. + +Configuration of the named authenticator is assigned through properties with the form: + +``` +druid.auth.authenticator.. +``` + +The configuration examples in the rest of this document will use "kerberos" as the name of the authenticator being configured. + +### Properties |Property|Possible Values|Description|Default|required| |--------|---------------|-----------|-------|--------| -|`druid.hadoop.security.kerberos.principal`|`druid@EXAMPLE.COM`| Principal user name, used for internal node communication|empty|Yes| -|`druid.hadoop.security.kerberos.keytab`|`/etc/security/keytabs/druid.keytab`|Path to keytab file used for internal node communication|empty|Yes| -|`druid.hadoop.security.spnego.principal`|`HTTP/_HOST@EXAMPLE.COM`| SPNego service principal used by druid nodes|empty|Yes| -|`druid.hadoop.security.spnego.keytab`|`/etc/security/keytabs/spnego.service.keytab`|SPNego service keytab used by druid nodes|empty|Yes| -|`druid.hadoop.security.spnego.authToLocal`|`RULE:[1:$1@$0](druid@EXAMPLE.COM)s/.*/druid DEFAULT`|It allows you to set a general rule for mapping principal names to local user names. It will be used if there is not an explicit mapping for the principal name that is being translated.|DEFAULT|No| -|`druid.hadoop.security.spnego.excludedPaths`|`['/status','/health']`| Array of HTTP paths which which does NOT need to be authenticated.|None|No| -|`druid.hadoop.security.spnego.cookieSignatureSecret`|`secretString`| Secret used to sign authentication cookies. It is advisable to explicitly set it, if you have multiple druid ndoes running on same machine with different ports as the Cookie Specification does not guarantee isolation by port.||No| +|`druid.auth.authenticator.kerberos.internalClientPrincipal`|`druid@EXAMPLE.COM`| Principal user name, used for internal node communication|empty|Yes| +|`druid.auth.authenticator.kerberos.internalClientKeytab`|`/etc/security/keytabs/druid.keytab`|Path to keytab file used for internal node communication|empty|Yes| +|`druid.auth.authenticator.kerberos.serverPrincipal`|`HTTP/_HOST@EXAMPLE.COM`| SPNego service principal used by druid nodes|empty|Yes| +|`druid.auth.authenticator.kerberos.serverKeytab`|`/etc/security/keytabs/spnego.service.keytab`|SPNego service keytab used by druid nodes|empty|Yes| +|`druid.auth.authenticator.kerberos.authToLocal`|`RULE:[1:$1@$0](druid@EXAMPLE.COM)s/.*/druid DEFAULT`|It allows you to set a general rule for mapping principal names to local user names. It will be used if there is not an explicit mapping for the principal name that is being translated.|DEFAULT|No| +|`druid.auth.authenticator.kerberos.excludedPaths`|`['/status','/health']`| Array of HTTP paths which which does NOT need to be authenticated.|None|No| +|`druid.auth.authenticator.kerberos.cookieSignatureSecret`|`secretString`| Secret used to sign authentication cookies. It is advisable to explicitly set it, if you have multiple druid ndoes running on same machine with different ports as the Cookie Specification does not guarantee isolation by port.||No| +|`druid.auth.authenticator.kerberos.authorizerName`|`secretString`| Secret used to sign authentication cookies. It is advisable to explicitly set it, if you have multiple druid ndoes running on same machine with different ports as the Cookie Specification does not guarantee isolation by port.||No| As a note, it is required that the SPNego principal in use by the druid nodes must start with HTTP (This specified by [RFC-4559](https://tools.ietf.org/html/rfc4559)) and must be of the form "HTTP/_HOST@REALM". The special string _HOST will be replaced automatically with the value of config `druid.host` ### Auth to Local Syntax - - -`druid.hadoop.security.spnego.authToLocal` allows you to set a general rules for mapping principal names to local user names. +`druid.auth.authenticator.kerberos.authToLocal` allows you to set a general rules for mapping principal names to local user names. The syntax for mapping rules is `RULE:\[n:string](regexp)s/pattern/replacement/g`. The integer n indicates how many components the target principal should have. If this matches, then a string will be formed from string, substituting the realm of the principal for $0 and the n‘th component of the principal for $n. e.g. if the principal was druid/admin then `\[2:$2$1suffix]` would result in the string `admindruidsuffix`. If this string matches regexp, then the s//\[g] substitution command will be run over the string. The optional g will cause the substitution to be global over the string, instead of replacing only the first match in the string. If required, multiple rules can be be joined by newline character and specified as a String. diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosAuthenticationHandler.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosAuthenticationHandler.java new file mode 100644 index 000000000000..50fe3b0f3a52 --- /dev/null +++ b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosAuthenticationHandler.java @@ -0,0 +1,229 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.security.kerberos; + +import io.druid.java.util.common.logger.Logger; +import org.apache.commons.codec.binary.Base64; +import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.apache.hadoop.security.authentication.server.AuthenticationToken; +import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; +import org.apache.hadoop.security.authentication.util.KerberosName; +import org.apache.hadoop.security.authentication.util.KerberosUtil; +import org.ietf.jgss.GSSContext; +import org.ietf.jgss.GSSCredential; +import org.ietf.jgss.GSSManager; +import org.ietf.jgss.Oid; + +import javax.security.auth.Subject; +import javax.security.auth.login.LoginContext; +import javax.security.auth.login.LoginException; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.File; +import java.io.IOException; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; +import java.util.regex.Pattern; + +public class DruidKerberosAuthenticationHandler extends KerberosAuthenticationHandler +{ + private static final Logger log = new Logger(DruidKerberosAuthenticationHandler.class); + + private String keytab; + private GSSManager gssManager; + private Subject serverSubject = new Subject(); + private List loginContexts = new ArrayList(); + + @Override + public void destroy() + { + keytab = null; + serverSubject = null; + for (LoginContext loginContext : loginContexts) { + try { + loginContext.logout(); + } + catch (LoginException ex) { + log.warn(ex, ex.getMessage()); + } + } + loginContexts.clear(); + } + + @Override + public void init(Properties config) throws ServletException + { + try { + String principal = config.getProperty(PRINCIPAL); + if (principal == null || principal.trim().length() == 0) { + throw new ServletException("Principal not defined in configuration"); + } + keytab = config.getProperty(KEYTAB, keytab); + if (keytab == null || keytab.trim().length() == 0) { + throw new ServletException("Keytab not defined in configuration"); + } + if (!new File(keytab).exists()) { + throw new ServletException("Keytab does not exist: " + keytab); + } + + // use all SPNEGO principals in the keytab if a principal isn't + // specifically configured + final String[] spnegoPrincipals; + if (principal.equals("*")) { + spnegoPrincipals = KerberosUtil.getPrincipalNames( + keytab, Pattern.compile("HTTP/.*")); + if (spnegoPrincipals.length == 0) { + throw new ServletException("Principals do not exist in the keytab"); + } + } else { + spnegoPrincipals = new String[]{principal}; + } + + String nameRules = config.getProperty(NAME_RULES, null); + if (nameRules != null) { + KerberosName.setRules(nameRules); + } + + for (String spnegoPrincipal : spnegoPrincipals) { + log.info("Login using keytab {}, for principal {}", + keytab, spnegoPrincipal + ); + final KerberosAuthenticator.DruidKerberosConfiguration kerberosConfiguration = + new KerberosAuthenticator.DruidKerberosConfiguration(keytab, spnegoPrincipal); + final LoginContext loginContext = + new LoginContext("", serverSubject, null, kerberosConfiguration); + try { + loginContext.login(); + } + catch (LoginException le) { + log.warn(le, "Failed to login as [{}]", spnegoPrincipal); + throw new AuthenticationException(le); + } + loginContexts.add(loginContext); + } + try { + gssManager = Subject.doAs(serverSubject, new PrivilegedExceptionAction() + { + + @Override + public GSSManager run() throws Exception + { + return GSSManager.getInstance(); + } + }); + } + catch (PrivilegedActionException ex) { + throw ex.getException(); + } + } + catch (Exception ex) { + throw new ServletException(ex); + } + } + + @Override + public AuthenticationToken authenticate(HttpServletRequest request, final HttpServletResponse response) + throws IOException, AuthenticationException + { + AuthenticationToken token = null; + String authorization = request.getHeader(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.AUTHORIZATION); + + if (authorization == null + || !authorization.startsWith(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.NEGOTIATE)) { + return null; + } else { + authorization = authorization.substring(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.NEGOTIATE + .length()).trim(); + final Base64 base64 = new Base64(0); + final byte[] clientToken = base64.decode(authorization); + final String serverName = request.getServerName(); + try { + token = Subject.doAs(serverSubject, new PrivilegedExceptionAction() + { + + @Override + public AuthenticationToken run() throws Exception + { + AuthenticationToken token = null; + GSSContext gssContext = null; + GSSCredential gssCreds = null; + try { + gssCreds = gssManager.createCredential( + gssManager.createName( + KerberosUtil.getServicePrincipal("HTTP", serverName), + KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL") + ), + GSSCredential.INDEFINITE_LIFETIME, + new Oid[]{ + KerberosUtil.getOidInstance("GSS_SPNEGO_MECH_OID"), + KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID") + }, + GSSCredential.ACCEPT_ONLY + ); + gssContext = gssManager.createContext(gssCreds); + byte[] serverToken = gssContext.acceptSecContext(clientToken, 0, clientToken.length); + if (serverToken != null && serverToken.length > 0) { + String authenticate = base64.encodeToString(serverToken); + response.setHeader( + org.apache.hadoop.security.authentication.client.KerberosAuthenticator.WWW_AUTHENTICATE, + org.apache.hadoop.security.authentication.client.KerberosAuthenticator.NEGOTIATE + + " " + + authenticate + ); + } + if (!gssContext.isEstablished()) { + response.setStatus(HttpServletResponse.SC_UNAUTHORIZED); + log.trace("SPNEGO in progress"); + } else { + String clientPrincipal = gssContext.getSrcName().toString(); + KerberosName kerberosName = new KerberosName(clientPrincipal); + String userName = kerberosName.getShortName(); + token = new AuthenticationToken(userName, clientPrincipal, getType()); + response.setStatus(HttpServletResponse.SC_OK); + log.trace("SPNEGO completed for principal [{}]", clientPrincipal); + } + } + finally { + if (gssContext != null) { + gssContext.dispose(); + } + if (gssCreds != null) { + gssCreds.dispose(); + } + } + return token; + } + }); + } + catch (PrivilegedActionException ex) { + if (ex.getException() instanceof IOException) { + throw (IOException) ex.getException(); + } else { + throw new AuthenticationException(ex.getException()); + } + } + } + return token; + } +} diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosModule.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosModule.java index 30d4df8b5ef9..e4058bc92d52 100644 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosModule.java +++ b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosModule.java @@ -20,19 +20,10 @@ package io.druid.security.kerberos; import com.fasterxml.jackson.databind.Module; +import com.fasterxml.jackson.databind.module.SimpleModule; import com.google.common.collect.ImmutableList; import com.google.inject.Binder; -import com.google.inject.multibindings.Multibinder; -import com.metamx.http.client.HttpClient; -import io.druid.guice.JsonConfigProvider; -import io.druid.guice.LazySingleton; -import io.druid.guice.annotations.Client; -import io.druid.guice.annotations.Global; -import io.druid.guice.http.HttpClientModule; -import io.druid.guice.http.JettyHttpClientModule; import io.druid.initialization.DruidModule; -import io.druid.server.initialization.jetty.ServletFilterHolder; -import io.druid.server.router.Router; import java.util.List; @@ -45,32 +36,14 @@ public class DruidKerberosModule implements DruidModule public List getJacksonModules() { return ImmutableList.of( + new SimpleModule("DruidKerberos").registerSubtypes( + KerberosAuthenticator.class + ) ); } @Override public void configure(Binder binder) { - JsonConfigProvider.bind(binder, "druid.hadoop.security.kerberos", AuthenticationKerberosConfig.class); - JsonConfigProvider.bind(binder, "druid.hadoop.security.spnego", SpnegoFilterConfig.class); - - Multibinder.newSetBinder(binder, ServletFilterHolder.class) - .addBinding() - .to(SpnegoFilterHolder.class); - - binder.bind(HttpClient.class) - .annotatedWith(Global.class) - .toProvider(new KerberosHttpClientProvider(new HttpClientModule.HttpClientProvider(Global.class))) - .in(LazySingleton.class); - - binder.bind(HttpClient.class) - .annotatedWith(Client.class) - .toProvider(new KerberosHttpClientProvider(new HttpClientModule.HttpClientProvider(Client.class))) - .in(LazySingleton.class); - - binder.bind(org.eclipse.jetty.client.HttpClient.class) - .annotatedWith(Router.class) - .toProvider(new KerberosJettyHttpClientProvider(new JettyHttpClientModule.HttpClientProvider(Router.class))) - .in(LazySingleton.class); } } diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java index d171821051a1..1487e76d5857 100644 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java +++ b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java @@ -73,7 +73,7 @@ public static String kerberosChallenge(String server) throws AuthenticationExcep // Create a GSSContext for authentication with the service. // We're passing client credentials as null since we want them to be read from the Subject. GSSContext gssContext = - manager.createContext(serverName.canonicalize(mechOid), mechOid, null, GSSContext.DEFAULT_LIFETIME); + manager.createContext(serverName.canonicalize(mechOid), mechOid, null, GSSContext.DEFAULT_LIFETIME); gssContext.requestMutualAuth(true); gssContext.requestCredDeleg(true); // Establish context @@ -91,24 +91,27 @@ public static String kerberosChallenge(String server) throws AuthenticationExcep } } - public static void authenticateIfRequired(AuthenticationKerberosConfig config) - throws IOException + public static void authenticateIfRequired(String internalClientPrincipal, String internalClientKeytab) + throws IOException { - String principal = config.getPrincipal(); - String keytab = config.getKeytab(); - if (!Strings.isNullOrEmpty(principal) && !Strings.isNullOrEmpty(keytab)) { + if (!Strings.isNullOrEmpty(internalClientPrincipal) && !Strings.isNullOrEmpty(internalClientKeytab)) { Configuration conf = new Configuration(); conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); UserGroupInformation.setConfiguration(conf); try { if (UserGroupInformation.getCurrentUser().hasKerberosCredentials() == false - || !UserGroupInformation.getCurrentUser().getUserName().equals(principal)) { - log.info("trying to authenticate user [%s] with keytab [%s]", principal, keytab); - UserGroupInformation.loginUserFromKeytab(principal, keytab); + || !UserGroupInformation.getCurrentUser().getUserName().equals(internalClientPrincipal)) { + log.info("trying to authenticate user [%s] with keytab [%s]", internalClientPrincipal, internalClientKeytab); + UserGroupInformation.loginUserFromKeytab(internalClientPrincipal, internalClientKeytab); } } catch (IOException e) { - throw new ISE(e, "Failed to authenticate user principal [%s] with keytab [%s]", principal, keytab); + throw new ISE( + e, + "Failed to authenticate user principal [%s] with keytab [%s]", + internalClientPrincipal, + internalClientKeytab + ); } } } diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosAuthenticator.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosAuthenticator.java new file mode 100644 index 000000000000..6531cb4944d6 --- /dev/null +++ b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosAuthenticator.java @@ -0,0 +1,692 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.security.kerberos; + +import com.fasterxml.jackson.annotation.JacksonInject; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeName; +import com.google.common.base.Throwables; +import com.metamx.http.client.HttpClient; +import io.druid.guice.annotations.Self; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.logger.Logger; +import io.druid.server.DruidNode; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthenticationResult; +import io.druid.server.security.Authenticator; +import org.apache.commons.codec.binary.Base64; +import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authentication.client.AuthenticatedURL; +import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.apache.hadoop.security.authentication.server.AuthenticationFilter; +import org.apache.hadoop.security.authentication.server.AuthenticationToken; +import org.apache.hadoop.security.authentication.util.KerberosUtil; +import org.apache.hadoop.security.authentication.util.Signer; +import org.apache.hadoop.security.authentication.util.SignerException; +import org.apache.hadoop.security.authentication.util.SignerSecretProvider; +import org.eclipse.jetty.client.api.Authentication; +import org.eclipse.jetty.client.api.ContentResponse; +import org.eclipse.jetty.client.api.Request; +import org.eclipse.jetty.util.Attributes; +import org.jboss.netty.handler.codec.http.HttpHeaders; +import sun.security.krb5.EncryptedData; +import sun.security.krb5.EncryptionKey; +import sun.security.krb5.internal.APReq; +import sun.security.krb5.internal.EncTicketPart; +import sun.security.krb5.internal.Krb5; +import sun.security.krb5.internal.Ticket; +import sun.security.krb5.internal.crypto.KeyUsage; +import sun.security.util.DerInputStream; +import sun.security.util.DerValue; + +import javax.security.auth.Subject; +import javax.security.auth.kerberos.KerberosKey; +import javax.security.auth.kerberos.KerberosPrincipal; +import javax.security.auth.kerberos.KeyTab; +import javax.security.auth.login.AppConfigurationEntry; +import javax.security.auth.login.Configuration; +import javax.security.auth.login.LoginContext; +import javax.servlet.DispatcherType; +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletContext; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletRequestWrapper; +import javax.servlet.http.HttpServletResponse; +import java.io.File; +import java.io.IOException; +import java.net.URI; +import java.security.Principal; +import java.security.PrivilegedExceptionAction; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Random; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +@JsonTypeName("kerberos") +public class KerberosAuthenticator implements Authenticator +{ + private static final Logger log = new Logger(KerberosAuthenticator.class); + private static final Pattern HADOOP_AUTH_COOKIE_REGEX = Pattern.compile(".*p=(\\S+)&t=.*"); + public static final List DEFAULT_EXCLUDED_PATHS = Collections.emptyList(); + + private final DruidNode node; + private final String serverPrincipal; + private final String serverKeytab; + private final String internalClientPrincipal; + private final String internalClientKeytab; + private final String authToLocal; + private final List excludedPaths; + private final String cookieSignatureSecret; + private final String authorizerName; + private LoginContext loginContext; + + + @JsonCreator + public KerberosAuthenticator( + @JsonProperty("serverPrincipal") String serverPrincipal, + @JsonProperty("serverKeytab") String serverKeytab, + @JsonProperty("internalClientPrincipal") String internalClientPrincipal, + @JsonProperty("internalClientKeytab") String internalClientKeytab, + @JsonProperty("authToLocal") String authToLocal, + @JsonProperty("excludedPaths") List excludedPaths, + @JsonProperty("cookieSignatureSecret") String cookieSignatureSecret, + @JsonProperty("authorizerName") String authorizerName, + @JacksonInject @Self DruidNode node + ) + { + this.node = node; + this.serverPrincipal = serverPrincipal; + this.serverKeytab = serverKeytab; + this.internalClientPrincipal = internalClientPrincipal; + this.internalClientKeytab = internalClientKeytab; + this.authToLocal = authToLocal == null ? "DEFAULT" : authToLocal; + this.excludedPaths = excludedPaths == null ? DEFAULT_EXCLUDED_PATHS : excludedPaths; + this.cookieSignatureSecret = cookieSignatureSecret; + this.authorizerName = authorizerName; + } + + @Override + public Filter getFilter() + { + return new AuthenticationFilter() + { + private Signer mySigner; + + @Override + public void init(FilterConfig filterConfig) throws ServletException + { + ClassLoader prevLoader = Thread.currentThread().getContextClassLoader(); + try { + // AuthenticationHandler is created during Authenticationfilter.init using reflection with thread context class loader. + // In case of druid since the class is actually loaded as an extension and filter init is done in main thread. + // We need to set the classloader explicitly to extension class loader. + Thread.currentThread().setContextClassLoader(AuthenticationFilter.class.getClassLoader()); + super.init(filterConfig); + String configPrefix = filterConfig.getInitParameter(CONFIG_PREFIX); + configPrefix = (configPrefix != null) ? configPrefix + "." : ""; + Properties config = getConfiguration(configPrefix, filterConfig); + String signatureSecret = config.getProperty(configPrefix + SIGNATURE_SECRET); + if (signatureSecret == null) { + signatureSecret = Long.toString(new Random().nextLong()); + log.warn("'signature.secret' configuration not set, using a random value as secret"); + } + final byte[] secretBytes = StringUtils.toUtf8(signatureSecret); + SignerSecretProvider signerSecretProvider = new SignerSecretProvider() + { + @Override + public void init(Properties config, ServletContext servletContext, long tokenValidity) throws Exception + { + + } + + @Override + public byte[] getCurrentSecret() + { + return secretBytes; + } + + @Override + public byte[][] getAllSecrets() + { + return new byte[][]{secretBytes}; + } + }; + mySigner = new Signer(signerSecretProvider); + } + finally { + Thread.currentThread().setContextClassLoader(prevLoader); + } + } + + // Copied from hadoop-auth's AuthenticationFilter, to allow us to change error response handling in doFilterSuper + @Override + protected AuthenticationToken getToken(HttpServletRequest request) throws IOException, AuthenticationException + { + AuthenticationToken token = null; + String tokenStr = null; + Cookie[] cookies = request.getCookies(); + if (cookies != null) { + for (Cookie cookie : cookies) { + if (cookie.getName().equals(AuthenticatedURL.AUTH_COOKIE)) { + tokenStr = cookie.getValue(); + try { + tokenStr = mySigner.verifyAndExtract(tokenStr); + } + catch (SignerException ex) { + throw new AuthenticationException(ex); + } + break; + } + } + } + if (tokenStr != null) { + token = AuthenticationToken.parse(tokenStr); + if (!token.getType().equals(getAuthenticationHandler().getType())) { + throw new AuthenticationException("Invalid AuthenticationToken type"); + } + if (token.isExpired()) { + throw new AuthenticationException("AuthenticationToken expired"); + } + } + return token; + } + + @Override + public void doFilter( + ServletRequest request, ServletResponse response, FilterChain filterChain + ) throws IOException, ServletException + { + HttpServletRequest httpReq = (HttpServletRequest) request; + + // If there's already an auth result, then we have authenticated already, skip this. + if (request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT) != null) { + filterChain.doFilter(request, response); + return; + } + + if (loginContext == null) { + initializeKerberosLogin(); + } + + String path = ((HttpServletRequest) request).getRequestURI(); + if (isExcluded(path)) { + filterChain.doFilter(request, response); + } else { + String clientPrincipal = null; + try { + Cookie[] cookies = httpReq.getCookies(); + if (cookies == null) { + clientPrincipal = getPrincipalFromRequestNew((HttpServletRequest) request); + } else { + clientPrincipal = null; + for (Cookie cookie : cookies) { + if ("hadoop.auth".equals(cookie.getName())) { + Matcher matcher = HADOOP_AUTH_COOKIE_REGEX.matcher(cookie.getValue()); + if (matcher.matches()) { + clientPrincipal = matcher.group(1); + break; + } + } + } + } + } + catch (Exception ex) { + clientPrincipal = null; + } + + if (clientPrincipal != null) { + request.setAttribute( + AuthConfig.DRUID_AUTHENTICATION_RESULT, + new AuthenticationResult(clientPrincipal, authorizerName) + ); + } + } + + doFilterSuper(request, response, filterChain); + } + + // Copied from hadoop-auth's AuthenticationFilter, to allow us to change error response handling + private void doFilterSuper(ServletRequest request, ServletResponse response, FilterChain filterChain) + throws IOException, ServletException + { + boolean unauthorizedResponse = true; + int errCode = HttpServletResponse.SC_UNAUTHORIZED; + AuthenticationException authenticationEx = null; + HttpServletRequest httpRequest = (HttpServletRequest) request; + HttpServletResponse httpResponse = (HttpServletResponse) response; + boolean isHttps = "https".equals(httpRequest.getScheme()); + try { + boolean newToken = false; + AuthenticationToken token; + try { + token = getToken(httpRequest); + } + catch (AuthenticationException ex) { + log.warn("AuthenticationToken ignored: " + ex.getMessage()); + // will be sent back in a 401 unless filter authenticates + authenticationEx = ex; + token = null; + } + if (getAuthenticationHandler().managementOperation(token, httpRequest, httpResponse)) { + if (token == null) { + if (log.isDebugEnabled()) { + log.debug("Request [{%s}] triggering authentication", getRequestURL(httpRequest)); + } + token = getAuthenticationHandler().authenticate(httpRequest, httpResponse); + if (token != null && token.getExpires() != 0 && + token != AuthenticationToken.ANONYMOUS) { + token.setExpires(System.currentTimeMillis() + getValidity() * 1000); + } + newToken = true; + } + if (token != null) { + unauthorizedResponse = false; + if (log.isDebugEnabled()) { + log.debug("Request [{%s}] user [{%s}] authenticated", getRequestURL(httpRequest), token.getUserName()); + } + final AuthenticationToken authToken = token; + httpRequest = new HttpServletRequestWrapper(httpRequest) + { + + @Override + public String getAuthType() + { + return authToken.getType(); + } + + @Override + public String getRemoteUser() + { + return authToken.getUserName(); + } + + @Override + public Principal getUserPrincipal() + { + return (authToken != AuthenticationToken.ANONYMOUS) ? authToken : null; + } + }; + if (newToken && !token.isExpired() && token != AuthenticationToken.ANONYMOUS) { + String signedToken = mySigner.sign(token.toString()); + createAuthCookie(httpResponse, signedToken, getCookieDomain(), + getCookiePath(), token.getExpires(), isHttps + ); + } + doFilter(filterChain, httpRequest, httpResponse); + } + } else { + unauthorizedResponse = false; + } + } + catch (AuthenticationException ex) { + // exception from the filter itself is fatal + errCode = HttpServletResponse.SC_FORBIDDEN; + authenticationEx = ex; + if (log.isDebugEnabled()) { + log.debug("Authentication exception: " + ex.getMessage(), ex); + } else { + log.warn("Authentication exception: " + ex.getMessage()); + } + } + if (unauthorizedResponse) { + if (!httpResponse.isCommitted()) { + createAuthCookie(httpResponse, "", getCookieDomain(), + getCookiePath(), 0, isHttps + ); + // If response code is 401. Then WWW-Authenticate Header should be + // present.. reset to 403 if not found.. + if ((errCode == HttpServletResponse.SC_UNAUTHORIZED) + && (!httpResponse.containsHeader( + org.apache.hadoop.security.authentication.client.KerberosAuthenticator.WWW_AUTHENTICATE))) { + errCode = HttpServletResponse.SC_FORBIDDEN; + } + if (authenticationEx == null) { + // Don't send an error response here, unlike the base AuthenticationFilter implementation. + // This request did not use Kerberos auth. + // Instead, we will send an error response in PreResponseAuthorizationCheckFilter to allow + // other Authenticator implementations to check the request. + filterChain.doFilter(request, response); + } else { + // Do send an error response here, we attempted Kerberos authentication and failed. + httpResponse.sendError(errCode, authenticationEx.getMessage()); + } + } + } + } + }; + } + + @Override + public Class getFilterClass() + { + return null; + } + + @Override + public Map getInitParameters() + { + Map params = new HashMap(); + try { + params.put( + "kerberos.principal", + SecurityUtil.getServerPrincipal(serverPrincipal, node.getHost()) + ); + params.put("kerberos.keytab", serverKeytab); + params.put(AuthenticationFilter.AUTH_TYPE, DruidKerberosAuthenticationHandler.class.getName()); + params.put("kerberos.name.rules", authToLocal); + if (cookieSignatureSecret != null) { + params.put("signature.secret", cookieSignatureSecret); + } + } + catch (IOException e) { + Throwables.propagate(e); + } + return params; + } + + @Override + public String getPath() + { + return "/*"; + } + + @Override + public EnumSet getDispatcherType() + { + return null; + } + + @Override + public String getAuthChallengeHeader() + { + return "Negotiate"; + } + + @Override + public AuthenticationResult authenticateJDBCContext(Map context) + { + throw new UnsupportedOperationException("JDBC Kerberos auth not supported yet"); + } + + @Override + public HttpClient createEscalatedClient(HttpClient baseClient) + { + return new KerberosHttpClient(baseClient, internalClientPrincipal, internalClientKeytab); + } + + @Override + public org.eclipse.jetty.client.HttpClient createEscalatedJettyClient(org.eclipse.jetty.client.HttpClient baseClient) + { + baseClient.getAuthenticationStore().addAuthentication(new Authentication() + { + @Override + public boolean matches(String type, URI uri, String realm) + { + return true; + } + + @Override + public Result authenticate( + final Request request, ContentResponse response, Authentication.HeaderInfo headerInfo, Attributes context + ) + { + return new Result() + { + @Override + public URI getURI() + { + return request.getURI(); + } + + @Override + public void apply(Request request) + { + try { + // No need to set cookies as they are handled by Jetty Http Client itself. + URI uri = request.getURI(); + if (DruidKerberosUtil.needToSendCredentials(baseClient.getCookieStore(), uri)) { + log.debug( + "No Auth Cookie found for URI[%s]. Existing Cookies[%s] Authenticating... ", + uri, + baseClient.getCookieStore().getCookies() + ); + final String host = request.getHost(); + DruidKerberosUtil.authenticateIfRequired(internalClientPrincipal, internalClientKeytab); + UserGroupInformation currentUser = UserGroupInformation.getCurrentUser(); + String challenge = currentUser.doAs(new PrivilegedExceptionAction() + { + @Override + public String run() throws Exception + { + return DruidKerberosUtil.kerberosChallenge(host); + } + }); + request.getHeaders().add(HttpHeaders.Names.AUTHORIZATION, "Negotiate " + challenge); + } else { + log.debug("Found Auth Cookie found for URI[%s].", uri); + } + } + catch (Throwable e) { + Throwables.propagate(e); + } + } + }; + } + }); + return baseClient; + } + + @Override + public AuthenticationResult createEscalatedAuthenticationResult() + { + return new AuthenticationResult(internalClientPrincipal, authorizerName); + } + + private boolean isExcluded(String path) + { + for (String excluded : excludedPaths) { + if (path.startsWith(excluded)) { + return true; + } + } + return false; + } + + + /** + * Kerberos context configuration for the JDK GSS library. Copied from hadoop-auth's KerberosAuthenticationHandler. + */ + public static class DruidKerberosConfiguration extends Configuration + { + private String keytab; + private String principal; + + public DruidKerberosConfiguration(String keytab, String principal) + { + this.keytab = keytab; + this.principal = principal; + } + + @Override + public AppConfigurationEntry[] getAppConfigurationEntry(String name) + { + Map options = new HashMap(); + if (System.getProperty("java.vendor").contains("IBM")) { + options.put( + "useKeytab", + keytab.startsWith("file://") ? keytab : "file://" + keytab + ); + options.put("principal", principal); + options.put("credsType", "acceptor"); + } else { + options.put("keyTab", keytab); + options.put("principal", principal); + options.put("useKeyTab", "true"); + options.put("storeKey", "true"); + options.put("doNotPrompt", "true"); + options.put("useTicketCache", "true"); + options.put("renewTGT", "true"); + options.put("isInitiator", "false"); + } + options.put("refreshKrb5Config", "true"); + String ticketCache = System.getenv("KRB5CCNAME"); + if (ticketCache != null) { + if (System.getProperty("java.vendor").contains("IBM")) { + options.put("useDefaultCcache", "true"); + // The first value searched when "useDefaultCcache" is used. + System.setProperty("KRB5CCNAME", ticketCache); + options.put("renewTGT", "true"); + options.put("credsType", "both"); + } else { + options.put("ticketCache", ticketCache); + } + } + if (log.isDebugEnabled()) { + options.put("debug", "true"); + } + + return new AppConfigurationEntry[]{ + new AppConfigurationEntry( + KerberosUtil.getKrb5LoginModuleName(), + AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, + options + ), + }; + } + } + + private String getPrincipalFromRequestNew(HttpServletRequest req) + { + String authorization = req.getHeader(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.AUTHORIZATION); + if (authorization == null + || !authorization.startsWith(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.NEGOTIATE)) { + return null; + } else { + authorization = authorization.substring(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.NEGOTIATE + .length()).trim(); + final Base64 base64 = new Base64(0); + final byte[] clientToken = base64.decode(authorization); + try { + DerInputStream ticketStream = new DerInputStream(clientToken); + DerValue[] values = ticketStream.getSet(clientToken.length, true); + + // see this link for AP-REQ format: https://tools.ietf.org/html/rfc1510#section-5.5.1 + for (DerValue value : values) { + if (isValueAPReq(value)) { + APReq apReq = new APReq(value); + Ticket ticket = apReq.ticket; + EncryptedData encData = ticket.encPart; + int eType = encData.getEType(); + + // find the server's key + EncryptionKey finalKey = null; + Subject serverSubj = loginContext.getSubject(); + Set serverCreds = serverSubj.getPrivateCredentials(Object.class); + for (Object cred : serverCreds) { + if (cred instanceof KeyTab) { + KeyTab serverKeyTab = (KeyTab) cred; + KerberosPrincipal serverPrincipal = new KerberosPrincipal(this.serverPrincipal); + KerberosKey[] serverKeys = serverKeyTab.getKeys(serverPrincipal); + for (KerberosKey key : serverKeys) { + if (key.getKeyType() == eType) { + finalKey = new EncryptionKey(key.getKeyType(), key.getEncoded()); + break; + } + } + } + } + + if (finalKey == null) { + log.error("Could not find matching key from server creds."); + return null; + } + + // decrypt the ticket with the server's key + byte[] decryptedBytes = encData.decrypt(finalKey, KeyUsage.KU_TICKET); + decryptedBytes = encData.reset(decryptedBytes); + EncTicketPart decrypted = new EncTicketPart(decryptedBytes); + String clientPrincipal = decrypted.cname.toString(); + return clientPrincipal; + } + } + } + catch (Exception ex) { + Throwables.propagate(ex); + } + } + + return null; + } + + private boolean isValueAPReq(DerValue value) + { + return value.isConstructed((byte) Krb5.KRB_AP_REQ); + } + + private void initializeKerberosLogin() throws ServletException + { + String principal; + String keytab; + + try { + principal = SecurityUtil.getServerPrincipal(serverPrincipal, node.getHost()); + if (principal == null || principal.trim().length() == 0) { + throw new ServletException("Principal not defined in configuration"); + } + keytab = serverKeytab; + if (keytab == null || keytab.trim().length() == 0) { + throw new ServletException("Keytab not defined in configuration"); + } + if (!new File(keytab).exists()) { + throw new ServletException("Keytab does not exist: " + keytab); + } + + Set principals = new HashSet(); + principals.add(new KerberosPrincipal(principal)); + Subject subject = new Subject(false, principals, new HashSet(), new HashSet()); + + DruidKerberosConfiguration kerberosConfiguration = new DruidKerberosConfiguration(keytab, principal); + + log.info("Login using keytab " + keytab + ", for principal " + principal); + loginContext = new LoginContext("", subject, null, kerberosConfiguration); + loginContext.login(); + + log.info("Initialized, principal %s from keytab %s", principal, keytab); + } + catch (Exception ex) { + throw new ServletException(ex); + } + } +} diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosHttpClient.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosHttpClient.java index 4930006b181d..979a10fcab5f 100644 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosHttpClient.java +++ b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosHttpClient.java @@ -47,15 +47,17 @@ public class KerberosHttpClient extends AbstractHttpClient private static final Logger log = new Logger(KerberosHttpClient.class); private final HttpClient delegate; - private final AuthenticationKerberosConfig config; private final CookieManager cookieManager; private final Executor exec = Execs.singleThreaded("test-%s"); + private final String internalClientPrincipal; + private final String internalClientKeytab; - public KerberosHttpClient(HttpClient delegate, AuthenticationKerberosConfig config) + public KerberosHttpClient(HttpClient delegate, String internalClientPrincipal, String internalClientKeytab) { this.delegate = delegate; - this.config = config; this.cookieManager = new CookieManager(); + this.internalClientPrincipal = internalClientPrincipal; + this.internalClientKeytab = internalClientKeytab; } @Override @@ -94,7 +96,7 @@ private void inner_go( uri, cookieManager.getCookieStore().getCookies() ); - DruidKerberosUtil.authenticateIfRequired(config); + DruidKerberosUtil.authenticateIfRequired(internalClientPrincipal, internalClientKeytab); UserGroupInformation currentUser = UserGroupInformation.getCurrentUser(); String challenge = currentUser.doAs(new PrivilegedExceptionAction() { diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosHttpClientProvider.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosHttpClientProvider.java deleted file mode 100644 index 6d4cb6234566..000000000000 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosHttpClientProvider.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.security.kerberos; - -import com.google.inject.Inject; -import com.google.inject.Injector; -import com.google.inject.Provider; -import com.metamx.http.client.HttpClient; -import io.druid.guice.http.AbstractHttpClientProvider; - -public class KerberosHttpClientProvider extends AbstractHttpClientProvider -{ - private final Provider delegateProvider; - private AuthenticationKerberosConfig config; - - public KerberosHttpClientProvider( - Provider delegateProvider - ) - { - this.delegateProvider = delegateProvider; - } - - @Inject - @Override - public void configure(Injector injector) - { - if (delegateProvider instanceof AbstractHttpClientProvider) { - ((AbstractHttpClientProvider) delegateProvider).configure(injector); - } - config = injector.getInstance(AuthenticationKerberosConfig.class); - } - - @Override - public HttpClient get() - { - return new KerberosHttpClient(delegateProvider.get(), config); - } -} diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosJettyHttpClientProvider.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosJettyHttpClientProvider.java deleted file mode 100644 index cb8575bf48f6..000000000000 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/KerberosJettyHttpClientProvider.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.security.kerberos; - - -import com.google.common.base.Throwables; -import com.google.inject.Inject; -import com.google.inject.Injector; -import com.google.inject.Provider; -import io.druid.guice.http.AbstractHttpClientProvider; -import io.druid.java.util.common.logger.Logger; -import org.apache.hadoop.security.UserGroupInformation; -import org.eclipse.jetty.client.HttpClient; -import org.eclipse.jetty.client.api.Authentication; -import org.eclipse.jetty.client.api.ContentResponse; -import org.eclipse.jetty.client.api.Request; -import org.eclipse.jetty.util.Attributes; -import org.jboss.netty.handler.codec.http.HttpHeaders; - -import java.net.URI; -import java.security.PrivilegedExceptionAction; - -public class KerberosJettyHttpClientProvider extends AbstractHttpClientProvider -{ - private static final Logger log = new Logger(KerberosJettyHttpClientProvider.class); - - private final Provider delegateProvider; - private AuthenticationKerberosConfig config; - - - public KerberosJettyHttpClientProvider( - Provider delegateProvider - ) - { - this.delegateProvider = delegateProvider; - } - - @Inject - @Override - public void configure(Injector injector) - { - if (delegateProvider instanceof AbstractHttpClientProvider) { - ((AbstractHttpClientProvider) delegateProvider).configure(injector); - } - config = injector.getInstance(AuthenticationKerberosConfig.class); - } - - - @Override - public HttpClient get() - { - final HttpClient httpClient = delegateProvider.get(); - httpClient.getAuthenticationStore().addAuthentication(new Authentication() - { - @Override - public boolean matches(String type, URI uri, String realm) - { - return true; - } - - @Override - public Result authenticate( - final Request request, ContentResponse response, Authentication.HeaderInfo headerInfo, Attributes context - ) - { - return new Result() - { - @Override - public URI getURI() - { - return request.getURI(); - } - - @Override - public void apply(Request request) - { - try { - // No need to set cookies as they are handled by Jetty Http Client itself. - URI uri = request.getURI(); - if (DruidKerberosUtil.needToSendCredentials(httpClient.getCookieStore(), uri)) { - log.debug( - "No Auth Cookie found for URI[%s]. Existing Cookies[%s] Authenticating... ", - uri, - httpClient.getCookieStore().getCookies() - ); - final String host = request.getHost(); - DruidKerberosUtil.authenticateIfRequired(config); - UserGroupInformation currentUser = UserGroupInformation.getCurrentUser(); - String challenge = currentUser.doAs(new PrivilegedExceptionAction() - { - @Override - public String run() throws Exception - { - return DruidKerberosUtil.kerberosChallenge(host); - } - }); - request.getHeaders().add(HttpHeaders.Names.AUTHORIZATION, "Negotiate " + challenge); - } else { - log.debug("Found Auth Cookie found for URI[%s].", uri); - } - } - catch (Throwable e) { - Throwables.propagate(e); - } - } - }; - } - }); - return httpClient; - } -} diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/SpnegoFilterConfig.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/SpnegoFilterConfig.java index 9ce8f6ab098f..4dd4f9b01f97 100644 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/SpnegoFilterConfig.java +++ b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/SpnegoFilterConfig.java @@ -47,11 +47,11 @@ public class SpnegoFilterConfig @JsonCreator public SpnegoFilterConfig( - @JsonProperty("principal") String principal, - @JsonProperty("keytab") String keytab, - @JsonProperty("authToLocal") String authToLocal, - @JsonProperty("excludedPaths") List excludedPaths, - @JsonProperty("cookieSignatureSecret") String cookieSignatureSecret + @JsonProperty("principal") String principal, + @JsonProperty("keytab") String keytab, + @JsonProperty("authToLocal") String authToLocal, + @JsonProperty("excludedPaths") List excludedPaths, + @JsonProperty("cookieSignatureSecret") String cookieSignatureSecret ) { this.principal = principal; diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/SpnegoFilterHolder.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/SpnegoFilterHolder.java deleted file mode 100644 index ab2a3b6d4f4b..000000000000 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/SpnegoFilterHolder.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.security.kerberos; - -import com.google.common.base.Throwables; -import com.google.inject.Inject; -import io.druid.guice.annotations.Self; -import io.druid.server.DruidNode; -import io.druid.server.initialization.jetty.ServletFilterHolder; -import org.apache.hadoop.security.SecurityUtil; -import org.apache.hadoop.security.authentication.server.AuthenticationFilter; - -import javax.servlet.DispatcherType; -import javax.servlet.Filter; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletRequest; -import java.io.IOException; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.Map; - -public class SpnegoFilterHolder implements ServletFilterHolder -{ - private final SpnegoFilterConfig config; - private final DruidNode node; - - @Inject - public SpnegoFilterHolder(SpnegoFilterConfig config, @Self DruidNode node) - { - this.config = config; - this.node = node; - } - - @Override - public Filter getFilter() - { - return new AuthenticationFilter() - { - @Override - public void init(FilterConfig filterConfig) throws ServletException - { - ClassLoader prevLoader = Thread.currentThread().getContextClassLoader(); - try { - // AuthenticationHandler is created during Authenticationfilter.init using reflection with thread context class loader. - // In case of druid since the class is actually loaded as an extension and filter init is done in main thread. - // We need to set the classloader explicitly to extension class loader. - Thread.currentThread().setContextClassLoader(AuthenticationFilter.class.getClassLoader()); - super.init(filterConfig); - } - finally { - Thread.currentThread().setContextClassLoader(prevLoader); - } - } - - @Override - public void doFilter( - ServletRequest request, ServletResponse response, FilterChain filterChain - ) throws IOException, ServletException - { - String path = ((HttpServletRequest) request).getRequestURI(); - if (isExcluded(path)) { - filterChain.doFilter(request, response); - } - super.doFilter(request, response, filterChain); - } - }; - } - - private boolean isExcluded(String path) - { - for (String excluded : config.getExcludedPaths()) { - if (path.startsWith(excluded)) { - return true; - } - } - return false; - } - - @Override - public Class getFilterClass() - { - return null; - } - - @Override - public Map getInitParameters() - { - Map params = new HashMap(); - try { - params.put( - "kerberos.principal", - SecurityUtil.getServerPrincipal(config.getPrincipal(), node.getHost()) - ); - params.put("kerberos.keytab", config.getKeytab()); - params.put(AuthenticationFilter.AUTH_TYPE, "kerberos"); - params.put("kerberos.name.rules", config.getAuthToLocal()); - if (config.getCookieSignatureSecret() != null) { - params.put("signature.secret", config.getCookieSignatureSecret()); - } - } - catch (IOException e) { - Throwables.propagate(e); - } - return params; - } - - @Override - public String getPath() - { - return "/*"; - } - - @Override - public EnumSet getDispatcherType() - { - return null; - } -} diff --git a/extensions-core/druid-kerberos/src/test/java/io/druid/security/kerberos/AuthenticationKerberosConfigTest.java b/extensions-core/druid-kerberos/src/test/java/io/druid/security/kerberos/AuthenticationKerberosConfigTest.java deleted file mode 100644 index 235513034464..000000000000 --- a/extensions-core/druid-kerberos/src/test/java/io/druid/security/kerberos/AuthenticationKerberosConfigTest.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.security.kerberos; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.inject.Binder; -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.google.inject.Module; -import com.google.inject.Provides; -import io.druid.guice.ConfigModule; -import io.druid.guice.DruidGuiceExtensions; -import io.druid.guice.JsonConfigProvider; -import io.druid.guice.LazySingleton; -import io.druid.guice.PropertiesModule; -import io.druid.jackson.DefaultObjectMapper; -import org.junit.Assert; -import org.junit.Test; - -import java.util.Arrays; -import java.util.Properties; - -public class AuthenticationKerberosConfigTest -{ - @Test - public void testserde() - { - Injector injector = Guice.createInjector( - new Module() - { - @Override - public void configure(Binder binder) - { - binder.install(new PropertiesModule(Arrays.asList("test.runtime.properties"))); - binder.install(new ConfigModule()); - binder.install(new DruidGuiceExtensions()); - JsonConfigProvider.bind(binder, "druid.hadoop.security.kerberos", AuthenticationKerberosConfig.class); - } - - @Provides - @LazySingleton - public ObjectMapper jsonMapper() - { - return new DefaultObjectMapper(); - } - } - ); - - Properties props = injector.getInstance(Properties.class); - AuthenticationKerberosConfig config = injector.getInstance(AuthenticationKerberosConfig.class); - - Assert.assertEquals(props.getProperty("druid.hadoop.security.kerberos.principal"), config.getPrincipal()); - Assert.assertEquals(props.getProperty("druid.hadoop.security.kerberos.keytab"), config.getKeytab()); - - - } -} diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java index 2fed81cabdeb..d69334037a37 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java @@ -44,6 +44,8 @@ import io.druid.segment.column.ValueType; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.virtual.ExpressionVirtualColumn; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthTestUtils; import io.druid.sql.calcite.filtration.Filtration; import io.druid.sql.calcite.planner.Calcites; import io.druid.sql.calcite.planner.DruidOperatorTable; @@ -127,12 +129,16 @@ public void setUp() throws Exception ImmutableSet.of(new QuantileSqlAggregator()), ImmutableSet.of() ); + plannerFactory = new PlannerFactory( druidSchema, CalciteTests.createMockQueryLifecycleFactory(walker), operatorTable, CalciteTests.createExprMacroTable(), plannerConfig, + new AuthConfig(), + AuthTestUtils.TEST_AUTHENTICATOR_MAPPER, + AuthTestUtils.TEST_AUTHORIZER_MAPPER, CalciteTests.getJsonMapper() ); } diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java index f81515ba18ea..08965c021644 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java @@ -78,6 +78,14 @@ import io.druid.segment.realtime.appenderator.TransactionalSegmentPublisher; import io.druid.segment.realtime.firehose.ChatHandler; import io.druid.segment.realtime.firehose.ChatHandlerProvider; +import io.druid.server.security.Access; +import io.druid.server.security.Action; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.AuthorizationUtils; +import io.druid.server.security.ForbiddenException; +import io.druid.server.security.Resource; +import io.druid.server.security.ResourceAction; +import io.druid.server.security.ResourceType; import io.druid.timeline.DataSegment; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; @@ -87,6 +95,7 @@ import org.apache.kafka.common.serialization.ByteArrayDeserializer; import org.joda.time.DateTime; +import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; @@ -94,6 +103,7 @@ import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; +import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.IOException; @@ -134,6 +144,7 @@ public enum Status private final InputRowParser parser; private final KafkaTuningConfig tuningConfig; private final KafkaIOConfig ioConfig; + private final AuthorizerMapper authorizerMapper; private final Optional chatHandlerProvider; private final Map endOffsets = new ConcurrentHashMap<>(); @@ -198,7 +209,8 @@ public KafkaIndexTask( @JsonProperty("tuningConfig") KafkaTuningConfig tuningConfig, @JsonProperty("ioConfig") KafkaIOConfig ioConfig, @JsonProperty("context") Map context, - @JacksonInject ChatHandlerProvider chatHandlerProvider + @JacksonInject ChatHandlerProvider chatHandlerProvider, + @JacksonInject AuthorizerMapper authorizerMapper ) { super( @@ -214,6 +226,7 @@ public KafkaIndexTask( this.tuningConfig = Preconditions.checkNotNull(tuningConfig, "tuningConfig"); this.ioConfig = Preconditions.checkNotNull(ioConfig, "ioConfig"); this.chatHandlerProvider = Optional.fromNullable(chatHandlerProvider); + this.authorizerMapper = authorizerMapper; this.endOffsets.putAll(ioConfig.getEndPartitions().getPartitionOffsetMap()); } @@ -627,8 +640,26 @@ public boolean canRestore() return true; } - @POST - @Path("/stop") + /** + * Authorizes action to be performed on this task's datasource + * + * @return authorization result + */ + private Access authorizationCheck(final HttpServletRequest req, Action action) + { + ResourceAction resourceAction = new ResourceAction( + new Resource(dataSchema.getDataSource(), ResourceType.DATASOURCE), + action + ); + + Access access = AuthorizationUtils.authorizeResourceAction(req, resourceAction, authorizerMapper); + if (!access.isAllowed()) { + throw new ForbiddenException(access.toString()); + } + + return access; + } + @Override public void stopGracefully() { @@ -694,9 +725,24 @@ public Sequence run(final QueryPlus queryPlus, final Map r }; } + @POST + @Path("/stop") + public Response stop(@Context final HttpServletRequest req) + { + authorizationCheck(req, Action.WRITE); + stopGracefully(); + return Response.status(Response.Status.OK).build(); + } + @GET @Path("/status") @Produces(MediaType.APPLICATION_JSON) + public Status getStatusHTTP(@Context final HttpServletRequest req) + { + authorizationCheck(req, Action.READ); + return status; + } + public Status getStatus() { return status; @@ -705,6 +751,12 @@ public Status getStatus() @GET @Path("/offsets/current") @Produces(MediaType.APPLICATION_JSON) + public Map getCurrentOffsets(@Context final HttpServletRequest req) + { + authorizationCheck(req, Action.READ); + return getCurrentOffsets(); + } + public Map getCurrentOffsets() { return nextOffsets; @@ -713,6 +765,12 @@ public Map getCurrentOffsets() @GET @Path("/offsets/end") @Produces(MediaType.APPLICATION_JSON) + public Map getEndOffsetsHTTP(@Context final HttpServletRequest req) + { + authorizationCheck(req, Action.READ); + return getEndOffsets(); + } + public Map getEndOffsets() { return endOffsets; @@ -722,9 +780,19 @@ public Map getEndOffsets() @Path("/offsets/end") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) + public Response setEndOffsetsHTTP( + Map offsets, + @QueryParam("resume") @DefaultValue("false") final boolean resume, + @Context final HttpServletRequest req + ) throws InterruptedException + { + authorizationCheck(req, Action.WRITE); + return setEndOffsets(offsets, resume); + } + public Response setEndOffsets( Map offsets, - @QueryParam("resume") @DefaultValue("false") final boolean resume + final boolean resume ) throws InterruptedException { if (offsets == null) { @@ -790,8 +858,16 @@ public Response setEndOffsets( @POST @Path("/pause") @Produces(MediaType.APPLICATION_JSON) - public Response pause(@QueryParam("timeout") @DefaultValue("0") final long timeout) - throws InterruptedException + public Response pauseHTTP( + @QueryParam("timeout") @DefaultValue("0") final long timeout, + @Context final HttpServletRequest req + ) throws InterruptedException + { + authorizationCheck(req, Action.WRITE); + return pause(timeout); + } + + public Response pause(final long timeout) throws InterruptedException { if (!(status == Status.PAUSED || status == Status.READING)) { return Response.status(Response.Status.BAD_REQUEST) @@ -840,6 +916,13 @@ public Response pause(@QueryParam("timeout") @DefaultValue("0") final long timeo @POST @Path("/resume") + public Response resumeHTTP(@Context final HttpServletRequest req) throws InterruptedException + { + authorizationCheck(req, Action.WRITE); + resume(); + return Response.status(Response.Status.OK).build(); + } + public void resume() throws InterruptedException { pauseLock.lockInterruptibly(); @@ -863,8 +946,9 @@ public void resume() throws InterruptedException @GET @Path("/time/start") @Produces(MediaType.APPLICATION_JSON) - public DateTime getStartTime() + public DateTime getStartTime(@Context final HttpServletRequest req) { + authorizationCheck(req, Action.WRITE); return startTime; } diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClientFactory.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClientFactory.java index ee602e50dbfc..869392642003 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClientFactory.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClientFactory.java @@ -22,7 +22,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.Inject; import com.metamx.http.client.HttpClient; -import io.druid.guice.annotations.Global; +import io.druid.guice.annotations.EscalatedGlobal; import io.druid.guice.annotations.Json; import io.druid.indexing.common.TaskInfoProvider; import org.joda.time.Duration; @@ -33,7 +33,10 @@ public class KafkaIndexTaskClientFactory private ObjectMapper mapper; @Inject - public KafkaIndexTaskClientFactory(@Global HttpClient httpClient, @Json ObjectMapper mapper) + public KafkaIndexTaskClientFactory( + @EscalatedGlobal HttpClient httpClient, + @Json ObjectMapper mapper + ) { this.httpClient = httpClient; this.mapper = mapper; diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java index d3c225095ef4..f2065e1a4c80 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java @@ -1432,6 +1432,7 @@ private void createKafkaTasksForGroup(int groupId, int replicas) taskTuningConfig, kafkaIOConfig, spec.getContext(), + null, null ); diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorSpec.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorSpec.java index c476b05e1053..559428fc2e49 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorSpec.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorSpec.java @@ -67,7 +67,7 @@ public KafkaSupervisorSpec( @JacksonInject @Json ObjectMapper mapper, @JacksonInject ServiceEmitter emitter, @JacksonInject DruidMonitorSchedulerConfig monitorSchedulerConfig - ) + ) { this.dataSchema = Preconditions.checkNotNull(dataSchema, "dataSchema"); this.tuningConfig = tuningConfig != null diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java index ec02dc372cae..453405ce08ee 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java @@ -1487,6 +1487,7 @@ private KafkaIndexTask createTask( tuningConfig, ioConfig, null, + null, null ); task.setPollRetryMs(POLL_RETRY_MS); diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java index 9ed12b7cbff4..672f0c92bb32 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java @@ -1819,7 +1819,10 @@ private KafkaSupervisor getSupervisor( skipOffsetGaps ); - KafkaIndexTaskClientFactory taskClientFactory = new KafkaIndexTaskClientFactory(null, null) + KafkaIndexTaskClientFactory taskClientFactory = new KafkaIndexTaskClientFactory( + null, + null + ) { @Override public KafkaIndexTaskClient build( @@ -1920,6 +1923,7 @@ private KafkaIndexTask createKafkaIndexTask( false ), ImmutableMap.of(), + null, null ); } diff --git a/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLContextModule.java b/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLContextModule.java index 9de19dbbd3e2..b2de27468d7d 100644 --- a/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLContextModule.java +++ b/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLContextModule.java @@ -24,6 +24,8 @@ import com.google.inject.Binder; import io.druid.guice.JsonConfigProvider; import io.druid.guice.annotations.Client; +import io.druid.guice.annotations.EscalatedClient; +import io.druid.guice.annotations.EscalatedGlobal; import io.druid.guice.annotations.Global; import io.druid.initialization.DruidModule; import io.druid.server.router.Router; @@ -48,5 +50,7 @@ public void configure(Binder binder) binder.bind(SSLContext.class).annotatedWith(Global.class).toProvider(SSLContextProvider.class); binder.bind(SSLContext.class).annotatedWith(Client.class).toProvider(SSLContextProvider.class); binder.bind(SSLContext.class).annotatedWith(Router.class).toProvider(SSLContextProvider.class); + binder.bind(SSLContext.class).annotatedWith(EscalatedGlobal.class).toProvider(SSLContextProvider.class); + binder.bind(SSLContext.class).annotatedWith(EscalatedClient.class).toProvider(SSLContextProvider.class); } } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerFactory.java b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerFactory.java index 3db22999a139..5e0b165c0cab 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerFactory.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerFactory.java @@ -24,7 +24,7 @@ import com.google.inject.Inject; import com.metamx.http.client.HttpClient; import io.druid.curator.cache.PathChildrenCacheFactory; -import io.druid.guice.annotations.Global; +import io.druid.guice.annotations.EscalatedGlobal; import io.druid.indexing.overlord.autoscaling.NoopProvisioningStrategy; import io.druid.indexing.overlord.autoscaling.ProvisioningSchedulerConfig; import io.druid.indexing.overlord.autoscaling.ProvisioningStrategy; @@ -53,7 +53,7 @@ public RemoteTaskRunnerFactory( final RemoteTaskRunnerConfig remoteTaskRunnerConfig, final IndexerZkConfig zkPaths, final ObjectMapper jsonMapper, - @Global final HttpClient httpClient, + @EscalatedGlobal final HttpClient httpClient, final Supplier workerConfigRef, final ProvisioningSchedulerConfig provisioningSchedulerConfig, final ProvisioningStrategy provisioningStrategy diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java b/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java index f98df52c7432..d8ef4bc87019 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java @@ -22,10 +22,6 @@ import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.base.Function; import com.google.common.base.Optional; -import com.google.common.base.Preconditions; -import com.google.common.base.Predicate; -import com.google.common.collect.Collections2; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; @@ -54,7 +50,6 @@ import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Intervals; -import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.EntryExistsException; @@ -62,9 +57,11 @@ import io.druid.server.http.security.StateResourceFilter; import io.druid.server.security.Access; import io.druid.server.security.Action; -import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.AuthorizationUtils; +import io.druid.server.security.ForbiddenException; import io.druid.server.security.Resource; +import io.druid.server.security.ResourceAction; import io.druid.server.security.ResourceType; import io.druid.tasklogs.TaskLogStreamer; import io.druid.timeline.DataSegment; @@ -87,7 +84,6 @@ import javax.ws.rs.core.Response; import java.io.IOException; import java.util.Collection; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -105,7 +101,7 @@ public class OverlordResource private final TaskLogStreamer taskLogStreamer; private final JacksonConfigManager configManager; private final AuditManager auditManager; - private final AuthConfig authConfig; + private final AuthorizerMapper authorizerMapper; private AtomicReference workerConfigRef = null; @@ -116,7 +112,7 @@ public OverlordResource( TaskLogStreamer taskLogStreamer, JacksonConfigManager configManager, AuditManager auditManager, - AuthConfig authConfig + AuthorizerMapper authorizerMapper ) throws Exception { this.taskMaster = taskMaster; @@ -124,7 +120,7 @@ public OverlordResource( this.taskLogStreamer = taskLogStreamer; this.configManager = configManager; this.auditManager = auditManager; - this.authConfig = authConfig; + this.authorizerMapper = authorizerMapper; } @POST @@ -136,21 +132,20 @@ public Response taskPost( @Context final HttpServletRequest req ) { - if (authConfig.isEnabled()) { - // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424 - final String dataSource = task.getDataSource(); - final AuthorizationInfo authorizationInfo = (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN); - Preconditions.checkNotNull( - authorizationInfo, - "Security is enabled but no authorization info found in the request" - ); - Access authResult = authorizationInfo.isAuthorized( - new Resource(dataSource, ResourceType.DATASOURCE), - Action.WRITE - ); - if (!authResult.isAllowed()) { - return Response.status(Response.Status.FORBIDDEN).header("Access-Check-Result", authResult).build(); - } + final String dataSource = task.getDataSource(); + final ResourceAction resourceAction = new ResourceAction( + new Resource(dataSource, ResourceType.DATASOURCE), + Action.WRITE + ); + + Access authResult = AuthorizationUtils.authorizeResourceAction( + req, + resourceAction, + authorizerMapper + ); + + if (!authResult.isAllowed()) { + throw new ForbiddenException(authResult.toString()); } return asLeaderWith( @@ -371,36 +366,25 @@ public Collection apply(TaskRunner taskRunner) // off to the runner yet: final List allActiveTasks = taskStorageQueryAdapter.getActiveTasks(); final List activeTasks; - if (authConfig.isEnabled()) { - // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424 - final Map, Access> resourceAccessMap = new HashMap<>(); - final AuthorizationInfo authorizationInfo = - (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN); - activeTasks = ImmutableList.copyOf( - Iterables.filter( - allActiveTasks, - new Predicate() - { - @Override - public boolean apply(Task input) - { - Resource resource = new Resource(input.getDataSource(), ResourceType.DATASOURCE); - Action action = Action.READ; - Pair key = new Pair<>(resource, action); - if (resourceAccessMap.containsKey(key)) { - return resourceAccessMap.get(key).isAllowed(); - } else { - Access access = authorizationInfo.isAuthorized(key.lhs, key.rhs); - resourceAccessMap.put(key, access); - return access.isAllowed(); - } - } - } - ) - ); - } else { - activeTasks = allActiveTasks; - } + Function raGenerator = new Function() + { + @Override + public ResourceAction apply(Task input) + { + return new ResourceAction( + new Resource(input.getDataSource(), ResourceType.DATASOURCE), + Action.READ + ); + } + }; + + activeTasks = AuthorizationUtils.filterAuthorizedResources( + req, + allActiveTasks, + raGenerator, + authorizerMapper + ); + final Set runnersKnownTasks = Sets.newHashSet( Iterables.transform( taskRunner.getKnownTasks(), @@ -452,13 +436,7 @@ public Response getPendingTasks(@Context final HttpServletRequest req) @Override public Collection apply(TaskRunner taskRunner) { - if (authConfig.isEnabled()) { - // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424 - return securedTaskRunnerWorkItem(taskRunner.getPendingTasks(), req); - } else { - return taskRunner.getPendingTasks(); - } - + return securedTaskRunnerWorkItem(taskRunner.getPendingTasks(), req); } } ); @@ -475,12 +453,7 @@ public Response getRunningTasks(@Context final HttpServletRequest req) @Override public Collection apply(TaskRunner taskRunner) { - if (authConfig.isEnabled()) { - // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424 - return securedTaskRunnerWorkItem(taskRunner.getRunningTasks(), req); - } else { - return taskRunner.getRunningTasks(); - } + return securedTaskRunnerWorkItem(taskRunner.getRunningTasks(), req); } } ); @@ -492,44 +465,34 @@ public Collection apply(TaskRunner taskRunner) public Response getCompleteTasks(@Context final HttpServletRequest req) { final List recentlyFinishedTasks; - if (authConfig.isEnabled()) { - // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424 - final Map, Access> resourceAccessMap = new HashMap<>(); - final AuthorizationInfo authorizationInfo = (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN); - recentlyFinishedTasks = ImmutableList.copyOf( - Iterables.filter( - taskStorageQueryAdapter.getRecentlyFinishedTaskStatuses(), - new Predicate() - { - @Override - public boolean apply(TaskStatus input) - { - final String taskId = input.getId(); - final Optional optionalTask = taskStorageQueryAdapter.getTask(taskId); - if (!optionalTask.isPresent()) { - throw new WebApplicationException( - Response.serverError().entity( - StringUtils.format("No task information found for task with id: [%s]", taskId) - ).build() - ); - } - Resource resource = new Resource(optionalTask.get().getDataSource(), ResourceType.DATASOURCE); - Action action = Action.READ; - Pair key = new Pair<>(resource, action); - if (resourceAccessMap.containsKey(key)) { - return resourceAccessMap.get(key).isAllowed(); - } else { - Access access = authorizationInfo.isAuthorized(key.lhs, key.rhs); - resourceAccessMap.put(key, access); - return access.isAllowed(); - } - } - } - ) - ); - } else { - recentlyFinishedTasks = taskStorageQueryAdapter.getRecentlyFinishedTaskStatuses(); - } + Function raGenerator = new Function() + { + @Override + public ResourceAction apply(TaskStatus input) + { + final String taskId = input.getId(); + final Optional optionalTask = taskStorageQueryAdapter.getTask(taskId); + if (!optionalTask.isPresent()) { + throw new WebApplicationException( + Response.serverError().entity( + StringUtils.format("No task information found for task with id: [%s]", taskId) + ).build() + ); + } + + return new ResourceAction( + new Resource(optionalTask.get().getDataSource(), ResourceType.DATASOURCE), + Action.READ + ); + } + }; + + recentlyFinishedTasks = AuthorizationUtils.filterAuthorizedResources( + req, + taskStorageQueryAdapter.getRecentlyFinishedTaskStatuses(), + raGenerator, + authorizerMapper + ); final List completeTasks = Lists.transform( recentlyFinishedTasks, @@ -685,37 +648,33 @@ private Collection securedTaskRunnerWorkItem( HttpServletRequest req ) { - final Map, Access> resourceAccessMap = new HashMap<>(); - final AuthorizationInfo authorizationInfo = - (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN); - return Collections2.filter( - collectionToFilter, - new Predicate() - { - @Override - public boolean apply(TaskRunnerWorkItem input) - { - final String taskId = input.getTaskId(); - final Optional optionalTask = taskStorageQueryAdapter.getTask(taskId); - if (!optionalTask.isPresent()) { - throw new WebApplicationException( - Response.serverError().entity( - StringUtils.format("No task information found for task with id: [%s]", taskId) - ).build() - ); - } - Resource resource = new Resource(optionalTask.get().getDataSource(), ResourceType.DATASOURCE); - Action action = Action.READ; - Pair key = new Pair<>(resource, action); - if (resourceAccessMap.containsKey(key)) { - return resourceAccessMap.get(key).isAllowed(); - } else { - Access access = authorizationInfo.isAuthorized(key.lhs, key.rhs); - resourceAccessMap.put(key, access); - return access.isAllowed(); - } - } + Function raGenerator = new Function() + { + @Override + public ResourceAction apply(TaskRunnerWorkItem input) + { + final String taskId = input.getTaskId(); + final Optional optionalTask = taskStorageQueryAdapter.getTask(taskId); + if (!optionalTask.isPresent()) { + throw new WebApplicationException( + Response.serverError().entity( + StringUtils.format("No task information found for task with id: [%s]", taskId) + ).build() + ); } + + return new ResourceAction( + new Resource(optionalTask.get().getDataSource(), ResourceType.DATASOURCE), + Action.READ + ); + } + }; + + return AuthorizationUtils.filterAuthorizedResources( + req, + collectionToFilter, + raGenerator, + authorizerMapper ); } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/SupervisorResourceFilter.java b/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/SupervisorResourceFilter.java index 6bab756ef33f..0af8490b3b15 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/SupervisorResourceFilter.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/SupervisorResourceFilter.java @@ -19,6 +19,7 @@ package io.druid.indexing.overlord.http.security; +import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; @@ -31,10 +32,11 @@ import io.druid.java.util.common.StringUtils; import io.druid.server.http.security.AbstractResourceFilter; import io.druid.server.security.Access; -import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; -import io.druid.server.security.Resource; -import io.druid.server.security.ResourceType; +import io.druid.server.security.Action; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.AuthorizationUtils; +import io.druid.server.security.ForbiddenException; +import io.druid.server.security.ResourceAction; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.PathSegment; @@ -46,68 +48,63 @@ public class SupervisorResourceFilter extends AbstractResourceFilter private final SupervisorManager supervisorManager; @Inject - public SupervisorResourceFilter(AuthConfig authConfig, SupervisorManager supervisorManager) + public SupervisorResourceFilter( + AuthorizerMapper authorizerMapper, + SupervisorManager supervisorManager + ) { - super(authConfig); + super(authorizerMapper); this.supervisorManager = supervisorManager; } @Override public ContainerRequest filter(ContainerRequest request) { - if (getAuthConfig().isEnabled()) { - // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424 - final String supervisorId = Preconditions.checkNotNull( - request.getPathSegments() - .get( - Iterables.indexOf( - request.getPathSegments(), - new Predicate() + final String supervisorId = Preconditions.checkNotNull( + request.getPathSegments() + .get( + Iterables.indexOf( + request.getPathSegments(), + new Predicate() + { + @Override + public boolean apply(PathSegment input) { - @Override - public boolean apply(PathSegment input) - { - return input.getPath().equals("supervisor"); - } + return input.getPath().equals("supervisor"); } - ) + 1 - ).getPath() + } + ) + 1 + ).getPath() + ); + + Optional supervisorSpecOptional = supervisorManager.getSupervisorSpec(supervisorId); + if (!supervisorSpecOptional.isPresent()) { + throw new WebApplicationException( + Response.status(Response.Status.BAD_REQUEST) + .entity(StringUtils.format("Cannot find any supervisor with id: [%s]", supervisorId)) + .build() ); + } - Optional supervisorSpecOptional = supervisorManager.getSupervisorSpec(supervisorId); - if (!supervisorSpecOptional.isPresent()) { - throw new WebApplicationException( - Response.status(Response.Status.BAD_REQUEST) - .entity(StringUtils.format("Cannot find any supervisor with id: [%s]", supervisorId)) - .build() - ); - } - final AuthorizationInfo authorizationInfo = (AuthorizationInfo) getReq().getAttribute(AuthConfig.DRUID_AUTH_TOKEN); - Preconditions.checkNotNull( - authorizationInfo, - "Security is enabled but no authorization info found in the request" - ); + final SupervisorSpec spec = supervisorSpecOptional.get(); + Preconditions.checkArgument( + spec.getDataSources() != null && spec.getDataSources().size() > 0, + "No dataSources found to perform authorization checks" + ); - final SupervisorSpec spec = supervisorSpecOptional.get(); - Preconditions.checkArgument( - spec.getDataSources() != null && spec.getDataSources().size() > 0, - "No dataSources found to perform authorization checks" - ); + Function resourceActionFunction = getAction(request) == Action.READ ? + AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR : + AuthorizationUtils.DATASOURCE_WRITE_RA_GENERATOR; - for (String dataSource : spec.getDataSources()) { - Access authResult = authorizationInfo.isAuthorized( - new Resource(dataSource, ResourceType.DATASOURCE), - getAction(request) - ); - if (!authResult.isAllowed()) { - throw new WebApplicationException(Response.status(Response.Status.FORBIDDEN) - .entity( - StringUtils.format("Access-Check-Result: %s", authResult.toString()) - ) - .build()); - } - } + Access authResult = AuthorizationUtils.authorizeAllResourceActions( + getReq(), + Iterables.transform(spec.getDataSources(), resourceActionFunction), + getAuthorizerMapper() + ); + + if (!authResult.isAllowed()) { + throw new ForbiddenException(authResult.toString()); } return request; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/TaskResourceFilter.java b/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/TaskResourceFilter.java index 310de74a782b..037720547a2f 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/TaskResourceFilter.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/TaskResourceFilter.java @@ -31,9 +31,11 @@ import io.druid.java.util.common.StringUtils; import io.druid.server.http.security.AbstractResourceFilter; import io.druid.server.security.Access; -import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.AuthorizationUtils; +import io.druid.server.security.ForbiddenException; import io.druid.server.security.Resource; +import io.druid.server.security.ResourceAction; import io.druid.server.security.ResourceType; import javax.ws.rs.WebApplicationException; @@ -52,60 +54,58 @@ public class TaskResourceFilter extends AbstractResourceFilter private final TaskStorageQueryAdapter taskStorageQueryAdapter; @Inject - public TaskResourceFilter(TaskStorageQueryAdapter taskStorageQueryAdapter, AuthConfig authConfig) + public TaskResourceFilter( + TaskStorageQueryAdapter taskStorageQueryAdapter, + AuthorizerMapper authorizerMapper + ) { - super(authConfig); + super(authorizerMapper); this.taskStorageQueryAdapter = taskStorageQueryAdapter; } @Override public ContainerRequest filter(ContainerRequest request) { - if (getAuthConfig().isEnabled()) { - // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424 - final String taskId = Preconditions.checkNotNull( - request.getPathSegments() - .get( - Iterables.indexOf( - request.getPathSegments(), - new Predicate() + final String taskId = Preconditions.checkNotNull( + request.getPathSegments() + .get( + Iterables.indexOf( + request.getPathSegments(), + new Predicate() + { + @Override + public boolean apply(PathSegment input) { - @Override - public boolean apply(PathSegment input) - { - return input.getPath().equals("task"); - } + return input.getPath().equals("task"); } - ) + 1 - ).getPath() + } + ) + 1 + ).getPath() + ); + + Optional taskOptional = taskStorageQueryAdapter.getTask(taskId); + if (!taskOptional.isPresent()) { + throw new WebApplicationException( + Response.status(Response.Status.BAD_REQUEST) + .entity(StringUtils.format("Cannot find any task with id: [%s]", taskId)) + .build() ); + } + final String dataSourceName = Preconditions.checkNotNull(taskOptional.get().getDataSource()); - Optional taskOptional = taskStorageQueryAdapter.getTask(taskId); - if (!taskOptional.isPresent()) { - throw new WebApplicationException( - Response.status(Response.Status.BAD_REQUEST) - .entity(StringUtils.format("Cannot find any task with id: [%s]", taskId)) - .build() - ); - } - final String dataSourceName = Preconditions.checkNotNull(taskOptional.get().getDataSource()); + final ResourceAction resourceAction = new ResourceAction( + new Resource(dataSourceName, ResourceType.DATASOURCE), + getAction(request) + ); - final AuthorizationInfo authorizationInfo = (AuthorizationInfo) getReq().getAttribute(AuthConfig.DRUID_AUTH_TOKEN); - Preconditions.checkNotNull( - authorizationInfo, - "Security is enabled but no authorization info found in the request" - ); - final Access authResult = authorizationInfo.isAuthorized( - new Resource(dataSourceName, ResourceType.DATASOURCE), - getAction(request) - ); - if (!authResult.isAllowed()) { - throw new WebApplicationException(Response.status(Response.Status.FORBIDDEN) - .entity( - StringUtils.format("Access-Check-Result: %s", authResult.toString()) - ) - .build()); - } + final Access authResult = AuthorizationUtils.authorizeResourceAction( + getReq(), + resourceAction, + getAuthorizerMapper() + ); + + if (!authResult.isAllowed()) { + throw new ForbiddenException(authResult.toString()); } return request; diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorResource.java b/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorResource.java index 365003573a6c..ec3aad39bf06 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorResource.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorResource.java @@ -25,6 +25,7 @@ import com.google.common.base.Predicate; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.inject.Inject; @@ -33,11 +34,10 @@ import io.druid.indexing.overlord.http.security.SupervisorResourceFilter; import io.druid.java.util.common.StringUtils; import io.druid.server.security.Access; -import io.druid.server.security.Action; import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; -import io.druid.server.security.Resource; -import io.druid.server.security.ResourceType; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.AuthorizationUtils; +import io.druid.server.security.ForbiddenException; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; @@ -62,12 +62,18 @@ public class SupervisorResource { private final TaskMaster taskMaster; private final AuthConfig authConfig; + private final AuthorizerMapper authorizerMapper; @Inject - public SupervisorResource(TaskMaster taskMaster, AuthConfig authConfig) + public SupervisorResource( + TaskMaster taskMaster, + AuthConfig authConfig, + AuthorizerMapper authorizerMapper + ) { this.taskMaster = taskMaster; this.authConfig = authConfig; + this.authorizerMapper = authorizerMapper; } @POST @@ -81,18 +87,21 @@ public Response specPost(final SupervisorSpec spec, @Context final HttpServletRe @Override public Response apply(SupervisorManager manager) { - if (authConfig.isEnabled()) { - // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424 - final AuthorizationInfo authorizationInfo = (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN); - Preconditions.checkNotNull( - authorizationInfo, - "Security is enabled but no authorization info found in the request" - ); - Access authResult = checkSupervisorAccess(authorizationInfo, spec); - if (!authResult.isAllowed()) { - return Response.status(Response.Status.FORBIDDEN).header("Access-Check-Result", authResult).build(); - } + Preconditions.checkArgument( + spec.getDataSources() != null && spec.getDataSources().size() > 0, + "No dataSources found to perform authorization checks" + ); + + Access authResult = AuthorizationUtils.authorizeAllResourceActions( + req, + Iterables.transform(spec.getDataSources(), AuthorizationUtils.DATASOURCE_WRITE_RA_GENERATOR), + authorizerMapper + ); + + if (!authResult.isAllowed()) { + throw new ForbiddenException(authResult.toString()); } + manager.createOrUpdateAndStartSupervisor(spec); return Response.ok(ImmutableMap.of("id", spec.getId())).build(); } @@ -100,25 +109,6 @@ public Response apply(SupervisorManager manager) ); } - private Access checkSupervisorAccess(final AuthorizationInfo authorizationInfo, final SupervisorSpec spec) - { - Preconditions.checkArgument( - spec.getDataSources() != null && spec.getDataSources().size() > 0, - "No dataSources found to perform authorization checks" - ); - Access result = new Access(true); - for (String dataSource : spec.getDataSources()) { - result = authorizationInfo.isAuthorized( - new Resource(dataSource, ResourceType.DATASOURCE), - Action.WRITE - ); - if (!result.isAllowed()) { - return result; - } - } - return result; - } - @GET @Produces(MediaType.APPLICATION_JSON) public Response specGetAll(@Context final HttpServletRequest req) @@ -130,32 +120,34 @@ public Response specGetAll(@Context final HttpServletRequest req) public Response apply(final SupervisorManager manager) { final Set supervisorIds; - if (authConfig.isEnabled()) { - final AuthorizationInfo authorizationInfo = (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN); - Preconditions.checkNotNull( - authorizationInfo, - "Security is enabled but no authorization info found in the request" - ); - supervisorIds = Sets.newHashSet( - Iterables.filter( - manager.getSupervisorIds(), - new Predicate() - { - @Override - public boolean apply(String id) - { - return manager.getSupervisorSpec(id).isPresent() && - checkSupervisorAccess( - authorizationInfo, - manager.getSupervisorSpec(id).get() - ).isAllowed(); - } - } - ) + supervisorIds = Sets.newHashSet(); + for (String supervisorId : manager.getSupervisorIds()) { + Optional supervisorSpecOptional = manager.getSupervisorSpec(supervisorId); + if (supervisorSpecOptional.isPresent()) { + Access accessResult = AuthorizationUtils.authorizeAllResourceActions( + req, + Iterables.transform( + supervisorSpecOptional.get().getDataSources(), + AuthorizationUtils.DATASOURCE_WRITE_RA_GENERATOR + ), + authorizerMapper + ); + + if (accessResult.isAllowed()) { + supervisorIds.add(supervisorId); + } + } + } + + // If there were no supervisorIds, go ahead and authorize the request. + if (manager.getSupervisorIds().size() == 0) { + AuthorizationUtils.authorizeAllResourceActions( + req, + Lists.newArrayList(), + authorizerMapper ); - } else { - supervisorIds = manager.getSupervisorIds(); } + return Response.ok(supervisorIds).build(); } } @@ -248,30 +240,29 @@ public Response specGetAllHistory(@Context final HttpServletRequest req) public Response apply(final SupervisorManager manager) { final Map> supervisorHistory; - if (authConfig.isEnabled()) { - final AuthorizationInfo authorizationInfo = (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN); - Preconditions.checkNotNull( - authorizationInfo, - "Security is enabled but no authorization info found in the request" - ); - supervisorHistory = Maps.filterKeys( - manager.getSupervisorHistory(), - new Predicate() + supervisorHistory = Maps.filterKeys( + manager.getSupervisorHistory(), + new Predicate() + { + @Override + public boolean apply(String id) { - @Override - public boolean apply(String id) - { - return manager.getSupervisorSpec(id).isPresent() && - checkSupervisorAccess( - authorizationInfo, - manager.getSupervisorSpec(id).get() - ).isAllowed(); + Optional supervisorSpecOptional = manager.getSupervisorSpec(id); + if (!supervisorSpecOptional.isPresent()) { + return false; } + Access accessResult = AuthorizationUtils.authorizeAllResourceActions( + req, + Iterables.transform( + supervisorSpecOptional.get().getDataSources(), + AuthorizationUtils.DATASOURCE_WRITE_RA_GENERATOR + ), + authorizerMapper + ); + return accessResult.isAllowed(); } - ); - } else { - supervisorHistory = manager.getSupervisorHistory(); - } + } + ); return Response.ok(supervisorHistory).build(); } } diff --git a/indexing-service/src/test/java/io/druid/indexing/common/TestUtils.java b/indexing-service/src/test/java/io/druid/indexing/common/TestUtils.java index 4b1070233709..013e91b479f9 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/TestUtils.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/TestUtils.java @@ -35,6 +35,8 @@ import io.druid.segment.column.ColumnConfig; import io.druid.segment.realtime.firehose.ChatHandlerProvider; import io.druid.segment.realtime.firehose.NoopChatHandlerProvider; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthorizerMapper; import java.util.List; import java.util.concurrent.TimeUnit; @@ -76,6 +78,8 @@ public int columnCacheSizeBytes() .addValue(IndexIO.class, indexIO) .addValue(ObjectMapper.class, jsonMapper) .addValue(ChatHandlerProvider.class, new NoopChatHandlerProvider()) + .addValue(AuthConfig.class, new AuthConfig()) + .addValue(AuthorizerMapper.class, null) ); } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordResourceTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordResourceTest.java index 08b341668dd0..0de16715d2c9 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordResourceTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordResourceTest.java @@ -39,13 +39,18 @@ import io.druid.server.security.Access; import io.druid.server.security.Action; import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; +import io.druid.server.security.AuthenticationResult; +import io.druid.server.security.Authorizer; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.ForbiddenException; import io.druid.server.security.Resource; import org.easymock.EasyMock; import org.junit.After; import org.junit.Assert; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.ExpectedException; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.core.Response; @@ -60,6 +65,9 @@ public class OverlordResourceTest private HttpServletRequest req; private TaskRunner taskRunner; + @Rule + public ExpectedException expectedException = ExpectedException.none(); + @Before public void setUp() throws Exception { @@ -72,34 +80,50 @@ public void setUp() throws Exception Optional.of(taskRunner) ).anyTimes(); + AuthorizerMapper authMapper = new AuthorizerMapper(null) { + @Override + public Authorizer getAuthorizer(String name) + { + return new Authorizer() + { + @Override + public Access authorize(AuthenticationResult authenticationResult, Resource resource, Action action) + { + if (resource.getName().equals("allow")) { + return new Access(true); + } else { + return new Access(false); + } + } + + }; + } + }; + overlordResource = new OverlordResource( taskMaster, tsqa, null, null, null, - new AuthConfig(true) + authMapper ); } public void expectAuthorizationTokenCheck() { - EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN)).andReturn( - new AuthorizationInfo() - { - @Override - public Access isAuthorized( - Resource resource, Action action - ) - { - if (resource.getName().equals("allow")) { - return new Access(true); - } else { - return new Access(false); - } - } - } - ); + AuthenticationResult authenticationResult = new AuthenticationResult("druid", "druid"); + + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null).anyTimes(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(authenticationResult) + .anyTimes(); + + req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, false); + EasyMock.expectLastCall().anyTimes(); + + req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + EasyMock.expectLastCall().anyTimes(); } @Test @@ -230,12 +254,13 @@ public void testSecuredGetRunningTasks() @Test public void testSecuredTaskPost() { + expectedException.expect(ForbiddenException.class); + expectedException.expectMessage("Allowed:false, Message:"); expectAuthorizationTokenCheck(); EasyMock.replay(taskRunner, taskMaster, tsqa, req); Task task = NoopTask.create(); - Response response = overlordResource.taskPost(task, req); - Assert.assertEquals(Response.Status.FORBIDDEN.getStatusCode(), response.getStatus()); + overlordResource.taskPost(task, req); } @After diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java index 94b2fb1f2830..fd4f13455d55 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java @@ -60,6 +60,8 @@ import io.druid.server.initialization.ZkPathsConfig; import io.druid.server.metrics.NoopServiceEmitter; import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthTestUtils; +import io.druid.server.security.AuthenticationResult; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.retry.RetryOneTime; @@ -126,7 +128,13 @@ private void tearDownServerAndCurator() @Before public void setUp() throws Exception { - req = EasyMock.createStrictMock(HttpServletRequest.class); + req = EasyMock.createMock(HttpServletRequest.class); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null).anyTimes(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn( + new AuthenticationResult("druid", "druid") + ).anyTimes(); + req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + EasyMock.expectLastCall().anyTimes(); supervisorManager = EasyMock.createMock(SupervisorManager.class); taskLockbox = EasyMock.createStrictMock(TaskLockbox.class); taskLockbox.syncFromStorage(); @@ -145,7 +153,7 @@ public void setUp() throws Exception taskActionClientFactory = EasyMock.createStrictMock(TaskActionClientFactory.class); EasyMock.expect(taskActionClientFactory.create(EasyMock.anyObject())) .andReturn(null).anyTimes(); - EasyMock.replay(taskLockbox, taskActionClientFactory); + EasyMock.replay(taskLockbox, taskActionClientFactory, req); taskStorage = new HeapMemoryTaskStorage(new TaskStorageConfig(null)); runTaskCountDownLatches = new CountDownLatch[2]; @@ -203,6 +211,7 @@ public void testOverlordRun() throws Exception Thread.sleep(10); } Assert.assertEquals(taskMaster.getCurrentLeader(), druidNode.getHostAndPort()); + // Test Overlord resource stuff overlordResource = new OverlordResource( taskMaster, @@ -210,7 +219,7 @@ public void testOverlordRun() throws Exception null, null, null, - new AuthConfig() + AuthTestUtils.TEST_AUTHORIZER_MAPPER ); Response response = overlordResource.getLeader(); Assert.assertEquals(druidNode.getHostAndPort(), response.getEntity()); diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/http/security/SecurityResourceFilterTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/http/security/OverlordSecurityResourceFilterTest.java similarity index 86% rename from indexing-service/src/test/java/io/druid/indexing/overlord/http/security/SecurityResourceFilterTest.java rename to indexing-service/src/test/java/io/druid/indexing/overlord/http/security/OverlordSecurityResourceFilterTest.java index 3eb38cf1f9ab..8ac21215f2e3 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/http/security/SecurityResourceFilterTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/http/security/OverlordSecurityResourceFilterTest.java @@ -35,6 +35,8 @@ import io.druid.indexing.worker.http.WorkerResource; import io.druid.server.http.security.AbstractResourceFilter; import io.druid.server.http.security.ResourceFilterTestHelper; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.ForbiddenException; import org.easymock.EasyMock; import org.junit.After; import org.junit.Assert; @@ -43,13 +45,11 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Response; import java.util.Collection; import java.util.List; @RunWith(Parameterized.class) -public class SecurityResourceFilterTest extends ResourceFilterTestHelper +public class OverlordSecurityResourceFilterTest extends ResourceFilterTestHelper { @Parameterized.Parameters(name = "{index}: requestPath={0}, requestMethod={1}, resourceFilter={2}") @@ -57,9 +57,19 @@ public static Collection data() { return ImmutableList.copyOf( Iterables.concat( - getRequestPaths(OverlordResource.class, ImmutableList.>of(TaskStorageQueryAdapter.class)), - getRequestPaths(WorkerResource.class), - getRequestPaths(SupervisorResource.class, ImmutableList.>of(SupervisorManager.class)) + getRequestPaths(OverlordResource.class, ImmutableList.>of( + TaskStorageQueryAdapter.class, + AuthorizerMapper.class + ) + ), + getRequestPaths(WorkerResource.class, ImmutableList.>of( + AuthorizerMapper.class + )), + getRequestPaths(SupervisorResource.class, ImmutableList.>of( + SupervisorManager.class, + AuthorizerMapper.class + ) + ) ) ); } @@ -75,7 +85,7 @@ public static Collection data() private TaskStorageQueryAdapter tsqa; private SupervisorManager supervisorManager; - public SecurityResourceFilterTest( + public OverlordSecurityResourceFilterTest( String requestPath, String requestMethod, ResourceFilter resourceFilter, @@ -139,23 +149,22 @@ public void testResourcesFilteringAccess() // As request object is a strict mock the ordering of expected calls matters // therefore adding the expectation below again as getEntity is called before getMethod EasyMock.expect(request.getMethod()).andReturn(requestMethod).anyTimes(); - EasyMock.replay(req, request, authorizationInfo); + EasyMock.replay(req, request, authorizerMapper); resourceFilter.getRequestFilter().filter(request); Assert.assertTrue(((AbstractResourceFilter) resourceFilter.getRequestFilter()).isApplicable(requestPath)); } - @Test(expected = WebApplicationException.class) + @Test(expected = ForbiddenException.class) public void testDatasourcesResourcesFilteringNoAccess() { setUpMockExpectations(requestPath, false, requestMethod); EasyMock.expect(request.getEntity(Task.class)).andReturn(noopTask).anyTimes(); - EasyMock.replay(req, request, authorizationInfo); + EasyMock.replay(req, request, authorizerMapper); Assert.assertTrue(((AbstractResourceFilter) resourceFilter.getRequestFilter()).isApplicable(requestPath)); try { resourceFilter.getRequestFilter().filter(request); } - catch (WebApplicationException e) { - Assert.assertEquals(Response.Status.FORBIDDEN.getStatusCode(), e.getResponse().getStatus()); + catch (ForbiddenException e) { throw e; } } @@ -165,14 +174,14 @@ public void testDatasourcesResourcesFilteringBadPath() { final String badRequestPath = requestPath.replaceAll("\\w+", "droid"); EasyMock.expect(request.getPath()).andReturn(badRequestPath).anyTimes(); - EasyMock.replay(req, request, authorizationInfo); + EasyMock.replay(req, request, authorizerMapper); Assert.assertFalse(((AbstractResourceFilter) resourceFilter.getRequestFilter()).isApplicable(badRequestPath)); } @After public void tearDown() { - EasyMock.verify(req, request, authorizationInfo); + EasyMock.verify(req, request, authorizerMapper); if (tsqa != null) { EasyMock.verify(tsqa); } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java index f4b96b3416cb..74476d9ff517 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java @@ -23,11 +23,14 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.indexing.overlord.DataSourceMetadata; import io.druid.indexing.overlord.TaskMaster; import io.druid.java.util.common.DateTimes; import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthTestUtils; +import io.druid.server.security.AuthenticationResult; import org.easymock.Capture; import org.easymock.EasyMock; import org.easymock.EasyMockRunner; @@ -61,16 +64,29 @@ public class SupervisorResourceTest extends EasyMockSupport @Before public void setUp() throws Exception { - supervisorResource = new SupervisorResource(taskMaster, new AuthConfig()); + supervisorResource = new SupervisorResource(taskMaster, new AuthConfig(), AuthTestUtils.TEST_AUTHORIZER_MAPPER); } @Test public void testSpecPost() throws Exception { - SupervisorSpec spec = new TestSupervisorSpec("my-id", null); + SupervisorSpec spec = new TestSupervisorSpec("my-id", null) { + + @Override + public List getDataSources() + { + return Lists.newArrayList("datasource1"); + } + }; EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.of(supervisorManager)); EasyMock.expect(supervisorManager.createOrUpdateAndStartSupervisor(spec)).andReturn(true); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null).atLeastOnce(); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn( + new AuthenticationResult("druid", "druid") + ).atLeastOnce(); + request.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + EasyMock.expectLastCall().anyTimes(); replayAll(); Response response = supervisorResource.specPost(spec, request); @@ -93,9 +109,33 @@ public void testSpecPost() throws Exception public void testSpecGetAll() throws Exception { Set supervisorIds = ImmutableSet.of("id1", "id2"); + SupervisorSpec spec1 = new TestSupervisorSpec("id1", null) { + + @Override + public List getDataSources() + { + return Lists.newArrayList("datasource1"); + } + }; + SupervisorSpec spec2 = new TestSupervisorSpec("id2", null) { + + @Override + public List getDataSources() + { + return Lists.newArrayList("datasource2"); + } + }; EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.of(supervisorManager)); - EasyMock.expect(supervisorManager.getSupervisorIds()).andReturn(supervisorIds); + EasyMock.expect(supervisorManager.getSupervisorIds()).andReturn(supervisorIds).atLeastOnce(); + EasyMock.expect(supervisorManager.getSupervisorSpec("id1")).andReturn(Optional.of(spec1)); + EasyMock.expect(supervisorManager.getSupervisorSpec("id2")).andReturn(Optional.of(spec2)); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null).atLeastOnce(); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn( + new AuthenticationResult("druid", "druid") + ).atLeastOnce(); + request.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + EasyMock.expectLastCall().anyTimes(); replayAll(); Response response = supervisorResource.specGetAll(request); @@ -221,6 +261,30 @@ public void testSpecGetAllHistory() throws Exception EasyMock.expect(taskMaster.getSupervisorManager()).andReturn(Optional.of(supervisorManager)).times(2); EasyMock.expect(supervisorManager.getSupervisorHistory()).andReturn(history); + SupervisorSpec spec1 = new TestSupervisorSpec("id1", null) { + + @Override + public List getDataSources() + { + return Lists.newArrayList("datasource1"); + } + }; + SupervisorSpec spec2 = new TestSupervisorSpec("id2", null) { + + @Override + public List getDataSources() + { + return Lists.newArrayList("datasource2"); + } + }; + EasyMock.expect(supervisorManager.getSupervisorSpec("id1")).andReturn(Optional.of(spec1)).atLeastOnce(); + EasyMock.expect(supervisorManager.getSupervisorSpec("id2")).andReturn(Optional.of(spec2)).atLeastOnce(); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null).atLeastOnce(); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn( + new AuthenticationResult("druid", "druid") + ).atLeastOnce(); + request.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + EasyMock.expectLastCall().anyTimes(); replayAll(); Response response = supervisorResource.specGetAllHistory(request); diff --git a/integration-tests/src/main/java/io/druid/testing/guice/DruidTestModule.java b/integration-tests/src/main/java/io/druid/testing/guice/DruidTestModule.java index 14b48da5f7d5..3d210c5e8204 100644 --- a/integration-tests/src/main/java/io/druid/testing/guice/DruidTestModule.java +++ b/integration-tests/src/main/java/io/druid/testing/guice/DruidTestModule.java @@ -34,7 +34,7 @@ import io.druid.curator.CuratorConfig; import io.druid.guice.JsonConfigProvider; import io.druid.guice.ManageLifecycle; -import io.druid.guice.annotations.Client; +import io.druid.guice.annotations.EscalatedClient; import io.druid.guice.annotations.Self; import io.druid.server.DruidNode; import io.druid.server.initialization.ServerConfig; @@ -67,7 +67,7 @@ public void configure(Binder binder) public HttpClient getHttpClient( IntegrationTestingConfig config, Lifecycle lifecycle, - @Client HttpClient delegate + @EscalatedClient HttpClient delegate ) throws Exception { diff --git a/processing/src/main/java/io/druid/query/QueryInterruptedException.java b/processing/src/main/java/io/druid/query/QueryInterruptedException.java index 29ea3f0ac63a..71478b6b30a2 100644 --- a/processing/src/main/java/io/druid/query/QueryInterruptedException.java +++ b/processing/src/main/java/io/druid/query/QueryInterruptedException.java @@ -46,6 +46,7 @@ public class QueryInterruptedException extends RuntimeException public static final String QUERY_TIMEOUT = "Query timeout"; public static final String QUERY_CANCELLED = "Query cancelled"; public static final String RESOURCE_LIMIT_EXCEEDED = "Resource limit exceeded"; + public static final String UNAUTHORIZED = "Unauthorized request."; public static final String UNKNOWN_EXCEPTION = "Unknown exception"; private final String errorCode; diff --git a/server/src/main/java/io/druid/client/BrokerServerView.java b/server/src/main/java/io/druid/client/BrokerServerView.java index 15af669be9cb..fe0e2d36567d 100644 --- a/server/src/main/java/io/druid/client/BrokerServerView.java +++ b/server/src/main/java/io/druid/client/BrokerServerView.java @@ -31,7 +31,7 @@ import io.druid.client.selector.ServerSelector; import io.druid.client.selector.TierSelectorStrategy; import io.druid.concurrent.Execs; -import io.druid.guice.annotations.Client; +import io.druid.guice.annotations.EscalatedClient; import io.druid.guice.annotations.Smile; import io.druid.java.util.common.Pair; import io.druid.java.util.common.logger.Logger; @@ -82,7 +82,7 @@ public BrokerServerView( QueryToolChestWarehouse warehouse, QueryWatcher queryWatcher, @Smile ObjectMapper smileMapper, - @Client HttpClient httpClient, + @EscalatedClient HttpClient httpClient, FilteredServerInventoryView baseView, TierSelectorStrategy tierSelectorStrategy, ServiceEmitter emitter, diff --git a/server/src/main/java/io/druid/client/FilteredHttpServerInventoryViewProvider.java b/server/src/main/java/io/druid/client/FilteredHttpServerInventoryViewProvider.java index b779ea9b2e49..54108b951547 100644 --- a/server/src/main/java/io/druid/client/FilteredHttpServerInventoryViewProvider.java +++ b/server/src/main/java/io/druid/client/FilteredHttpServerInventoryViewProvider.java @@ -24,7 +24,7 @@ import com.google.common.base.Predicates; import com.metamx.http.client.HttpClient; import io.druid.discovery.DruidNodeDiscoveryProvider; -import io.druid.guice.annotations.Client; +import io.druid.guice.annotations.EscalatedClient; import io.druid.guice.annotations.Smile; import io.druid.java.util.common.Pair; import io.druid.server.coordination.DruidServerMetadata; @@ -38,7 +38,7 @@ public class FilteredHttpServerInventoryViewProvider implements FilteredServerIn { @JacksonInject @NotNull - @Client + @EscalatedClient HttpClient httpClient = null; @JacksonInject diff --git a/server/src/main/java/io/druid/client/HttpServerInventoryView.java b/server/src/main/java/io/druid/client/HttpServerInventoryView.java index 14d53d097040..d48b664a46f5 100644 --- a/server/src/main/java/io/druid/client/HttpServerInventoryView.java +++ b/server/src/main/java/io/druid/client/HttpServerInventoryView.java @@ -43,7 +43,7 @@ import io.druid.discovery.DiscoveryDruidNode; import io.druid.discovery.DruidNodeDiscovery; import io.druid.discovery.DruidNodeDiscoveryProvider; -import io.druid.guice.annotations.Global; +import io.druid.guice.annotations.EscalatedGlobal; import io.druid.guice.annotations.Smile; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; @@ -118,7 +118,7 @@ public class HttpServerInventoryView implements ServerInventoryView, FilteredSer @Inject public HttpServerInventoryView( final @Smile ObjectMapper smileMapper, - final @Global HttpClient httpClient, + final @EscalatedGlobal HttpClient httpClient, final DruidNodeDiscoveryProvider druidNodeDiscoveryProvider, final Predicate> defaultFilter, final HttpServerInventoryViewConfig config diff --git a/server/src/main/java/io/druid/client/HttpServerInventoryViewProvider.java b/server/src/main/java/io/druid/client/HttpServerInventoryViewProvider.java index 22ba09fff04c..bfcb3b262384 100644 --- a/server/src/main/java/io/druid/client/HttpServerInventoryViewProvider.java +++ b/server/src/main/java/io/druid/client/HttpServerInventoryViewProvider.java @@ -24,7 +24,7 @@ import com.google.common.base.Predicates; import com.metamx.http.client.HttpClient; import io.druid.discovery.DruidNodeDiscoveryProvider; -import io.druid.guice.annotations.Client; +import io.druid.guice.annotations.EscalatedClient; import io.druid.guice.annotations.Smile; import io.druid.java.util.common.Pair; import io.druid.server.coordination.DruidServerMetadata; @@ -38,7 +38,7 @@ public class HttpServerInventoryViewProvider implements ServerInventoryViewProvi { @JacksonInject @NotNull - @Client + @EscalatedClient HttpClient httpClient = null; @JacksonInject diff --git a/server/src/main/java/io/druid/guice/CoordinatorDiscoveryModule.java b/server/src/main/java/io/druid/guice/CoordinatorDiscoveryModule.java index 3aa9a91c0333..6af0f2d02250 100644 --- a/server/src/main/java/io/druid/guice/CoordinatorDiscoveryModule.java +++ b/server/src/main/java/io/druid/guice/CoordinatorDiscoveryModule.java @@ -29,7 +29,7 @@ import io.druid.curator.discovery.ServerDiscoverySelector; import io.druid.discovery.DruidLeaderClient; import io.druid.discovery.DruidNodeDiscoveryProvider; -import io.druid.guice.annotations.Global; +import io.druid.guice.annotations.EscalatedGlobal; /** */ @@ -56,7 +56,7 @@ public ServerDiscoverySelector getServiceProvider( @Coordinator @ManageLifecycle public DruidLeaderClient getLeaderHttpClient( - @Global HttpClient httpClient, + @EscalatedGlobal HttpClient httpClient, DruidNodeDiscoveryProvider druidNodeDiscoveryProvider, @Coordinator ServerDiscoverySelector serverDiscoverySelector ) diff --git a/server/src/main/java/io/druid/guice/IndexingServiceDiscoveryModule.java b/server/src/main/java/io/druid/guice/IndexingServiceDiscoveryModule.java index 32737351cafd..05da8f301bb4 100644 --- a/server/src/main/java/io/druid/guice/IndexingServiceDiscoveryModule.java +++ b/server/src/main/java/io/druid/guice/IndexingServiceDiscoveryModule.java @@ -29,7 +29,7 @@ import io.druid.curator.discovery.ServerDiscoverySelector; import io.druid.discovery.DruidLeaderClient; import io.druid.discovery.DruidNodeDiscoveryProvider; -import io.druid.guice.annotations.Global; +import io.druid.guice.annotations.EscalatedGlobal; /** */ @@ -56,7 +56,7 @@ public ServerDiscoverySelector getServiceProvider( @IndexingService @ManageLifecycle public DruidLeaderClient getLeaderHttpClient( - @Global HttpClient httpClient, + @EscalatedGlobal HttpClient httpClient, DruidNodeDiscoveryProvider druidNodeDiscoveryProvider, @IndexingService ServerDiscoverySelector serverDiscoverySelector ) diff --git a/server/src/main/java/io/druid/guice/annotations/EscalatedClient.java b/server/src/main/java/io/druid/guice/annotations/EscalatedClient.java new file mode 100644 index 000000000000..161ad51fc495 --- /dev/null +++ b/server/src/main/java/io/druid/guice/annotations/EscalatedClient.java @@ -0,0 +1,36 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.guice.annotations; + +import com.google.inject.BindingAnnotation; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + */ +@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD}) +@Retention(RetentionPolicy.RUNTIME) +@BindingAnnotation +public @interface EscalatedClient +{ +} diff --git a/server/src/main/java/io/druid/guice/http/AbstractHttpClientProvider.java b/server/src/main/java/io/druid/guice/http/AbstractHttpClientProvider.java index d1b771db403d..8d1e209ca727 100644 --- a/server/src/main/java/io/druid/guice/http/AbstractHttpClientProvider.java +++ b/server/src/main/java/io/druid/guice/http/AbstractHttpClientProvider.java @@ -90,7 +90,6 @@ public Provider> getConfigProvider() { return injector.getProvider(configKey); } - public Provider getLifecycleProvider() { return injector.getProvider(Lifecycle.class); diff --git a/server/src/main/java/io/druid/guice/http/HttpClientModule.java b/server/src/main/java/io/druid/guice/http/HttpClientModule.java index c8dc0915ac8f..848bb9b3eb8f 100644 --- a/server/src/main/java/io/druid/guice/http/HttpClientModule.java +++ b/server/src/main/java/io/druid/guice/http/HttpClientModule.java @@ -19,17 +19,24 @@ package io.druid.guice.http; +import com.google.common.collect.Sets; import com.google.inject.Binder; +import com.google.inject.Inject; import com.google.inject.Module; import com.metamx.http.client.HttpClient; import com.metamx.http.client.HttpClientConfig; import com.metamx.http.client.HttpClientInit; import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; +import io.druid.guice.annotations.EscalatedClient; +import io.druid.guice.annotations.EscalatedGlobal; import io.druid.guice.annotations.Global; import io.druid.java.util.common.StringUtils; +import io.druid.server.security.Authenticator; +import io.druid.server.security.AuthenticatorMapper; import java.lang.annotation.Annotation; +import java.util.Set; /** */ @@ -40,9 +47,19 @@ public static HttpClientModule global() return new HttpClientModule("druid.global.http", Global.class); } + public static HttpClientModule escalatedGlobal() + { + return new HttpClientModule("druid.global.http", EscalatedGlobal.class); + } + + private static Set> ESCALATING_ANNOTATIONS = Sets.newHashSet( + EscalatedGlobal.class, EscalatedClient.class + ); + private final String propertyPrefix; private Annotation annotation = null; private Class annotationClazz = null; + private boolean isEscalated = false; public HttpClientModule(String propertyPrefix) { @@ -53,6 +70,8 @@ public HttpClientModule(String propertyPrefix, Class annot { this.propertyPrefix = propertyPrefix; this.annotationClazz = annotation; + + isEscalated = ESCALATING_ANNOTATIONS.contains(annotationClazz); } public HttpClientModule(String propertyPrefix, Annotation annotation) @@ -68,36 +87,48 @@ public void configure(Binder binder) JsonConfigProvider.bind(binder, propertyPrefix, DruidHttpClientConfig.class, annotation); binder.bind(HttpClient.class) .annotatedWith(annotation) - .toProvider(new HttpClientProvider(annotation)) + .toProvider(new HttpClientProvider(annotation, isEscalated)) .in(LazySingleton.class); } else if (annotationClazz != null) { JsonConfigProvider.bind(binder, propertyPrefix, DruidHttpClientConfig.class, annotationClazz); binder.bind(HttpClient.class) .annotatedWith(annotationClazz) - .toProvider(new HttpClientProvider(annotationClazz)) + .toProvider(new HttpClientProvider(annotationClazz, isEscalated)) .in(LazySingleton.class); } else { JsonConfigProvider.bind(binder, propertyPrefix, DruidHttpClientConfig.class); binder.bind(HttpClient.class) - .toProvider(new HttpClientProvider()) + .toProvider(new HttpClientProvider(isEscalated)) .in(LazySingleton.class); } } public static class HttpClientProvider extends AbstractHttpClientProvider { - public HttpClientProvider() + private boolean isEscalated; + private Authenticator escalatingAuthenticator; + + public HttpClientProvider(boolean isEscalated) { + this.isEscalated = isEscalated; } - public HttpClientProvider(Annotation annotation) + public HttpClientProvider(Annotation annotation, boolean isEscalated) { super(annotation); + this.isEscalated = isEscalated; } - public HttpClientProvider(Class annotationClazz) + public HttpClientProvider(Class annotationClazz, boolean isEscalated) { super(annotationClazz); + this.isEscalated = isEscalated; + } + + @Inject + public void inject(AuthenticatorMapper authenticatorMapper) + { + this.escalatingAuthenticator = authenticatorMapper.getEscalatingAuthenticator(); } @Override @@ -118,7 +149,16 @@ public HttpClient get() builder.withSslContext(getSslContextBinding().getProvider().get()); } - return HttpClientInit.createClient(builder.build(), LifecycleUtils.asMmxLifecycle(getLifecycleProvider().get())); + HttpClient client = HttpClientInit.createClient( + builder.build(), + LifecycleUtils.asMmxLifecycle(getLifecycleProvider().get()) + ); + + if (isEscalated) { + return escalatingAuthenticator.createEscalatedClient(client); + } else { + return client; + } } } } diff --git a/server/src/main/java/io/druid/guice/security/AuthenticatorModule.java b/server/src/main/java/io/druid/guice/security/AuthenticatorModule.java new file mode 100644 index 000000000000..7c09284ef52f --- /dev/null +++ b/server/src/main/java/io/druid/guice/security/AuthenticatorModule.java @@ -0,0 +1,51 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.guice.security; + +import com.google.inject.Binder; +import com.google.inject.Key; +import com.google.inject.Module; +import com.google.inject.Provides; +import com.google.inject.multibindings.MapBinder; +import com.google.inject.name.Named; +import io.druid.guice.LazySingleton; +import io.druid.guice.PolyBind; +import io.druid.server.security.Authenticator; +import io.druid.server.security.AllowAllAuthenticator; + +public class AuthenticatorModule implements Module +{ + @Override + public void configure(Binder binder) + { + final MapBinder authenticatorMapBinder = PolyBind.optionBinder( + binder, + Key.get(Authenticator.class) + ); + authenticatorMapBinder.addBinding("allowAll").to(AllowAllAuthenticator.class).in(LazySingleton.class); + } + + @Provides + @Named("allowAll") + public Authenticator getAuthenticator() + { + return new AllowAllAuthenticator(); + } +} diff --git a/server/src/main/java/io/druid/guice/security/AuthorizerModule.java b/server/src/main/java/io/druid/guice/security/AuthorizerModule.java new file mode 100644 index 000000000000..a39a3dc09462 --- /dev/null +++ b/server/src/main/java/io/druid/guice/security/AuthorizerModule.java @@ -0,0 +1,51 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.guice.security; + +import com.google.inject.Binder; +import com.google.inject.Key; +import com.google.inject.Module; +import com.google.inject.Provides; +import com.google.inject.multibindings.MapBinder; +import com.google.inject.name.Named; +import io.druid.guice.LazySingleton; +import io.druid.guice.PolyBind; +import io.druid.server.security.Authorizer; +import io.druid.server.security.AllowAllAuthorizer; + +public class AuthorizerModule implements Module +{ + @Override + public void configure(Binder binder) + { + final MapBinder authorizerMapBinder = PolyBind.optionBinder( + binder, + Key.get(Authorizer.class) + ); + authorizerMapBinder.addBinding("allowAll").to(AllowAllAuthorizer.class).in(LazySingleton.class); + } + + @Provides + @Named("allowAll") + public Authorizer getAuthorizer() + { + return new AllowAllAuthorizer(); + } +} diff --git a/server/src/main/java/io/druid/initialization/Initialization.java b/server/src/main/java/io/druid/initialization/Initialization.java index 67e22c8e299f..a6ab2a0d155c 100644 --- a/server/src/main/java/io/druid/initialization/Initialization.java +++ b/server/src/main/java/io/druid/initialization/Initialization.java @@ -53,13 +53,19 @@ import io.druid.guice.StartupLoggingModule; import io.druid.guice.StorageNodeModule; import io.druid.guice.annotations.Client; +import io.druid.guice.annotations.EscalatedClient; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; import io.druid.guice.http.HttpClientModule; +import io.druid.guice.security.AuthenticatorModule; +import io.druid.guice.security.AuthorizerModule; import io.druid.guice.security.DruidAuthModule; import io.druid.java.util.common.ISE; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.storage.derby.DerbyMetadataStorageDruidModule; +import io.druid.server.initialization.AuthenticatorHttpClientWrapperModule; +import io.druid.server.initialization.AuthenticatorMapperModule; +import io.druid.server.initialization.AuthorizerMapperModule; import io.druid.server.emitter.EmitterModule; import io.druid.server.initialization.jetty.JettyServerModule; import io.druid.server.metrics.MetricsModule; @@ -347,7 +353,9 @@ public static Injector makeInjectorWithModules(final Injector baseInjector, Iter new LifecycleModule(), EmitterModule.class, HttpClientModule.global(), + HttpClientModule.escalatedGlobal(), new HttpClientModule("druid.broker.http", Client.class), + new HttpClientModule("druid.broker.http", EscalatedClient.class), new CuratorModule(), new AnnouncerModule(), new AWSModule(), @@ -368,6 +376,11 @@ public static Injector makeInjectorWithModules(final Injector baseInjector, Iter new FirehoseModule(), new ParsersModule(), new JavaScriptModule(), + new AuthenticatorModule(), + new AuthenticatorMapperModule(), + new AuthenticatorHttpClientWrapperModule(), + new AuthorizerModule(), + new AuthorizerMapperModule(), new StartupLoggingModule() ); diff --git a/server/src/main/java/io/druid/query/lookup/LookupModule.java b/server/src/main/java/io/druid/query/lookup/LookupModule.java index 151c65fbbedc..c8fdeaf68c0d 100644 --- a/server/src/main/java/io/druid/query/lookup/LookupModule.java +++ b/server/src/main/java/io/druid/query/lookup/LookupModule.java @@ -33,6 +33,7 @@ import com.google.inject.Binder; import com.google.inject.Inject; import com.google.inject.Provides; +import com.sun.jersey.spi.container.ResourceFilters; import io.druid.common.utils.ServletResourceUtils; import io.druid.curator.announcement.Announcer; import io.druid.guice.ExpressionModule; @@ -50,6 +51,7 @@ import io.druid.query.expression.LookupExprMacro; import io.druid.server.DruidNode; import io.druid.server.http.HostAndPortWithScheme; +import io.druid.server.http.security.ConfigResourceFilter; import io.druid.server.initialization.ZkPathsConfig; import io.druid.server.initialization.jetty.JettyBindings; import io.druid.server.listener.announcer.ListenerResourceAnnouncer; @@ -114,6 +116,7 @@ public LookupNodeService getLookupNodeService(LookupListeningAnnouncerConfig loo } @Path(ListenerResource.BASE_PATH + "/" + LookupCoordinatorManager.LOOKUP_LISTEN_ANNOUNCE_KEY) +@ResourceFilters(ConfigResourceFilter.class) class LookupListeningResource extends ListenerResource { private static final Logger LOG = new Logger(LookupListeningResource.class); diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java b/server/src/main/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java index 8b280dd4ec41..80cd4165f650 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java @@ -44,6 +44,13 @@ import io.druid.java.util.common.DateTimes; import io.druid.server.metrics.EventReceiverFirehoseMetric; import io.druid.server.metrics.EventReceiverFirehoseRegister; +import io.druid.server.security.Access; +import io.druid.server.security.Action; +import io.druid.server.security.AuthorizationUtils; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.Resource; +import io.druid.server.security.ResourceAction; +import io.druid.server.security.ResourceType; import org.joda.time.DateTime; import javax.servlet.http.HttpServletRequest; @@ -83,6 +90,7 @@ public class EventReceiverFirehoseFactory implements FirehoseFactory rows) throws InterruptedException @Consumes({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE}) @Produces({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE}) public Response shutdown( - @QueryParam("shutoffTime") final String shutoffTime + @QueryParam("shutoffTime") final String shutoffTime, + @Context final HttpServletRequest req ) { + Access accessResult = AuthorizationUtils.authorizeResourceAction( + req, + new ResourceAction( + new Resource("STATE", ResourceType.STATE), + Action.WRITE + ), + authorizerMapper + ); + if (!accessResult.isAllowed()) { + return Response.status(403).build(); + } + try { DateTime shutoffAt = shutoffTime == null ? DateTimes.nowUtc() : DateTimes.of(shutoffTime); log.info("Setting Firehose shutoffTime to %s", shutoffTime); diff --git a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java index 849dfd82e686..39e16044ffb9 100644 --- a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java +++ b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java @@ -42,6 +42,9 @@ import io.druid.server.metrics.QueryCountStatsProvider; import io.druid.server.router.QueryHostFinder; import io.druid.server.router.Router; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.Authenticator; +import io.druid.server.security.AuthenticatorMapper; import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.client.api.Request; import org.eclipse.jetty.client.api.Response; @@ -108,6 +111,7 @@ private static void handleException(HttpServletResponse response, ObjectMapper o private final ServiceEmitter emitter; private final RequestLogger requestLogger; private final GenericQueryMetricsFactory queryMetricsFactory; + private final Authenticator escalatingAuthenticator; private HttpClient broadcastClient; @@ -121,7 +125,8 @@ public AsyncQueryForwardingServlet( @Router DruidHttpClientConfig httpClientConfig, ServiceEmitter emitter, RequestLogger requestLogger, - GenericQueryMetricsFactory queryMetricsFactory + GenericQueryMetricsFactory queryMetricsFactory, + AuthenticatorMapper authenticatorMapper ) { this.warehouse = warehouse; @@ -133,6 +138,7 @@ public AsyncQueryForwardingServlet( this.emitter = emitter; this.requestLogger = requestLogger; this.queryMetricsFactory = queryMetricsFactory; + this.escalatingAuthenticator = authenticatorMapper.getEscalatingAuthenticator(); } @Override @@ -142,7 +148,7 @@ public void init() throws ServletException // Note that httpClientProvider is setup to return same HttpClient instance on each get() so // it is same http client as that is used by parent ProxyServlet. - broadcastClient = httpClientProvider.get(); + broadcastClient = newHttpClient(); try { broadcastClient.start(); } @@ -266,6 +272,12 @@ protected void sendProxyRequest( } } + // Since we can't see the request object on the remote side, we can't check whether the remote side actually + // performed an authorization check here, so always set this to true for the proxy servlet. + // If the remote node failed to perform an authorization check, PreResponseAuthorizationCheckFilter + // will log that on the remote node. + clientRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + super.sendProxyRequest( clientRequest, proxyResponse, @@ -317,7 +329,7 @@ protected static URI makeURI(String scheme, String host, String requestURI, Stri @Override protected HttpClient newHttpClient() { - return httpClientProvider.get(); + return escalatingAuthenticator.createEscalatedJettyClient(httpClientProvider.get()); } @Override diff --git a/server/src/main/java/io/druid/server/BrokerQueryResource.java b/server/src/main/java/io/druid/server/BrokerQueryResource.java index 402e59f4356c..a55ef4bd7511 100644 --- a/server/src/main/java/io/druid/server/BrokerQueryResource.java +++ b/server/src/main/java/io/druid/server/BrokerQueryResource.java @@ -27,9 +27,11 @@ import io.druid.client.TimelineServerView; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; +import io.druid.query.GenericQueryMetricsFactory; import io.druid.query.Query; import io.druid.server.http.security.StateResourceFilter; import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthorizerMapper; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; @@ -58,6 +60,8 @@ public BrokerQueryResource( @Smile ObjectMapper smileMapper, QueryManager queryManager, AuthConfig authConfig, + AuthorizerMapper authorizerMapper, + GenericQueryMetricsFactory queryMetricsFactory, TimelineServerView brokerServerView ) { @@ -66,7 +70,9 @@ public BrokerQueryResource( jsonMapper, smileMapper, queryManager, - authConfig + authConfig, + authorizerMapper, + queryMetricsFactory ); this.brokerServerView = brokerServerView; } diff --git a/server/src/main/java/io/druid/server/ClientInfoResource.java b/server/src/main/java/io/druid/server/ClientInfoResource.java index 32e678ea86ad..ff46da0be0de 100644 --- a/server/src/main/java/io/druid/server/ClientInfoResource.java +++ b/server/src/main/java/io/druid/server/ClientInfoResource.java @@ -19,8 +19,6 @@ package io.druid.server; -import com.google.common.base.Predicate; -import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; @@ -37,18 +35,14 @@ import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Intervals; import io.druid.java.util.common.JodaUtils; -import io.druid.java.util.common.Pair; import io.druid.java.util.common.logger.Logger; import io.druid.query.LocatedSegmentDescriptor; import io.druid.query.TableDataSource; import io.druid.query.metadata.SegmentMetadataQueryConfig; import io.druid.server.http.security.DatasourceResourceFilter; -import io.druid.server.security.Access; -import io.druid.server.security.Action; import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; -import io.druid.server.security.Resource; -import io.druid.server.security.ResourceType; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.AuthorizationUtils; import io.druid.timeline.DataSegment; import io.druid.timeline.TimelineLookup; import io.druid.timeline.TimelineObjectHolder; @@ -68,7 +62,6 @@ import java.io.IOException; import java.util.Collections; import java.util.Comparator; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -88,13 +81,15 @@ public class ClientInfoResource private TimelineServerView timelineServerView; private SegmentMetadataQueryConfig segmentMetadataQueryConfig; private final AuthConfig authConfig; + private final AuthorizerMapper authorizerMapper; @Inject public ClientInfoResource( FilteredServerInventoryView serverInventoryView, TimelineServerView timelineServerView, SegmentMetadataQueryConfig segmentMetadataQueryConfig, - AuthConfig authConfig + AuthConfig authConfig, + AuthorizerMapper authorizerMapper ) { this.serverInventoryView = serverInventoryView; @@ -102,6 +97,7 @@ public ClientInfoResource( this.segmentMetadataQueryConfig = (segmentMetadataQueryConfig == null) ? new SegmentMetadataQueryConfig() : segmentMetadataQueryConfig; this.authConfig = authConfig; + this.authorizerMapper = authorizerMapper; } private Map> getSegmentsForDatasources() @@ -123,33 +119,12 @@ private Map> getSegmentsForDatasources() @Produces(MediaType.APPLICATION_JSON) public Iterable getDataSources(@Context final HttpServletRequest request) { - if (authConfig.isEnabled()) { - // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424 - final Map, Access> resourceAccessMap = new HashMap<>(); - final AuthorizationInfo authorizationInfo = (AuthorizationInfo) request.getAttribute(AuthConfig.DRUID_AUTH_TOKEN); - return Collections2.filter( - getSegmentsForDatasources().keySet(), - new Predicate() - { - @Override - public boolean apply(String input) - { - Resource resource = new Resource(input, ResourceType.DATASOURCE); - Action action = Action.READ; - Pair key = new Pair<>(resource, action); - if (resourceAccessMap.containsKey(key)) { - return resourceAccessMap.get(key).isAllowed(); - } else { - Access access = authorizationInfo.isAuthorized(key.lhs, key.rhs); - resourceAccessMap.put(key, access); - return access.isAllowed(); - } - } - } - ); - } else { - return getSegmentsForDatasources().keySet(); - } + return AuthorizationUtils.filterAuthorizedResources( + request, + getSegmentsForDatasources().keySet(), + AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR, + authorizerMapper + ); } @GET diff --git a/server/src/main/java/io/druid/server/QueryLifecycle.java b/server/src/main/java/io/druid/server/QueryLifecycle.java index 4d6be4d39f40..8785397bb3c8 100644 --- a/server/src/main/java/io/druid/server/QueryLifecycle.java +++ b/server/src/main/java/io/druid/server/QueryLifecycle.java @@ -20,6 +20,7 @@ package io.druid.server; import com.google.common.base.Strings; +import com.google.common.collect.Iterables; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.DirectDruidClient; import io.druid.java.util.common.DateTimes; @@ -40,13 +41,12 @@ import io.druid.server.initialization.ServerConfig; import io.druid.server.log.RequestLogger; import io.druid.server.security.Access; -import io.druid.server.security.Action; -import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; -import io.druid.server.security.Resource; -import io.druid.server.security.ResourceType; +import io.druid.server.security.AuthenticationResult; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.AuthorizationUtils; import javax.annotation.Nullable; +import javax.servlet.http.HttpServletRequest; import java.util.LinkedHashMap; import java.util.Map; import java.util.UUID; @@ -58,7 +58,7 @@ * *
    *
  1. Initialization ({@link #initialize(Query)})
  2. - *
  3. Authorization ({@link #authorize(AuthorizationInfo)}
  4. + *
  5. Authorization ({@link #authorize(String, String, HttpServletRequest)}
  6. *
  7. Execution ({@link #execute()}
  8. *
  9. Logging ({@link #emitLogsAndMetrics(Throwable, String, long)}
  10. *
@@ -75,7 +75,7 @@ public class QueryLifecycle private final ServiceEmitter emitter; private final RequestLogger requestLogger; private final ServerConfig serverConfig; - private final AuthConfig authConfig; + private final AuthorizerMapper authorizerMapper; private final long startMs; private final long startNs; @@ -90,7 +90,7 @@ public QueryLifecycle( final ServiceEmitter emitter, final RequestLogger requestLogger, final ServerConfig serverConfig, - final AuthConfig authConfig, + final AuthorizerMapper authorizerMapper, final long startMs, final long startNs ) @@ -101,7 +101,7 @@ public QueryLifecycle( this.emitter = emitter; this.requestLogger = requestLogger; this.serverConfig = serverConfig; - this.authConfig = authConfig; + this.authorizerMapper = authorizerMapper; this.startMs = startMs; this.startNs = startNs; } @@ -111,17 +111,16 @@ public QueryLifecycle( * is unauthorized, an IllegalStateException will be thrown. Logs and metrics are emitted when the Sequence is * either fully iterated or throws an exception. * - * @param query the query - * @param authorizationInfo authorization info from the request; or null if none is present. This must be non-null - * if security is enabled, or the request will be considered unauthorized. - * @param remoteAddress remote address, for logging; or null if unknown + * @param query the query + * @param authenticationResult authentication result indicating identity of the requester + * @param remoteAddress remote address, for logging; or null if unknown * * @return results */ @SuppressWarnings("unchecked") public Sequence runSimple( final Query query, - @Nullable final AuthorizationInfo authorizationInfo, + final AuthenticationResult authenticationResult, @Nullable final String remoteAddress ) { @@ -130,7 +129,7 @@ public Sequence runSimple( final Sequence results; try { - final Access access = authorize(authorizationInfo); + final Access access = authorize(authenticationResult); if (!access.isAllowed()) { throw new ISE("Unauthorized"); } @@ -183,41 +182,66 @@ public void initialize(final Query baseQuery) /** * Authorize the query. Will return an Access object denoting whether the query is authorized or not. * - * @param authorizationInfo authorization info from the request; or null if none is present. This must be non-null - * if security is enabled, or the request will be considered unauthorized. + * @param authenticationResult authentication result indicating the identity of the requester * * @return authorization result * - * @throws IllegalStateException if security is enabled and authorizationInfo is null - */ - public Access authorize(@Nullable final AuthorizationInfo authorizationInfo) + * */ + public Access authorize( + final AuthenticationResult authenticationResult + ) { transition(State.INITIALIZED, State.AUTHORIZING); + Access authResult = AuthorizationUtils.authorizeAllResourceActions( + authenticationResult, + Iterables.transform( + queryPlus.getQuery().getDataSource().getNames(), + AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR + ), + authorizerMapper + ); - if (authConfig.isEnabled()) { - // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424 - if (authorizationInfo != null) { - for (String dataSource : queryPlus.getQuery().getDataSource().getNames()) { - Access authResult = authorizationInfo.isAuthorized( - new Resource(dataSource, ResourceType.DATASOURCE), - Action.READ - ); - if (!authResult.isAllowed()) { - // Not authorized; go straight to Jail, do not pass Go. - transition(State.AUTHORIZING, State.DONE); - return authResult; - } - } + if (!authResult.isAllowed()) { + // Not authorized; go straight to Jail, do not pass Go. + transition(State.AUTHORIZING, State.DONE); + } else { + transition(State.AUTHORIZING, State.AUTHORIZED); + } + return authResult; + } - transition(State.AUTHORIZING, State.AUTHORIZED); - return new Access(true); - } else { - throw new ISE("WTF?! Security is enabled but no authorization info found in the request"); - } + /** + * Authorize the query. Will return an Access object denoting whether the query is authorized or not. + * + * @param token authentication token from the request + * @param namespace namespace of the authentication token + * @param req HTTP request object of the request. If provided, the auth-related fields in the HTTP request + * will be automatically set. + * + * @return authorization result + * + * */ + public Access authorize( + @Nullable HttpServletRequest req + ) + { + transition(State.INITIALIZED, State.AUTHORIZING); + Access authResult = AuthorizationUtils.authorizeAllResourceActions( + req, + Iterables.transform( + queryPlus.getQuery().getDataSource().getNames(), + AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR + ), + authorizerMapper + ); + + if (!authResult.isAllowed()) { + // Not authorized; go straight to Jail, do not pass Go. + transition(State.AUTHORIZING, State.DONE); } else { transition(State.AUTHORIZING, State.AUTHORIZED); - return new Access(true); } + return authResult; } /** diff --git a/server/src/main/java/io/druid/server/QueryLifecycleFactory.java b/server/src/main/java/io/druid/server/QueryLifecycleFactory.java index a6fbc3cca719..745d23bb5c9e 100644 --- a/server/src/main/java/io/druid/server/QueryLifecycleFactory.java +++ b/server/src/main/java/io/druid/server/QueryLifecycleFactory.java @@ -28,6 +28,7 @@ import io.druid.server.initialization.ServerConfig; import io.druid.server.log.RequestLogger; import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthorizerMapper; @LazySingleton public class QueryLifecycleFactory @@ -38,7 +39,7 @@ public class QueryLifecycleFactory private final ServiceEmitter emitter; private final RequestLogger requestLogger; private final ServerConfig serverConfig; - private final AuthConfig authConfig; + private final AuthorizerMapper authorizerMapper; @Inject public QueryLifecycleFactory( @@ -48,7 +49,8 @@ public QueryLifecycleFactory( final ServiceEmitter emitter, final RequestLogger requestLogger, final ServerConfig serverConfig, - final AuthConfig authConfig + final AuthConfig authConfig, + final AuthorizerMapper authorizerMapper ) { this.warehouse = warehouse; @@ -57,7 +59,7 @@ public QueryLifecycleFactory( this.emitter = emitter; this.requestLogger = requestLogger; this.serverConfig = serverConfig; - this.authConfig = authConfig; + this.authorizerMapper = authorizerMapper; } public QueryLifecycle factorize() @@ -69,7 +71,7 @@ public QueryLifecycle factorize() emitter, requestLogger, serverConfig, - authConfig, + authorizerMapper, System.currentTimeMillis(), System.nanoTime() ); diff --git a/server/src/main/java/io/druid/server/QueryResource.java b/server/src/main/java/io/druid/server/QueryResource.java index 84caf56ee70f..bdf458dcb65d 100644 --- a/server/src/main/java/io/druid/server/QueryResource.java +++ b/server/src/main/java/io/druid/server/QueryResource.java @@ -24,9 +24,10 @@ import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.datatype.joda.ser.DateTimeSerializer; import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes; -import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.collect.Sets; import com.google.common.io.CountingOutputStream; import com.google.inject.Inject; import com.metamx.emitter.EmittingLogger; @@ -38,16 +39,16 @@ import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Yielder; import io.druid.java.util.common.guava.Yielders; +import io.druid.query.GenericQueryMetricsFactory; import io.druid.query.Query; import io.druid.query.QueryContexts; import io.druid.query.QueryInterruptedException; import io.druid.server.metrics.QueryCountStatsProvider; import io.druid.server.security.Access; -import io.druid.server.security.Action; import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; -import io.druid.server.security.Resource; -import io.druid.server.security.ResourceType; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.AuthorizationUtils; +import io.druid.server.security.ForbiddenException; import org.joda.time.DateTime; import javax.servlet.http.HttpServletRequest; @@ -92,6 +93,9 @@ public class QueryResource implements QueryCountStatsProvider protected final ObjectMapper serializeDateTimeAsLongSmileMapper; protected final QueryManager queryManager; protected final AuthConfig authConfig; + protected final AuthorizerMapper authorizerMapper; + + private final GenericQueryMetricsFactory queryMetricsFactory; private final AtomicLong successfulQueryCount = new AtomicLong(); private final AtomicLong failedQueryCount = new AtomicLong(); private final AtomicLong interruptedQueryCount = new AtomicLong(); @@ -102,7 +106,9 @@ public QueryResource( @Json ObjectMapper jsonMapper, @Smile ObjectMapper smileMapper, QueryManager queryManager, - AuthConfig authConfig + AuthConfig authConfig, + AuthorizerMapper authorizerMapper, + GenericQueryMetricsFactory queryMetricsFactory ) { this.queryLifecycleFactory = queryLifecycleFactory; @@ -112,6 +118,8 @@ public QueryResource( this.serializeDateTimeAsLongSmileMapper = serializeDataTimeAsLong(smileMapper); this.queryManager = queryManager; this.authConfig = authConfig; + this.authorizerMapper = authorizerMapper; + this.queryMetricsFactory = queryMetricsFactory; } @DELETE @@ -122,28 +130,22 @@ public Response getServer(@PathParam("id") String queryId, @Context final HttpSe if (log.isDebugEnabled()) { log.debug("Received cancel request for query [%s]", queryId); } - if (authConfig.isEnabled()) { - // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424 - final AuthorizationInfo authorizationInfo = (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN); - Preconditions.checkNotNull( - authorizationInfo, - "Security is enabled but no authorization info found in the request" - ); - Set datasources = queryManager.getQueryDatasources(queryId); - if (datasources == null) { - log.warn("QueryId [%s] not registered with QueryManager, cannot cancel", queryId); - } else { - for (String dataSource : datasources) { - Access authResult = authorizationInfo.isAuthorized( - new Resource(dataSource, ResourceType.DATASOURCE), - Action.WRITE - ); - if (!authResult.isAllowed()) { - return Response.status(Response.Status.FORBIDDEN).header("Access-Check-Result", authResult).build(); - } - } - } + Set datasources = queryManager.getQueryDatasources(queryId); + if (datasources == null) { + log.warn("QueryId [%s] not registered with QueryManager, cannot cancel", queryId); + datasources = Sets.newTreeSet(); + } + + Access authResult = AuthorizationUtils.authorizeAllResourceActions( + req, + Iterables.transform(datasources, AuthorizationUtils.DATASOURCE_WRITE_RA_GENERATOR), + authorizerMapper + ); + + if (!authResult.isAllowed()) { + throw new ForbiddenException(authResult.toString()); } + queryManager.cancelQuery(queryId); return Response.status(Response.Status.ACCEPTED).build(); } @@ -154,7 +156,7 @@ public Response getServer(@PathParam("id") String queryId, @Context final HttpSe public Response doPost( final InputStream in, @QueryParam("pretty") final String pretty, - @Context final HttpServletRequest req // used to get request content-type, remote address and AuthorizationInfo + @Context final HttpServletRequest req // used to get request content-type, remote address and auth-related headers ) throws IOException { final QueryLifecycle queryLifecycle = queryLifecycleFactory.factorize(); @@ -174,9 +176,9 @@ public Response doPost( log.debug("Got query [%s]", query); } - final Access authResult = queryLifecycle.authorize((AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN)); + final Access authResult = queryLifecycle.authorize(req); if (!authResult.isAllowed()) { - return Response.status(Response.Status.FORBIDDEN).header("Access-Check-Result", authResult).build(); + throw new ForbiddenException(authResult.toString()); } final QueryLifecycle.QueryResponse queryResponse = queryLifecycle.execute(); @@ -270,6 +272,11 @@ public void write(OutputStream outputStream) throws IOException, WebApplicationE queryLifecycle.emitLogsAndMetrics(e, req.getRemoteAddr(), -1); return context.gotError(e); } + catch (ForbiddenException e) { + // don't do anything for an authorization failure, ForbiddenExceptionMapper will catch this later and + // send an error response if this is thrown. + throw e; + } catch (Exception e) { failedQueryCount.incrementAndGet(); queryLifecycle.emitLogsAndMetrics(e, req.getRemoteAddr(), -1); diff --git a/server/src/main/java/io/druid/server/http/DatasourcesResource.java b/server/src/main/java/io/druid/server/http/DatasourcesResource.java index 27f6b5a239df..e473ca2b0082 100644 --- a/server/src/main/java/io/druid/server/http/DatasourcesResource.java +++ b/server/src/main/java/io/druid/server/http/DatasourcesResource.java @@ -44,7 +44,8 @@ import io.druid.query.TableDataSource; import io.druid.server.http.security.DatasourceResourceFilter; import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; +import io.druid.server.security.AuthenticationResult; +import io.druid.server.security.AuthorizerMapper; import io.druid.timeline.DataSegment; import io.druid.timeline.TimelineLookup; import io.druid.timeline.TimelineObjectHolder; @@ -82,19 +83,22 @@ public class DatasourcesResource private final MetadataSegmentManager databaseSegmentManager; private final IndexingServiceClient indexingServiceClient; private final AuthConfig authConfig; + private final AuthorizerMapper authorizerMapper; @Inject public DatasourcesResource( CoordinatorServerView serverInventoryView, MetadataSegmentManager databaseSegmentManager, @Nullable IndexingServiceClient indexingServiceClient, - AuthConfig authConfig + AuthConfig authConfig, + AuthorizerMapper authorizerMapper ) { this.serverInventoryView = serverInventoryView; this.databaseSegmentManager = databaseSegmentManager; this.indexingServiceClient = indexingServiceClient; this.authConfig = authConfig; + this.authorizerMapper = authorizerMapper; } @GET @@ -106,12 +110,11 @@ public Response getQueryableDataSources( ) { Response.ResponseBuilder builder = Response.ok(); - final Set datasources = authConfig.isEnabled() ? - InventoryViewUtils.getSecuredDataSources( - serverInventoryView, - (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN) - ) : - InventoryViewUtils.getDataSources(serverInventoryView); + final Set datasources = InventoryViewUtils.getSecuredDataSources( + serverInventoryView, + authorizerMapper, + (AuthenticationResult) req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT) + ); if (full != null) { return builder.entity(datasources).build(); @@ -246,6 +249,7 @@ public Response deleteDataSource( @DELETE @Path("/{dataSourceName}/intervals/{interval}") + @ResourceFilters(DatasourceResourceFilter.class) @Produces(MediaType.APPLICATION_JSON) public Response deleteDataSourceSpecificInterval( @PathParam("dataSourceName") final String dataSourceName, diff --git a/server/src/main/java/io/druid/server/http/IntervalsResource.java b/server/src/main/java/io/druid/server/http/IntervalsResource.java index 66478a116e51..4a90237e6716 100644 --- a/server/src/main/java/io/druid/server/http/IntervalsResource.java +++ b/server/src/main/java/io/druid/server/http/IntervalsResource.java @@ -27,7 +27,8 @@ import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.guava.Comparators; import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; +import io.druid.server.security.AuthenticationResult; +import io.druid.server.security.AuthorizerMapper; import io.druid.timeline.DataSegment; import org.joda.time.Interval; @@ -51,15 +52,18 @@ public class IntervalsResource { private final InventoryView serverInventoryView; private final AuthConfig authConfig; + private final AuthorizerMapper authorizerMapper; @Inject public IntervalsResource( InventoryView serverInventoryView, - AuthConfig authConfig + AuthConfig authConfig, + AuthorizerMapper authorizerMapper ) { this.serverInventoryView = serverInventoryView; this.authConfig = authConfig; + this.authorizerMapper = authorizerMapper; } @GET @@ -67,12 +71,11 @@ public IntervalsResource( public Response getIntervals(@Context final HttpServletRequest req) { final Comparator comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd()); - final Set datasources = authConfig.isEnabled() ? - InventoryViewUtils.getSecuredDataSources( - serverInventoryView, - (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN) - ) : - InventoryViewUtils.getDataSources(serverInventoryView); + final Set datasources = InventoryViewUtils.getSecuredDataSources( + serverInventoryView, + authorizerMapper, + (AuthenticationResult) req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT) + ); final Map>> retVal = Maps.newTreeMap(comparator); for (DruidDataSource dataSource : datasources) { @@ -100,12 +103,11 @@ public Response getSpecificIntervals( ) { final Interval theInterval = Intervals.of(interval.replace("_", "/")); - final Set datasources = authConfig.isEnabled() ? - InventoryViewUtils.getSecuredDataSources( - serverInventoryView, - (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN) - ) : - InventoryViewUtils.getDataSources(serverInventoryView); + final Set datasources = InventoryViewUtils.getSecuredDataSources( + serverInventoryView, + authorizerMapper, + (AuthenticationResult) req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT) + ); final Comparator comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd()); diff --git a/server/src/main/java/io/druid/server/http/InventoryViewUtils.java b/server/src/main/java/io/druid/server/http/InventoryViewUtils.java index 521b175f5916..e6db453e6c31 100644 --- a/server/src/main/java/io/druid/server/http/InventoryViewUtils.java +++ b/server/src/main/java/io/druid/server/http/InventoryViewUtils.java @@ -33,7 +33,9 @@ import io.druid.java.util.common.Pair; import io.druid.server.security.Access; import io.druid.server.security.Action; -import io.druid.server.security.AuthorizationInfo; +import io.druid.server.security.AuthenticationResult; +import io.druid.server.security.Authorizer; +import io.druid.server.security.AuthorizerMapper; import io.druid.server.security.Resource; import io.druid.server.security.ResourceType; @@ -80,11 +82,17 @@ public Iterable apply(DruidServer input) public static Set getSecuredDataSources( InventoryView inventoryView, - final AuthorizationInfo authorizationInfo + final AuthorizerMapper authorizerMapper, + final AuthenticationResult authenticationResult ) { - if (authorizationInfo == null) { - throw new ISE("Invalid to call a secured method with null AuthorizationInfo!!"); + if (authorizerMapper == null) { + throw new ISE("No authorization mapper found"); + } + + final Authorizer authorizer = authorizerMapper.getAuthorizer(authenticationResult.getAuthorizerName()); + if (authorizer == null) { + throw new ISE("Invalid to call a secured method with null Authorizer!!"); } else { final Map, Access> resourceAccessMap = new HashMap<>(); return ImmutableSet.copyOf( @@ -101,7 +109,7 @@ public boolean apply(DruidDataSource input) if (resourceAccessMap.containsKey(key)) { return resourceAccessMap.get(key).isAllowed(); } else { - Access access = authorizationInfo.isAuthorized(key.lhs, key.rhs); + Access access = authorizer.authorize(authenticationResult, key.lhs, key.rhs); resourceAccessMap.put(key, access); return access.isAllowed(); } diff --git a/server/src/main/java/io/druid/server/http/LookupCoordinatorResource.java b/server/src/main/java/io/druid/server/http/LookupCoordinatorResource.java index 10da6deacde8..3f5ea82c9e91 100644 --- a/server/src/main/java/io/druid/server/http/LookupCoordinatorResource.java +++ b/server/src/main/java/io/druid/server/http/LookupCoordinatorResource.java @@ -28,6 +28,7 @@ import com.google.common.base.Strings; import com.google.common.net.HostAndPort; import com.google.inject.Inject; +import com.sun.jersey.spi.container.ResourceFilters; import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; import io.druid.common.utils.ServletResourceUtils; @@ -37,6 +38,7 @@ import io.druid.java.util.common.RE; import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.LookupsState; +import io.druid.server.http.security.ConfigResourceFilter; import io.druid.server.lookup.cache.LookupCoordinatorManager; import io.druid.server.lookup.cache.LookupExtractorFactoryMapContainer; @@ -67,6 +69,7 @@ * Contains information about lookups exposed through the coordinator */ @Path("/druid/coordinator/v1/lookups") +@ResourceFilters(ConfigResourceFilter.class) public class LookupCoordinatorResource { private static final Logger LOG = new Logger(LookupCoordinatorResource.class); diff --git a/server/src/main/java/io/druid/server/http/MetadataResource.java b/server/src/main/java/io/druid/server/http/MetadataResource.java index fb60b2a0bf68..fc1fadd11ac1 100644 --- a/server/src/main/java/io/druid/server/http/MetadataResource.java +++ b/server/src/main/java/io/druid/server/http/MetadataResource.java @@ -22,22 +22,17 @@ import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.collect.Collections2; -import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.inject.Inject; import com.sun.jersey.spi.container.ResourceFilters; import io.druid.client.DruidDataSource; import io.druid.indexing.overlord.IndexerMetadataStorageCoordinator; -import io.druid.java.util.common.Pair; import io.druid.metadata.MetadataSegmentManager; import io.druid.server.http.security.DatasourceResourceFilter; -import io.druid.server.security.Access; -import io.druid.server.security.Action; import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; -import io.druid.server.security.Resource; -import io.druid.server.security.ResourceType; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.AuthorizationUtils; import io.druid.timeline.DataSegment; import org.joda.time.Interval; @@ -52,9 +47,7 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.IOException; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.Set; /** @@ -65,17 +58,20 @@ public class MetadataResource private final MetadataSegmentManager metadataSegmentManager; private final IndexerMetadataStorageCoordinator metadataStorageCoordinator; private final AuthConfig authConfig; + private final AuthorizerMapper authorizerMapper; @Inject public MetadataResource( MetadataSegmentManager metadataSegmentManager, IndexerMetadataStorageCoordinator metadataStorageCoordinator, - AuthConfig authConfig + AuthConfig authConfig, + AuthorizerMapper authorizerMapper ) { this.metadataSegmentManager = metadataSegmentManager; this.metadataStorageCoordinator = metadataStorageCoordinator; this.authConfig = authConfig; + this.authorizerMapper = authorizerMapper; } @GET @@ -106,37 +102,14 @@ public String apply(DruidDataSource input) ); } - final Set dataSourceNamesPostAuth; + List datasourceNamesList = AuthorizationUtils.filterAuthorizedResources( + req, + dataSourceNamesPreAuth, + AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR, + authorizerMapper + ); - if (authConfig.isEnabled()) { - // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424 - final Map, Access> resourceAccessMap = new HashMap<>(); - final AuthorizationInfo authorizationInfo = (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN); - dataSourceNamesPostAuth = ImmutableSet.copyOf( - Sets.filter( - dataSourceNamesPreAuth, - new Predicate() - { - @Override - public boolean apply(String input) - { - Resource resource = new Resource(input, ResourceType.DATASOURCE); - Action action = Action.READ; - Pair key = new Pair<>(resource, action); - if (resourceAccessMap.containsKey(key)) { - return resourceAccessMap.get(key).isAllowed(); - } else { - Access access = authorizationInfo.isAuthorized(key.lhs, key.rhs); - resourceAccessMap.put(key, access); - return access.isAllowed(); - } - } - } - ) - ); - } else { - dataSourceNamesPostAuth = dataSourceNamesPreAuth; - } + final Set dataSourceNamesPostAuth = Sets.newTreeSet(datasourceNamesList); // Cannot do both includeDisabled and full, let includeDisabled take priority // Always use dataSourceNamesPostAuth to determine the set of returned dataSources diff --git a/server/src/main/java/io/druid/server/http/security/AbstractResourceFilter.java b/server/src/main/java/io/druid/server/http/security/AbstractResourceFilter.java index a8a1fb4cb4e1..406917fd5795 100644 --- a/server/src/main/java/io/druid/server/http/security/AbstractResourceFilter.java +++ b/server/src/main/java/io/druid/server/http/security/AbstractResourceFilter.java @@ -25,7 +25,8 @@ import com.sun.jersey.spi.container.ContainerResponseFilter; import com.sun.jersey.spi.container.ResourceFilter; import io.druid.server.security.Action; -import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthorizerMapper; + import javax.servlet.http.HttpServletRequest; import javax.ws.rs.core.Context; @@ -35,12 +36,14 @@ public abstract class AbstractResourceFilter implements ResourceFilter, Containe @Context private HttpServletRequest req; - private final AuthConfig authConfig; + private AuthorizerMapper authorizerMapper; @Inject - public AbstractResourceFilter(AuthConfig authConfig) + public AbstractResourceFilter( + AuthorizerMapper authorizerMapper + ) { - this.authConfig = authConfig; + this.authorizerMapper = authorizerMapper; } @Override @@ -60,9 +63,14 @@ public HttpServletRequest getReq() return req; } - public AuthConfig getAuthConfig() + public AuthorizerMapper getAuthorizerMapper() + { + return authorizerMapper; + } + + public void setAuthorizerMapper(AuthorizerMapper authorizerMapper) { - return authConfig; + this.authorizerMapper = authorizerMapper; } public AbstractResourceFilter setReq(HttpServletRequest req) diff --git a/server/src/main/java/io/druid/server/http/security/ConfigResourceFilter.java b/server/src/main/java/io/druid/server/http/security/ConfigResourceFilter.java index 926df47f07b7..da50a0b0f5be 100644 --- a/server/src/main/java/io/druid/server/http/security/ConfigResourceFilter.java +++ b/server/src/main/java/io/druid/server/http/security/ConfigResourceFilter.java @@ -19,19 +19,16 @@ package io.druid.server.http.security; -import com.google.common.base.Preconditions; import com.google.inject.Inject; import com.sun.jersey.spi.container.ContainerRequest; -import io.druid.java.util.common.StringUtils; import io.druid.server.security.Access; -import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.AuthorizationUtils; +import io.druid.server.security.ForbiddenException; import io.druid.server.security.Resource; +import io.druid.server.security.ResourceAction; import io.druid.server.security.ResourceType; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Response; - /** * Use this ResourceFilter at end points where Druid Cluster configuration is read or written * Here are some example paths where this filter is used - @@ -44,35 +41,31 @@ public class ConfigResourceFilter extends AbstractResourceFilter { @Inject - public ConfigResourceFilter(AuthConfig authConfig) + public ConfigResourceFilter( + AuthorizerMapper authorizerMapper + ) { - super(authConfig); + super(authorizerMapper); } @Override public ContainerRequest filter(ContainerRequest request) { - if (getAuthConfig().isEnabled()) { - // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424 - final String resourceName = "CONFIG"; - final AuthorizationInfo authorizationInfo = (AuthorizationInfo) getReq().getAttribute(AuthConfig.DRUID_AUTH_TOKEN); - Preconditions.checkNotNull( - authorizationInfo, - "Security is enabled but no authorization info found in the request" - ); + final ResourceAction resourceAction = new ResourceAction( + new Resource("CONFIG", ResourceType.CONFIG), + getAction(request) + ); + + final Access authResult = AuthorizationUtils.authorizeResourceAction( + getReq(), + resourceAction, + getAuthorizerMapper() + ); - final Access authResult = authorizationInfo.isAuthorized( - new Resource(resourceName, ResourceType.CONFIG), - getAction(request) - ); - if (!authResult.isAllowed()) { - throw new WebApplicationException( - Response.status(Response.Status.FORBIDDEN) - .entity(StringUtils.format("Access-Check-Result: %s", authResult.toString())) - .build() - ); - } + if (!authResult.isAllowed()) { + throw new ForbiddenException(authResult.toString()); } + return request; } diff --git a/server/src/main/java/io/druid/server/http/security/DatasourceResourceFilter.java b/server/src/main/java/io/druid/server/http/security/DatasourceResourceFilter.java index 2f2b2152b427..9a489e6058a4 100644 --- a/server/src/main/java/io/druid/server/http/security/DatasourceResourceFilter.java +++ b/server/src/main/java/io/druid/server/http/security/DatasourceResourceFilter.java @@ -25,16 +25,15 @@ import com.google.common.collect.Iterables; import com.google.inject.Inject; import com.sun.jersey.spi.container.ContainerRequest; -import io.druid.java.util.common.StringUtils; import io.druid.server.security.Access; -import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.AuthorizationUtils; +import io.druid.server.security.ForbiddenException; import io.druid.server.security.Resource; +import io.druid.server.security.ResourceAction; import io.druid.server.security.ResourceType; -import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.PathSegment; -import javax.ws.rs.core.Response; import java.util.List; /** @@ -47,52 +46,54 @@ public class DatasourceResourceFilter extends AbstractResourceFilter { @Inject - public DatasourceResourceFilter(AuthConfig authConfig) + public DatasourceResourceFilter( + AuthorizerMapper authorizerMapper + ) { - super(authConfig); + super(authorizerMapper); } @Override public ContainerRequest filter(ContainerRequest request) { - if (getAuthConfig().isEnabled()) { - // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424 - final String dataSourceName = request.getPathSegments() - .get( - Iterables.indexOf( - request.getPathSegments(), - new Predicate() - { - @Override - public boolean apply(PathSegment input) - { - return input.getPath().equals("datasources"); - } - } - ) + 1 - ).getPath(); - Preconditions.checkNotNull(dataSourceName); - final AuthorizationInfo authorizationInfo = (AuthorizationInfo) getReq().getAttribute(AuthConfig.DRUID_AUTH_TOKEN); - Preconditions.checkNotNull( - authorizationInfo, - "Security is enabled but no authorization info found in the request" - ); - final Access authResult = authorizationInfo.isAuthorized( - new Resource(dataSourceName, ResourceType.DATASOURCE), - getAction(request) - ); - if (!authResult.isAllowed()) { - throw new WebApplicationException( - Response.status(Response.Status.FORBIDDEN) - .entity(StringUtils.format("Access-Check-Result: %s", authResult.toString())) - .build() - ); - } + final ResourceAction resourceAction = new ResourceAction( + new Resource(getRequestDatasourceName(request), ResourceType.DATASOURCE), + getAction(request) + ); + + final Access authResult = AuthorizationUtils.authorizeResourceAction( + getReq(), + resourceAction, + getAuthorizerMapper() + ); + + if (!authResult.isAllowed()) { + throw new ForbiddenException(authResult.toString()); } return request; } + private String getRequestDatasourceName(ContainerRequest request) + { + final String dataSourceName = request.getPathSegments() + .get( + Iterables.indexOf( + request.getPathSegments(), + new Predicate() + { + @Override + public boolean apply(PathSegment input) + { + return input.getPath().equals("datasources"); + } + } + ) + 1 + ).getPath(); + Preconditions.checkNotNull(dataSourceName); + return dataSourceName; + } + @Override public boolean isApplicable(String requestPath) { diff --git a/server/src/main/java/io/druid/server/http/security/RulesResourceFilter.java b/server/src/main/java/io/druid/server/http/security/RulesResourceFilter.java index 15ca22188112..7a6223c3063e 100644 --- a/server/src/main/java/io/druid/server/http/security/RulesResourceFilter.java +++ b/server/src/main/java/io/druid/server/http/security/RulesResourceFilter.java @@ -25,16 +25,15 @@ import com.google.common.collect.Iterables; import com.google.inject.Inject; import com.sun.jersey.spi.container.ContainerRequest; -import io.druid.java.util.common.StringUtils; import io.druid.server.security.Access; -import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.AuthorizationUtils; +import io.druid.server.security.ForbiddenException; import io.druid.server.security.Resource; +import io.druid.server.security.ResourceAction; import io.druid.server.security.ResourceType; -import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.PathSegment; -import javax.ws.rs.core.Response; import java.util.List; @@ -47,47 +46,45 @@ public class RulesResourceFilter extends AbstractResourceFilter { @Inject - public RulesResourceFilter(AuthConfig authConfig) + public RulesResourceFilter( + AuthorizerMapper authorizerMapper + ) { - super(authConfig); + super(authorizerMapper); } @Override public ContainerRequest filter(ContainerRequest request) { - if (getAuthConfig().isEnabled()) { - // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424 - final String dataSourceName = request.getPathSegments() - .get( - Iterables.indexOf( - request.getPathSegments(), - new Predicate() + final String dataSourceName = request.getPathSegments() + .get( + Iterables.indexOf( + request.getPathSegments(), + new Predicate() + { + @Override + public boolean apply(PathSegment input) { - @Override - public boolean apply(PathSegment input) - { - return input.getPath().equals("rules"); - } + return input.getPath().equals("rules"); } - ) + 1 - ).getPath(); - Preconditions.checkNotNull(dataSourceName); - final AuthorizationInfo authorizationInfo = (AuthorizationInfo) getReq().getAttribute(AuthConfig.DRUID_AUTH_TOKEN); - Preconditions.checkNotNull( - authorizationInfo, - "Security is enabled but no authorization info found in the request" - ); - final Access authResult = authorizationInfo.isAuthorized( - new Resource(dataSourceName, ResourceType.DATASOURCE), - getAction(request) - ); - if (!authResult.isAllowed()) { - throw new WebApplicationException( - Response.status(Response.Status.FORBIDDEN) - .entity(StringUtils.format("Access-Check-Result: %s", authResult.toString())) - .build() - ); - } + } + ) + 1 + ).getPath(); + Preconditions.checkNotNull(dataSourceName); + + final ResourceAction resourceAction = new ResourceAction( + new Resource(dataSourceName, ResourceType.DATASOURCE), + getAction(request) + ); + + final Access authResult = AuthorizationUtils.authorizeResourceAction( + getReq(), + resourceAction, + getAuthorizerMapper() + ); + + if (!authResult.isAllowed()) { + throw new ForbiddenException(authResult.toString()); } return request; diff --git a/server/src/main/java/io/druid/server/http/security/StateResourceFilter.java b/server/src/main/java/io/druid/server/http/security/StateResourceFilter.java index cec7ecd21c2c..f3f8a16343a7 100644 --- a/server/src/main/java/io/druid/server/http/security/StateResourceFilter.java +++ b/server/src/main/java/io/druid/server/http/security/StateResourceFilter.java @@ -19,19 +19,16 @@ package io.druid.server.http.security; -import com.google.common.base.Preconditions; import com.google.inject.Inject; import com.sun.jersey.spi.container.ContainerRequest; -import io.druid.java.util.common.StringUtils; import io.druid.server.security.Access; -import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.AuthorizationUtils; +import io.druid.server.security.ForbiddenException; import io.druid.server.security.Resource; +import io.druid.server.security.ResourceAction; import io.druid.server.security.ResourceType; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Response; - /** * Use this ResourceFilter at end points where Druid Cluster State is read or written * Here are some example paths where this filter is used - @@ -50,34 +47,29 @@ public class StateResourceFilter extends AbstractResourceFilter { @Inject - public StateResourceFilter(AuthConfig authConfig) + public StateResourceFilter( + AuthorizerMapper authorizerMapper + ) { - super(authConfig); + super(authorizerMapper); } @Override public ContainerRequest filter(ContainerRequest request) { - if (getAuthConfig().isEnabled()) { - // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424 - final String resourceName = "STATE"; - final AuthorizationInfo authorizationInfo = (AuthorizationInfo) getReq().getAttribute(AuthConfig.DRUID_AUTH_TOKEN); - Preconditions.checkNotNull( - authorizationInfo, - "Security is enabled but no authorization info found in the request" - ); + final ResourceAction resourceAction = new ResourceAction( + new Resource("STATE", ResourceType.STATE), + getAction(request) + ); + + final Access authResult = AuthorizationUtils.authorizeResourceAction( + getReq(), + resourceAction, + getAuthorizerMapper() + ); - final Access authResult = authorizationInfo.isAuthorized( - new Resource(resourceName, ResourceType.STATE), - getAction(request) - ); - if (!authResult.isAllowed()) { - throw new WebApplicationException( - Response.status(Response.Status.FORBIDDEN) - .entity(StringUtils.format("Access-Check-Result: %s", authResult.toString())) - .build() - ); - } + if (!authResult.isAllowed()) { + throw new ForbiddenException(authResult.toString()); } return request; diff --git a/server/src/main/java/io/druid/server/initialization/AuthenticatorHttpClientWrapperModule.java b/server/src/main/java/io/druid/server/initialization/AuthenticatorHttpClientWrapperModule.java new file mode 100644 index 000000000000..78f88fe8eb91 --- /dev/null +++ b/server/src/main/java/io/druid/server/initialization/AuthenticatorHttpClientWrapperModule.java @@ -0,0 +1,69 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.initialization; + +import com.fasterxml.jackson.databind.Module; +import com.google.inject.Binder; +import com.google.inject.Inject; +import com.google.inject.Provider; +import io.druid.guice.LazySingleton; +import io.druid.initialization.DruidModule; +import io.druid.java.util.common.logger.Logger; +import io.druid.server.security.AuthenticatorHttpClientWrapper; +import io.druid.server.security.AuthenticatorMapper; + +import java.util.Collections; +import java.util.List; + +public class AuthenticatorHttpClientWrapperModule implements DruidModule +{ + private static Logger log = new Logger(AuthenticatorHttpClientWrapperModule.class); + + @Override + public void configure(Binder binder) + { + binder.bind(AuthenticatorHttpClientWrapper.class) + .toProvider(new AuthenticatorHttpClientWrapperProvider()) + .in(LazySingleton.class); + } + + @Override + public List getJacksonModules() + { + return Collections.EMPTY_LIST; + } + + private static class AuthenticatorHttpClientWrapperProvider implements Provider + { + private AuthenticatorHttpClientWrapper wrapper; + + @Inject + public void inject(AuthenticatorMapper authenticatorMapper) + { + this.wrapper = new AuthenticatorHttpClientWrapper(authenticatorMapper.getEscalatingAuthenticator()); + } + + @Override + public AuthenticatorHttpClientWrapper get() + { + return wrapper; + } + } +} diff --git a/server/src/main/java/io/druid/server/initialization/AuthenticatorMapperModule.java b/server/src/main/java/io/druid/server/initialization/AuthenticatorMapperModule.java new file mode 100644 index 000000000000..56b7b0097cd1 --- /dev/null +++ b/server/src/main/java/io/druid/server/initialization/AuthenticatorMapperModule.java @@ -0,0 +1,124 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.initialization; + +import com.fasterxml.jackson.databind.Module; +import com.google.common.base.Supplier; +import com.google.common.collect.Maps; +import com.google.inject.Binder; +import com.google.inject.Inject; +import com.google.inject.Injector; +import com.google.inject.Provider; +import io.druid.guice.JsonConfigProvider; +import io.druid.guice.JsonConfigurator; +import io.druid.guice.LazySingleton; +import io.druid.guice.LifecycleModule; +import io.druid.initialization.DruidModule; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.logger.Logger; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.Authenticator; +import io.druid.server.security.AuthenticatorMapper; +import io.druid.server.security.AllowAllAuthenticator; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +public class AuthenticatorMapperModule implements DruidModule +{ + private static final String AUTHENTICATOR_PROPERTIES_FORMAT_STRING = "druid.auth.authenticator.%s"; + private static Logger log = new Logger(AuthenticatorMapperModule.class); + + @Override + public void configure(Binder binder) + { + binder.bind(AuthenticatorMapper.class) + .toProvider(new AuthenticatorMapperProvider()) + .in(LazySingleton.class); + + LifecycleModule.register(binder, AuthenticatorMapper.class); + } + + @SuppressWarnings("unchecked") + @Override + public List getJacksonModules() + { + return Collections.EMPTY_LIST; + } + + private static class AuthenticatorMapperProvider implements Provider + { + private AuthConfig authConfig; + private Injector injector; + private Properties props; + private JsonConfigurator configurator; + + @Inject + public void inject(Injector injector, Properties props, JsonConfigurator configurator) + { + this.authConfig = injector.getInstance(AuthConfig.class); + this.injector = injector; + this.props = props; + this.configurator = configurator; + } + + @Override + public AuthenticatorMapper get() + { + // order of the authenticators matters + Map authenticatorMap = Maps.newLinkedHashMap(); + + List authenticators = authConfig.getAuthenticatorChain(); + + // Default configuration is to allow all requests. + if (authenticators == null) { + authenticatorMap.put("allowAll", new AllowAllAuthenticator()); + return new AuthenticatorMapper(authenticatorMap, "allowAll"); + } + + if (authenticators.isEmpty()) { + throw new IAE("Must have at least one Authenticator configured."); + } + + for (String authenticatorName : authenticators) { + final String authenticatorPropertyBase = StringUtils.format(AUTHENTICATOR_PROPERTIES_FORMAT_STRING, authenticatorName); + final JsonConfigProvider authenticatorProvider = new JsonConfigProvider<>( + authenticatorPropertyBase, + Authenticator.class + ); + + authenticatorProvider.inject(props, configurator); + + Supplier authenticatorSupplier = authenticatorProvider.get(); + if (authenticatorSupplier == null) { + throw new ISE("Could not create authenticator with name: %s", authenticatorName); + } + Authenticator authenticator = authenticatorSupplier.get(); + authenticatorMap.put(authenticatorName, authenticator); + } + + return new AuthenticatorMapper(authenticatorMap, authConfig.getEscalatedAuthenticator()); + } + } +} diff --git a/server/src/main/java/io/druid/server/initialization/AuthorizerMapperModule.java b/server/src/main/java/io/druid/server/initialization/AuthorizerMapperModule.java new file mode 100644 index 000000000000..54e5eda36a70 --- /dev/null +++ b/server/src/main/java/io/druid/server/initialization/AuthorizerMapperModule.java @@ -0,0 +1,127 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.initialization; + +import com.fasterxml.jackson.databind.Module; +import com.google.common.base.Supplier; +import com.google.common.collect.Maps; +import com.google.inject.Binder; +import com.google.inject.Inject; +import com.google.inject.Injector; +import com.google.inject.Provider; +import io.druid.guice.JsonConfigProvider; +import io.druid.guice.JsonConfigurator; +import io.druid.guice.LazySingleton; +import io.druid.guice.LifecycleModule; +import io.druid.initialization.DruidModule; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.logger.Logger; +import io.druid.server.security.AllowAllAuthorizer; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.Authorizer; +import io.druid.server.security.AuthorizerMapper; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +public class AuthorizerMapperModule implements DruidModule +{ + private static final String AUTHORIZER_PROPERTIES_FORMAT_STRING = "druid.auth.authorizer.%s"; + private static Logger log = new Logger(AuthorizerMapperModule.class); + + @Override + public void configure(Binder binder) + { + binder.bind(AuthorizerMapper.class) + .toProvider(new AuthorizerMapperProvider()) + .in(LazySingleton.class); + + LifecycleModule.register(binder, AuthorizerMapper.class); + } + + @SuppressWarnings("unchecked") + @Override + public List getJacksonModules() + { + return Collections.EMPTY_LIST; + } + + private static class AuthorizerMapperProvider implements Provider + { + private AuthConfig authConfig; + private Injector injector; + private Properties props; + private JsonConfigurator configurator; + + @Inject + public void inject(Injector injector, Properties props, JsonConfigurator configurator) + { + this.authConfig = injector.getInstance(AuthConfig.class); + this.injector = injector; + this.props = props; + this.configurator = configurator; + } + + @Override + public AuthorizerMapper get() + { + Map authorizerMap = Maps.newHashMap(); + List authorizers = authConfig.getAuthorizers(); + + // Default is allow all + if (authorizers == null) { + return new AuthorizerMapper(null) { + @Override + public Authorizer getAuthorizer(String name) + { + return new AllowAllAuthorizer(); + } + }; + } + + if (authorizers.isEmpty()) { + throw new IAE("Must have at least one Authorizer configured."); + } + + for (String authorizerName : authorizers) { + final String authorizerPropertyBase = StringUtils.format(AUTHORIZER_PROPERTIES_FORMAT_STRING, authorizerName); + final JsonConfigProvider authorizerProvider = new JsonConfigProvider<>( + authorizerPropertyBase, + Authorizer.class + ); + + authorizerProvider.inject(props, configurator); + + Supplier authorizerSupplier = authorizerProvider.get(); + if (authorizerSupplier == null) { + throw new ISE("Could not create authorizer with name: %s", authorizerName); + } + Authorizer authorizer = authorizerSupplier.get(); + authorizerMap.put(authorizerName, authorizer); + } + + return new AuthorizerMapper(authorizerMap); + } + } +} diff --git a/server/src/main/java/io/druid/server/initialization/jetty/ForbiddenExceptionMapper.java b/server/src/main/java/io/druid/server/initialization/jetty/ForbiddenExceptionMapper.java new file mode 100644 index 000000000000..6c12a4fd2633 --- /dev/null +++ b/server/src/main/java/io/druid/server/initialization/jetty/ForbiddenExceptionMapper.java @@ -0,0 +1,44 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.initialization.jetty; + + +import com.google.common.collect.ImmutableMap; +import io.druid.server.security.ForbiddenException; + +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.ext.ExceptionMapper; +import javax.ws.rs.ext.Provider; + +@Provider +public class ForbiddenExceptionMapper implements ExceptionMapper +{ + @Override + public Response toResponse(ForbiddenException exception) + { + return Response.status(Response.Status.FORBIDDEN) + .type(MediaType.APPLICATION_JSON) + .entity(ImmutableMap.of( + "Access-Check-Result", exception.getMessage() + )) + .build(); + } +} diff --git a/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java b/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java index a7c17757ed68..278d752a478a 100644 --- a/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java +++ b/server/src/main/java/io/druid/server/initialization/jetty/JettyServerModule.java @@ -100,6 +100,7 @@ protected void configureServlets() binder.bind(GuiceContainer.class).to(DruidGuiceContainer.class); binder.bind(DruidGuiceContainer.class).in(Scopes.SINGLETON); binder.bind(CustomExceptionMapper.class).in(Singleton.class); + binder.bind(ForbiddenExceptionMapper.class).in(Singleton.class); serve("/*").with(DruidGuiceContainer.class); diff --git a/server/src/main/java/io/druid/server/log/StartupLoggingConfig.java b/server/src/main/java/io/druid/server/log/StartupLoggingConfig.java index 51776da266af..81568b4ba882 100644 --- a/server/src/main/java/io/druid/server/log/StartupLoggingConfig.java +++ b/server/src/main/java/io/druid/server/log/StartupLoggingConfig.java @@ -30,7 +30,7 @@ public class StartupLoggingConfig private boolean logProperties = false; @JsonProperty - private List maskProperties = ImmutableList.of("password"); + private List maskProperties = ImmutableList.of("Password", "password", "Secret", "secret"); public boolean isLogProperties() { diff --git a/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java b/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java index 53770dfa7010..e73c8bd73ae9 100644 --- a/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java +++ b/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java @@ -47,7 +47,7 @@ import io.druid.concurrent.Execs; import io.druid.concurrent.LifecycleLock; import io.druid.discovery.DruidNodeDiscoveryProvider; -import io.druid.guice.annotations.Global; +import io.druid.guice.annotations.EscalatedGlobal; import io.druid.guice.annotations.Smile; import io.druid.java.util.common.IAE; import io.druid.java.util.common.IOE; @@ -133,7 +133,7 @@ public class LookupCoordinatorManager @Inject public LookupCoordinatorManager( - final @Global HttpClient httpClient, + final @EscalatedGlobal HttpClient httpClient, final DruidNodeDiscoveryProvider druidNodeDiscoveryProvider, final @Smile ObjectMapper smileMapper, final JacksonConfigManager configManager, @@ -144,7 +144,11 @@ public LookupCoordinatorManager( druidNodeDiscoveryProvider, configManager, lookupCoordinatorManagerConfig, - new LookupsCommunicator(httpClient, lookupCoordinatorManagerConfig, smileMapper), + new LookupsCommunicator( + httpClient, + lookupCoordinatorManagerConfig, + smileMapper + ), null ); } diff --git a/server/src/main/java/io/druid/server/security/Access.java b/server/src/main/java/io/druid/server/security/Access.java index 19f6a6115753..c7bc32b9dd6e 100644 --- a/server/src/main/java/io/druid/server/security/Access.java +++ b/server/src/main/java/io/druid/server/security/Access.java @@ -23,6 +23,8 @@ public class Access { + public final static Access OK = new Access(true); + private final boolean allowed; private String message; diff --git a/server/src/main/java/io/druid/server/security/Action.java b/server/src/main/java/io/druid/server/security/Action.java index 2b7606b58dd8..99f6a7e6bbf5 100644 --- a/server/src/main/java/io/druid/server/security/Action.java +++ b/server/src/main/java/io/druid/server/security/Action.java @@ -19,8 +19,20 @@ package io.druid.server.security; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.druid.java.util.common.StringUtils; + public enum Action { READ, - WRITE + WRITE; + + @JsonCreator + public static Action fromString(String name) + { + if (name == null) { + return null; + } + return valueOf(StringUtils.toUpperCase(name)); + } } diff --git a/server/src/main/java/io/druid/server/security/AllowAllAuthenticator.java b/server/src/main/java/io/druid/server/security/AllowAllAuthenticator.java new file mode 100644 index 000000000000..b6a9ae8673d9 --- /dev/null +++ b/server/src/main/java/io/druid/server/security/AllowAllAuthenticator.java @@ -0,0 +1,123 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.security; + +import com.metamx.http.client.HttpClient; + +import javax.servlet.DispatcherType; +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import java.io.IOException; +import java.util.EnumSet; +import java.util.Map; + +/** + * Should only be used in conjunction with AllowAllAuthorizer. + */ +public class AllowAllAuthenticator implements Authenticator +{ + public static final AuthenticationResult ALLOW_ALL_RESULT = new AuthenticationResult("allowAll", "allowAll"); + + @Override + public Class getFilterClass() + { + return null; + } + + @Override + public Map getInitParameters() + { + return null; + } + + @Override + public String getPath() + { + return "/*"; + } + + @Override + public EnumSet getDispatcherType() + { + return null; + } + + @Override + public Filter getFilter() + { + return new Filter() + { + @Override + public void init(FilterConfig filterConfig) throws ServletException + { + + } + + @Override + public void doFilter( + ServletRequest request, ServletResponse response, FilterChain chain + ) throws IOException, ServletException + { + request.setAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT, ALLOW_ALL_RESULT); + chain.doFilter(request, response); + } + + @Override + public void destroy() + { + + } + }; + } + + @Override + public String getAuthChallengeHeader() + { + return null; + } + + @Override + public AuthenticationResult authenticateJDBCContext(Map context) + { + return ALLOW_ALL_RESULT; + } + + @Override + public HttpClient createEscalatedClient(HttpClient baseClient) + { + return baseClient; + } + + @Override + public org.eclipse.jetty.client.HttpClient createEscalatedJettyClient(org.eclipse.jetty.client.HttpClient baseClient) + { + return baseClient; + } + + @Override + public AuthenticationResult createEscalatedAuthenticationResult() + { + return ALLOW_ALL_RESULT; + } +} diff --git a/server/src/main/java/io/druid/server/security/AllowAllAuthorizer.java b/server/src/main/java/io/druid/server/security/AllowAllAuthorizer.java new file mode 100644 index 000000000000..12bd6e6597cf --- /dev/null +++ b/server/src/main/java/io/druid/server/security/AllowAllAuthorizer.java @@ -0,0 +1,29 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.security; + +public class AllowAllAuthorizer implements Authorizer +{ + @Override + public Access authorize(AuthenticationResult authenticationResult, Resource resource, Action action) + { + return Access.OK; + } +} diff --git a/server/src/main/java/io/druid/server/security/AuthConfig.java b/server/src/main/java/io/druid/server/security/AuthConfig.java index 619ab158a753..e93d84939706 100644 --- a/server/src/main/java/io/druid/server/security/AuthConfig.java +++ b/server/src/main/java/io/druid/server/security/AuthConfig.java @@ -22,35 +22,69 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; + public class AuthConfig { /** - * Use this String as the attribute name for the request attribute to pass {@link AuthorizationInfo} - * from the servlet filter to the jersey resource - * */ - public static final String DRUID_AUTH_TOKEN = "Druid-Auth-Token"; + * HTTP attribute that holds an AuthenticationResult, with info about a successful authentication check. + */ + public static final String DRUID_AUTHENTICATION_RESULT = "Druid-Authentication-Result"; + + /** + * HTTP attribute set when a static method in AuthorizationUtils performs an authorization check on the request. + */ + public static final String DRUID_AUTHORIZATION_CHECKED = "Druid-Authorization-Checked"; public AuthConfig() { - this(false); + this(null, null, null); } @JsonCreator - public AuthConfig(@JsonProperty("enabled") boolean enabled) + public AuthConfig( + @JsonProperty("authenticatorChain") List authenticationChain, + @JsonProperty("escalatedAuthenticator") String escalatedAuthenticator, + @JsonProperty("authorizers") List authorizers + ) { - this.enabled = enabled; + this.authenticatorChain = authenticationChain; + this.escalatedAuthenticator = escalatedAuthenticator == null ? "allowAll" : escalatedAuthenticator; + this.authorizers = authorizers; } - /** - * If druid.auth.enabled is set to true then an implementation of AuthorizationInfo - * must be provided and it must be set as a request attribute possibly inside the servlet filter - * injected in the filter chain using your own extension - * */ + + @JsonProperty + private final List authenticatorChain; + + @JsonProperty + private final String escalatedAuthenticator; + @JsonProperty - private final boolean enabled; + private List authorizers; - public boolean isEnabled() + public List getAuthenticatorChain() { - return enabled; + return authenticatorChain; + } + + public String getEscalatedAuthenticator() + { + return escalatedAuthenticator; + } + + public List getAuthorizers() + { + return authorizers; + } + + @Override + public String toString() + { + return "AuthConfig{" + + "authenticatorChain='" + authenticatorChain + '\'' + + ", escalatedAuthenticator='" + escalatedAuthenticator + '\'' + + ", authorizers='" + authorizers + '\'' + + '}'; } @Override @@ -65,21 +99,26 @@ public boolean equals(Object o) AuthConfig that = (AuthConfig) o; - return enabled == that.enabled; + if (getAuthenticatorChain() != null + ? !getAuthenticatorChain().equals(that.getAuthenticatorChain()) + : that.getAuthenticatorChain() != null) { + return false; + } + if (getEscalatedAuthenticator() != null + ? !getEscalatedAuthenticator().equals(that.getEscalatedAuthenticator()) + : that.getEscalatedAuthenticator() != null) { + return false; + } + return getAuthorizers() != null ? getAuthorizers().equals(that.getAuthorizers()) : that.getAuthorizers() == null; } @Override public int hashCode() { - return (enabled ? 1 : 0); - } - - @Override - public String toString() - { - return "AuthConfig{" + - "enabled=" + enabled + - '}'; + int result = getAuthenticatorChain() != null ? getAuthenticatorChain().hashCode() : 0; + result = 31 * result + (getEscalatedAuthenticator() != null ? getEscalatedAuthenticator().hashCode() : 0); + result = 31 * result + (getAuthorizers() != null ? getAuthorizers().hashCode() : 0); + return result; } } diff --git a/server/src/main/java/io/druid/server/security/AuthTestUtils.java b/server/src/main/java/io/druid/server/security/AuthTestUtils.java new file mode 100644 index 000000000000..e06fa9c23030 --- /dev/null +++ b/server/src/main/java/io/druid/server/security/AuthTestUtils.java @@ -0,0 +1,44 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.security; + +import com.google.common.collect.Maps; + +import java.util.Map; + +public class AuthTestUtils +{ + public static final AuthenticatorMapper TEST_AUTHENTICATOR_MAPPER; + public static final AuthorizerMapper TEST_AUTHORIZER_MAPPER; + + static { + final Map defaultMap = Maps.newHashMap(); + defaultMap.put("allowAll", new AllowAllAuthenticator()); + TEST_AUTHENTICATOR_MAPPER = new AuthenticatorMapper(defaultMap, "allowAll"); + + TEST_AUTHORIZER_MAPPER = new AuthorizerMapper(null) { + @Override + public Authorizer getAuthorizer(String name) + { + return new AllowAllAuthorizer(); + } + }; + } +} diff --git a/server/src/main/java/io/druid/server/security/AuthenticationResult.java b/server/src/main/java/io/druid/server/security/AuthenticationResult.java new file mode 100644 index 000000000000..ac92665dcccd --- /dev/null +++ b/server/src/main/java/io/druid/server/security/AuthenticationResult.java @@ -0,0 +1,61 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.security; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * An AuthenticationResult contains information about a successfully authenticated request. + */ +public class AuthenticationResult +{ + /** + * the identity of the requester + */ + private final String identity; + + /** + * the name of the Authorizer that should handle the authenticated request. + */ + private final String authorizerName; + + @JsonCreator + public AuthenticationResult( + @JsonProperty("identity") final String identity, + @JsonProperty("authorizerName") final String authorizerName + ) + { + this.identity = identity; + this.authorizerName = authorizerName; + } + + @JsonProperty + public String getIdentity() + { + return identity; + } + + @JsonProperty + public String getAuthorizerName() + { + return authorizerName; + } +} diff --git a/server/src/main/java/io/druid/server/security/AuthenticationUtils.java b/server/src/main/java/io/druid/server/security/AuthenticationUtils.java new file mode 100644 index 000000000000..cabaa828274c --- /dev/null +++ b/server/src/main/java/io/druid/server/security/AuthenticationUtils.java @@ -0,0 +1,85 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.security; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.eclipse.jetty.servlet.FilterHolder; +import org.eclipse.jetty.servlet.ServletContextHandler; + +import java.util.List; + +public class AuthenticationUtils +{ + public static void addAuthenticationFilterChain( + ServletContextHandler root, + List authenticators + ) + { + for (Authenticator authenticator : authenticators) { + FilterHolder holder = new FilterHolder( + new AuthenticationWrappingFilter(authenticator.getFilter()) + ); + if (authenticator.getInitParameters() != null) { + holder.setInitParameters(authenticator.getInitParameters()); + } + root.addFilter( + holder, + "/*", + null + ); + } + } + + public static void addNoopAuthorizationFilters(ServletContextHandler root, List unsecuredPaths) + { + for (String unsecuredPath : unsecuredPaths) { + root.addFilter(new FilterHolder(new UnsecuredResourceFilter()), unsecuredPath, null); + } + } + + public static void addSecuritySanityCheckFilter( + ServletContextHandler root, + ObjectMapper jsonMapper + ) + { + root.addFilter( + new FilterHolder( + new SecuritySanityCheckFilter(jsonMapper) + ), + "/*", + null + ); + } + + public static void addPreResponseAuthorizationCheckFilter( + ServletContextHandler root, + List authenticators, + ObjectMapper jsonMapper + ) + { + root.addFilter( + new FilterHolder( + new PreResponseAuthorizationCheckFilter(authenticators, jsonMapper) + ), + "/*", + null + ); + } +} diff --git a/server/src/main/java/io/druid/server/security/AuthenticationWrappingFilter.java b/server/src/main/java/io/druid/server/security/AuthenticationWrappingFilter.java new file mode 100644 index 000000000000..c5ce96374350 --- /dev/null +++ b/server/src/main/java/io/druid/server/security/AuthenticationWrappingFilter.java @@ -0,0 +1,69 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.security; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import java.io.IOException; + +/** + * Used to wrap Filters created by Authenticators, this wrapper filter skips itself if a request already + * has an authentication check (so that Authenticator implementations don't have to perform this check themselves) + */ +public class AuthenticationWrappingFilter implements Filter +{ + private final Filter delegate; + + public AuthenticationWrappingFilter( + final Filter delegate + ) + { + this.delegate = delegate; + } + + @Override + public void init(FilterConfig filterConfig) throws ServletException + { + delegate.init(filterConfig); + } + + @Override + public void doFilter( + ServletRequest request, ServletResponse response, FilterChain chain + ) throws IOException, ServletException + { + // If there's already an auth result, then we have authenticated already, skip this. + if (request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT) != null) { + chain.doFilter(request, response); + } else { + delegate.doFilter(request, response, chain); + } + } + + @Override + public void destroy() + { + delegate.destroy(); + } +} diff --git a/server/src/main/java/io/druid/server/security/Authenticator.java b/server/src/main/java/io/druid/server/security/Authenticator.java new file mode 100644 index 000000000000..50468cbfd5ea --- /dev/null +++ b/server/src/main/java/io/druid/server/security/Authenticator.java @@ -0,0 +1,129 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.security; + +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; +import com.metamx.http.client.HttpClient; +import io.druid.server.initialization.jetty.ServletFilterHolder; + +import javax.annotation.Nullable; +import javax.servlet.Filter; +import java.util.Map; + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") +@JsonSubTypes(value = { + @JsonSubTypes.Type(name = "allowAll", value = AllowAllAuthenticator.class), +}) +/** + * This interface is essentially a ServletFilterHolder with additional requirements on the getFilter() method contract, plus: + * + * - A method that returns a WWW-Authenticate challenge header appropriate for the + * authentication mechanism, getAuthChallengeHeader(). + * - A method for creating a wrapped HTTP client that can authenticate using the Authenticator's authentication scheme, + * used for internal Druid node communications (e.g., broker -> historical messages), createEscalatedClient(). + * - A method for creating a wrapped Jetty HTTP client that can authenticate using the Authenticator's authentication scheme, + * used by the Druid router, createEscalatedJettyClient(). + * - A method for authenticating credentials contained in a JDBC connection context, used for authenticating Druid SQL + * requests received via JDBC, authenticateJDBCContext(). + */ +public interface Authenticator extends ServletFilterHolder +{ + /** + * Create a Filter that performs authentication checks on incoming HTTP requests. + *

+ * If the authentication succeeds, the Filter should set the "Druid-Authentication-Result" attribute in the request, + * containing an AuthenticationResult that represents the authenticated identity of the requester, along with + * the name of the Authorizer instance that should authorize the request. + *

+ * If the "Druid-Authentication-Result" attribute is already set (i.e., request has been authenticated by an + * earlier Filter), this Filter should skip any authentication checks and proceed to the next Filter. + *

+ * If a filter cannot recognize a request's format (e.g., the request does not have credentials compatible + * with a filter's authentication scheme), the filter should not send an error response, allowing other + * filters to handle the request. A challenge response will be sent if the filter chain is exhausted. + *

+ * If the authentication fails (i.e., a filter recognized the authentication scheme of a request, but the credentials + * failed to authenticate successfully) the Filter should send an error response, without needing to proceed to + * other filters in the chain.. + * + * @return Filter that authenticates HTTP requests + */ + @Override + public Filter getFilter(); + + /** + * Return a WWW-Authenticate challenge scheme string appropriate for this Authenticator's authentication mechanism. + *

+ * For example, a Basic HTTP implementation should return "Basic", while a Kerberos implementation would return + * "Negotiate". If this method returns null, no authentication scheme will be added for that Authenticator + * implementation. + * + * @return Authentication scheme + */ + @Nullable + public String getAuthChallengeHeader(); + + /** + * Given a JDBC connection context, authenticate the identity represented by the information in the context. + * This is used to secure JDBC access for Druid SQL. + *

+ * For example, a Basic HTTP auth implementation could read the "user" and "password" fields from the JDBC context. + *

+ * The expected contents of the context are left to the implementation. + * + * @param context JDBC connection context + * + * @return AuthenticationResult of the identity represented by the context is successfully authenticated, + * null if authentication failed + */ + @Nullable + public AuthenticationResult authenticateJDBCContext(Map context); + + /** + * Return a client that sends requests with the format/information necessary to authenticate successfully + * against this Authenticator's authentication scheme using the identity of the internal system user. + *

+ * This HTTP client is used for internal communications between Druid nodes, such as when a broker communicates + * with a historical node during query processing. + * + * @param baseClient Base HTTP client for internal Druid communications + * + * @return metamx HttpClient that sends requests with the credentials of the internal system user + */ + public HttpClient createEscalatedClient(HttpClient baseClient); + + /** + * Return a client that sends requests with the format/information necessary to authenticate successfully + * against this Authenticator's authentication scheme using the identity of the internal system user. + *

+ * This HTTP client is used by the Druid Router node. + * + * @param baseClient Base Jetty HttpClient + * + * @return Jetty HttpClient that sends requests with the credentials of the internal system user + */ + public org.eclipse.jetty.client.HttpClient createEscalatedJettyClient(org.eclipse.jetty.client.HttpClient baseClient); + + /** + * @return an AuthenticationResult representing the identity of the internal system user. + */ + public AuthenticationResult createEscalatedAuthenticationResult(); +} diff --git a/server/src/main/java/io/druid/server/security/AuthenticatorHttpClientWrapper.java b/server/src/main/java/io/druid/server/security/AuthenticatorHttpClientWrapper.java new file mode 100644 index 000000000000..25d4cb7c3d25 --- /dev/null +++ b/server/src/main/java/io/druid/server/security/AuthenticatorHttpClientWrapper.java @@ -0,0 +1,43 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.security; + +import com.metamx.http.client.HttpClient; + +/** + * Singleton utility object that creates escalated HttpClients using a configuration-specified Authenticator's + * getEscalatedClient() method. + */ +public class AuthenticatorHttpClientWrapper +{ + private Authenticator escalatingAuthenticator; + + public AuthenticatorHttpClientWrapper( + final Authenticator escalatingAuthenticator + ) + { + this.escalatingAuthenticator = escalatingAuthenticator; + } + + public HttpClient getEscalatedClient(HttpClient baseClient) + { + return escalatingAuthenticator.createEscalatedClient(baseClient); + } +} diff --git a/server/src/main/java/io/druid/server/security/AuthenticatorMapper.java b/server/src/main/java/io/druid/server/security/AuthenticatorMapper.java new file mode 100644 index 000000000000..952f607cbaf8 --- /dev/null +++ b/server/src/main/java/io/druid/server/security/AuthenticatorMapper.java @@ -0,0 +1,58 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.security; + +import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; +import io.druid.guice.ManageLifecycle; + +import java.util.List; +import java.util.Map; + +@ManageLifecycle +public class AuthenticatorMapper +{ + private Map authenticatorMap; + private Authenticator escalatingAuthenticator; + + public AuthenticatorMapper( + Map authenticatorMap, + String escalatingAuthenticatorName + ) + { + this.authenticatorMap = authenticatorMap; + this.escalatingAuthenticator = authenticatorMap.get(escalatingAuthenticatorName); + Preconditions.checkNotNull( + escalatingAuthenticator, + "Could not find escalating authenticator with name: %s", + escalatingAuthenticatorName + ); + } + + public Authenticator getEscalatingAuthenticator() + { + return escalatingAuthenticator; + } + + public List getAuthenticatorChain() + { + return Lists.newArrayList(authenticatorMap.values()); + } +} diff --git a/server/src/main/java/io/druid/server/security/AuthorizationInfo.java b/server/src/main/java/io/druid/server/security/AuthorizationInfo.java deleted file mode 100644 index 31097a935477..000000000000 --- a/server/src/main/java/io/druid/server/security/AuthorizationInfo.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.server.security; - -/** - * This interface should be used to store as well as process Authorization Information - * An extension can be used to inject servlet filter which will create objects of this type - * and set it as a request attribute with attribute name as {@link AuthConfig#DRUID_AUTH_TOKEN}. - * In the jersey resources if the authorization is enabled depending on {@link AuthConfig#enabled} - * the {@link #isAuthorized(Resource, Action)} method will be used to perform authorization checks - * */ -public interface AuthorizationInfo -{ - /** - * Perform authorization checks for the given {@link Resource} and {@link Action}. - * resource and action objects should be instantiated depending on - * the specific endPoint where the check is being performed. - * Modeling Principal and specific way of performing authorization checks is - * entirely implementation dependent. - * - * @param resource information about resource that is being accessed - * @param action action to be performed on the resource - * @return a {@link Access} object having {@link Access#allowed} set to true if authorized otherwise set to false - * and optionally {@link Access#message} set to appropriate message - * */ - Access isAuthorized(Resource resource, Action action); -} diff --git a/server/src/main/java/io/druid/server/security/AuthorizationUtils.java b/server/src/main/java/io/druid/server/security/AuthorizationUtils.java new file mode 100644 index 000000000000..5c8fb2e41361 --- /dev/null +++ b/server/src/main/java/io/druid/server/security/AuthorizationUtils.java @@ -0,0 +1,241 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.security; + +import com.google.common.base.Function; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; +import io.druid.java.util.common.ISE; + +import javax.servlet.http.HttpServletRequest; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * Static utility functions for performing authorization checks. + */ +public class AuthorizationUtils +{ + /** + * Check a resource-action using the authorization fields from the request. + * + * Otherwise, if the resource-actions is authorized, return ACCESS_OK. + * + * This function will set the DRUID_AUTHORIZATION_CHECKED attribute in the request. + * + * If this attribute is already set when this function is called, an exception is thrown. + * + * @param request HTTP request to be authorized + * @param resourceAction A resource identifier and the action to be taken the resource. + * @param authorizerMapper The singleton AuthorizerMapper instance + * @return ACCESS_OK or the failed Access object returned by the Authorizer that checked the request. + */ + public static Access authorizeResourceAction( + final HttpServletRequest request, + final ResourceAction resourceAction, + final AuthorizerMapper authorizerMapper + ) + { + return authorizeAllResourceActions( + request, + Lists.newArrayList(resourceAction), + authorizerMapper + ); + } + + /** + * Check a list of resource-actions to be performed by the identity represented by authenticationResult. + * + * If one of the resource-actions fails the authorization check, this method returns the failed + * Access object from the check. + * + * Otherwise, return ACCESS_OK if all resource-actions were successfully authorized. + * + * @param authenticationResult Authentication result representing identity of requester + * @param resourceActions An Iterable of resource-actions to authorize + * @return ACCESS_OK or the Access object from the first failed check + */ + public static Access authorizeAllResourceActions( + final AuthenticationResult authenticationResult, + final Iterable resourceActions, + final AuthorizerMapper authorizerMapper + ) + { + final Authorizer authorizer = authorizerMapper.getAuthorizer(authenticationResult.getAuthorizerName()); + if (authorizer == null) { + throw new ISE("No authorizer found with name: [%s].", authenticationResult.getAuthorizerName()); + } + + // this method returns on first failure, so only successful Access results are kept in the cache + final Set resultCache = Sets.newHashSet(); + + for (ResourceAction resourceAction : resourceActions) { + if (resultCache.contains(resourceAction)) { + continue; + } + final Access access = authorizer.authorize( + authenticationResult, + resourceAction.getResource(), + resourceAction.getAction() + ); + if (!access.isAllowed()) { + return access; + } else { + resultCache.add(resourceAction); + } + } + + return Access.OK; + } + + /** + * Check a list of resource-actions to be performed as a result of an HTTP request. + * + * If one of the resource-actions fails the authorization check, this method returns the failed + * Access object from the check. + * + * Otherwise, return ACCESS_OK if all resource-actions were successfully authorized. + * + * This function will set the DRUID_AUTHORIZATION_CHECKED attribute in the request. + * + * If this attribute is already set when this function is called, an exception is thrown. + * + * @param request HTTP request to be authorized + * @param resourceActions An Iterable of resource-actions to authorize + * @return ACCESS_OK or the Access object from the first failed check + */ + public static Access authorizeAllResourceActions( + final HttpServletRequest request, + final Iterable resourceActions, + final AuthorizerMapper authorizerMapper + ) + { + if (request.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED) != null) { + throw new ISE("Request already had authorization check."); + } + + final AuthenticationResult authenticationResult = (AuthenticationResult) request.getAttribute( + AuthConfig.DRUID_AUTHENTICATION_RESULT + ); + if (authenticationResult == null) { + throw new ISE("Null authentication result"); + } + + Access access = authorizeAllResourceActions( + authenticationResult, + resourceActions, + authorizerMapper + ); + + request.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, access.isAllowed()); + return access; + } + + /** + * Filter a list of resource-actions using the request's authorization fields, returning a new list of + * resource-actions that were authorized. + * + * This function will set the DRUID_AUTHORIZATION_CHECKED attribute in the request. + * + * If this attribute is already set when this function is called, an exception is thrown. + * + * @param request HTTP request to be authorized + * @param resources List of resources to be processed into resource-actions + * @param resourceActionGenerator Function that creates a resource-action from a resource + * @return A list containing the resource-actions from the resourceParser that were successfully authorized. + */ + public static List filterAuthorizedResources( + final HttpServletRequest request, + final Collection resources, + final Function resourceActionGenerator, + final AuthorizerMapper authorizerMapper + ) + { + final AuthenticationResult authenticationResult = (AuthenticationResult) request.getAttribute( + AuthConfig.DRUID_AUTHENTICATION_RESULT + ); + if (authenticationResult == null) { + throw new ISE("Null authentication result"); + } + + final Authorizer authorizer = authorizerMapper.getAuthorizer(authenticationResult.getAuthorizerName()); + if (authorizer == null) { + throw new ISE("No authorizer found with name: [%s].", authenticationResult.getAuthorizerName()); + } + + final Map resultCache = Maps.newHashMap(); + List filteredResources = new ArrayList<>(); + for (ResType resource : resources) { + final ResourceAction resourceAction = resourceActionGenerator.apply(resource); + Access access = resultCache.computeIfAbsent( + resourceAction, + ra -> authorizer.authorize( + authenticationResult, + ra.getResource(), + ra.getAction() + ) + ); + if (access.isAllowed()) { + filteredResources.add(resource); + } + } + + // We're filtering, so having access to none of the objects isn't an authorization failure (in terms of whether + // to send an error response or not.) + request.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + return filteredResources; + } + + /** + * Function for the common pattern of generating a resource-action for reading from a datasource, using the + * datasource name. + */ + public static Function DATASOURCE_READ_RA_GENERATOR = new Function() + { + @Override + public ResourceAction apply(String input) + { + return new ResourceAction( + new Resource(input, ResourceType.DATASOURCE), + Action.READ + ); + } + }; + + /** + * Function for the common pattern of generating a resource-action for reading from a datasource, using the + * datasource name. + */ + public static Function DATASOURCE_WRITE_RA_GENERATOR = new Function() + { + @Override + public ResourceAction apply(String input) + { + return new ResourceAction( + new Resource(input, ResourceType.DATASOURCE), + Action.WRITE + ); + } + }; +} diff --git a/server/src/main/java/io/druid/server/security/Authorizer.java b/server/src/main/java/io/druid/server/security/Authorizer.java new file mode 100644 index 000000000000..9cd586c9f9dc --- /dev/null +++ b/server/src/main/java/io/druid/server/security/Authorizer.java @@ -0,0 +1,52 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.security; + +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") +@JsonSubTypes(value = { + @JsonSubTypes.Type(name = "allowAll", value = AllowAllAuthorizer.class) +}) +/** + * An Authorizer is responsible for performing authorization checks for resource accesses. + * + * A single instance of each Authorizer implementation will be created per node. + * Security-sensitive endpoints will need to extract the identity string contained in the request's Druid-Auth-Token + * attribute, previously set by an Authenticator. Each endpoint will pass this identity String to the + * Authorizer's authorize() method along with any Resource/Action pairs created for the request being + * handled. The endpoint can use these checks to filter out resources or deny the request as needed. + * After a request is authorized, a new attribute, "Druid-Authorization-Checked", should be set in the + * request header with the result of the authorization decision. + */ +public interface Authorizer +{ + /** + * Check if the entity represented by {@code identity} is authorized to perform {@code action} on {@code resource}. + * + * @param authenticationResult The authentication result of the request + * @param resource The resource to be accessed + * @param action The action to perform on the resource + * + * @return An Access object representing the result of the authorization check. Must not be null. + */ + Access authorize(AuthenticationResult authenticationResult, Resource resource, Action action); +} diff --git a/server/src/main/java/io/druid/server/security/AuthorizerMapper.java b/server/src/main/java/io/druid/server/security/AuthorizerMapper.java new file mode 100644 index 000000000000..2c029aafe038 --- /dev/null +++ b/server/src/main/java/io/druid/server/security/AuthorizerMapper.java @@ -0,0 +1,42 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.security; + +import io.druid.guice.ManageLifecycle; + +import java.util.Map; + +@ManageLifecycle +public class AuthorizerMapper +{ + private Map authorizerMap; + + public AuthorizerMapper( + Map authorizerMap + ) + { + this.authorizerMap = authorizerMap; + } + + public Authorizer getAuthorizer(String name) + { + return authorizerMap.get(name); + } +} diff --git a/server/src/main/java/io/druid/server/security/SystemAuthorizationInfo.java b/server/src/main/java/io/druid/server/security/ForbiddenException.java similarity index 66% rename from server/src/main/java/io/druid/server/security/SystemAuthorizationInfo.java rename to server/src/main/java/io/druid/server/security/ForbiddenException.java index f41983edc696..fc7324cb7013 100644 --- a/server/src/main/java/io/druid/server/security/SystemAuthorizationInfo.java +++ b/server/src/main/java/io/druid/server/security/ForbiddenException.java @@ -20,20 +20,18 @@ package io.druid.server.security; /** - * An AuthorizationInfo that is useful for actions generated internally by the system. It allows everything. + * Throw this when a request is unauthorized and we want to send a 403 response back, Jersey exception mapper will + * take care of sending the response. */ -public class SystemAuthorizationInfo implements AuthorizationInfo +public class ForbiddenException extends RuntimeException { - public static final SystemAuthorizationInfo INSTANCE = new SystemAuthorizationInfo(); - - private SystemAuthorizationInfo() + public ForbiddenException() { - // Singleton. + super("Unauthorized."); } - @Override - public Access isAuthorized(final Resource resource, final Action action) + public ForbiddenException(String msg) { - return new Access(true); + super(msg); } } diff --git a/server/src/main/java/io/druid/server/security/PreResponseAuthorizationCheckFilter.java b/server/src/main/java/io/druid/server/security/PreResponseAuthorizationCheckFilter.java new file mode 100644 index 000000000000..6743af04c204 --- /dev/null +++ b/server/src/main/java/io/druid/server/security/PreResponseAuthorizationCheckFilter.java @@ -0,0 +1,184 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.security; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Sets; +import com.metamx.emitter.EmittingLogger; +import io.druid.java.util.common.ISE; +import io.druid.query.QueryInterruptedException; +import io.druid.server.DruidNode; +import org.eclipse.jetty.server.Response; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Set; + +/** + * Filter that verifies that authorization checks were applied to an HTTP request, before sending a response. + *

+ * This filter is intended to help catch missing authorization checks arising from bugs/design omissions. + */ +public class PreResponseAuthorizationCheckFilter implements Filter +{ + private static final EmittingLogger log = new EmittingLogger(PreResponseAuthorizationCheckFilter.class); + + private final List authenticators; + private final ObjectMapper jsonMapper; + + public PreResponseAuthorizationCheckFilter( + List authenticators, + ObjectMapper jsonMapper + ) + { + this.authenticators = authenticators; + this.jsonMapper = jsonMapper; + } + + @Override + public void init(FilterConfig filterConfig) throws ServletException + { + + } + + @Override + public void doFilter( + ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain + ) throws IOException, ServletException + { + final HttpServletResponse response = (HttpServletResponse) servletResponse; + final HttpServletRequest request = (HttpServletRequest) servletRequest; + + if (servletRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT) == null) { + handleUnauthenticatedRequest(response); + return; + } + + filterChain.doFilter(servletRequest, servletResponse); + + Boolean authInfoChecked = (Boolean) servletRequest.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED); + if (authInfoChecked == null && !errorOverridesMissingAuth(response.getStatus())) { + // Note: rather than throwing an exception here, it would be nice to blank out the original response + // since the request didn't have any authorization checks performed. However, this breaks proxying + // (e.g. OverlordServletProxy), so this is not implemented for now. + handleAuthorizationCheckError( + "Request did not have an authorization check performed.", + request, + response + ); + } + + if (authInfoChecked != null && !authInfoChecked && response.getStatus() != Response.SC_FORBIDDEN) { + handleAuthorizationCheckError( + "Request's authorization check failed but status code was not 403.", + request, + response + ); + } + } + + @Override + public void destroy() + { + + } + + private void handleUnauthenticatedRequest( + final HttpServletResponse response + ) throws IOException + { + // Since this is the last filter in the chain, some previous authentication filter + // should have placed an authentication result in the request. + // If not, send an authentication challenge. + Set supportedAuthSchemes = Sets.newHashSet(); + for (Authenticator authenticator : authenticators) { + String challengeHeader = authenticator.getAuthChallengeHeader(); + if (challengeHeader != null) { + supportedAuthSchemes.add(challengeHeader); + } + } + for (String authScheme : supportedAuthSchemes) { + response.addHeader("WWW-Authenticate", authScheme); + } + QueryInterruptedException unauthorizedError = new QueryInterruptedException( + QueryInterruptedException.UNAUTHORIZED, + null, + null, + DruidNode.getDefaultHost() + ); + unauthorizedError.setStackTrace(new StackTraceElement[0]); + OutputStream out = response.getOutputStream(); + sendJsonError(response, Response.SC_UNAUTHORIZED, jsonMapper.writeValueAsString(unauthorizedError), out); + out.close(); + return; + } + + private void handleAuthorizationCheckError( + String errorMsg, + HttpServletRequest servletRequest, + HttpServletResponse servletResponse + ) + { + // Send out an alert so there's a centralized collection point for seeing errors of this nature + log.makeAlert(errorMsg) + .addData("uri", servletRequest.getRequestURI()) + .addData("method", servletRequest.getMethod()) + .emit(); + + if (servletResponse.isCommitted()) { + throw new ISE(errorMsg); + } else { + try { + servletResponse.sendError(Response.SC_FORBIDDEN); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } + } + + private static boolean errorOverridesMissingAuth(int status) + { + return status == Response.SC_INTERNAL_SERVER_ERROR; + } + + public static void sendJsonError(HttpServletResponse resp, int error, String errorJson, OutputStream outputStream) + { + resp.setStatus(error); + resp.setContentType("application/json"); + resp.setCharacterEncoding("UTF-8"); + try { + outputStream.write(errorJson.getBytes(StandardCharsets.UTF_8)); + } + catch (IOException ioe) { + log.error("WTF? Can't get writer from HTTP response."); + } + } +} diff --git a/server/src/main/java/io/druid/server/security/Resource.java b/server/src/main/java/io/druid/server/security/Resource.java index d3c74fb52899..ca0a64a12af3 100644 --- a/server/src/main/java/io/druid/server/security/Resource.java +++ b/server/src/main/java/io/druid/server/security/Resource.java @@ -19,22 +19,31 @@ package io.druid.server.security; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + public class Resource { private final String name; private final ResourceType type; - public Resource(String name, ResourceType type) + @JsonCreator + public Resource( + @JsonProperty("name") String name, + @JsonProperty("type") ResourceType type + ) { this.name = name; this.type = type; } + @JsonProperty public String getName() { return name; } + @JsonProperty public ResourceType getType() { return type; @@ -66,4 +75,13 @@ public int hashCode() result = 31 * result + type.hashCode(); return result; } + + @Override + public String toString() + { + return "Resource{" + + "name='" + name + '\'' + + ", type=" + type + + '}'; + } } diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/AuthenticationKerberosConfig.java b/server/src/main/java/io/druid/server/security/ResourceAction.java similarity index 56% rename from extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/AuthenticationKerberosConfig.java rename to server/src/main/java/io/druid/server/security/ResourceAction.java index 7ed11fe59cc0..240f9280562f 100644 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/AuthenticationKerberosConfig.java +++ b/server/src/main/java/io/druid/server/security/ResourceAction.java @@ -17,36 +17,36 @@ * under the License. */ -package io.druid.security.kerberos; - +package io.druid.server.security; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -public class AuthenticationKerberosConfig +public class ResourceAction { - @JsonProperty - private final String principal; - @JsonProperty - private final String keytab; + private final Resource resource; + private final Action action; @JsonCreator - public AuthenticationKerberosConfig(@JsonProperty("principal") String principal, @JsonProperty("keytab") String keytab) + public ResourceAction( + @JsonProperty("resource") Resource resource, + @JsonProperty("action") Action action + ) { - this.principal = principal; - this.keytab = keytab; + this.resource = resource; + this.action = action; } @JsonProperty - public String getPrincipal() + public Resource getResource() { - return principal; + return resource; } @JsonProperty - public String getKeytab() + public Action getAction() { - return keytab; + return action; } @Override @@ -55,24 +55,24 @@ public boolean equals(Object o) if (this == o) { return true; } - if (!(o instanceof AuthenticationKerberosConfig)) { + if (o == null || getClass() != o.getClass()) { return false; } - AuthenticationKerberosConfig that = (AuthenticationKerberosConfig) o; + ResourceAction that = (ResourceAction) o; - if (getPrincipal() != null ? !getPrincipal().equals(that.getPrincipal()) : that.getPrincipal() != null) { + if (!getResource().equals(that.getResource())) { return false; } - return getKeytab() != null ? getKeytab().equals(that.getKeytab()) : that.getKeytab() == null; + return getAction() == that.getAction(); } @Override public int hashCode() { - int result = getPrincipal() != null ? getPrincipal().hashCode() : 0; - result = 31 * result + (getKeytab() != null ? getKeytab().hashCode() : 0); + int result = getResource().hashCode(); + result = 31 * result + getAction().hashCode(); return result; } } diff --git a/server/src/main/java/io/druid/server/security/ResourceType.java b/server/src/main/java/io/druid/server/security/ResourceType.java index 818bf9ca947d..3d4de3ca065b 100644 --- a/server/src/main/java/io/druid/server/security/ResourceType.java +++ b/server/src/main/java/io/druid/server/security/ResourceType.java @@ -19,9 +19,21 @@ package io.druid.server.security; +import com.fasterxml.jackson.annotation.JsonCreator; +import io.druid.java.util.common.StringUtils; + public enum ResourceType { DATASOURCE, CONFIG, - STATE + STATE; + + @JsonCreator + public static ResourceType fromString(String name) + { + if (name == null) { + return null; + } + return valueOf(StringUtils.toUpperCase(name)); + } } diff --git a/server/src/main/java/io/druid/server/security/SecuritySanityCheckFilter.java b/server/src/main/java/io/druid/server/security/SecuritySanityCheckFilter.java new file mode 100644 index 000000000000..e53658197eaa --- /dev/null +++ b/server/src/main/java/io/druid/server/security/SecuritySanityCheckFilter.java @@ -0,0 +1,107 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.security; + +import com.fasterxml.jackson.databind.ObjectMapper; +import io.druid.java.util.common.logger.Logger; +import io.druid.query.QueryInterruptedException; +import io.druid.server.DruidNode; +import org.eclipse.jetty.server.Response; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; + +public class SecuritySanityCheckFilter implements Filter +{ + private static final Logger log = new Logger(SecuritySanityCheckFilter.class); + + private final String unauthorizedMessage; + + public SecuritySanityCheckFilter( + ObjectMapper jsonMapper + ) + { + try { + QueryInterruptedException unauthorizedError = new QueryInterruptedException( + QueryInterruptedException.UNAUTHORIZED, + null, + null, + DruidNode.getDefaultHost() + ); + unauthorizedError.setStackTrace(new StackTraceElement[0]); + this.unauthorizedMessage = jsonMapper.writeValueAsString(unauthorizedError); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Override + public void init(FilterConfig filterConfig) throws ServletException + { + } + + @Override + public void doFilter( + ServletRequest request, ServletResponse response, FilterChain chain + ) throws IOException, ServletException + { + HttpServletResponse httpResponse = (HttpServletResponse) response; + OutputStream out = httpResponse.getOutputStream(); + + // make sure the original request isn't trying to fake the auth token checks + Boolean authInfoChecked = (Boolean) request.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED); + AuthenticationResult result = (AuthenticationResult) request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT); + if (authInfoChecked != null || result != null) { + sendJsonError(httpResponse, Response.SC_FORBIDDEN, unauthorizedMessage, out); + out.close(); + return; + } + + chain.doFilter(request, response); + } + + @Override + public void destroy() + { + + } + + public static void sendJsonError(HttpServletResponse resp, int error, String errorJson, OutputStream outputStream) + { + resp.setStatus(error); + resp.setContentType("application/json"); + resp.setCharacterEncoding("UTF-8"); + try { + outputStream.write(errorJson.getBytes(StandardCharsets.UTF_8)); + } + catch (IOException ioe) { + log.error("WTF? Can't get writer from HTTP response."); + } + } +} diff --git a/server/src/main/java/io/druid/server/security/UnsecuredResourceFilter.java b/server/src/main/java/io/druid/server/security/UnsecuredResourceFilter.java new file mode 100644 index 000000000000..1fbbc7013c83 --- /dev/null +++ b/server/src/main/java/io/druid/server/security/UnsecuredResourceFilter.java @@ -0,0 +1,52 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.security; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import java.io.IOException; + +public class UnsecuredResourceFilter implements Filter +{ + @Override + public void init(FilterConfig filterConfig) throws ServletException + { + + } + + @Override + public void doFilter( + ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain + ) throws IOException, ServletException + { + servletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + filterChain.doFilter(servletRequest, servletResponse); + } + + @Override + public void destroy() + { + + } +} diff --git a/server/src/test/java/io/druid/discovery/DruidLeaderClientTest.java b/server/src/test/java/io/druid/discovery/DruidLeaderClientTest.java index c29c6451ef01..8fdb412cd929 100644 --- a/server/src/test/java/io/druid/discovery/DruidLeaderClientTest.java +++ b/server/src/test/java/io/druid/discovery/DruidLeaderClientTest.java @@ -96,7 +96,6 @@ public void configure(Binder binder) } ) ); - httpClient = injector.getInstance(ClientHolder.class).getClient(); return injector; } diff --git a/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java b/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java index 94c87c953c37..8e17dcce08c5 100644 --- a/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java +++ b/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java @@ -31,6 +31,9 @@ import io.druid.java.util.common.ISE; import io.druid.server.metrics.EventReceiverFirehoseMetric; import io.druid.server.metrics.EventReceiverFirehoseRegister; +import io.druid.server.security.AllowAllAuthenticator; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthTestUtils; import org.apache.commons.io.IOUtils; import org.easymock.EasyMock; import org.junit.Assert; @@ -74,7 +77,8 @@ public void setUp() throws Exception null, new DefaultObjectMapper(), new DefaultObjectMapper(), - register + register, + AuthTestUtils.TEST_AUTHORIZER_MAPPER ); firehose = (EventReceiverFirehoseFactory.EventReceiverFirehose) eventReceiverFirehoseFactory.connect( new MapInputRowParser( @@ -95,6 +99,14 @@ public void setUp() throws Exception @Test public void testSingleThread() throws IOException { + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) + .andReturn(null) + .anyTimes(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(AllowAllAuthenticator.ALLOW_ALL_RESULT) + .anyTimes(); + req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + EasyMock.expectLastCall().anyTimes(); EasyMock.expect(req.getContentType()).andReturn("application/json").times(NUM_EVENTS); EasyMock.replay(req); @@ -137,6 +149,15 @@ public void testSingleThread() throws IOException @Test public void testMultipleThreads() throws InterruptedException, IOException, TimeoutException, ExecutionException { + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) + .andReturn(null) + .anyTimes(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(AllowAllAuthenticator.ALLOW_ALL_RESULT) + .anyTimes(); + req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(req.getContentType()).andReturn("application/json").times(2 * NUM_EVENTS); EasyMock.replay(req); @@ -205,7 +226,8 @@ public void testDuplicateRegistering() throws IOException null, new DefaultObjectMapper(), new DefaultObjectMapper(), - register + register, + AuthTestUtils.TEST_AUTHORIZER_MAPPER ); EventReceiverFirehoseFactory.EventReceiverFirehose firehose2 = (EventReceiverFirehoseFactory.EventReceiverFirehose) eventReceiverFirehoseFactory2 @@ -228,7 +250,17 @@ public void testDuplicateRegistering() throws IOException @Test(timeout = 40_000L) public void testShutdownWithPrevTime() throws Exception { - firehose.shutdown(DateTimes.nowUtc().minusMinutes(2).toString()); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) + .andReturn(null) + .anyTimes(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(AllowAllAuthenticator.ALLOW_ALL_RESULT) + .anyTimes(); + req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + EasyMock.expectLastCall().anyTimes(); + EasyMock.replay(req); + + firehose.shutdown(DateTimes.nowUtc().minusMinutes(2).toString(), req); while (!firehose.isClosed()) { Thread.sleep(50); } @@ -237,7 +269,17 @@ public void testShutdownWithPrevTime() throws Exception @Test(timeout = 40_000L) public void testShutdown() throws Exception { - firehose.shutdown(DateTimes.nowUtc().plusMillis(100).toString()); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) + .andReturn(null) + .anyTimes(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(AllowAllAuthenticator.ALLOW_ALL_RESULT) + .anyTimes(); + req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + EasyMock.expectLastCall().anyTimes(); + EasyMock.replay(req); + + firehose.shutdown(DateTimes.nowUtc().plusMillis(100).toString(), req); while (!firehose.isClosed()) { Thread.sleep(50); } diff --git a/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java b/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java index a3ca079f9cc9..340503bcadd1 100644 --- a/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java +++ b/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java @@ -51,6 +51,10 @@ import io.druid.server.log.RequestLogger; import io.druid.server.metrics.NoopServiceEmitter; import io.druid.server.router.QueryHostFinder; +import io.druid.server.security.AllowAllAuthorizer; +import io.druid.server.security.AuthTestUtils; +import io.druid.server.security.Authorizer; +import io.druid.server.security.AuthorizerMapper; import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; @@ -112,6 +116,16 @@ public void configure(Binder binder) new DruidNode("test", "localhost", null, null, new ServerConfig()) ); binder.bind(JettyServerInitializer.class).to(ProxyJettyServerInit.class).in(LazySingleton.class); + binder.bind(AuthorizerMapper.class).toInstance( + new AuthorizerMapper(null) { + + @Override + public Authorizer getAuthorizer(String name) + { + return new AllowAllAuthorizer(); + } + } + ); Jerseys.addResource(binder, SlowResource.class); Jerseys.addResource(binder, ExceptionResource.class); Jerseys.addResource(binder, DefaultResource.class); @@ -238,7 +252,8 @@ public void log(RequestLogLine requestLogLine) throws IOException // noop } }, - new DefaultGenericQueryMetricsFactory(jsonMapper) + new DefaultGenericQueryMetricsFactory(jsonMapper), + AuthTestUtils.TEST_AUTHENTICATOR_MAPPER ) { @Override diff --git a/server/src/test/java/io/druid/server/ClientInfoResourceTest.java b/server/src/test/java/io/druid/server/ClientInfoResourceTest.java index a397c749171a..7a3156d45f4a 100644 --- a/server/src/test/java/io/druid/server/ClientInfoResourceTest.java +++ b/server/src/test/java/io/druid/server/ClientInfoResourceTest.java @@ -415,7 +415,13 @@ private ClientInfoResource getResourceTestHelper( SegmentMetadataQueryConfig segmentMetadataQueryConfig ) { - return new ClientInfoResource(serverInventoryView, timelineServerView, segmentMetadataQueryConfig, new AuthConfig()) + return new ClientInfoResource( + serverInventoryView, + timelineServerView, + segmentMetadataQueryConfig, + new AuthConfig(), + null + ) { @Override protected DateTime getCurrentTime() diff --git a/server/src/test/java/io/druid/server/QueryResourceTest.java b/server/src/test/java/io/druid/server/QueryResourceTest.java index 1dc226b979f8..879672b5dc19 100644 --- a/server/src/test/java/io/druid/server/QueryResourceTest.java +++ b/server/src/test/java/io/druid/server/QueryResourceTest.java @@ -45,7 +45,11 @@ import io.druid.server.security.Access; import io.druid.server.security.Action; import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; +import io.druid.server.security.AuthTestUtils; +import io.druid.server.security.AuthenticationResult; +import io.druid.server.security.Authorizer; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.ForbiddenException; import io.druid.server.security.Resource; import org.easymock.EasyMock; import org.joda.time.Interval; @@ -72,6 +76,9 @@ public class QueryResourceTest { private static final QueryToolChestWarehouse warehouse = new MapQueryToolChestWarehouse(ImmutableMap., QueryToolChest>of()); private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); + private static final AuthenticationResult authenticationResult = new AuthenticationResult("druid", "druid"); + + public static final ServerConfig serverConfig = new ServerConfig() { @Override @@ -113,6 +120,7 @@ public QueryRunner getQueryRunnerForSegments( } }; + private static final ServiceEmitter noopServiceEmitter = new NoopServiceEmitter(); private QueryResource queryResource; @@ -139,12 +147,15 @@ public void setup() new NoopServiceEmitter(), new NoopRequestLogger(), serverConfig, - new AuthConfig() + new AuthConfig(), + AuthTestUtils.TEST_AUTHORIZER_MAPPER ), jsonMapper, jsonMapper, queryManager, - new AuthConfig() + new AuthConfig(), + null, + new DefaultGenericQueryMetricsFactory(jsonMapper) ); } @@ -166,16 +177,17 @@ public void setup() @Test public void testGoodQuery() throws IOException { - EasyMock.expect(testServletRequest.getAttribute(EasyMock.anyString())).andReturn( - new AuthorizationInfo() - { - @Override - public Access isAuthorized(Resource resource, Action action) - { - return new Access(true); - } - } - ).times(1); + EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) + .andReturn(null) + .anyTimes(); + + EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(authenticationResult) + .anyTimes(); + + testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + EasyMock.expectLastCall().anyTimes(); + EasyMock.replay(testServletRequest); Response response = queryResource.doPost( new ByteArrayInputStream(simpleTimeSeriesQuery.getBytes("UTF-8")), @@ -201,13 +213,30 @@ public void testBadQuery() throws IOException @Test public void testSecuredQuery() throws Exception { - EasyMock.expect(testServletRequest.getAttribute(EasyMock.anyString())).andReturn( - new AuthorizationInfo() + EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) + .andReturn(null) + .anyTimes(); + + EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(authenticationResult) + .anyTimes(); + + testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, false); + EasyMock.expectLastCall().times(1); + + testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + EasyMock.expectLastCall().times(1); + + EasyMock.replay(testServletRequest); + + AuthorizerMapper authMapper = new AuthorizerMapper(null) { + @Override + public Authorizer getAuthorizer(String name) + { + return new Authorizer() { @Override - public Access isAuthorized( - Resource resource, Action action - ) + public Access authorize(AuthenticationResult authenticationResult, Resource resource, Action action) { if (resource.getName().equals("allow")) { return new Access(true); @@ -215,9 +244,10 @@ public Access isAuthorized( return new Access(false); } } - } - ).times(2); - EasyMock.replay(testServletRequest); + + }; + } + }; queryResource = new QueryResource( new QueryLifecycleFactory( @@ -227,22 +257,30 @@ public Access isAuthorized( new NoopServiceEmitter(), new NoopRequestLogger(), serverConfig, - new AuthConfig(true) + new AuthConfig(null, null, null), + authMapper ), jsonMapper, jsonMapper, queryManager, - new AuthConfig(true) + new AuthConfig(null, null, null), + authMapper, + new DefaultGenericQueryMetricsFactory(jsonMapper) ); - Response response = queryResource.doPost( - new ByteArrayInputStream(simpleTimeSeriesQuery.getBytes("UTF-8")), - null /*pretty*/, - testServletRequest - ); - Assert.assertEquals(Response.Status.FORBIDDEN.getStatusCode(), response.getStatus()); - response = queryResource.doPost( + try { + queryResource.doPost( + new ByteArrayInputStream(simpleTimeSeriesQuery.getBytes("UTF-8")), + null /*pretty*/, + testServletRequest + ); + Assert.fail("doPost did not throw ForbiddenException for an unauthorized query"); + } + catch (ForbiddenException e) { + } + + Response response = queryResource.doPost( new ByteArrayInputStream("{\"queryType\":\"timeBoundary\", \"dataSource\":\"allow\"}".getBytes("UTF-8")), null /*pretty*/, testServletRequest @@ -260,13 +298,27 @@ public void testSecuredGetServer() throws Exception final CountDownLatch startAwaitLatch = new CountDownLatch(1); final CountDownLatch cancelledCountDownLatch = new CountDownLatch(1); - EasyMock.expect(testServletRequest.getAttribute(EasyMock.anyString())).andReturn( - new AuthorizationInfo() + EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) + .andReturn(null) + .anyTimes(); + + EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(authenticationResult) + .anyTimes(); + + testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + EasyMock.expectLastCall().times(1); + + EasyMock.replay(testServletRequest); + + AuthorizerMapper authMapper = new AuthorizerMapper(null) { + @Override + public Authorizer getAuthorizer(String name) + { + return new Authorizer() { @Override - public Access isAuthorized( - Resource resource, Action action - ) + public Access authorize(AuthenticationResult authenticationResult, Resource resource, Action action) { // READ action corresponds to the query // WRITE corresponds to cancellation of query @@ -289,9 +341,10 @@ public Access isAuthorized( return new Access(true); } } - } - ).times(2); - EasyMock.replay(testServletRequest); + + }; + } + }; queryResource = new QueryResource( new QueryLifecycleFactory( @@ -301,12 +354,15 @@ public Access isAuthorized( new NoopServiceEmitter(), new NoopRequestLogger(), serverConfig, - new AuthConfig(true) + new AuthConfig(null, null, null), + authMapper ), jsonMapper, jsonMapper, queryManager, - new AuthConfig(true) + new AuthConfig(null, null, null), + authMapper, + new DefaultGenericQueryMetricsFactory(jsonMapper) ); final String queryString = "{\"queryType\":\"timeBoundary\", \"dataSource\":\"allow\"," @@ -366,13 +422,31 @@ public void testDenySecuredGetServer() throws Exception final CountDownLatch waitFinishLatch = new CountDownLatch(2); final CountDownLatch startAwaitLatch = new CountDownLatch(1); - EasyMock.expect(testServletRequest.getAttribute(EasyMock.anyString())).andReturn( - new AuthorizationInfo() + EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) + .andReturn(null) + .anyTimes(); + + EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(authenticationResult) + .anyTimes(); + + testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + EasyMock.expectLastCall().times(1); + + testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, false); + EasyMock.expectLastCall().times(1); + + EasyMock.replay(testServletRequest); + + AuthorizerMapper authMapper = new AuthorizerMapper(null) + { + @Override + public Authorizer getAuthorizer(String name) + { + return new Authorizer() { @Override - public Access isAuthorized( - Resource resource, Action action - ) + public Access authorize(AuthenticationResult authenticationResult, Resource resource, Action action) { // READ action corresponds to the query // WRITE corresponds to cancellation of query @@ -389,9 +463,10 @@ public Access isAuthorized( return new Access(false); } } - } - ).times(2); - EasyMock.replay(testServletRequest); + + }; + } + }; queryResource = new QueryResource( new QueryLifecycleFactory( @@ -401,12 +476,15 @@ public Access isAuthorized( new NoopServiceEmitter(), new NoopRequestLogger(), serverConfig, - new AuthConfig(true) + new AuthConfig(null, null, null), + authMapper ), jsonMapper, jsonMapper, queryManager, - new AuthConfig(true) + new AuthConfig(null, null, null), + authMapper, + new DefaultGenericQueryMetricsFactory(jsonMapper) ); final String queryString = "{\"queryType\":\"timeBoundary\", \"dataSource\":\"allow\"," @@ -448,10 +526,13 @@ public void run() @Override public void run() { - Response response = queryResource.getServer("id_1", testServletRequest); - Assert.assertEquals(Response.Status.FORBIDDEN.getStatusCode(), response.getStatus()); - waitForCancellationLatch.countDown(); - waitFinishLatch.countDown(); + try { + queryResource.getServer("id_1", testServletRequest); + } + catch (ForbiddenException e) { + waitForCancellationLatch.countDown(); + waitFinishLatch.countDown(); + } } } ); diff --git a/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java b/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java index a254ff9d47e1..3b98960b8e13 100644 --- a/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java +++ b/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java @@ -30,7 +30,10 @@ import io.druid.server.security.Access; import io.druid.server.security.Action; import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; +import io.druid.server.security.AuthTestUtils; +import io.druid.server.security.AuthenticationResult; +import io.druid.server.security.Authorizer; +import io.druid.server.security.AuthorizerMapper; import io.druid.server.security.Resource; import io.druid.timeline.DataSegment; import org.easymock.EasyMock; @@ -121,8 +124,17 @@ public void testGetFullQueryableDataSources() throws Exception EasyMock.expect(inventoryView.getInventory()).andReturn( ImmutableList.of(server) ).atLeastOnce(); - EasyMock.replay(inventoryView, server); - DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig()); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn( + new AuthenticationResult("druid", "druid") + ).atLeastOnce(); + EasyMock.replay(inventoryView, server, request); + DatasourcesResource datasourcesResource = new DatasourcesResource( + inventoryView, + null, + null, + new AuthConfig(), + AuthTestUtils.TEST_AUTHORIZER_MAPPER + ); Response response = datasourcesResource.getQueryableDataSources("full", null, request); Set result = (Set) response.getEntity(); DruidDataSource[] resultantDruidDataSources = new DruidDataSource[result.size()]; @@ -143,19 +155,28 @@ public void testGetFullQueryableDataSources() throws Exception @Test public void testSecuredGetFullQueryableDataSources() throws Exception { + AuthenticationResult authenticationResult = new AuthenticationResult("druid", "druid"); + + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(authenticationResult) + .anyTimes(); EasyMock.expect(server.getDataSources()).andReturn( ImmutableList.of(listDataSources.get(0), listDataSources.get(1)) ).atLeastOnce(); + EasyMock.expect(inventoryView.getInventory()).andReturn( ImmutableList.of(server) ).atLeastOnce(); - EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTH_TOKEN)).andReturn( - new AuthorizationInfo() + EasyMock.replay(inventoryView, server, request); + + AuthorizerMapper authMapper = new AuthorizerMapper(null) { + @Override + public Authorizer getAuthorizer(String name) + { + return new Authorizer() { @Override - public Access isAuthorized( - Resource resource, Action action - ) + public Access authorize(AuthenticationResult authenticationResult1, Resource resource, Action action) { if (resource.getName().equals("datasource1")) { return new Access(true); @@ -163,11 +184,18 @@ public Access isAuthorized( return new Access(false); } } - } - ).times(2); - EasyMock.replay(inventoryView, server, request); - DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig(true)); + }; + } + }; + + DatasourcesResource datasourcesResource = new DatasourcesResource( + inventoryView, + null, + null, + new AuthConfig(null, null, null), + authMapper + ); Response response = datasourcesResource.getQueryableDataSources("full", null, request); Set result = (Set) response.getEntity(); DruidDataSource[] resultantDruidDataSources = new DruidDataSource[result.size()]; @@ -204,9 +232,18 @@ public void testGetSimpleQueryableDataSources() throws Exception EasyMock.expect(inventoryView.getInventory()).andReturn( ImmutableList.of(server) ).atLeastOnce(); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn( + new AuthenticationResult("druid", "druid") + ).atLeastOnce(); - EasyMock.replay(inventoryView, server); - DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig()); + EasyMock.replay(inventoryView, server, request); + DatasourcesResource datasourcesResource = new DatasourcesResource( + inventoryView, + null, + null, + new AuthConfig(), + AuthTestUtils.TEST_AUTHORIZER_MAPPER + ); Response response = datasourcesResource.getQueryableDataSources(null, "simple", request); Assert.assertEquals(200, response.getStatus()); List> results = (List>) response.getEntity(); @@ -233,7 +270,7 @@ public void testFullGetTheDataSource() throws Exception ).atLeastOnce(); EasyMock.replay(inventoryView, server); - DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig()); + DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig(), null); Response response = datasourcesResource.getTheDataSource("datasource1", "full"); DruidDataSource result = (DruidDataSource) response.getEntity(); Assert.assertEquals(200, response.getStatus()); @@ -250,7 +287,7 @@ public void testNullGetTheDataSource() throws Exception ).atLeastOnce(); EasyMock.replay(inventoryView, server); - DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig()); + DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig(), null); Assert.assertEquals(204, datasourcesResource.getTheDataSource("none", null).getStatus()); EasyMock.verify(inventoryView, server); } @@ -272,7 +309,7 @@ public void testSimpleGetTheDataSource() throws Exception ).atLeastOnce(); EasyMock.replay(inventoryView, server); - DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig()); + DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig(), null); Response response = datasourcesResource.getTheDataSource("datasource1", null); Assert.assertEquals(200, response.getStatus()); Map> result = (Map>) response.getEntity(); @@ -311,7 +348,7 @@ public void testSimpleGetTheDataSourceManyTiers() throws Exception ).atLeastOnce(); EasyMock.replay(inventoryView, server, server2, server3); - DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig()); + DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig(), null); Response response = datasourcesResource.getTheDataSource("datasource1", null); Assert.assertEquals(200, response.getStatus()); Map> result = (Map>) response.getEntity(); @@ -342,7 +379,7 @@ public void testGetSegmentDataSourceIntervals() List expectedIntervals = new ArrayList<>(); expectedIntervals.add(Intervals.of("2010-01-22T00:00:00.000Z/2010-01-23T00:00:00.000Z")); expectedIntervals.add(Intervals.of("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); - DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig()); + DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig(), null); Response response = datasourcesResource.getSegmentDataSourceIntervals("invalidDataSource", null, null); Assert.assertEquals(response.getEntity(), null); @@ -389,7 +426,7 @@ public void testGetSegmentDataSourceSpecificInterval() ).atLeastOnce(); EasyMock.replay(inventoryView); - DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig()); + DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig(), null); Response response = datasourcesResource.getSegmentDataSourceSpecificInterval( "invalidDataSource", "2010-01-01/P1D", @@ -456,7 +493,13 @@ public void testDeleteDataSourceSpecificInterval() throws Exception EasyMock.expectLastCall().once(); EasyMock.replay(indexingServiceClient, server); - DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, indexingServiceClient, new AuthConfig()); + DatasourcesResource datasourcesResource = new DatasourcesResource( + inventoryView, + null, + indexingServiceClient, + new AuthConfig(), + null + ); Response response = datasourcesResource.deleteDataSourceSpecificInterval("datasource1", interval); Assert.assertEquals(200, response.getStatus()); @@ -469,7 +512,13 @@ public void testDeleteDataSource() { IndexingServiceClient indexingServiceClient = EasyMock.createStrictMock(IndexingServiceClient.class); EasyMock.replay(indexingServiceClient, server); - DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, indexingServiceClient, new AuthConfig()); + DatasourcesResource datasourcesResource = new DatasourcesResource( + inventoryView, + null, + indexingServiceClient, + new AuthConfig(), + null + ); Response response = datasourcesResource.deleteDataSource("datasource", "true", "???"); Assert.assertEquals(400, response.getStatus()); Assert.assertNotNull(response.getEntity()); diff --git a/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java b/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java index 5ec8f6857d9a..c6d8eff10be6 100644 --- a/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java +++ b/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java @@ -25,6 +25,8 @@ import io.druid.java.util.common.Intervals; import io.druid.server.coordination.ServerType; import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthTestUtils; +import io.druid.server.security.AuthenticationResult; import io.druid.timeline.DataSegment; import org.easymock.EasyMock; import org.joda.time.Interval; @@ -106,12 +108,19 @@ public void testGetIntervals() EasyMock.expect(inventoryView.getInventory()).andReturn( ImmutableList.of(server) ).atLeastOnce(); - EasyMock.replay(inventoryView); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn( + new AuthenticationResult("druid", "druid") + ).atLeastOnce(); + EasyMock.replay(inventoryView, request); List expectedIntervals = new ArrayList<>(); expectedIntervals.add(Intervals.of("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); expectedIntervals.add(Intervals.of("2010-01-22T00:00:00.000Z/2010-01-23T00:00:00.000Z")); - IntervalsResource intervalsResource = new IntervalsResource(inventoryView, new AuthConfig()); + IntervalsResource intervalsResource = new IntervalsResource( + inventoryView, + new AuthConfig(), + AuthTestUtils.TEST_AUTHORIZER_MAPPER + ); Response response = intervalsResource.getIntervals(request); TreeMap>> actualIntervals = (TreeMap) response.getEntity(); @@ -133,11 +142,18 @@ public void testSimpleGetSpecificIntervals() EasyMock.expect(inventoryView.getInventory()).andReturn( ImmutableList.of(server) ).atLeastOnce(); - EasyMock.replay(inventoryView); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn( + new AuthenticationResult("druid", "druid") + ).atLeastOnce(); + EasyMock.replay(inventoryView, request); List expectedIntervals = new ArrayList<>(); expectedIntervals.add(Intervals.of("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); - IntervalsResource intervalsResource = new IntervalsResource(inventoryView, new AuthConfig()); + IntervalsResource intervalsResource = new IntervalsResource( + inventoryView, + new AuthConfig(), + AuthTestUtils.TEST_AUTHORIZER_MAPPER + ); Response response = intervalsResource.getSpecificIntervals("2010-01-01T00:00:00.000Z/P1D", "simple", null, request); Map> actualIntervals = (Map) response.getEntity(); @@ -154,11 +170,18 @@ public void testFullGetSpecificIntervals() EasyMock.expect(inventoryView.getInventory()).andReturn( ImmutableList.of(server) ).atLeastOnce(); - EasyMock.replay(inventoryView); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn( + new AuthenticationResult("druid", "druid") + ).atLeastOnce(); + EasyMock.replay(inventoryView, request); List expectedIntervals = new ArrayList<>(); expectedIntervals.add(Intervals.of("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); - IntervalsResource intervalsResource = new IntervalsResource(inventoryView, new AuthConfig()); + IntervalsResource intervalsResource = new IntervalsResource( + inventoryView, + new AuthConfig(), + AuthTestUtils.TEST_AUTHORIZER_MAPPER + ); Response response = intervalsResource.getSpecificIntervals("2010-01-01T00:00:00.000Z/P1D", null, "full", request); TreeMap>> actualIntervals = (TreeMap) response.getEntity(); @@ -177,16 +200,22 @@ public void testGetSpecificIntervals() EasyMock.expect(inventoryView.getInventory()).andReturn( ImmutableList.of(server) ).atLeastOnce(); - EasyMock.replay(inventoryView); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn( + new AuthenticationResult("druid", "druid") + ).atLeastOnce(); + EasyMock.replay(inventoryView, request); - IntervalsResource intervalsResource = new IntervalsResource(inventoryView, new AuthConfig()); + IntervalsResource intervalsResource = new IntervalsResource( + inventoryView, + new AuthConfig(), + AuthTestUtils.TEST_AUTHORIZER_MAPPER + ); Response response = intervalsResource.getSpecificIntervals("2010-01-01T00:00:00.000Z/P1D", null, null, request); Map actualIntervals = (Map) response.getEntity(); Assert.assertEquals(2, actualIntervals.size()); Assert.assertEquals(25L, actualIntervals.get("size")); Assert.assertEquals(2, actualIntervals.get("count")); - } @After diff --git a/server/src/test/java/io/druid/server/http/security/PreResponseAuthorizationCheckFilterTest.java b/server/src/test/java/io/druid/server/http/security/PreResponseAuthorizationCheckFilterTest.java new file mode 100644 index 000000000000..e9cf0a23fa4b --- /dev/null +++ b/server/src/test/java/io/druid/server/http/security/PreResponseAuthorizationCheckFilterTest.java @@ -0,0 +1,134 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.http.security; + +import com.google.common.collect.Lists; +import com.metamx.emitter.EmittingLogger; +import com.metamx.emitter.service.ServiceEmitter; +import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.ISE; +import io.druid.server.security.AllowAllAuthenticator; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthenticationResult; +import io.druid.server.security.Authenticator; +import io.druid.server.security.PreResponseAuthorizationCheckFilter; +import org.easymock.EasyMock; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import javax.servlet.FilterChain; +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.util.List; + +public class PreResponseAuthorizationCheckFilterTest +{ + private static List authenticators = Lists.newArrayList(new AllowAllAuthenticator()); + + @Rule + public ExpectedException expectedException = ExpectedException.none(); + + @Test + public void testValidRequest() throws Exception + { + AuthenticationResult authenticationResult = new AuthenticationResult("so-very-valid", "so-very-valid"); + + HttpServletRequest req = EasyMock.createStrictMock(HttpServletRequest.class); + HttpServletResponse resp = EasyMock.createStrictMock(HttpServletResponse.class); + FilterChain filterChain = EasyMock.createNiceMock(FilterChain.class); + ServletOutputStream outputStream = EasyMock.createNiceMock(ServletOutputStream.class); + + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(authenticationResult).once(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(true).once(); + EasyMock.replay(req, resp, filterChain, outputStream); + + PreResponseAuthorizationCheckFilter filter = new PreResponseAuthorizationCheckFilter( + authenticators, + new DefaultObjectMapper() + ); + filter.doFilter(req, resp, filterChain); + EasyMock.verify(req, resp, filterChain, outputStream); + } + + @Test + public void testAuthenticationFailedRequest() throws Exception + { + HttpServletRequest req = EasyMock.createStrictMock(HttpServletRequest.class); + HttpServletResponse resp = EasyMock.createStrictMock(HttpServletResponse.class); + FilterChain filterChain = EasyMock.createNiceMock(FilterChain.class); + ServletOutputStream outputStream = EasyMock.createNiceMock(ServletOutputStream.class); + + EasyMock.expect(resp.getOutputStream()).andReturn(outputStream).once(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(null).once(); + resp.setStatus(401); + EasyMock.expectLastCall().once(); + resp.setContentType("application/json"); + EasyMock.expectLastCall().once(); + resp.setCharacterEncoding("UTF-8"); + EasyMock.expectLastCall().once(); + EasyMock.replay(req, resp, filterChain, outputStream); + + PreResponseAuthorizationCheckFilter filter = new PreResponseAuthorizationCheckFilter( + authenticators, + new DefaultObjectMapper() + ); + filter.doFilter(req, resp, filterChain); + EasyMock.verify(req, resp, filterChain, outputStream); + } + + @Test + public void testMissingAuthorizationCheck() throws Exception + { + EmittingLogger.registerEmitter(EasyMock.createNiceMock(ServiceEmitter.class)); + + expectedException.expect(ISE.class); + expectedException.expectMessage("Request did not have an authorization check performed."); + + AuthenticationResult authenticationResult = new AuthenticationResult("so-very-valid", "so-very-valid"); + + HttpServletRequest req = EasyMock.createStrictMock(HttpServletRequest.class); + HttpServletResponse resp = EasyMock.createStrictMock(HttpServletResponse.class); + FilterChain filterChain = EasyMock.createNiceMock(FilterChain.class); + ServletOutputStream outputStream = EasyMock.createNiceMock(ServletOutputStream.class); + + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(authenticationResult).once(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null).once(); + EasyMock.expect(resp.getStatus()).andReturn(200).once(); + EasyMock.expect(req.getRequestURI()).andReturn("uri").once(); + EasyMock.expect(req.getMethod()).andReturn("GET").once(); + EasyMock.expect(resp.isCommitted()).andReturn(true).once(); + resp.setStatus(403); + EasyMock.expectLastCall().once(); + resp.setContentType("application/json"); + EasyMock.expectLastCall().once(); + resp.setCharacterEncoding("UTF-8"); + EasyMock.expectLastCall().once(); + EasyMock.replay(req, resp, filterChain, outputStream); + + PreResponseAuthorizationCheckFilter filter = new PreResponseAuthorizationCheckFilter( + authenticators, + new DefaultObjectMapper() + ); + filter.doFilter(req, resp, filterChain); + EasyMock.verify(req, resp, filterChain, outputStream); + } +} diff --git a/server/src/test/java/io/druid/server/http/security/ResourceFilterTestHelper.java b/server/src/test/java/io/druid/server/http/security/ResourceFilterTestHelper.java index c594a14bd2bf..eae5e99cdfed 100644 --- a/server/src/test/java/io/druid/server/http/security/ResourceFilterTestHelper.java +++ b/server/src/test/java/io/druid/server/http/security/ResourceFilterTestHelper.java @@ -36,7 +36,9 @@ import io.druid.server.security.Access; import io.druid.server.security.Action; import io.druid.server.security.AuthConfig; -import io.druid.server.security.AuthorizationInfo; +import io.druid.server.security.AuthenticationResult; +import io.druid.server.security.Authorizer; +import io.druid.server.security.AuthorizerMapper; import io.druid.server.security.Resource; import org.easymock.EasyMock; @@ -56,18 +58,19 @@ public class ResourceFilterTestHelper { public HttpServletRequest req; - public AuthorizationInfo authorizationInfo; + public AuthorizerMapper authorizerMapper; public ContainerRequest request; public void setUp(ResourceFilter resourceFilter) throws Exception { req = EasyMock.createStrictMock(HttpServletRequest.class); request = EasyMock.createStrictMock(ContainerRequest.class); - authorizationInfo = EasyMock.createStrictMock(AuthorizationInfo.class); + authorizerMapper = EasyMock.createStrictMock(AuthorizerMapper.class); // Memory barrier synchronized (this) { ((AbstractResourceFilter) resourceFilter).setReq(req); + ((AbstractResourceFilter) resourceFilter).setAuthorizerMapper(authorizerMapper); } } @@ -107,14 +110,26 @@ public MultivaluedMap getMatrixParameters() ) ).anyTimes(); EasyMock.expect(request.getMethod()).andReturn(requestMethod).anyTimes(); - EasyMock.expect(req.getAttribute(EasyMock.anyString())).andReturn(authorizationInfo).atLeastOnce(); - EasyMock.expect(authorizationInfo.isAuthorized( - EasyMock.anyObject(Resource.class), - EasyMock.anyObject(Action.class) + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null).anyTimes(); + AuthenticationResult authenticationResult = new AuthenticationResult("druid", "druid"); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(authenticationResult) + .atLeastOnce(); + req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, authCheckResult); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(authorizerMapper.getAuthorizer( + EasyMock.anyString() )).andReturn( - new Access(authCheckResult) - ).atLeastOnce(); + new Authorizer() + { + @Override + public Access authorize(AuthenticationResult authenticationResult1, Resource resource, Action action) + { + return new Access(authCheckResult); + } + } + ).atLeastOnce(); } public static Collection getRequestPaths(final Class clazz) @@ -122,6 +137,11 @@ public static Collection getRequestPaths(final Class clazz) return getRequestPaths(clazz, ImmutableList.>of(), ImmutableList.>of()); } + public static Collection getRequestPathsWithAuthorizer(final Class clazz) + { + return getRequestPaths(clazz, ImmutableList.>of(AuthorizerMapper.class), ImmutableList.>of()); + } + public static Collection getRequestPaths( final Class clazz, final Iterable> mockableInjections @@ -162,7 +182,7 @@ public void configure(Binder binder) for (Key key : mockableKeys) { binder.bind((Key) key).toInstance(EasyMock.createNiceMock(key.getTypeLiteral().getRawType())); } - binder.bind(AuthConfig.class).toInstance(new AuthConfig(true)); + binder.bind(AuthConfig.class).toInstance(new AuthConfig(null, null, null)); } } ); diff --git a/server/src/test/java/io/druid/server/http/security/SecurityResourceFilterTest.java b/server/src/test/java/io/druid/server/http/security/SecurityResourceFilterTest.java index 31803052e427..92e9ee79b276 100644 --- a/server/src/test/java/io/druid/server/http/security/SecurityResourceFilterTest.java +++ b/server/src/test/java/io/druid/server/http/security/SecurityResourceFilterTest.java @@ -37,6 +37,7 @@ import io.druid.server.http.RulesResource; import io.druid.server.http.ServersResource; import io.druid.server.http.TiersResource; +import io.druid.server.security.ForbiddenException; import org.easymock.EasyMock; import org.junit.Assert; import org.junit.Before; @@ -44,8 +45,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Response; import java.util.Collection; @RunWith(Parameterized.class) @@ -56,20 +55,20 @@ public static Collection data() { return ImmutableList.copyOf( Iterables.concat( - getRequestPaths(CoordinatorResource.class), - getRequestPaths(DatasourcesResource.class), - getRequestPaths(BrokerResource.class), - getRequestPaths(HistoricalResource.class), - getRequestPaths(IntervalsResource.class), - getRequestPaths(MetadataResource.class), - getRequestPaths(RulesResource.class), - getRequestPaths(ServersResource.class), - getRequestPaths(TiersResource.class), - getRequestPaths(ClientInfoResource.class), - getRequestPaths(CoordinatorDynamicConfigsResource.class), - getRequestPaths(QueryResource.class), - getRequestPaths(StatusResource.class), - getRequestPaths(BrokerQueryResource.class) + getRequestPathsWithAuthorizer(CoordinatorResource.class), + getRequestPathsWithAuthorizer(DatasourcesResource.class), + getRequestPathsWithAuthorizer(BrokerResource.class), + getRequestPathsWithAuthorizer(HistoricalResource.class), + getRequestPathsWithAuthorizer(IntervalsResource.class), + getRequestPathsWithAuthorizer(MetadataResource.class), + getRequestPathsWithAuthorizer(RulesResource.class), + getRequestPathsWithAuthorizer(ServersResource.class), + getRequestPathsWithAuthorizer(TiersResource.class), + getRequestPathsWithAuthorizer(ClientInfoResource.class), + getRequestPathsWithAuthorizer(CoordinatorDynamicConfigsResource.class), + getRequestPathsWithAuthorizer(QueryResource.class), + getRequestPathsWithAuthorizer(StatusResource.class), + getRequestPathsWithAuthorizer(BrokerQueryResource.class) ) ); } @@ -102,35 +101,34 @@ public void setUp() throws Exception public void testResourcesFilteringAccess() { setUpMockExpectations(requestPath, true, requestMethod); - EasyMock.replay(req, request, authorizationInfo); + EasyMock.replay(req, request, authorizerMapper); Assert.assertTrue(((AbstractResourceFilter) resourceFilter.getRequestFilter()).isApplicable(requestPath)); resourceFilter.getRequestFilter().filter(request); - EasyMock.verify(req, request, authorizationInfo); + EasyMock.verify(req, request, authorizerMapper); } - @Test(expected = WebApplicationException.class) + @Test(expected = ForbiddenException.class) public void testResourcesFilteringNoAccess() { setUpMockExpectations(requestPath, false, requestMethod); - EasyMock.replay(req, request, authorizationInfo); + EasyMock.replay(req, request, authorizerMapper); Assert.assertTrue(((AbstractResourceFilter) resourceFilter.getRequestFilter()).isApplicable(requestPath)); try { resourceFilter.getRequestFilter().filter(request); + Assert.fail(); } - catch (WebApplicationException e) { - Assert.assertEquals(Response.Status.FORBIDDEN.getStatusCode(), e.getResponse().getStatus()); + catch (ForbiddenException e) { throw e; } - EasyMock.verify(req, request, authorizationInfo); + EasyMock.verify(req, request, authorizerMapper); } @Test public void testResourcesFilteringBadPath() { - EasyMock.replay(req, request, authorizationInfo); + EasyMock.replay(req, request, authorizerMapper); final String badRequestPath = requestPath.replaceAll("\\w+", "droid"); Assert.assertFalse(((AbstractResourceFilter) resourceFilter.getRequestFilter()).isApplicable(badRequestPath)); - EasyMock.verify(req, request, authorizationInfo); + EasyMock.verify(req, request, authorizerMapper); } - } diff --git a/server/src/test/java/io/druid/server/http/security/SecuritySanityCheckFilterTest.java b/server/src/test/java/io/druid/server/http/security/SecuritySanityCheckFilterTest.java new file mode 100644 index 000000000000..9a8e34e3b72c --- /dev/null +++ b/server/src/test/java/io/druid/server/http/security/SecuritySanityCheckFilterTest.java @@ -0,0 +1,78 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.http.security; + +import io.druid.jackson.DefaultObjectMapper; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthenticationResult; +import io.druid.server.security.SecuritySanityCheckFilter; +import org.easymock.EasyMock; +import org.junit.Test; + +import javax.servlet.FilterChain; +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +public class SecuritySanityCheckFilterTest +{ + @Test + public void testValidRequest() throws Exception + { + HttpServletRequest req = EasyMock.createStrictMock(HttpServletRequest.class); + HttpServletResponse resp = EasyMock.createStrictMock(HttpServletResponse.class); + FilterChain filterChain = EasyMock.createStrictMock(FilterChain.class); + + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null).once(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(null).once(); + filterChain.doFilter(req, resp); + EasyMock.expectLastCall().once(); + EasyMock.replay(req, filterChain); + SecuritySanityCheckFilter filter = new SecuritySanityCheckFilter(new DefaultObjectMapper()); + filter.doFilter(req, resp, filterChain); + EasyMock.verify(req, filterChain); + } + + @Test + public void testInvalidRequest() throws Exception + { + HttpServletRequest req = EasyMock.createStrictMock(HttpServletRequest.class); + HttpServletResponse resp = EasyMock.createStrictMock(HttpServletResponse.class); + FilterChain filterChain = EasyMock.createStrictMock(FilterChain.class); + ServletOutputStream outputStream = EasyMock.createNiceMock(ServletOutputStream.class); + + AuthenticationResult authenticationResult = new AuthenticationResult("does-not-belong", "does-not-belong"); + + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(true).once(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(authenticationResult).once(); + EasyMock.expect(resp.getOutputStream()).andReturn(outputStream).once(); + resp.setStatus(403); + EasyMock.expectLastCall().once(); + resp.setContentType("application/json"); + EasyMock.expectLastCall().once(); + resp.setCharacterEncoding("UTF-8"); + EasyMock.expectLastCall().once(); + + EasyMock.replay(req, resp, filterChain, outputStream); + SecuritySanityCheckFilter filter = new SecuritySanityCheckFilter(new DefaultObjectMapper()); + filter.doFilter(req, resp, filterChain); + EasyMock.verify(req, resp, filterChain, outputStream); + } +} diff --git a/server/src/test/java/io/druid/server/initialization/JettyQosTest.java b/server/src/test/java/io/druid/server/initialization/JettyQosTest.java index 8897c7f553f2..f9f1219b7bfc 100644 --- a/server/src/test/java/io/druid/server/initialization/JettyQosTest.java +++ b/server/src/test/java/io/druid/server/initialization/JettyQosTest.java @@ -43,6 +43,8 @@ import io.druid.server.DruidNode; import io.druid.server.initialization.jetty.JettyBindings; import io.druid.server.initialization.jetty.JettyServerInitializer; +import io.druid.server.security.AuthTestUtils; +import io.druid.server.security.AuthorizerMapper; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.util.thread.QueuedThreadPool; import org.jboss.netty.handler.codec.http.HttpMethod; @@ -75,6 +77,7 @@ public void configure(Binder binder) Jerseys.addResource(binder, SlowResource.class); Jerseys.addResource(binder, ExceptionResource.class); Jerseys.addResource(binder, DefaultResource.class); + binder.bind(AuthorizerMapper.class).toInstance(AuthTestUtils.TEST_AUTHORIZER_MAPPER); JettyBindings.addQosFilter(binder, "/slow/*", 2); final ServerConfig serverConfig = new ObjectMapper().convertValue( ImmutableMap.of("numThreads", "10"), diff --git a/server/src/test/java/io/druid/server/initialization/JettyTest.java b/server/src/test/java/io/druid/server/initialization/JettyTest.java index a38c1340f536..1e9206f0b230 100644 --- a/server/src/test/java/io/druid/server/initialization/JettyTest.java +++ b/server/src/test/java/io/druid/server/initialization/JettyTest.java @@ -41,6 +41,8 @@ import io.druid.server.DruidNode; import io.druid.server.initialization.jetty.JettyServerInitializer; import io.druid.server.initialization.jetty.ServletFilterHolder; +import io.druid.server.security.AuthTestUtils; +import io.druid.server.security.AuthorizerMapper; import org.apache.commons.io.IOUtils; import org.eclipse.jetty.server.Server; import org.jboss.netty.handler.codec.http.HttpMethod; @@ -126,6 +128,7 @@ public EnumSet getDispatcherType() Jerseys.addResource(binder, SlowResource.class); Jerseys.addResource(binder, ExceptionResource.class); Jerseys.addResource(binder, DefaultResource.class); + binder.bind(AuthorizerMapper.class).toInstance(AuthTestUtils.TEST_AUTHORIZER_MAPPER); LifecycleModule.register(binder, Server.class); } } diff --git a/services/src/main/java/io/druid/cli/CliOverlord.java b/services/src/main/java/io/druid/cli/CliOverlord.java index c3db181df839..c13a5424fa6f 100644 --- a/services/src/main/java/io/druid/cli/CliOverlord.java +++ b/services/src/main/java/io/druid/cli/CliOverlord.java @@ -19,7 +19,9 @@ package io.druid.cli; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.Key; @@ -46,6 +48,7 @@ import io.druid.guice.ListProvider; import io.druid.guice.ManageLifecycle; import io.druid.guice.PolyBind; +import io.druid.guice.annotations.Json; import io.druid.indexing.common.actions.LocalTaskActionClientFactory; import io.druid.indexing.common.actions.TaskActionClientFactory; import io.druid.indexing.common.actions.TaskActionToolbox; @@ -86,6 +89,10 @@ import io.druid.server.http.RedirectInfo; import io.druid.server.initialization.jetty.JettyServerInitUtils; import io.druid.server.initialization.jetty.JettyServerInitializer; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthenticationUtils; +import io.druid.server.security.Authenticator; +import io.druid.server.security.AuthenticatorMapper; import io.druid.tasklogs.TaskLogStreamer; import io.druid.tasklogs.TaskLogs; import org.eclipse.jetty.server.Handler; @@ -109,6 +116,14 @@ public class CliOverlord extends ServerRunnable { private static Logger log = new Logger(CliOverlord.class); + private static List UNSECURED_PATHS = Lists.newArrayList( + "/", + "/console.html", + "/old-console/*", + "/images/*", + "/js/*" + ); + public CliOverlord() { super(log); @@ -299,8 +314,28 @@ public void initialize(Server server, Injector injector) } ) ); + + final AuthConfig authConfig = injector.getInstance(AuthConfig.class); + final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)); + final AuthenticatorMapper authenticatorMapper = injector.getInstance(AuthenticatorMapper.class); + + List authenticators = null; + AuthenticationUtils.addSecuritySanityCheckFilter(root, jsonMapper); + authenticators = authenticatorMapper.getAuthenticatorChain(); + AuthenticationUtils.addAuthenticationFilterChain(root, authenticators); + JettyServerInitUtils.addExtensionFilters(root, injector); + // perform no-op authorization for these static resources + AuthenticationUtils.addNoopAuthorizationFilters(root, UNSECURED_PATHS); + + // Check that requests were authorized before sending responses + AuthenticationUtils.addPreResponseAuthorizationCheckFilter( + root, + authenticators, + jsonMapper + ); + // /status should not redirect, so add first root.addFilter(GuiceFilter.class, "/status/*", null); diff --git a/services/src/main/java/io/druid/cli/CliPeon.java b/services/src/main/java/io/druid/cli/CliPeon.java index 953ef3b4b6f6..9676cb41c387 100644 --- a/services/src/main/java/io/druid/cli/CliPeon.java +++ b/services/src/main/java/io/druid/cli/CliPeon.java @@ -139,7 +139,7 @@ protected List getModules() public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/peon"); - binder.bindConstant().annotatedWith(Names.named("servicePort")).to(-1); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); PolyBind.createChoice( diff --git a/services/src/main/java/io/druid/cli/CliRouter.java b/services/src/main/java/io/druid/cli/CliRouter.java index faebceb9b4d6..8e06948d4766 100644 --- a/services/src/main/java/io/druid/cli/CliRouter.java +++ b/services/src/main/java/io/druid/cli/CliRouter.java @@ -41,7 +41,7 @@ import io.druid.guice.QueryRunnerFactoryModule; import io.druid.guice.QueryableModule; import io.druid.guice.RouterProcessingModule; -import io.druid.guice.annotations.Global; +import io.druid.guice.annotations.EscalatedGlobal; import io.druid.guice.annotations.Self; import io.druid.guice.http.JettyHttpClientModule; import io.druid.java.util.common.logger.Logger; @@ -138,7 +138,7 @@ public ServerDiscoverySelector getCoordinatorServerDiscoverySelector( @Provides @ManageLifecycle public DruidLeaderClient getLeaderHttpClient( - @Global HttpClient httpClient, + @EscalatedGlobal HttpClient httpClient, DruidNodeDiscoveryProvider druidNodeDiscoveryProvider, ServerDiscoverySelector serverDiscoverySelector ) diff --git a/services/src/main/java/io/druid/cli/CoordinatorJettyServerInitializer.java b/services/src/main/java/io/druid/cli/CoordinatorJettyServerInitializer.java index 14c5d758023f..6f3e96cb2e63 100644 --- a/services/src/main/java/io/druid/cli/CoordinatorJettyServerInitializer.java +++ b/services/src/main/java/io/druid/cli/CoordinatorJettyServerInitializer.java @@ -19,14 +19,23 @@ package io.druid.cli; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Lists; import com.google.inject.Inject; import com.google.inject.Injector; +import com.google.inject.Key; import com.google.inject.servlet.GuiceFilter; +import io.druid.guice.annotations.Json; +import io.druid.java.util.common.logger.Logger; import io.druid.server.coordinator.DruidCoordinatorConfig; import io.druid.server.http.OverlordProxyServlet; import io.druid.server.http.RedirectFilter; import io.druid.server.initialization.jetty.JettyServerInitUtils; import io.druid.server.initialization.jetty.JettyServerInitializer; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthenticationUtils; +import io.druid.server.security.Authenticator; +import io.druid.server.security.AuthenticatorMapper; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.HandlerList; @@ -37,12 +46,27 @@ import org.eclipse.jetty.util.resource.Resource; import org.eclipse.jetty.util.resource.ResourceCollection; +import java.util.List; import java.util.Properties; /** */ class CoordinatorJettyServerInitializer implements JettyServerInitializer { + private static List UNSECURED_PATHS = Lists.newArrayList( + "/favicon.ico", + "/css/*", + "/druid.js", + "/druid.css", + "/pages/*", + "/fonts/*", + "/old-console/*", + "/coordinator/false", + "/overlord/false" + ); + + private static Logger log = new Logger(CoordinatorJettyServerInitializer.class); + private final DruidCoordinatorConfig config; private final boolean beOverlord; @@ -81,8 +105,28 @@ public void initialize(Server server, Injector injector) // used for console development root.setResourceBase(config.getConsoleStatic()); } + + final AuthConfig authConfig = injector.getInstance(AuthConfig.class); + final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)); + final AuthenticatorMapper authenticatorMapper = injector.getInstance(AuthenticatorMapper.class); + + List authenticators = null; + AuthenticationUtils.addSecuritySanityCheckFilter(root, jsonMapper); + authenticators = authenticatorMapper.getAuthenticatorChain(); + AuthenticationUtils.addAuthenticationFilterChain(root, authenticators); + JettyServerInitUtils.addExtensionFilters(root, injector); + // perform no-op authorization for these static resources + AuthenticationUtils.addNoopAuthorizationFilters(root, UNSECURED_PATHS); + + // Check that requests were authorized before sending responses + AuthenticationUtils.addPreResponseAuthorizationCheckFilter( + root, + authenticators, + jsonMapper + ); + // /status should not redirect, so add first root.addFilter(GuiceFilter.class, "/status/*", null); diff --git a/services/src/main/java/io/druid/cli/MiddleManagerJettyServerInitializer.java b/services/src/main/java/io/druid/cli/MiddleManagerJettyServerInitializer.java index 48c99baa6edc..f66676b9205f 100644 --- a/services/src/main/java/io/druid/cli/MiddleManagerJettyServerInitializer.java +++ b/services/src/main/java/io/druid/cli/MiddleManagerJettyServerInitializer.java @@ -19,10 +19,18 @@ package io.druid.cli; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.Injector; +import com.google.inject.Key; import com.google.inject.servlet.GuiceFilter; +import io.druid.guice.annotations.Json; +import io.druid.java.util.common.logger.Logger; import io.druid.server.initialization.jetty.JettyServerInitUtils; import io.druid.server.initialization.jetty.JettyServerInitializer; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthenticationUtils; +import io.druid.server.security.Authenticator; +import io.druid.server.security.AuthenticatorMapper; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.DefaultHandler; @@ -31,16 +39,39 @@ import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; +import java.util.List; + /** */ class MiddleManagerJettyServerInitializer implements JettyServerInitializer { + private static Logger log = new Logger(MiddleManagerJettyServerInitializer.class); + @Override public void initialize(Server server, Injector injector) { final ServletContextHandler root = new ServletContextHandler(ServletContextHandler.SESSIONS); root.addServlet(new ServletHolder(new DefaultServlet()), "/*"); + + final AuthConfig authConfig = injector.getInstance(AuthConfig.class); + final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)); + final AuthenticatorMapper authenticatorMapper = injector.getInstance(AuthenticatorMapper.class); + + List authenticators = null; + AuthenticationUtils.addSecuritySanityCheckFilter(root, jsonMapper); + authenticators = authenticatorMapper.getAuthenticatorChain(); + AuthenticationUtils.addAuthenticationFilterChain(root, authenticators); + + JettyServerInitUtils.addExtensionFilters(root, injector); + + // Check that requests were authorized before sending responses + AuthenticationUtils.addPreResponseAuthorizationCheckFilter( + root, + authenticators, + jsonMapper + ); + root.addFilter(GuiceFilter.class, "/*", null); final HandlerList handlerList = new HandlerList(); diff --git a/services/src/main/java/io/druid/cli/QueryJettyServerInitializer.java b/services/src/main/java/io/druid/cli/QueryJettyServerInitializer.java index d3d3f8b9fd43..7ebaa2d2f20a 100644 --- a/services/src/main/java/io/druid/cli/QueryJettyServerInitializer.java +++ b/services/src/main/java/io/druid/cli/QueryJettyServerInitializer.java @@ -19,12 +19,20 @@ package io.druid.cli; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.inject.Inject; import com.google.inject.Injector; +import com.google.inject.Key; import com.google.inject.servlet.GuiceFilter; +import io.druid.guice.annotations.Json; +import io.druid.java.util.common.logger.Logger; import io.druid.server.initialization.jetty.JettyServerInitUtils; import io.druid.server.initialization.jetty.JettyServerInitializer; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthenticationUtils; +import io.druid.server.security.Authenticator; +import io.druid.server.security.AuthenticatorMapper; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.HandlerList; @@ -39,6 +47,8 @@ */ public class QueryJettyServerInitializer implements JettyServerInitializer { + private static Logger log = new Logger(QueryJettyServerInitializer.class); + private final List extensionHandlers; @Inject @@ -52,8 +62,25 @@ public void initialize(Server server, Injector injector) { final ServletContextHandler root = new ServletContextHandler(ServletContextHandler.SESSIONS); root.addServlet(new ServletHolder(new DefaultServlet()), "/*"); + + final AuthConfig authConfig = injector.getInstance(AuthConfig.class); + final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)); + final AuthenticatorMapper authenticatorMapper = injector.getInstance(AuthenticatorMapper.class); + + List authenticators = null; + AuthenticationUtils.addSecuritySanityCheckFilter(root, jsonMapper); + authenticators = authenticatorMapper.getAuthenticatorChain(); + AuthenticationUtils.addAuthenticationFilterChain(root, authenticators); + JettyServerInitUtils.addExtensionFilters(root, injector); + // Check that requests were authorized before sending responses + AuthenticationUtils.addPreResponseAuthorizationCheckFilter( + root, + authenticators, + jsonMapper + ); + root.addFilter(GuiceFilter.class, "/*", null); final HandlerList handlerList = new HandlerList(); diff --git a/services/src/main/java/io/druid/cli/RouterJettyServerInitializer.java b/services/src/main/java/io/druid/cli/RouterJettyServerInitializer.java index 2cd95bed80f6..f7acecaa1027 100644 --- a/services/src/main/java/io/druid/cli/RouterJettyServerInitializer.java +++ b/services/src/main/java/io/druid/cli/RouterJettyServerInitializer.java @@ -19,14 +19,22 @@ package io.druid.cli; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.Inject; import com.google.inject.Injector; +import com.google.inject.Key; import com.google.inject.servlet.GuiceFilter; +import io.druid.guice.annotations.Json; import io.druid.guice.http.DruidHttpClientConfig; +import io.druid.java.util.common.logger.Logger; import io.druid.server.AsyncQueryForwardingServlet; import io.druid.server.initialization.jetty.JettyServerInitUtils; import io.druid.server.initialization.jetty.JettyServerInitializer; import io.druid.server.router.Router; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthenticationUtils; +import io.druid.server.security.Authenticator; +import io.druid.server.security.AuthenticatorMapper; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.HandlerList; @@ -34,10 +42,14 @@ import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; +import java.util.List; + /** */ public class RouterJettyServerInitializer implements JettyServerInitializer { + private static Logger log = new Logger(RouterJettyServerInitializer.class); + private final AsyncQueryForwardingServlet asyncQueryForwardingServlet; private final DruidHttpClientConfig httpClientConfig; @@ -69,7 +81,26 @@ public void initialize(Server server, Injector injector) sh.setInitParameter("timeout", Long.toString(httpClientConfig.getReadTimeout().getMillis())); root.addServlet(sh, "/druid/v2/*"); + + final AuthConfig authConfig = injector.getInstance(AuthConfig.class); + final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)); + final AuthenticatorMapper authenticatorMapper = injector.getInstance(AuthenticatorMapper.class); + + List authenticators = null; + AuthenticationUtils.addSecuritySanityCheckFilter(root, jsonMapper); + authenticators = authenticatorMapper.getAuthenticatorChain(); + AuthenticationUtils.addAuthenticationFilterChain(root, authenticators); + JettyServerInitUtils.addExtensionFilters(root, injector); + + // Check that requests were authorized before sending responses + AuthenticationUtils.addPreResponseAuthorizationCheckFilter( + root, + authenticators, + jsonMapper + ); + + // Can't use '/*' here because of Guice conflicts with AsyncQueryForwardingServlet path root.addFilter(GuiceFilter.class, "/status/*", null); root.addFilter(GuiceFilter.class, "/druid/router/*", null); diff --git a/sql/src/main/java/io/druid/sql/avatica/DruidAvaticaHandler.java b/sql/src/main/java/io/druid/sql/avatica/DruidAvaticaHandler.java index e6779458b57c..471efce6a904 100644 --- a/sql/src/main/java/io/druid/sql/avatica/DruidAvaticaHandler.java +++ b/sql/src/main/java/io/druid/sql/avatica/DruidAvaticaHandler.java @@ -56,9 +56,6 @@ public void handle( final HttpServletResponse response ) throws IOException, ServletException { - // This is not integrated with the experimental authorization framework. - // (Non-trivial since we don't know the dataSources up-front) - if (request.getRequestURI().equals(AVATICA_PATH)) { super.handle(target, baseRequest, request, response); } diff --git a/sql/src/main/java/io/druid/sql/avatica/DruidConnection.java b/sql/src/main/java/io/druid/sql/avatica/DruidConnection.java index 8e78800f22fa..8af20c2b8caa 100644 --- a/sql/src/main/java/io/druid/sql/avatica/DruidConnection.java +++ b/sql/src/main/java/io/druid/sql/avatica/DruidConnection.java @@ -20,14 +20,20 @@ package io.druid.sql.avatica; import com.google.common.base.Preconditions; +import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSortedMap; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; import io.druid.java.util.common.ISE; import io.druid.java.util.common.logger.Logger; +import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; import java.util.HashMap; import java.util.Map; +import java.util.Set; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -38,6 +44,9 @@ public class DruidConnection { private static final Logger log = new Logger(DruidConnection.class); + private static final Set SENSITIVE_CONTEXT_FIELDS = Sets.newHashSet( + "user", "password" + ); private final String connectionId; private final int maxStatements; @@ -74,13 +83,33 @@ public DruidStatement createStatement() throw new ISE("Too many open statements, limit is[%,d]", maxStatements); } - final DruidStatement statement = new DruidStatement(connectionId, statementId, context, () -> { - // onClose function for the statement - synchronized (statements) { - log.debug("Connection[%s] closed statement[%s].", connectionId, statementId); - statements.remove(statementId); - } - }); + // remove sensitive fields from the context, only the connection's context needs to have authentication + // credentials + Map sanitizedContext = Maps.newHashMap(); + sanitizedContext = Maps.filterEntries( + context, + new Predicate>() + { + @Override + public boolean apply(@Nullable Map.Entry input) + { + return !SENSITIVE_CONTEXT_FIELDS.contains(input.getKey()); + } + } + ); + + final DruidStatement statement = new DruidStatement( + connectionId, + statementId, + ImmutableSortedMap.copyOf(sanitizedContext), + () -> { + // onClose function for the statement + synchronized (statements) { + log.debug("Connection[%s] closed statement[%s].", connectionId, statementId); + statements.remove(statementId); + } + } + ); statements.put(statementId, statement); log.debug("Connection[%s] opened statement[%s].", connectionId, statementId); @@ -138,4 +167,9 @@ public DruidConnection sync(final Future newTimeoutFuture) } return this; } + + public Map context() + { + return context; + } } diff --git a/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java b/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java index f3c819ca05b1..eb1a32961565 100644 --- a/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java +++ b/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java @@ -29,10 +29,16 @@ import com.google.common.collect.Iterables; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.inject.Inject; +import com.google.inject.Injector; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthenticationResult; +import io.druid.server.security.Authenticator; +import io.druid.server.security.AuthenticatorMapper; +import io.druid.server.security.ForbiddenException; import io.druid.sql.calcite.planner.Calcites; import io.druid.sql.calcite.planner.PlannerFactory; import org.apache.calcite.avatica.MetaImpl; @@ -48,7 +54,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; @@ -58,11 +63,12 @@ public class DruidMeta extends MetaImpl { private static final Logger log = new Logger(DruidMeta.class); - private static final Set SKIP_PROPERTIES = ImmutableSet.of("user", "password"); private final PlannerFactory plannerFactory; private final ScheduledExecutorService exec; private final AvaticaServerConfig config; + private final AuthConfig authConfig; + private final List authenticators; // Used to track logical connections. private final Map connections = new ConcurrentHashMap<>(); @@ -72,17 +78,26 @@ public class DruidMeta extends MetaImpl private final AtomicInteger connectionCount = new AtomicInteger(); @Inject - public DruidMeta(final PlannerFactory plannerFactory, final AvaticaServerConfig config) + public DruidMeta( + final PlannerFactory plannerFactory, + final AvaticaServerConfig config, + final AuthConfig authConfig, + final Injector injector + ) { super(null); this.plannerFactory = Preconditions.checkNotNull(plannerFactory, "plannerFactory"); this.config = config; + this.authConfig = authConfig; this.exec = Executors.newSingleThreadScheduledExecutor( new ThreadFactoryBuilder() .setNameFormat(StringUtils.format("DruidMeta@%s-ScheduledExecutor", Integer.toHexString(hashCode()))) .setDaemon(true) .build() ); + + final AuthenticatorMapper authenticatorMapper = injector.getInstance(AuthenticatorMapper.class); + this.authenticators = authenticatorMapper.getAuthenticatorChain(); } @Override @@ -91,9 +106,7 @@ public void openConnection(final ConnectionHandle ch, final Map // Build connection context. final ImmutableMap.Builder context = ImmutableMap.builder(); for (Map.Entry entry : info.entrySet()) { - if (!SKIP_PROPERTIES.contains(entry.getKey())) { - context.put(entry); - } + context.put(entry); } openDruidConnection(ch.id, context.build()); } @@ -132,7 +145,12 @@ public StatementHandle prepare( { final StatementHandle statement = createStatement(ch); final DruidStatement druidStatement = getDruidStatement(statement); - statement.signature = druidStatement.prepare(plannerFactory, sql, maxRowCount).getSignature(); + final DruidConnection druidConnection = getDruidConnection(statement.connectionId); + AuthenticationResult authenticationResult = authenticateConnection(druidConnection); + if (authenticationResult == null) { + throw new ForbiddenException("Authentication failed."); + } + statement.signature = druidStatement.prepare(plannerFactory, sql, maxRowCount, authenticationResult).getSignature(); return statement; } @@ -160,7 +178,12 @@ public ExecuteResult prepareAndExecute( { // Ignore "callback", this class is designed for use with LocalService which doesn't use it. final DruidStatement druidStatement = getDruidStatement(statement); - final Signature signature = druidStatement.prepare(plannerFactory, sql, maxRowCount).getSignature(); + final DruidConnection druidConnection = getDruidConnection(statement.connectionId); + AuthenticationResult authenticationResult = authenticateConnection(druidConnection); + if (authenticationResult == null) { + throw new ForbiddenException("Authentication failed."); + } + final Signature signature = druidStatement.prepare(plannerFactory, sql, maxRowCount, authenticationResult).getSignature(); final Frame firstFrame = druidStatement.execute() .nextFrame( DruidStatement.START_OFFSET, @@ -487,6 +510,18 @@ void closeAllConnections() } } + private AuthenticationResult authenticateConnection(final DruidConnection connection) + { + Map context = connection.context(); + for (Authenticator authenticator : authenticators) { + AuthenticationResult authenticationResult = authenticator.authenticateJDBCContext(context); + if (authenticationResult != null) { + return authenticationResult; + } + } + return null; + } + private DruidConnection openDruidConnection(final String connectionId, final Map context) { if (connectionCount.incrementAndGet() > config.getMaxConnections()) { diff --git a/sql/src/main/java/io/druid/sql/avatica/DruidStatement.java b/sql/src/main/java/io/druid/sql/avatica/DruidStatement.java index 07e8e26f5881..e617e6b8b3a5 100644 --- a/sql/src/main/java/io/druid/sql/avatica/DruidStatement.java +++ b/sql/src/main/java/io/druid/sql/avatica/DruidStatement.java @@ -29,6 +29,7 @@ import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.guava.Yielder; import io.druid.java.util.common.guava.Yielders; +import io.druid.server.security.AuthenticationResult; import io.druid.sql.calcite.planner.DruidPlanner; import io.druid.sql.calcite.planner.PlannerFactory; import io.druid.sql.calcite.planner.PlannerResult; @@ -152,12 +153,17 @@ public static List createColumnMetaData(final RelDataType rowTyp return columns; } - public DruidStatement prepare(final PlannerFactory plannerFactory, final String query, final long maxRowCount) + public DruidStatement prepare( + final PlannerFactory plannerFactory, + final String query, + final long maxRowCount, + final AuthenticationResult authenticationResult + ) { try (final DruidPlanner planner = plannerFactory.createPlanner(queryContext)) { synchronized (lock) { ensure(State.NEW); - this.plannerResult = planner.plan(query); + this.plannerResult = planner.plan(query, null, authenticationResult); this.maxRowCount = maxRowCount; this.query = query; this.signature = Meta.Signature.create( diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/io/druid/sql/calcite/planner/DruidPlanner.java index 773e4b67f6bf..d9bf59fba875 100644 --- a/sql/src/main/java/io/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/io/druid/sql/calcite/planner/DruidPlanner.java @@ -23,8 +23,17 @@ import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import com.google.common.collect.Sets; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; +import io.druid.server.security.Access; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthenticationResult; +import io.druid.server.security.AuthenticatorMapper; +import io.druid.server.security.AuthorizerMapper; +import io.druid.server.security.AuthorizationUtils; +import io.druid.server.security.ForbiddenException; import io.druid.sql.calcite.rel.DruidConvention; import io.druid.sql.calcite.rel.DruidRel; import org.apache.calcite.DataContext; @@ -34,9 +43,11 @@ import org.apache.calcite.interpreter.Bindables; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.plan.RelOptPlanner; +import org.apache.calcite.plan.RelOptTable; import org.apache.calcite.plan.RelOptUtil; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.RelRoot; +import org.apache.calcite.rel.RelVisitor; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexNode; @@ -50,22 +61,47 @@ import org.apache.calcite.tools.ValidationException; import org.apache.calcite.util.Pair; +import javax.servlet.http.HttpServletRequest; import java.io.Closeable; import java.util.ArrayList; import java.util.List; +import java.util.Set; public class DruidPlanner implements Closeable { private final Planner planner; private final PlannerContext plannerContext; + private final AuthConfig authConfig; + private final AuthorizerMapper authorizerMapper; + private final AuthenticatorMapper authenticatorMapper; - public DruidPlanner(final Planner planner, final PlannerContext plannerContext) + public DruidPlanner( + final Planner planner, + final PlannerContext plannerContext, + final AuthConfig authConfig, + final AuthorizerMapper authorizerMapper, + final AuthenticatorMapper authenticatorMapper + ) { this.planner = planner; this.plannerContext = plannerContext; + this.authConfig = authConfig; + this.authorizerMapper = authorizerMapper; + this.authenticatorMapper = authenticatorMapper; } public PlannerResult plan(final String sql) throws SqlParseException, ValidationException, RelConversionException + { + AuthenticationResult authenticationResult = authenticatorMapper.getEscalatingAuthenticator() + .createEscalatedAuthenticationResult(); + return plan(sql, null, authenticationResult); + } + + public PlannerResult plan( + final String sql, + final HttpServletRequest request, + final AuthenticationResult authenticationResult + ) throws SqlParseException, ValidationException, RelConversionException, ForbiddenException { SqlExplain explain = null; SqlNode parsed = planner.parse(sql); @@ -77,12 +113,12 @@ public PlannerResult plan(final String sql) throws SqlParseException, Validation final RelRoot root = planner.rel(validated); try { - return planWithDruidConvention(explain, root); + return planWithDruidConvention(explain, root, request, authenticationResult); } catch (RelOptPlanner.CannotPlanException e) { // Try again with BINDABLE convention. Used for querying Values, metadata tables, and fallback. try { - return planWithBindableConvention(explain, root); + return planWithBindableConvention(explain, root, request, authenticationResult); } catch (Exception e2) { e.addSuppressed(e2); @@ -104,8 +140,10 @@ public void close() private PlannerResult planWithDruidConvention( final SqlExplain explain, - final RelRoot root - ) throws RelConversionException + final RelRoot root, + final HttpServletRequest request, + final AuthenticationResult authenticationResult + ) throws RelConversionException, ForbiddenException { final DruidRel druidRel = (DruidRel) planner.transform( Rules.DRUID_CONVENTION_RULES, @@ -115,6 +153,32 @@ private PlannerResult planWithDruidConvention( root.rel ); + List datasourceNames = druidRel.getDatasourceNames(); + // we'll eventually run a second authorization check at QueryLifecycle.runSimple(), so store the + // authentication result in the planner context. + Access authResult; + if (request != null) { + authResult = AuthorizationUtils.authorizeAllResourceActions( + request, + Iterables.transform(datasourceNames, AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR), + authorizerMapper + ); + plannerContext.setAuthenticationResult( + (AuthenticationResult) request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT) + ); + } else { + authResult = AuthorizationUtils.authorizeAllResourceActions( + authenticationResult, + Iterables.transform(datasourceNames, AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR), + authorizerMapper + ); + plannerContext.setAuthenticationResult(authenticationResult); + } + + if (!authResult.isAllowed()) { + throw new ForbiddenException(authResult.toString()); + } + if (explain != null) { return planExplanation(druidRel, explain); } else { @@ -149,9 +213,57 @@ public Object[] apply(final Object[] input) } } + private Access authorizeBindableRel( + BindableRel rel, + final PlannerContext plannerContext, + HttpServletRequest req, + final AuthenticationResult authenticationResult + ) + { + Set datasourceNames = Sets.newHashSet(); + rel.childrenAccept( + new RelVisitor() + { + @Override + public void visit(RelNode node, int ordinal, RelNode parent) + { + if (node instanceof DruidRel) { + datasourceNames.addAll(((DruidRel) node).getDatasourceNames()); + } + if (node instanceof Bindables.BindableTableScan) { + Bindables.BindableTableScan bts = (Bindables.BindableTableScan) node; + RelOptTable table = bts.getTable(); + String tableName = table.getQualifiedName().get(0); + datasourceNames.add(tableName); + } + node.childrenAccept(this); + } + } + ); + if (req != null) { + plannerContext.setAuthenticationResult( + (AuthenticationResult) req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT) + ); + return AuthorizationUtils.authorizeAllResourceActions( + req, + Iterables.transform(datasourceNames, AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR), + authorizerMapper + ); + } else { + plannerContext.setAuthenticationResult(authenticationResult); + return AuthorizationUtils.authorizeAllResourceActions( + authenticationResult, + Iterables.transform(datasourceNames, AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR), + authorizerMapper + ); + } + } + private PlannerResult planWithBindableConvention( final SqlExplain explain, - final RelRoot root + final RelRoot root, + final HttpServletRequest request, + final AuthenticationResult authenticationResult ) throws RelConversionException { BindableRel bindableRel = (BindableRel) planner.transform( @@ -178,6 +290,11 @@ private PlannerResult planWithBindableConvention( ); } + Access accessResult = authorizeBindableRel(bindableRel, plannerContext, request, authenticationResult); + if (!accessResult.isAllowed()) { + throw new ForbiddenException(accessResult.toString()); + } + if (explain != null) { return planExplanation(bindableRel, explain); } else { diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/io/druid/sql/calcite/planner/PlannerContext.java index cc4b6116a630..a385a0be1ba9 100644 --- a/sql/src/main/java/io/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/io/druid/sql/calcite/planner/PlannerContext.java @@ -21,7 +21,10 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; import io.druid.math.expr.ExprMacroTable; +import io.druid.server.security.AuthenticationResult; +import io.druid.server.security.AuthorizerMapper; import org.apache.calcite.DataContext; import org.apache.calcite.adapter.java.JavaTypeFactory; import org.apache.calcite.linq4j.QueryProvider; @@ -40,6 +43,7 @@ public class PlannerContext { public static final String CTX_SQL_CURRENT_TIMESTAMP = "sqlCurrentTimestamp"; public static final String CTX_SQL_TIME_ZONE = "sqlTimeZone"; + public static final String CTX_AUTHENTICATION_RESULT = "authenticationResult"; private final DruidOperatorTable operatorTable; private final ExprMacroTable macroTable; @@ -47,27 +51,33 @@ public class PlannerContext private final DateTime localNow; private final long queryStartTimeMillis; private final Map queryContext; + private final AuthorizerMapper authorizerMapper; + + private AuthenticationResult authenticationResult; private PlannerContext( final DruidOperatorTable operatorTable, final ExprMacroTable macroTable, final PlannerConfig plannerConfig, final DateTime localNow, + final AuthorizerMapper authorizerMapper, final Map queryContext ) { this.operatorTable = operatorTable; this.macroTable = macroTable; this.plannerConfig = Preconditions.checkNotNull(plannerConfig, "plannerConfig"); - this.queryContext = queryContext != null ? ImmutableMap.copyOf(queryContext) : ImmutableMap.of(); + this.queryContext = queryContext != null ? Maps.newHashMap(queryContext) : Maps.newHashMap(); this.localNow = Preconditions.checkNotNull(localNow, "localNow"); this.queryStartTimeMillis = System.currentTimeMillis(); + this.authorizerMapper = authorizerMapper; } public static PlannerContext create( final DruidOperatorTable operatorTable, final ExprMacroTable macroTable, final PlannerConfig plannerConfig, + final AuthorizerMapper authorizerMapper, final Map queryContext ) { @@ -99,6 +109,7 @@ public static PlannerContext create( macroTable, plannerConfig.withOverrides(queryContext), utcNow.withZone(timeZone), + authorizerMapper, queryContext ); } @@ -138,6 +149,16 @@ public long getQueryStartTimeMillis() return queryStartTimeMillis; } + public AuthenticationResult getAuthenticationResult() + { + return authenticationResult; + } + + public void setAuthenticationResult(AuthenticationResult authenticationResult) + { + this.authenticationResult = authenticationResult; + } + public DataContext createDataContext(final JavaTypeFactory typeFactory) { class DruidDataContext implements DataContext diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/PlannerFactory.java b/sql/src/main/java/io/druid/sql/calcite/planner/PlannerFactory.java index 087047be9e4a..be8092f30eb3 100644 --- a/sql/src/main/java/io/druid/sql/calcite/planner/PlannerFactory.java +++ b/sql/src/main/java/io/druid/sql/calcite/planner/PlannerFactory.java @@ -24,6 +24,9 @@ import io.druid.guice.annotations.Json; import io.druid.math.expr.ExprMacroTable; import io.druid.server.QueryLifecycleFactory; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthenticatorMapper; +import io.druid.server.security.AuthorizerMapper; import io.druid.sql.calcite.rel.QueryMaker; import io.druid.sql.calcite.schema.DruidSchema; import org.apache.calcite.avatica.util.Casing; @@ -57,6 +60,10 @@ public class PlannerFactory private final PlannerConfig plannerConfig; private final ObjectMapper jsonMapper; + private final AuthConfig authConfig; + private final AuthorizerMapper authorizerMapper; + private final AuthenticatorMapper authenticatorMapper; + @Inject public PlannerFactory( final DruidSchema druidSchema, @@ -64,6 +71,9 @@ public PlannerFactory( final DruidOperatorTable operatorTable, final ExprMacroTable macroTable, final PlannerConfig plannerConfig, + final AuthConfig authConfig, + final AuthenticatorMapper authenticatorMapper, + final AuthorizerMapper authorizerMapper, final @Json ObjectMapper jsonMapper ) { @@ -72,14 +82,24 @@ public PlannerFactory( this.operatorTable = operatorTable; this.macroTable = macroTable; this.plannerConfig = plannerConfig; + this.authConfig = authConfig; + this.authorizerMapper = authorizerMapper; + this.authenticatorMapper = authenticatorMapper; this.jsonMapper = jsonMapper; } public DruidPlanner createPlanner(final Map queryContext) { final SchemaPlus rootSchema = Calcites.createRootSchema(druidSchema); - final PlannerContext plannerContext = PlannerContext.create(operatorTable, macroTable, plannerConfig, queryContext); + final PlannerContext plannerContext = PlannerContext.create( + operatorTable, + macroTable, + plannerConfig, + authorizerMapper, + queryContext + ); final QueryMaker queryMaker = new QueryMaker(queryLifecycleFactory, plannerContext, jsonMapper); + final FrameworkConfig frameworkConfig = Frameworks .newConfigBuilder() .parserConfig(PARSER_CONFIG) @@ -94,6 +114,12 @@ public DruidPlanner createPlanner(final Map queryContext) .typeSystem(DruidTypeSystem.INSTANCE) .build(); - return new DruidPlanner(Frameworks.getPlanner(frameworkConfig), plannerContext); + return new DruidPlanner( + Frameworks.getPlanner(frameworkConfig), + plannerContext, + authConfig, + authorizerMapper, + authenticatorMapper + ); } } diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidOuterQueryRel.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidOuterQueryRel.java index df80ee0ed8e0..e2456be5f105 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidOuterQueryRel.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidOuterQueryRel.java @@ -181,6 +181,12 @@ public RelNode copy(final RelTraitSet traitSet, final List inputs) ); } + @Override + public List getDatasourceNames() + { + return ((DruidRel) sourceRel).getDatasourceNames(); + } + @Override public RelWriter explainTerms(RelWriter pw) { diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryBuilder.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryBuilder.java index f4c6f3de3937..e2e2a8595baa 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryBuilder.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryBuilder.java @@ -21,6 +21,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; @@ -477,7 +478,7 @@ public TimeseriesQuery toTimeseriesQuery( queryGranularity, grouping.getAggregatorFactories(), grouping.getPostAggregators(), - theContext + ImmutableSortedMap.copyOf(theContext) ); } @@ -552,7 +553,7 @@ public TopNQuery toTopNQuery( Granularities.ALL, grouping.getAggregatorFactories(), grouping.getPostAggregators(), - plannerContext.getQueryContext() + ImmutableSortedMap.copyOf(plannerContext.getQueryContext()) ); } @@ -587,7 +588,7 @@ public GroupByQuery toGroupByQuery( grouping.getPostAggregators(), having != null ? new DimFilterHavingSpec(having) : null, limitSpec, - plannerContext.getQueryContext() + ImmutableSortedMap.copyOf(plannerContext.getQueryContext()) ); } @@ -637,7 +638,7 @@ public ScanQuery toScanQuery( filtration.getDimFilter(), Ordering.natural().sortedCopy(ImmutableSet.copyOf(getRowOrder())), false, - plannerContext.getQueryContext() + ImmutableSortedMap.copyOf(plannerContext.getQueryContext()) ); } @@ -718,7 +719,7 @@ public SelectQuery toSelectQuery( metrics.stream().sorted().distinct().collect(Collectors.toList()), getVirtualColumns(plannerContext.getExprMacroTable()), pagingSpec, - plannerContext.getQueryContext() + ImmutableSortedMap.copyOf(plannerContext.getQueryContext()) ); } diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryRel.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryRel.java index 95afcb76a471..2e7af5c4af16 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryRel.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryRel.java @@ -36,6 +36,7 @@ import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.type.RelDataType; +import java.util.List; import javax.annotation.Nullable; import java.io.IOException; @@ -134,6 +135,12 @@ public DruidQueryRel asDruidConvention() ); } + @Override + public List getDatasourceNames() + { + return druidTable.getDataSource().getNames(); + } + @Override public RowSignature getSourceRowSignature() { diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidRel.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidRel.java index 5b4a22ffc88a..d6153867d59d 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidRel.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidRel.java @@ -35,6 +35,7 @@ import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.rel.AbstractRelNode; +import java.util.List; import javax.annotation.Nullable; public abstract class DruidRel extends AbstractRelNode implements BindableRel @@ -93,6 +94,11 @@ public PlannerContext getPlannerContext() public abstract T asDruidConvention(); + /** + * Get a list of names of datasources read by this DruidRel + */ + public abstract List getDatasourceNames(); + @Override public Class getElementType() { diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java index bd011a4590e5..0ef006036590 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java @@ -194,6 +194,17 @@ public DruidSemiJoin asDruidConvention() ); } + @Override + public List getDatasourceNames() + { + final DruidRel druidRight = (DruidRel) this.right; + Set datasourceNames = Sets.newLinkedHashSet(); + datasourceNames.addAll(left.getDatasourceNames()); + datasourceNames.addAll(druidRight.getDatasourceNames()); + + return Lists.newArrayList(datasourceNames); + } + @Override public int getQueryCount() { diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java b/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java index 6a4bac009bf6..8ede04f3eff3 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java @@ -49,6 +49,7 @@ import io.druid.segment.DimensionHandlerUtils; import io.druid.segment.column.Column; import io.druid.server.QueryLifecycleFactory; +import io.druid.server.security.AuthenticationResult; import io.druid.sql.calcite.planner.Calcites; import io.druid.sql.calcite.planner.PlannerContext; import org.apache.calcite.avatica.ColumnMetaData; @@ -267,10 +268,8 @@ public void remove() private Sequence runQuery(final Query query) { Hook.QUERY_PLAN.run(query); - - // Authorization really should be applied in planning. At this point the query has already begun to execute. - // So, use "null" authorizationInfo to force the query to fail if security is enabled. - return queryLifecycleFactory.factorize().runSimple(query, null, null); + final AuthenticationResult authenticationResult = plannerContext.getAuthenticationResult(); + return queryLifecycleFactory.factorize().runSimple(query, authenticationResult, null); } private Sequence executeTimeseries( diff --git a/sql/src/main/java/io/druid/sql/calcite/schema/DruidSchema.java b/sql/src/main/java/io/druid/sql/calcite/schema/DruidSchema.java index a662fc04f0a9..a0246920ae4b 100644 --- a/sql/src/main/java/io/druid/sql/calcite/schema/DruidSchema.java +++ b/sql/src/main/java/io/druid/sql/calcite/schema/DruidSchema.java @@ -50,7 +50,9 @@ import io.druid.segment.column.ValueType; import io.druid.server.QueryLifecycleFactory; import io.druid.server.coordination.DruidServerMetadata; -import io.druid.server.security.SystemAuthorizationInfo; +import io.druid.server.security.AuthenticationResult; +import io.druid.server.security.Authenticator; +import io.druid.server.security.AuthenticatorMapper; import io.druid.sql.calcite.planner.PlannerConfig; import io.druid.sql.calcite.table.DruidTable; import io.druid.sql.calcite.table.RowSignature; @@ -115,6 +117,9 @@ public class DruidSchema extends AbstractSchema // All segments that need to be refreshed. private final TreeSet segmentsNeedingRefresh = new TreeSet<>(SEGMENT_ORDER); + // Escalating authenticator, so we can attach an authentication result to queries we generate. + private final Authenticator escalatingAuthenticator; + private boolean refreshImmediately = false; private long lastRefresh = 0L; private boolean isServerViewInitialized = false; @@ -124,7 +129,8 @@ public DruidSchema( final QueryLifecycleFactory queryLifecycleFactory, final TimelineServerView serverView, final PlannerConfig config, - final ViewManager viewManager + final ViewManager viewManager, + final AuthenticatorMapper authenticatorMapper ) { this.queryLifecycleFactory = Preconditions.checkNotNull(queryLifecycleFactory, "queryLifecycleFactory"); @@ -133,6 +139,7 @@ public DruidSchema( this.viewManager = Preconditions.checkNotNull(viewManager, "viewManager"); this.cacheExec = ScheduledExecutors.fixed(1, "DruidSchema-Cache-%d"); this.tables = Maps.newConcurrentMap(); + this.escalatingAuthenticator = authenticatorMapper.getEscalatingAuthenticator(); serverView.registerTimelineCallback( MoreExecutors.sameThreadExecutor(), @@ -401,7 +408,8 @@ private Set refreshSegmentsForDataSource( final Set retVal = new HashSet<>(); final Sequence sequence = runSegmentMetadataQuery( queryLifecycleFactory, - Iterables.limit(segments, MAX_SEGMENTS_PER_QUERY) + Iterables.limit(segments, MAX_SEGMENTS_PER_QUERY), + escalatingAuthenticator.createEscalatedAuthenticationResult() ); Yielder yielder = Yielders.each(sequence); @@ -471,7 +479,8 @@ private DruidTable buildDruidTable(final String dataSource) private static Sequence runSegmentMetadataQuery( final QueryLifecycleFactory queryLifecycleFactory, - final Iterable segments + final Iterable segments, + final AuthenticationResult authenticationResult ) { // Sanity check: getOnlyElement of a set, to ensure all segments have the same dataSource. @@ -496,8 +505,7 @@ private static Sequence runSegmentMetadataQuery( false ); - // Use SystemAuthorizationInfo since this is a query generated by Druid itself. - return queryLifecycleFactory.factorize().runSimple(segmentMetadataQuery, SystemAuthorizationInfo.INSTANCE, null); + return queryLifecycleFactory.factorize().runSimple(segmentMetadataQuery, authenticationResult, null); } private static RowSignature analysisToRowSignature(final SegmentAnalysis analysis) diff --git a/sql/src/main/java/io/druid/sql/http/SqlResource.java b/sql/src/main/java/io/druid/sql/http/SqlResource.java index 9c8cf6bdff14..8cc1d1f4d8d0 100644 --- a/sql/src/main/java/io/druid/sql/http/SqlResource.java +++ b/sql/src/main/java/io/druid/sql/http/SqlResource.java @@ -40,11 +40,13 @@ import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; +import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.StreamingOutput; @@ -74,16 +76,16 @@ public SqlResource( @POST @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) - public Response doPost(final SqlQuery sqlQuery) throws SQLException, IOException + public Response doPost( + final SqlQuery sqlQuery, + @Context final HttpServletRequest req + ) throws SQLException, IOException { - // This is not integrated with the experimental authorization framework. - // (Non-trivial since we don't know the dataSources up-front) - final PlannerResult plannerResult; final DateTimeZone timeZone; try (final DruidPlanner planner = plannerFactory.createPlanner(sqlQuery.getContext())) { - plannerResult = planner.plan(sqlQuery.getQuery()); + plannerResult = planner.plan(sqlQuery.getQuery(), req, null); timeZone = planner.getPlannerContext().getTimeZone(); // Remember which columns are time-typed, so we can emit ISO8601 instead of millis values. diff --git a/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java index 1e6a5bb9a0b9..75e75f335199 100644 --- a/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -30,12 +30,20 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; +import com.google.inject.Binder; +import com.google.inject.Injector; +import com.google.inject.Module; +import com.google.inject.name.Names; +import io.druid.guice.GuiceInjectors; +import io.druid.initialization.Initialization; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.math.expr.ExprMacroTable; import io.druid.server.DruidNode; import io.druid.server.initialization.ServerConfig; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthTestUtils; import io.druid.sql.calcite.planner.Calcites; import io.druid.sql.calcite.planner.DruidOperatorTable; import io.druid.sql.calcite.planner.PlannerConfig; @@ -110,6 +118,7 @@ public int getMaxStatementsPerConnection() private Connection clientLosAngeles; private DruidMeta druidMeta; private String url; + private Injector injector; @Before public void setUp() throws Exception @@ -120,6 +129,22 @@ public void setUp() throws Exception final DruidSchema druidSchema = CalciteTests.createMockSchema(walker, plannerConfig); final DruidOperatorTable operatorTable = CalciteTests.createOperatorTable(); final ExprMacroTable macroTable = CalciteTests.createExprMacroTable(); + + injector = Initialization.makeInjectorWithModules( + GuiceInjectors.makeStartupInjector(), + ImmutableList.of( + new Module() + { + @Override + public void configure(Binder binder) + { + binder.bindConstant().annotatedWith(Names.named("serviceName")).to("test"); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); + } + } + ) + ); druidMeta = new DruidMeta( new PlannerFactory( druidSchema, @@ -127,13 +152,18 @@ public void setUp() throws Exception operatorTable, macroTable, plannerConfig, + new AuthConfig(), + AuthTestUtils.TEST_AUTHENTICATOR_MAPPER, + AuthTestUtils.TEST_AUTHORIZER_MAPPER, CalciteTests.getJsonMapper() ), - AVATICA_CONFIG + AVATICA_CONFIG, + new AuthConfig(), + injector ); final DruidAvaticaHandler handler = new DruidAvaticaHandler( druidMeta, - new DruidNode("dummy", "dummy", 1, null, new ServerConfig()), + new DruidNode("dummy", "dummy", 1, null, new ServerConfig()), new AvaticaMonitor() ); final int port = new Random().nextInt(9999) + 10000; @@ -145,7 +175,7 @@ public void setUp() throws Exception port, DruidAvaticaHandler.AVATICA_PATH ); - client = DriverManager.getConnection(url); + client = DriverManager.getConnection(url, "admin", "druid"); final Properties propertiesLosAngeles = new Properties(); propertiesLosAngeles.setProperty("sqlTimeZone", "America/Los_Angeles"); @@ -558,7 +588,6 @@ public int getMaxRowsPerFrame() final DruidSchema druidSchema = CalciteTests.createMockSchema(walker, plannerConfig); final DruidOperatorTable operatorTable = CalciteTests.createOperatorTable(); final ExprMacroTable macroTable = CalciteTests.createExprMacroTable(); - final List frames = new ArrayList<>(); DruidMeta smallFrameDruidMeta = new DruidMeta( new PlannerFactory( @@ -567,9 +596,14 @@ public int getMaxRowsPerFrame() operatorTable, macroTable, plannerConfig, + new AuthConfig(), + AuthTestUtils.TEST_AUTHENTICATOR_MAPPER, + AuthTestUtils.TEST_AUTHORIZER_MAPPER, CalciteTests.getJsonMapper() ), - smallFrameConfig + smallFrameConfig, + new AuthConfig(), + injector ) { @Override diff --git a/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java index 88e9fbd2415a..7cd37b3ade77 100644 --- a/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java +++ b/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java @@ -21,8 +21,12 @@ import com.google.common.base.Function; import com.google.common.collect.Lists; + import io.druid.java.util.common.DateTimes; import io.druid.math.expr.ExprMacroTable; +import io.druid.server.security.AllowAllAuthenticator; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthTestUtils; import io.druid.sql.calcite.planner.Calcites; import io.druid.sql.calcite.planner.DruidOperatorTable; import io.druid.sql.calcite.planner.PlannerConfig; @@ -71,6 +75,9 @@ public void setUp() throws Exception operatorTable, macroTable, plannerConfig, + new AuthConfig(), + AuthTestUtils.TEST_AUTHENTICATOR_MAPPER, + AuthTestUtils.TEST_AUTHORIZER_MAPPER, CalciteTests.getJsonMapper() ); } @@ -86,7 +93,8 @@ public void tearDown() throws Exception public void testSignature() throws Exception { final String sql = "SELECT * FROM druid.foo"; - final DruidStatement statement = new DruidStatement("", 0, null, () -> {}).prepare(plannerFactory, sql, -1); + final DruidStatement statement = new DruidStatement("", 0, null, () -> { + }).prepare(plannerFactory, sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); // Check signature. final Meta.Signature signature = statement.getSignature(); @@ -125,7 +133,8 @@ public List apply(final ColumnMetaData columnMetaData) public void testSelectAllInFirstFrame() throws Exception { final String sql = "SELECT __time, cnt, dim1, dim2, m1 FROM druid.foo"; - final DruidStatement statement = new DruidStatement("", 0, null, () -> {}).prepare(plannerFactory, sql, -1); + final DruidStatement statement = new DruidStatement("", 0, null, () -> { + }).prepare(plannerFactory, sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); // First frame, ask for all rows. Meta.Frame frame = statement.execute().nextFrame(DruidStatement.START_OFFSET, 6); @@ -151,8 +160,9 @@ public void testSelectAllInFirstFrame() throws Exception public void testSelectSplitOverTwoFrames() throws Exception { final String sql = "SELECT __time, cnt, dim1, dim2, m1 FROM druid.foo"; - final DruidStatement statement = new DruidStatement("", 0, null, () -> {}).prepare(plannerFactory, sql, -1); - + final DruidStatement statement = new DruidStatement("", 0, null, () -> { + }).prepare(plannerFactory, sql, -1, AllowAllAuthenticator.ALLOW_ALL_RESULT); + // First frame, ask for 2 rows. Meta.Frame frame = statement.execute().nextFrame(DruidStatement.START_OFFSET, 2); Assert.assertEquals( diff --git a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java index 52975b80cf67..9839fb93e80f 100644 --- a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java @@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import com.google.common.collect.Maps; import io.druid.hll.HLLCV1; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Intervals; @@ -86,6 +87,8 @@ import io.druid.segment.column.Column; import io.druid.segment.column.ValueType; import io.druid.segment.virtual.ExpressionVirtualColumn; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthTestUtils; import io.druid.sql.calcite.filtration.Filtration; import io.druid.sql.calcite.planner.Calcites; import io.druid.sql.calcite.planner.DruidOperatorTable; @@ -201,13 +204,15 @@ public int getMaxQueryCount() ); // Matches QUERY_CONTEXT_LOS_ANGELES - public static final Map TIMESERIES_CONTEXT_LOS_ANGELES = ImmutableMap.of( - PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00Z", - PlannerContext.CTX_SQL_TIME_ZONE, LOS_ANGELES, - "skipEmptyBuckets", true, - QueryContexts.DEFAULT_TIMEOUT_KEY, QueryContexts.DEFAULT_TIMEOUT_MILLIS, - QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY, Long.MAX_VALUE - ); + public static final Map TIMESERIES_CONTEXT_LOS_ANGELES = Maps.newHashMap(); + { + TIMESERIES_CONTEXT_LOS_ANGELES.put(PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00Z"); + TIMESERIES_CONTEXT_LOS_ANGELES.put(PlannerContext.CTX_SQL_TIME_ZONE, LOS_ANGELES); + TIMESERIES_CONTEXT_LOS_ANGELES.put("skipEmptyBuckets", true); + TIMESERIES_CONTEXT_LOS_ANGELES.put(QueryContexts.DEFAULT_TIMEOUT_KEY, QueryContexts.DEFAULT_TIMEOUT_MILLIS); + TIMESERIES_CONTEXT_LOS_ANGELES.put(QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY, Long.MAX_VALUE); + } + private static final PagingSpec FIRST_PAGING_SPEC = new PagingSpec(null, 1000, true); @Rule @@ -445,7 +450,7 @@ public void testExplainSelectStar() throws Exception ImmutableList.of(), ImmutableList.of( new Object[]{ - "DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":null,\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807},\"descending\":false}])\n" + "DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":null,\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false}])\n" } ) ); @@ -700,8 +705,8 @@ public void testExplainSelfJoinWithFallback() throws Exception new Object[]{ "BindableProject(dim1=[$9], dim10=[$2], dim2=[$3])\n" + " BindableJoin(condition=[=($9, $3)], joinType=[inner])\n" - + " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":null,\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807},\"descending\":false}])\n" - + " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":\"\",\"extractionFn\":null}},\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807},\"descending\":false}])\n" + + " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":null,\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false}])\n" + + " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":\"\",\"extractionFn\":null}},\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false}])\n" } ) ); @@ -3223,9 +3228,9 @@ public void testExplainDoubleNestedGroupBy() throws Exception ImmutableList.of(), ImmutableList.of( new Object[]{ - "DruidOuterQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[],\"aggregations\":[{\"type\":\"longSum\",\"name\":\"a0\",\"fieldName\":\"a0\",\"expression\":null},{\"type\":\"count\",\"name\":\"a1\"}],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807},\"descending\":false}])\n" - + " DruidOuterQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"d1\",\"outputName\":\"d0\",\"outputType\":\"STRING\"}],\"aggregations\":[{\"type\":\"longSum\",\"name\":\"a0\",\"fieldName\":\"a0\",\"expression\":null}],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807},\"descending\":false}])\n" - + " DruidQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dim1\",\"outputName\":\"d0\",\"outputType\":\"STRING\"},{\"type\":\"default\",\"dimension\":\"dim2\",\"outputName\":\"d1\",\"outputType\":\"STRING\"}],\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807},\"descending\":false}])\n" + "DruidOuterQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[],\"aggregations\":[{\"type\":\"longSum\",\"name\":\"a0\",\"fieldName\":\"a0\",\"expression\":null},{\"type\":\"count\",\"name\":\"a1\"}],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false}])\n" + + " DruidOuterQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"d1\",\"outputName\":\"d0\",\"outputType\":\"STRING\"}],\"aggregations\":[{\"type\":\"longSum\",\"name\":\"a0\",\"fieldName\":\"a0\",\"expression\":null}],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false}])\n" + + " DruidQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dim1\",\"outputName\":\"d0\",\"outputType\":\"STRING\"},{\"type\":\"default\",\"dimension\":\"dim2\",\"outputName\":\"d1\",\"outputType\":\"STRING\"}],\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false}])\n" } ) ); @@ -3454,9 +3459,9 @@ public void testExplainExactCountDistinctOfSemiJoinResult() throws Exception ImmutableList.of(), ImmutableList.of( new Object[]{ - "DruidOuterQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[],\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807},\"descending\":false}])\n" - + " DruidSemiJoin(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dim2\",\"outputName\":\"d0\",\"outputType\":\"STRING\"}],\"aggregations\":[],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807},\"descending\":false}], leftExpressions=[[DruidExpression{simpleExtraction=null, expression='substring(\"dim2\", 0, 1)'}]], rightKeys=[[0]])\n" - + " DruidQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":\"\",\"extractionFn\":null}},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"extraction\",\"dimension\":\"dim1\",\"outputName\":\"d0\",\"outputType\":\"STRING\",\"extractionFn\":{\"type\":\"substring\",\"index\":0,\"length\":1}}],\"aggregations\":[],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807},\"descending\":false}])\n" + "DruidOuterQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[],\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false}])\n" + + " DruidSemiJoin(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dim2\",\"outputName\":\"d0\",\"outputType\":\"STRING\"}],\"aggregations\":[],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false}], leftExpressions=[[DruidExpression{simpleExtraction=null, expression='substring(\"dim2\", 0, 1)'}]], rightKeys=[[0]])\n" + + " DruidQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":\"\",\"extractionFn\":null}},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"extraction\",\"dimension\":\"dim1\",\"outputName\":\"d0\",\"outputType\":\"STRING\",\"extractionFn\":{\"type\":\"substring\",\"index\":0,\"length\":1}}],\"aggregations\":[],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false}])\n" } ) ); @@ -5510,12 +5515,16 @@ private List getResults( final DruidSchema druidSchema = CalciteTests.createMockSchema(walker, plannerConfig, viewManager); final DruidOperatorTable operatorTable = CalciteTests.createOperatorTable(); final ExprMacroTable macroTable = CalciteTests.createExprMacroTable(); + final PlannerFactory plannerFactory = new PlannerFactory( druidSchema, CalciteTests.createMockQueryLifecycleFactory(walker), operatorTable, macroTable, plannerConfig, + new AuthConfig(), + AuthTestUtils.TEST_AUTHENTICATOR_MAPPER, + AuthTestUtils.TEST_AUTHORIZER_MAPPER, CalciteTests.getJsonMapper() ); diff --git a/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java b/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java index fcef10cd7306..e27b9eee219f 100644 --- a/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java @@ -29,6 +29,7 @@ import io.druid.query.extraction.RegexDimExtractionFn; import io.druid.query.extraction.TimeFormatExtractionFn; import io.druid.segment.column.ValueType; +import io.druid.server.security.AuthTestUtils; import io.druid.sql.calcite.planner.Calcites; import io.druid.sql.calcite.planner.PlannerConfig; import io.druid.sql.calcite.planner.PlannerContext; @@ -63,6 +64,7 @@ public class ExpressionsTest CalciteTests.createOperatorTable(), CalciteTests.createExprMacroTable(), new PlannerConfig(), + AuthTestUtils.TEST_AUTHORIZER_MAPPER, ImmutableMap.of() ); private final RowSignature rowSignature = RowSignature diff --git a/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java b/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java index 05684b41bf34..772a3adffe20 100644 --- a/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java @@ -29,6 +29,9 @@ import io.druid.math.expr.ExprMacroTable; import io.druid.query.QueryInterruptedException; import io.druid.query.ResourceLimitExceededException; +import io.druid.server.security.AllowAllAuthenticator; +import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthTestUtils; import io.druid.sql.calcite.planner.Calcites; import io.druid.sql.calcite.planner.DruidOperatorTable; import io.druid.sql.calcite.planner.PlannerConfig; @@ -41,6 +44,7 @@ import io.druid.sql.http.SqlQuery; import io.druid.sql.http.SqlResource; import org.apache.calcite.tools.ValidationException; +import org.easymock.EasyMock; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -48,6 +52,7 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; +import javax.servlet.http.HttpServletRequest; import javax.ws.rs.core.Response; import javax.ws.rs.core.StreamingOutput; import java.io.ByteArrayOutputStream; @@ -68,15 +73,32 @@ public class SqlResourceTest private SqlResource resource; + private HttpServletRequest req; + @Before public void setUp() throws Exception { Calcites.setSystemProperties(); walker = CalciteTests.createMockWalker(temporaryFolder.newFolder()); + final PlannerConfig plannerConfig = new PlannerConfig(); final DruidSchema druidSchema = CalciteTests.createMockSchema(walker, plannerConfig); final DruidOperatorTable operatorTable = CalciteTests.createOperatorTable(); final ExprMacroTable macroTable = CalciteTests.createExprMacroTable(); + req = EasyMock.createStrictMock(HttpServletRequest.class); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) + .andReturn(null) + .anyTimes(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(AllowAllAuthenticator.ALLOW_ALL_RESULT) + .anyTimes(); + req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) + .andReturn(AllowAllAuthenticator.ALLOW_ALL_RESULT) + .anyTimes(); + EasyMock.replay(req); + resource = new SqlResource( JSON_MAPPER, new PlannerFactory( @@ -85,6 +107,9 @@ public void setUp() throws Exception operatorTable, macroTable, plannerConfig, + new AuthConfig(), + AuthTestUtils.TEST_AUTHENTICATOR_MAPPER, + AuthTestUtils.TEST_AUTHORIZER_MAPPER, CalciteTests.getJsonMapper() ) ); @@ -243,7 +268,7 @@ public void testResourceLimitExceeded() throws Exception // Returns either an error or a result. private Pair>> doPost(final SqlQuery query) throws Exception { - final Response response = resource.doPost(query); + final Response response = resource.doPost(query, req); if (response.getStatus() == 200) { final StreamingOutput output = (StreamingOutput) response.getEntity(); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); diff --git a/sql/src/test/java/io/druid/sql/calcite/schema/DruidSchemaTest.java b/sql/src/test/java/io/druid/sql/calcite/schema/DruidSchemaTest.java index 119342966bdf..695646b1e3a3 100644 --- a/sql/src/test/java/io/druid/sql/calcite/schema/DruidSchemaTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/schema/DruidSchemaTest.java @@ -32,6 +32,7 @@ import io.druid.segment.QueryableIndex; import io.druid.segment.TestHelper; import io.druid.segment.incremental.IncrementalIndexSchema; +import io.druid.server.security.AuthTestUtils; import io.druid.sql.calcite.planner.Calcites; import io.druid.sql.calcite.planner.PlannerConfig; import io.druid.sql.calcite.table.DruidTable; @@ -139,11 +140,13 @@ public void setUp() throws Exception index2 ); + schema = new DruidSchema( CalciteTests.createMockQueryLifecycleFactory(walker), new TestServerInventoryView(walker.getSegments()), PLANNER_CONFIG_DEFAULT, - new NoopViewManager() + new NoopViewManager(), + AuthTestUtils.TEST_AUTHENTICATOR_MAPPER ); schema.start(); diff --git a/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java b/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java index 5e73d86e2203..e9d12f65ccad 100644 --- a/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java +++ b/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java @@ -93,6 +93,7 @@ import io.druid.server.initialization.ServerConfig; import io.druid.server.log.NoopRequestLogger; import io.druid.server.security.AuthConfig; +import io.druid.server.security.AuthTestUtils; import io.druid.sql.calcite.aggregation.SqlAggregator; import io.druid.sql.calcite.expression.SqlOperatorConversion; import io.druid.sql.calcite.planner.DruidOperatorTable; @@ -327,7 +328,8 @@ public > QueryToolChest getToolChest new ServiceEmitter("dummy", "dummy", new NoopEmitter()), new NoopRequestLogger(), new ServerConfig(), - new AuthConfig() + new AuthConfig(), + AuthTestUtils.TEST_AUTHORIZER_MAPPER ); } @@ -420,7 +422,8 @@ public static DruidSchema createMockSchema( CalciteTests.createMockQueryLifecycleFactory(walker), new TestServerInventoryView(walker.getSegments()), plannerConfig, - viewManager + viewManager, + AuthTestUtils.TEST_AUTHENTICATOR_MAPPER ); schema.start();