Skip to content
3 changes: 2 additions & 1 deletion docs/content/configuration/historical.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,8 @@ The historical node uses several of the global configs in [Configuration](../con
|`druid.segmentCache.dropSegmentDelayMillis`|How long a node delays before completely dropping segment.|30000 (30 seconds)|
|`druid.segmentCache.infoDir`|Historical nodes keep track of the segments they are serving so that when the process is restarted they can reload the same segments without waiting for the Coordinator to reassign. This path defines where this metadata is kept. Directory will be created if needed.|${first_location}/info_dir|
|`druid.segmentCache.announceIntervalMillis`|How frequently to announce segments while segments are loading from cache. Set this value to zero to wait for all segments to be loaded before announcing.|5000 (5 seconds)|
|`druid.segmentCache.numBootstrapThreads`|How many segments to load concurrently from local storage at startup.|1|
|`druid.segmentCache.numLoadingThreads`|How many segments to load concurrently from from deep storage.|1|
|`druid.segmentCache.numBootstrapThreads`|How many segments to load concurrently from local storage at startup.|Same as numLoadingThreads|

### Query Configs

Expand Down
6 changes: 5 additions & 1 deletion java-util/src/main/java/io/druid/java/util/common/Pair.java
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,13 @@ public static <T1, T2> Pair<T1, T2> of(T1 lhs, T2 rhs)
}

public final T1 lhs;

public final T2 rhs;

public Pair(T1 lhs, T2 rhs)
public Pair(
T1 lhs,
T2 rhs
)
{
this.lhs = lhs;
this.rhs = rhs;
Expand Down
10 changes: 10 additions & 0 deletions server/src/main/java/io/druid/client/ImmutableDruidServer.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package io.druid.client;

import com.google.common.collect.ImmutableMap;
import com.metamx.common.StringUtils;
import io.druid.server.coordination.DruidServerMetadata;
import io.druid.server.coordination.ServerType;
import io.druid.timeline.DataSegment;
Expand Down Expand Up @@ -108,6 +109,15 @@ public Map<String, DataSegment> getSegments()
return segments;
}

public String getURL()
{
if (metadata.getHostAndTlsPort() != null) {
return StringUtils.safeFormat("https://%s", metadata.getHostAndTlsPort());
} else {
return StringUtils.safeFormat("http://%s", metadata.getHostAndPort());
}
}

@Override
public String toString()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,6 @@
*/
public class SegmentLoaderConfig
{
private static final int DEFAULT_NUM_BOOTSTRAP_THREADS = 1;

@JsonProperty
@NotEmpty
private List<StorageLocationConfig> locations = null;
Expand All @@ -46,12 +44,18 @@ public class SegmentLoaderConfig
@JsonProperty("announceIntervalMillis")
private int announceIntervalMillis = 0; // do not background announce

@JsonProperty("numLoadingThreads")
private int numLoadingThreads = 1;

@JsonProperty("numBootstrapThreads")
private Integer numBootstrapThreads = null;

@JsonProperty
private File infoDir = null;

@JsonProperty
private int statusQueueMaxSize = 100;
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You did not document this, was that intentional?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes, will let it run on our servers for a while before documenting. hopefully we'll have enough confidence in this before 0.11.1 release so will get documented before that release.


public List<StorageLocationConfig> getLocations()
{
return locations;
Expand All @@ -72,9 +76,14 @@ public int getAnnounceIntervalMillis()
return announceIntervalMillis;
}

public int getNumLoadingThreads()
{
return numLoadingThreads;
}

public int getNumBootstrapThreads()
{
return numBootstrapThreads == null ? DEFAULT_NUM_BOOTSTRAP_THREADS : numBootstrapThreads;
return numBootstrapThreads == null ? numLoadingThreads : numBootstrapThreads;
}

public File getInfoDir()
Expand All @@ -90,6 +99,11 @@ public File getInfoDir()
return infoDir;
}

public int getStatusQueueMaxSize()
{
return statusQueueMaxSize;
}

public SegmentLoaderConfig withLocations(List<StorageLocationConfig> locations)
{
SegmentLoaderConfig retVal = new SegmentLoaderConfig();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,6 @@
public interface DataSegmentChangeCallback
{
public void execute();

DataSegmentChangeCallback NOOP = () -> {};
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
import io.druid.java.util.common.StringUtils;
import io.druid.timeline.DataSegment;

import java.util.Objects;

/**
*/
public class SegmentChangeRequestDrop implements DataSegmentChangeRequest
Expand Down Expand Up @@ -58,6 +60,25 @@ public String asString()
return StringUtils.format("DROP: %s", segment.getIdentifier());
}

@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SegmentChangeRequestDrop that = (SegmentChangeRequestDrop) o;
return Objects.equals(segment, that.segment);
}

@Override
public int hashCode()
{
return Objects.hash(segment);
}

@Override
public String toString()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
import io.druid.java.util.common.StringUtils;
import io.druid.timeline.DataSegment;

import java.util.Objects;

/**
*/
public class SegmentChangeRequestLoad implements DataSegmentChangeRequest
Expand Down Expand Up @@ -58,6 +60,25 @@ public String asString()
return StringUtils.format("LOAD: %s", segment.getIdentifier());
}

@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SegmentChangeRequestLoad that = (SegmentChangeRequestLoad) o;
return Objects.equals(segment, that.segment);
}

@Override
public int hashCode()
{
return Objects.hash(segment);
}

@Override
public String toString()
{
Expand Down
Loading