Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
53 commits
Select commit Hold shift + click to select a range
b2d8fcd
Limit the number of balance task (#674)
morningman Feb 28, 2019
acf839c
Remove the running partition mark when delete operation encounter err…
morningman Mar 1, 2019
f2bd98f
Clear etl job files when job finished (#680)
yiguolei Mar 1, 2019
49dddb4
Fix some tablet scheduler bug (#686)
morningman Mar 5, 2019
4dbbd32
Remove sensitive info (#692)
morningman Mar 6, 2019
7965a71
Add esquery function (#652)
Salieri1969 Mar 8, 2019
584b437
Fix balance with diff storage medium (#705)
morningman Mar 11, 2019
b3fd53a
Fix bug that compareTo in PartitionKey throws cast error (#720)
chenhao7253886 Mar 11, 2019
d67aeb8
Clean timeout tablets channel in TabletWriterMgr (#718)
kangkaisen Mar 11, 2019
e2717e1
Fix the error of variable_length for Decimal (#724)
lide-reed Mar 11, 2019
5f9e82b
Support calculate unix_timestamp() on Frontend (#732)
morningman Mar 13, 2019
7feb27e
Fix not matched error code (#740)
imay Mar 13, 2019
cc2fd43
Rollback the fix of variable_length for Decimal (#744)
lide-reed Mar 14, 2019
e970f28
Fix transaction non-idempotency error (#749)
Mar 14, 2019
297a5f2
Add comment to avoid modification for variable_length (#750)
lide-reed Mar 14, 2019
c11e78c
Fix bug of invalid replica last failed version (#746)
morningman Mar 14, 2019
4a3d9dd
Fix bug that balance slot may not be released when balance task is do…
morningman Mar 14, 2019
28ea424
Update compile instruction in README.md (#763)
morningman Mar 17, 2019
2a152e0
Remove colocate table meta when drop db (#761)
kangkaisen Mar 17, 2019
5e80dca
Update curl version (#766)
litao91 Mar 18, 2019
1f092bb
Add EsTableDescriptor in be (#775)
yiguolei Mar 19, 2019
fb4e77d
Add http post feature for HttpClient (#773)
wuyunfeng Mar 19, 2019
11307b2
Fix bug: stream load ignore last line with no-newline (#785)
imay Mar 21, 2019
4d8f0dc
Fix add_version () core dump on acquiring delta (#788)
Mar 21, 2019
e60b71d
Release SegmentGroup reference count (#790)
Mar 22, 2019
c34b306
Decimal optimize branch #695 (#727)
lide-reed Mar 22, 2019
722a4db
Fix bug that throws exception when pruning partition type is date (#792)
chenhao7253886 Mar 22, 2019
f4a63b2
Fix doris on es bug (#791)
yiguolei Mar 22, 2019
504fbc3
Fix bug that Greatest get wrong function's symbol (#796)
chenhao7253886 Mar 23, 2019
d47600e
Modify the logic of setting password (#798)
morningman Mar 25, 2019
bb3d5f2
Implement the routine load process of Kafka on Backend (#671)
morningman Feb 28, 2019
970e4e7
Add unit test (#675)
morningman Feb 28, 2019
33c57f2
Submit routine load task immediately (#682)
EmmyMiao87 Mar 4, 2019
e8ea90e
Modify interface (#684)
morningman Mar 4, 2019
872c4dc
Put begin txn into task scheduler (#687)
EmmyMiao87 Mar 5, 2019
c20548e
Add a data consumer pool to reuse the data consumer (#691)
morningman Mar 6, 2019
6d2f9ba
Add missing files (#696)
morningman Mar 6, 2019
a19ebaa
Missing to set auth code (#699)
morningman Mar 7, 2019
09777e5
Change the relationship between txn and task (#703)
EmmyMiao87 Mar 7, 2019
12d9385
Add some logs (#711)
morningman Mar 8, 2019
86217bd
Fix bug that data consumer should be removed from pool when being got…
morningman Mar 11, 2019
3bb9f36
Add attachment in rollback txn (#725)
EmmyMiao87 Mar 11, 2019
de51fc2
Stream load with no data will abort txn (#735)
EmmyMiao87 Mar 12, 2019
f649a6c
Add routine load job cleaner (#742)
EmmyMiao87 Mar 14, 2019
11ecd71
Add persist operations for routine load job (#754)
morningman Mar 14, 2019
3b99673
modify the replay logic of routine load job (#762)
morningman Mar 17, 2019
dce0dd4
Modify some task scheduler logic (#767)
morningman Mar 18, 2019
4ba4001
Fix routine load replay bugs (#770)
morningman Mar 19, 2019
5be079d
Add a cleaner bg thread to clean idle data consumer (#776)
morningman Mar 20, 2019
d918df5
Implement ShowRoutineLoadStmt and ShowRoutineLoadTaskStmt (#786)
EmmyMiao87 Mar 21, 2019
e95d081
Fix some routine load bugs (#787)
morningman Mar 22, 2019
1f7f3db
Add metrics for routine load (#795)
morningman Mar 25, 2019
f459ddf
Merge master and fix BE ut
morningman Mar 25, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,5 @@ gensrc/build
fe/target
thirdparty/src
*.so.tmp
.DS_Store
*.iml
26 changes: 20 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,9 @@ The simplicity (of developing, deploying and using) and meeting many data servin

Currently only supports Docker environment and Linux OS, such as Ubuntu and CentOS.

### 4.1 For Docker
### 4.1 Compile in Docker environment (Recommended)

We offer a docker images as a Doris compilation environment. You can compile Doris from source in it and run the output binaries in other Linux environment.

Firstly, you must be install and start docker service.

Expand All @@ -45,15 +47,25 @@ apachedoris/doris-dev build-env f8bc5d4024e0 21 hours ago

#### Step2: Run the Docker image

You can run image directyly:
You can run the image directyly:

```
$ docker run -it apachedoris/doris-dev:build-env
```

Or if you want to compile the source located in your local host, you can map the local directory to the image by running:

```
$ docker run -it -v /your/local/path/incubator-doris-DORIS-x.x.x-release/:/root/incubator-doris-DORIS-x.x.x-release/ apachedoris/doris-dev:build-env
```

#### Step3: Download Doris source

Now you should in docker environment.

You can download Doris source by release package or by git clone in image.
(If you have downloaded source and it is not in image, you can map its path to image in Step2.)

(If you already downloaded the source in your local host and map it to the image in Step2, you can skip this step.)

```
$ wget https://dist.apache.org/repos/dist/dev/incubator/doris/xxx.tar.gz
Expand All @@ -62,13 +74,14 @@ $ git clone https://github.com/apache/incubator-doris.git
```

#### Step4: Build Doris
Now you should in docker environment, and you can enter Doris source path and build Doris.

Enter Doris source path and build Doris.

```
$ sh build.sh
```

After successfully building, it will install binary files in the directory output/.
After successfully building, it will install binary files in the directory `output/`.

### 4.2 For Linux OS

Expand Down Expand Up @@ -98,7 +111,7 @@ Run following script, it will compile thirdparty libraries and build whole Doris
sh build.sh
```

After successfully building, it will install binary files in the directory output/.
After successfully building, it will install binary files in the directory `output/`.

## 5. Reporting Issues

Expand All @@ -116,3 +129,4 @@ If you find any bugs, please file a [GitHub issue](https://github.com/apache/inc
* Deploy and Upgrade - <https://github.com/apache/incubator-doris/wiki/Doris-Deploy-%26-Upgrade>
* User Manual - <https://github.com/apache/incubator-doris/wiki/Doris-Create%2C-Load-and-Delete>
* FAQs - <https://github.com/apache/incubator-doris/wiki/Doris-FAQ>

12 changes: 6 additions & 6 deletions be/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -178,12 +178,12 @@ set_target_properties(brpc PROPERTIES IMPORTED_LOCATION ${THIRDPARTY_DIR}/lib64/
add_library(rocksdb STATIC IMPORTED)
set_target_properties(rocksdb PROPERTIES IMPORTED_LOCATION ${THIRDPARTY_DIR}/lib/librocksdb.a)

add_library(librdkafka STATIC IMPORTED)
set_target_properties(librdkafka PROPERTIES IMPORTED_LOCATION ${THIRDPARTY_DIR}/lib/librdkafka.a)

add_library(librdkafka_cpp STATIC IMPORTED)
set_target_properties(librdkafka_cpp PROPERTIES IMPORTED_LOCATION ${THIRDPARTY_DIR}/lib/librdkafka++.a)

add_library(librdkafka STATIC IMPORTED)
set_target_properties(librdkafka PROPERTIES IMPORTED_LOCATION ${THIRDPARTY_DIR}/lib/librdkafka.a)

find_program(THRIFT_COMPILER thrift ${CMAKE_SOURCE_DIR}/bin)

# llvm-config
Expand Down Expand Up @@ -440,9 +440,10 @@ set(DORIS_LINK_LIBS

# Set thirdparty libraries
set(DORIS_DEPENDENCIES
${WL_START_GROUP}
rocksdb
librdkafka
librdkafka_cpp
librdkafka
lzo
snappy
${Boost_LIBRARIES}
Expand All @@ -456,16 +457,15 @@ set(DORIS_DEPENDENCIES
libevent
mysql
curl
${WL_START_GROUP}
${LIBZ}
${LIBBZ2}
gflags
brpc
protobuf
openssl
crypto
${WL_START_GROUP}
leveldb
${WL_END_GROUP}
)

# Add all external dependencies. They should come after the palo libs.
Expand Down
1 change: 1 addition & 0 deletions be/src/common/config.h
Original file line number Diff line number Diff line change
Expand Up @@ -265,6 +265,7 @@ namespace config {
CONF_Int32(number_tablet_writer_threads, "16");

CONF_Int64(streaming_load_max_mb, "10240");
CONF_Int32(streaming_load_rpc_max_alive_time_sec, "600");

// Fragment thread pool
CONF_Int32(fragment_pool_thread_num, "64");
Expand Down
2 changes: 2 additions & 0 deletions be/src/common/daemon.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
#include "exprs/es_functions.h"
#include "exprs/timestamp_functions.h"
#include "exprs/decimal_operators.h"
#include "exprs/decimalv2_operators.h"
#include "exprs/utility_functions.h"
#include "exprs/json_functions.h"
#include "exprs/hll_hash_function.h"
Expand Down Expand Up @@ -182,6 +183,7 @@ void init_daemon(int argc, char** argv, const std::vector<StorePath>& paths) {
EncryptionFunctions::init();
TimestampFunctions::init();
DecimalOperators::init();
DecimalV2Operators::init();
UtilityFunctions::init();
CompoundPredicate::init();
JsonFunctions::init();
Expand Down
3 changes: 3 additions & 0 deletions be/src/common/status.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@ const Status Status::MEM_LIMIT_EXCEEDED(
const Status Status::THRIFT_RPC_ERROR(
TStatusCode::THRIFT_RPC_ERROR, "Thrift RPC failed", true);

const Status Status::TIMEOUT(
TStatusCode::TIMEOUT, "timeout", true);

Status::ErrorDetail::ErrorDetail(const TStatus& status) :
error_code(status.status_code),
error_msgs(status.error_msgs) {
Expand Down
1 change: 1 addition & 0 deletions be/src/common/status.h
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ class Status {
static const Status CANCELLED;
static const Status MEM_LIMIT_EXCEEDED;
static const Status THRIFT_RPC_ERROR;
static const Status TIMEOUT;

// copy c'tor makes copy of error detail so Status can be returned by value
Status(const Status& status) : _error_detail(
Expand Down
52 changes: 52 additions & 0 deletions be/src/common/utils.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

#pragma once

#include <string>

namespace doris {

struct AuthInfo {
std::string user;
std::string passwd;
std::string cluster;
std::string user_ip;
// -1 as unset
int64_t auth_code = -1;
};

template<class T>
void set_request_auth(T* req, const AuthInfo& auth) {
if (auth.auth_code != -1) {
// if auth_code is set, no need to set other info
req->__set_auth_code(auth.auth_code);
// user name and passwd is unused, but they are required field.
// so they have to be set.
req->user = "";
req->passwd = "";
} else {
req->user = auth.user;
req->passwd = auth.passwd;
if (!auth.cluster.empty()) {
req->__set_cluster(auth.cluster);
}
req->__set_user_ip(auth.user_ip);
}
}

}
2 changes: 1 addition & 1 deletion be/src/exec/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ set(EXEC_FILES
schema_scanner/schema_columns_scanner.cpp
schema_scanner/schema_charsets_scanner.cpp
schema_scanner/schema_collations_scanner.cpp
schema_scanner/frontend_helper.cpp
schema_scanner/schema_helper.cpp
partitioned_hash_table.cc
partitioned_hash_table_ir.cc
partitioned_aggregation_node.cc
Expand Down
3 changes: 2 additions & 1 deletion be/src/exec/broker_scan_node.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -344,7 +344,7 @@ Status BrokerScanNode::scanner_scan(
tuple = reinterpret_cast<Tuple*>(new_tuple);
counter->num_rows_returned++;
} else {
counter->num_rows_filtered++;
counter->num_rows_unselected++;
}
}

Expand Down Expand Up @@ -409,6 +409,7 @@ void BrokerScanNode::scanner_worker(int start_idx, int length) {
// Update stats
_runtime_state->update_num_rows_load_success(counter.num_rows_returned);
_runtime_state->update_num_rows_load_filtered(counter.num_rows_filtered);
_runtime_state->update_num_rows_load_unselected(counter.num_rows_unselected);

// scanner is going to finish
{
Expand Down
5 changes: 3 additions & 2 deletions be/src/exec/broker_scanner.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@
#include "runtime/exec_env.h"
#include "runtime/mem_tracker.h"
#include "runtime/raw_value.h"
#include "runtime/load_stream_mgr.h"
#include "runtime/stream_load_pipe.h"
#include "runtime/stream_load/load_stream_mgr.h"
#include "runtime/stream_load/stream_load_pipe.h"
#include "runtime/tuple.h"
#include "exprs/expr.h"
#include "exec/text_converter.h"
Expand Down Expand Up @@ -237,6 +237,7 @@ Status BrokerScanner::open_file_reader() {
case TFileType::FILE_STREAM: {
_stream_load_pipe = _state->exec_env()->load_stream_mgr()->get(range.load_id);
if (_stream_load_pipe == nullptr) {
VLOG(3) << "unknown stream load id: " << UniqueId(range.load_id);
return Status("unknown stream load id");
}
_cur_file_reader = _stream_load_pipe.get();
Expand Down
10 changes: 7 additions & 3 deletions be/src/exec/broker_scanner.h
Original file line number Diff line number Diff line change
Expand Up @@ -49,11 +49,15 @@ class RuntimeProfile;
class StreamLoadPipe;

struct BrokerScanCounter {
BrokerScanCounter() : num_rows_returned(0), num_rows_filtered(0) {
BrokerScanCounter() :
num_rows_returned(0),
num_rows_filtered(0),
num_rows_unselected(0) {
}

int64_t num_rows_returned;
int64_t num_rows_filtered;
int64_t num_rows_returned; // qualified rows
int64_t num_rows_filtered; // unqualified rows
int64_t num_rows_unselected; // rows filterd by predicates
};

// Broker scanner convert the data read from broker to doris's tuple.
Expand Down
Loading