Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,8 @@ private static void runJsonDatasetExample(SparkSession spark) {
// an RDD[String] storing one JSON object per string.
List<String> jsonData = Arrays.asList(
"{\"name\":\"Yin\",\"address\":{\"city\":\"Columbus\",\"state\":\"Ohio\"}}");
JavaRDD<String> anotherPeopleRDD = new JavaSparkContext(spark.sparkContext()).parallelize(jsonData);
JavaRDD<String> anotherPeopleRDD =
new JavaSparkContext(spark.sparkContext()).parallelize(jsonData);
Dataset anotherPeople = spark.read().json(anotherPeopleRDD);
anotherPeople.show();
// +---------------+----+
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ public final class FixedLengthRowBasedKeyValueBatch extends RowBasedKeyValueBatc
private final int vlen;
private final int recordLength;

private final long getKeyOffsetForFixedLengthRecords(int rowId) {
private long getKeyOffsetForFixedLengthRecords(int rowId) {
return recordStartOffset + rowId * (long) recordLength;
}

Expand All @@ -43,7 +43,7 @@ private final long getKeyOffsetForFixedLengthRecords(int rowId) {
* Returns an UnsafeRow pointing to the value if succeeds, otherwise returns null.
*/
@Override
public final UnsafeRow appendRow(Object kbase, long koff, int klen,
public UnsafeRow appendRow(Object kbase, long koff, int klen,
Object vbase, long voff, int vlen) {
// if run out of max supported rows or page size, return null
if (numRows >= capacity || page == null || page.size() - pageCursor < recordLength) {
Expand Down Expand Up @@ -71,7 +71,7 @@ public final UnsafeRow appendRow(Object kbase, long koff, int klen,
* Returns the key row in this batch at `rowId`. Returned key row is reused across calls.
*/
@Override
public final UnsafeRow getKeyRow(int rowId) {
public UnsafeRow getKeyRow(int rowId) {
assert(rowId >= 0);
assert(rowId < numRows);
if (keyRowId != rowId) { // if keyRowId == rowId, desired keyRow is already cached
Expand All @@ -90,7 +90,7 @@ public final UnsafeRow getKeyRow(int rowId) {
* In most times, 1) is skipped because `getKeyRow(id)` is often called before `getValueRow(id)`.
*/
@Override
protected final UnsafeRow getValueFromKey(int rowId) {
protected UnsafeRow getValueFromKey(int rowId) {
if (keyRowId != rowId) {
getKeyRow(rowId);
}
Expand All @@ -103,7 +103,7 @@ protected final UnsafeRow getValueFromKey(int rowId) {
* Returns an iterator to go through all rows
*/
@Override
public final org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> rowIterator() {
public org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> rowIterator() {
return new org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow>() {
private final UnsafeRow key = new UnsafeRow(keySchema.length());
private final UnsafeRow value = new UnsafeRow(valueSchema.length());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ public final void close() {
}
}

private final boolean acquirePage(long requiredSize) {
private boolean acquirePage(long requiredSize) {
try {
page = allocatePage(requiredSize);
} catch (OutOfMemoryError e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public final class VariableLengthRowBasedKeyValueBatch extends RowBasedKeyValueB
* Returns an UnsafeRow pointing to the value if succeeds, otherwise returns null.
*/
@Override
public final UnsafeRow appendRow(Object kbase, long koff, int klen,
public UnsafeRow appendRow(Object kbase, long koff, int klen,
Object vbase, long voff, int vlen) {
final long recordLength = 8 + klen + vlen + 8;
// if run out of max supported rows or page size, return null
Expand Down Expand Up @@ -94,7 +94,7 @@ public UnsafeRow getKeyRow(int rowId) {
* In most times, 1) is skipped because `getKeyRow(id)` is often called before `getValueRow(id)`.
*/
@Override
public final UnsafeRow getValueFromKey(int rowId) {
public UnsafeRow getValueFromKey(int rowId) {
if (keyRowId != rowId) {
getKeyRow(rowId);
}
Expand All @@ -110,7 +110,7 @@ public final UnsafeRow getValueFromKey(int rowId) {
* Returns an iterator to go through all rows
*/
@Override
public final org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> rowIterator() {
public org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> rowIterator() {
return new org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow>() {
private final UnsafeRow key = new UnsafeRow(keySchema.length());
private final UnsafeRow value = new UnsafeRow(valueSchema.length());
Expand Down