Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Handle map and array types binary record data #3434

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.flink.cdc.common.data.binary;

import org.apache.flink.cdc.common.annotation.Internal;
import org.apache.flink.cdc.common.data.MapData;
import org.apache.flink.cdc.common.types.DataType;
import org.apache.flink.core.memory.MemorySegment;
import org.apache.flink.core.memory.MemorySegmentFactory;

import java.util.HashMap;
import java.util.Map;

import static org.apache.flink.cdc.common.utils.Preconditions.checkArgument;

/**
* [4 byte(keyArray size in bytes)] + [Key BinaryArray] + [Value BinaryArray].
*
* <p>{@code BinaryMap} are influenced by Apache Spark UnsafeMapData.
*/
@Internal
public class BinaryMapData extends BinarySection implements MapData {
private final BinaryArrayData keys;
private final BinaryArrayData values;

public BinaryMapData() {
keys = new BinaryArrayData();
values = new BinaryArrayData();
}

public int size() {
return keys.size();
}

@Override
public void pointTo(MemorySegment[] segments, int offset, int sizeInBytes) {
// Read the numBytes of key array from the first 4 bytes.
final int keyArrayBytes = BinarySegmentUtils.getInt(segments, offset);
assert keyArrayBytes >= 0 : "keyArraySize (" + keyArrayBytes + ") should >= 0";
final int valueArrayBytes = sizeInBytes - keyArrayBytes - 4;
assert valueArrayBytes >= 0 : "valueArraySize (" + valueArrayBytes + ") should >= 0";

keys.pointTo(segments, offset + 4, keyArrayBytes);
values.pointTo(segments, offset + 4 + keyArrayBytes, valueArrayBytes);

assert keys.size() == values.size();

this.segments = segments;
this.offset = offset;
this.sizeInBytes = sizeInBytes;
}

public BinaryArrayData keyArray() {
return keys;
}

public BinaryArrayData valueArray() {
return values;
}

public Map<?, ?> toJavaMap(DataType keyType, DataType valueType) {
Object[] keyArray = keys.toObjectArray(keyType);
Object[] valueArray = values.toObjectArray(valueType);

Map<Object, Object> map = new HashMap<>();
for (int i = 0; i < keyArray.length; i++) {
map.put(keyArray[i], valueArray[i]);
}
return map;
}

public BinaryMapData copy() {
return copy(new BinaryMapData());
}

public BinaryMapData copy(BinaryMapData reuse) {
byte[] bytes = BinarySegmentUtils.copyToBytes(segments, offset, sizeInBytes);
reuse.pointTo(MemorySegmentFactory.wrap(bytes), 0, sizeInBytes);
return reuse;
}

@Override
public int hashCode() {
return BinarySegmentUtils.hashByWords(segments, offset, sizeInBytes);
}

// ------------------------------------------------------------------------------------------
// Construction Utilities
// ------------------------------------------------------------------------------------------

public static BinaryMapData valueOf(BinaryArrayData key, BinaryArrayData value) {
checkArgument(key.segments.length == 1 && value.getSegments().length == 1);
byte[] bytes = new byte[4 + key.sizeInBytes + value.sizeInBytes];
MemorySegment segment = MemorySegmentFactory.wrap(bytes);
segment.putInt(0, key.sizeInBytes);
key.getSegments()[0].copyTo(key.getOffset(), segment, 4, key.sizeInBytes);
value.getSegments()[0].copyTo(
value.getOffset(), segment, 4 + key.sizeInBytes, value.sizeInBytes);
BinaryMapData map = new BinaryMapData();
map.pointTo(segment, 0, bytes.length);
return map;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -254,12 +254,14 @@ public byte[] getBinary(int pos) {

@Override
public ArrayData getArray(int pos) {
throw new UnsupportedOperationException("Not support ArrayData");
assertIndexIsValid(pos);
return BinarySegmentUtils.readArrayData(segments, offset, getLong(pos));
}

@Override
public MapData getMap(int pos) {
throw new UnsupportedOperationException("Not support MapData.");
assertIndexIsValid(pos);
return BinarySegmentUtils.readMapData(segments, offset, getLong(pos));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,10 @@
package org.apache.flink.cdc.common.data.binary;

import org.apache.flink.cdc.common.annotation.Internal;
import org.apache.flink.cdc.common.data.ArrayData;
import org.apache.flink.cdc.common.data.DecimalData;
import org.apache.flink.cdc.common.data.LocalZonedTimestampData;
import org.apache.flink.cdc.common.data.MapData;
import org.apache.flink.cdc.common.data.RecordData;
import org.apache.flink.cdc.common.data.StringData;
import org.apache.flink.cdc.common.data.TimestampData;
Expand Down Expand Up @@ -1154,4 +1156,24 @@ private static int findInMultiSegments(
}
return -1;
}

/** Gets an instance of {@link MapData} from underlying {@link MemorySegment}. */
public static MapData readMapData(
MemorySegment[] segments, int baseOffset, long offsetAndSize) {
final int size = ((int) offsetAndSize);
int offset = (int) (offsetAndSize >> 32);
BinaryMapData map = new BinaryMapData();
map.pointTo(segments, offset + baseOffset, size);
return map;
}

/** Gets an instance of {@link ArrayData} from underlying {@link MemorySegment}. */
public static ArrayData readArrayData(
MemorySegment[] segments, int baseOffset, long offsetAndSize) {
final int size = ((int) offsetAndSize);
int offset = (int) (offsetAndSize >> 32);
BinaryArrayData array = new BinaryArrayData();
array.pointTo(segments, offset + baseOffset, size);
return array;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.flink.cdc.common.data.util;

import org.apache.flink.cdc.common.data.binary.BinaryRecordData;
import org.apache.flink.core.memory.MemorySegmentFactory;
import org.apache.flink.core.memory.MemoryUtils;

/**
* Utilities for {@link BinaryRecordDataDataUtil}. Many of the methods in this class are used in
* code generation.
*/
public class BinaryRecordDataDataUtil {

public static final sun.misc.Unsafe UNSAFE = MemoryUtils.UNSAFE;
public static final int BYTE_ARRAY_BASE_OFFSET = UNSAFE.arrayBaseOffset(byte[].class);

public static final BinaryRecordData EMPTY_ROW = new BinaryRecordData(0);

static {
int size = EMPTY_ROW.getFixedLengthPartSize();
byte[] bytes = new byte[size];
EMPTY_ROW.pointTo(MemorySegmentFactory.wrap(bytes), 0, size);
}

public static boolean byteArrayEquals(byte[] left, byte[] right, int length) {
return byteArrayEquals(left, BYTE_ARRAY_BASE_OFFSET, right, BYTE_ARRAY_BASE_OFFSET, length);
}

public static boolean byteArrayEquals(
Object left, long leftOffset, Object right, long rightOffset, int length) {
int i = 0;

while (i <= length - 8) {
if (UNSAFE.getLong(left, leftOffset + i) != UNSAFE.getLong(right, rightOffset + i)) {
return false;
}
i += 8;
}

while (i < length) {
if (UNSAFE.getByte(left, leftOffset + i) != UNSAFE.getByte(right, rightOffset + i)) {
return false;
}
i += 1;
}
return true;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.flink.cdc.common.data.util;

import org.apache.flink.cdc.common.data.ArrayData;
import org.apache.flink.cdc.common.data.MapData;
import org.apache.flink.cdc.common.types.DataType;

import java.util.HashMap;
import java.util.Map;

/** Utilities for {@link MapData}. */
public final class MapDataUtil {

/**
* Converts a {@link MapData} into Java {@link Map}, the keys and values of the Java map still
* holds objects of internal data structures.
*/
public static Map<Object, Object> convertToJavaMap(
MapData map, DataType keyType, DataType valueType) {
ArrayData keyArray = map.keyArray();
ArrayData valueArray = map.valueArray();
Map<Object, Object> javaMap = new HashMap<>();
ArrayData.ElementGetter keyGetter = ArrayData.createElementGetter(keyType);
ArrayData.ElementGetter valueGetter = ArrayData.createElementGetter(valueType);
for (int i = 0; i < map.size(); i++) {
Object key = keyGetter.getElementOrNull(keyArray, i);
Object value = valueGetter.getElementOrNull(valueArray, i);
javaMap.put(key, value);
}
return javaMap;
}
}
Loading
Loading