Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

AVRO-4068: Java Code Cleanup #3192

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ public BigDecimal fromBytes(final ByteBuffer value, final Schema schema, final L
BigInteger bg = null;
ByteBuffer buffer = decoder.readBytes(null);
byte[] array = buffer.array();
if (array != null && array.length > 0) {
if (array.length > 0) {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This cannot be null

bg = new BigInteger(array);
}

Expand Down
12 changes: 6 additions & 6 deletions lang/java/avro/src/main/java/org/apache/avro/JsonProperties.java
Original file line number Diff line number Diff line change
Expand Up @@ -150,9 +150,9 @@ private Null() {
// Also, we only ever ADD to the collection, never changing a value, so
// putWithAbsent is the
// only modifier
private ConcurrentMap<String, JsonNode> props = new ConcurrentHashMap<String, JsonNode>() {
private final ConcurrentMap<String, JsonNode> props = new ConcurrentHashMap<>() {
private static final long serialVersionUID = 1L;
private Queue<MapEntry<String, JsonNode>> propOrder = new ConcurrentLinkedQueue<>();
private final Queue<MapEntry<String, JsonNode>> propOrder = new ConcurrentLinkedQueue<>();

@Override
public JsonNode putIfAbsent(String key, JsonNode value) {
Expand All @@ -170,10 +170,10 @@ public JsonNode put(String key, JsonNode value) {

@Override
public Set<Map.Entry<String, JsonNode>> entrySet() {
return new AbstractSet<Map.Entry<String, JsonNode>>() {
return new AbstractSet<>() {
@Override
public Iterator<Map.Entry<String, JsonNode>> iterator() {
return new Iterator<Map.Entry<String, JsonNode>>() {
return new Iterator<>() {
Iterator<MapEntry<String, JsonNode>> it = propOrder.iterator();

@Override
Expand All @@ -196,7 +196,7 @@ public int size() {
}
};

private Set<String> reserved;
private final Set<String> reserved;

JsonProperties(Set<String> reserved) {
this.reserved = reserved;
Expand All @@ -206,7 +206,7 @@ public int size() {
this.reserved = reserved;
for (Entry<String, ?> a : propMap.entrySet()) {
Object v = a.getValue();
JsonNode json = null;
JsonNode json;
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This always gets overwritten

if (v instanceof String) {
json = TextNode.valueOf((String) v);
} else if (v instanceof JsonNode) {
Expand Down
11 changes: 4 additions & 7 deletions lang/java/avro/src/main/java/org/apache/avro/Protocol.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
Expand Down Expand Up @@ -71,11 +70,9 @@ public class Protocol extends JsonProperties {
public static final long VERSION = 1;

// Support properties for both Protocol and Message objects
private static final Set<String> MESSAGE_RESERVED = Collections
.unmodifiableSet(new HashSet<>(Arrays.asList("doc", "response", "request", "errors", "one-way")));
private static final Set<String> MESSAGE_RESERVED = Set.of("doc", "response", "request", "errors", "one-way");
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Use new Set.of(xxx)


private static final Set<String> FIELD_RESERVED = Collections
.unmodifiableSet(new HashSet<>(Arrays.asList("name", "type", "doc", "default", "aliases")));
private static final Set<String> FIELD_RESERVED = Set.of("name", "type", "doc", "default", "aliases");

/** A protocol message. */
public class Message extends JsonProperties {
Expand Down Expand Up @@ -255,8 +252,8 @@ void toJson1(Set<String> knownNames, JsonGenerator gen) throws IOException {
/** Union type for generating system errors. */
public static final Schema SYSTEM_ERRORS = Schema.createUnion(Collections.singletonList(SYSTEM_ERROR));

private static final Set<String> PROTOCOL_RESERVED = Collections
.unmodifiableSet(new HashSet<>(Arrays.asList("namespace", "protocol", "doc", "messages", "types", "errors")));
private static final Set<String> PROTOCOL_RESERVED = Set.of("namespace", "protocol", "doc", "messages", "types",
"errors");

private Protocol() {
super(PROTOCOL_RESERVED);
Expand Down
2 changes: 1 addition & 1 deletion lang/java/avro/src/main/java/org/apache/avro/Resolver.java
Original file line number Diff line number Diff line change
Expand Up @@ -435,7 +435,7 @@ public static class RecordAdjust extends Action {
* fields that will be read from the writer: these <i>n</i> are in the order
* dictated by writer's schema. The remaining <i>m</i> fields will be read from
* default values (actions for these default values are found in
* {@link RecordAdjust#defaults}.
* {@link RecordAdjust#defaults}).
*/
public final Field[] readerOrder;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ public ByteBuffer decompress(ByteBuffer compressedData) throws IOException {

try (BZip2CompressorInputStream inputStream = new BZip2CompressorInputStream(bais)) {

int readCount = -1;
int readCount;
while ((readCount = inputStream.read(buffer, compressedData.position(), buffer.length)) > 0) {
baos.write(buffer, 0, readCount);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ private DataFileConstants() {

public static final byte VERSION = 1;
public static final byte[] MAGIC = new byte[] { (byte) 'O', (byte) 'b', (byte) 'j', VERSION };
public static final long FOOTER_BLOCK = -1;
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Never used

public static final int SYNC_SIZE = 16;
public static final int DEFAULT_SYNC_INTERVAL = 4000 * SYNC_SIZE;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ private Header() {
}
}

private DatumReader<D> reader;
private final DatumReader<D> reader;
private long blockSize;
private boolean availableBlock = false;
private Header header;
Expand Down Expand Up @@ -94,7 +94,7 @@ public DataFileStream(InputStream in, DatumReader<D> reader) throws IOException
/**
* create an uninitialized DataFileStream
*/
protected DataFileStream(DatumReader<D> reader) throws IOException {
protected DataFileStream(DatumReader<D> reader) {
this.reader = reader;
}

Expand Down Expand Up @@ -147,7 +147,7 @@ void initialize(InputStream in, byte[] magic) throws IOException {
}

/** Initialize the stream without reading from it. */
void initialize(Header header) throws IOException {
void initialize(Header header) {
this.header = header;
this.codec = resolveCodec();
reader.setSchema(header.schema);
Expand Down Expand Up @@ -303,7 +303,7 @@ boolean hasNextBlock() {
blockRemaining = vin.readLong(); // read block count
blockSize = vin.readLong(); // read block size
if (blockSize > Integer.MAX_VALUE || blockSize < 0) {
throw new IOException("Block size invalid or too large for this " + "implementation: " + blockSize);
throw new IOException("Block size invalid or too large for this implementation: " + blockSize);
}
blockCount = blockRemaining;
availableBlock = true;
Expand Down Expand Up @@ -366,22 +366,6 @@ private DataBlock(long numEntries, int blockSize) {
this.numEntries = numEntries;
}

byte[] getData() {
return data;
}

long getNumEntries() {
return numEntries;
}

int getBlockSize() {
return blockSize;
}

boolean isFlushOnWrite() {
return flushOnWrite;
}

Comment on lines -369 to -384
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Never used

void setFlushOnWrite(boolean flushOnWrite) {
this.flushOnWrite = flushOnWrite;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.util.NonCopyingByteArrayOutputStream;
import org.apache.commons.compress.utils.IOUtils;
import org.apache.commons.io.IOUtils;

import java.io.BufferedOutputStream;
import java.io.Closeable;
Expand Down Expand Up @@ -56,7 +56,7 @@
*/
public class DataFileWriter<D> implements Closeable, Flushable {
private Schema schema;
private DatumWriter<D> dout;
private final DatumWriter<D> dout;

private OutputStream underlyingStream;

Expand Down Expand Up @@ -117,11 +117,10 @@ public DataFileWriter<D> setCodec(CodecFactory c) {
* is written. In this case, the {@linkplain #flush()} must be called to flush
* the stream.
*
* Invalid values throw IllegalArgumentException
*
* @param syncInterval the approximate number of uncompressed bytes to write in
* each block
* @return this DataFileWriter
* @throws IllegalArgumentException if syncInterval is invalid
*/
public DataFileWriter<D> setSyncInterval(int syncInterval) {
if (syncInterval < 32 || syncInterval > (1 << 30)) {
Expand Down Expand Up @@ -193,7 +192,7 @@ public DataFileWriter<D> create(Schema schema, OutputStream outs, byte[] sync) t
* Set whether this writer should flush the block to the stream every time a
* sync marker is written. By default, the writer will flush the buffer each
* time a sync marker is written (if the block size limit is reached or the
* {@linkplain #sync()} is called.
* {@linkplain #sync()} is called).
*
* @param flushOnEveryBlock - If set to false, this writer will not flush the
* block to the stream until {@linkplain #flush()} is
Expand Down Expand Up @@ -475,11 +474,11 @@ public void close() throws IOException {
}
}

private class BufferedFileOutputStream extends BufferedOutputStream {
private static class BufferedFileOutputStream extends BufferedOutputStream {
private long position; // start of buffer

private class PositionFilter extends FilterOutputStream {
public PositionFilter(OutputStream out) throws IOException {
public PositionFilter(OutputStream out) {
super(out);
}

Expand All @@ -490,7 +489,7 @@ public void write(byte[] b, int off, int len) throws IOException {
}
}

public BufferedFileOutputStream(OutputStream out) throws IOException {
public BufferedFileOutputStream(OutputStream out) {
super(null);
this.out = new PositionFilter(out);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ public class DeflateCodec extends Codec {
private static final int DEFAULT_BUFFER_SIZE = 8192;

static class Option extends CodecFactory {
private int compressionLevel;
private final int compressionLevel;

Option(int compressionLevel) {
this.compressionLevel = compressionLevel;
Expand All @@ -55,8 +55,8 @@ protected Codec createInstance() {
private Deflater deflater;
private Inflater inflater;
// currently only do 'nowrap' -- RFC 1951, not zlib
private boolean nowrap = true;
private int compressionLevel;
private final boolean nowrap = true;
private final int compressionLevel;

public DeflateCodec(int compressionLevel) {
this.compressionLevel = compressionLevel;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,15 +31,15 @@ public interface FileReader<D> extends Iterator<D>, Iterable<D>, Closeable {

/**
* Read the next datum from the file.
*
*
* @param reuse an instance to reuse.
* @throws NoSuchElementException if no more remain in the file.
*/
D next(D reuse) throws IOException;

/**
* Move to the next synchronization point after a position. To process a range
* of file entires, call this with the starting position, then check
* of file entries, call this with the starting position, then check
* {@link #pastSync(long)} with the end point before each call to
* {@link #next()}.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@

/** * Implements Snappy compression and decompression. */
public class SnappyCodec extends Codec {
private CRC32 crc32 = new CRC32();
private final CRC32 crc32 = new CRC32();

static class Option extends CodecFactory {
static {
Expand Down Expand Up @@ -72,7 +72,7 @@ public ByteBuffer decompress(ByteBuffer in) throws IOException {

crc32.reset();
crc32.update(out.array(), 0, size);
if (in.getInt(((Buffer) in).limit() - 4) != (int) crc32.getValue())
if (in.getInt(in.limit() - 4) != (int) crc32.getValue())
throw new IOException("Checksum failure");

return out;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,14 @@
import org.apache.avro.util.NonCopyingByteArrayOutputStream;
import org.apache.commons.compress.compressors.xz.XZCompressorInputStream;
import org.apache.commons.compress.compressors.xz.XZCompressorOutputStream;
import org.apache.commons.compress.utils.IOUtils;
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Deprecated


/** * Implements xz compression and decompression. */
public class XZCodec extends Codec {
public final static int DEFAULT_COMPRESSION = 6;
private static final int DEFAULT_BUFFER_SIZE = 8192;

static class Option extends CodecFactory {
private int compressionLevel;
private final int compressionLevel;

Option(int compressionLevel) {
this.compressionLevel = compressionLevel;
Expand All @@ -46,7 +45,7 @@ protected Codec createInstance() {
}
}

private int compressionLevel;
private final int compressionLevel;

public XZCodec(int compressionLevel) {
this.compressionLevel = compressionLevel;
Expand All @@ -72,7 +71,7 @@ public ByteBuffer decompress(ByteBuffer data) throws IOException {
InputStream bytesIn = new ByteArrayInputStream(data.array(), computeOffset(data), data.remaining());

try (InputStream ios = new XZCompressorInputStream(bytesIn)) {
IOUtils.copy(ios, baos);
ios.transferTo(baos);
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

new JDK9+ syntax

}
return baos.asByteBuffer();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import java.nio.ByteBuffer;

import org.apache.avro.util.NonCopyingByteArrayOutputStream;
import org.apache.commons.compress.utils.IOUtils;

public class ZstandardCodec extends Codec {
public final static int DEFAULT_COMPRESSION = 3;
Expand Down Expand Up @@ -82,7 +81,7 @@ public ByteBuffer decompress(ByteBuffer compressedData) throws IOException {
InputStream bytesIn = new ByteArrayInputStream(compressedData.array(), computeOffset(compressedData),
compressedData.remaining());
try (InputStream ios = ZstandardLoader.input(bytesIn, useBufferPool)) {
IOUtils.copy(ios, baos);
ios.transferTo(baos);
}
return baos.asByteBuffer();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,9 +137,9 @@ private void loadConversions() {
}
}

private Map<String, Conversion<?>> conversions = new HashMap<>();
private final Map<String, Conversion<?>> conversions = new HashMap<>();

private Map<Class<?>, Map<String, Conversion<?>>> conversionsByClass = new IdentityHashMap<>();
private final Map<Class<?>, Map<String, Conversion<?>>> conversionsByClass = new IdentityHashMap<>();

public Collection<Conversion<?>> getConversions() {
return conversions.values();
Expand Down Expand Up @@ -364,7 +364,7 @@ public int hashCode() {

@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
return new Iterator<>() {
private int position = 0;

@Override
Expand Down Expand Up @@ -551,8 +551,8 @@ public int compareTo(Fixed that) {

/** Default implementation of {@link GenericEnumSymbol}. */
public static class EnumSymbol implements GenericEnumSymbol<EnumSymbol> {
private Schema schema;
private String symbol;
private final Schema schema;
private final String symbol;

public EnumSymbol(Schema schema, String symbol) {
this.schema = schema;
Expand Down Expand Up @@ -1259,9 +1259,7 @@ protected int compareMaps(final Map<?, ?> m1, final Map<?, ?> m2) {
}
}
}
} catch (ClassCastException unused) {
return 1;
} catch (NullPointerException unused) {
} catch (ClassCastException | NullPointerException unused) {
return 1;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -519,7 +519,7 @@ public int hashCode() {

@Override
public boolean equals(Object obj) {
if (obj == null || !(obj instanceof GenericDatumReader.IdentitySchemaKey)) {
if (!(obj instanceof GenericDatumReader.IdentitySchemaKey)) {
return false;
}
IdentitySchemaKey key = (IdentitySchemaKey) obj;
Expand Down
Loading
Loading