Skip to content

Commit

Permalink
Merge branch 'add-decode-type-to-transaction-decoder' of https://gith…
Browse files Browse the repository at this point in the history
…ub.com/Gabriel-Trintinalia/besu into add-decode-type-to-transaction-decoder
  • Loading branch information
Gabriel-Trintinalia committed Sep 4, 2023
2 parents 0d62138 + 25a78ed commit ea4651d
Show file tree
Hide file tree
Showing 8 changed files with 95 additions and 9 deletions.
15 changes: 14 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,19 @@
# Changelog

## 23.7.3
### Additions and Improvements

### Breaking Changes
- Removed support for Kotti network (ETC) [#5816](https://github.com/hyperledger/besu/pull/5816)

### Additions and Improvements

### Bug Fixes
- do not create ignorable storage on revert storage-variables subcommand [#5830](https://github.com/hyperledger/besu/pull/5830)

### Download Links


## 23.7.2

### Additions and Improvements
Expand Down Expand Up @@ -30,7 +44,6 @@
### Breaking Changes
- Removed deprecated GoQuorum permissioning interop [#5607](https://github.com/hyperledger/besu/pull/5607)
- Removed support for version 0 of the database as it is no longer used by any active node. [#5698](https://github.com/hyperledger/besu/pull/5698)
- Removed support for Kotti network (ETC) [#5816](https://github.com/hyperledger/besu/pull/5816)

### Additions and Improvements
- `evmtool` launcher binaries now ship as part of the standard distribution. [#5701](https://github.com/hyperledger/besu/pull/5701)
Expand Down
3 changes: 2 additions & 1 deletion besu/src/main/java/org/hyperledger/besu/cli/BesuCommand.java
Original file line number Diff line number Diff line change
Expand Up @@ -3489,7 +3489,8 @@ private void setMergeConfigOptions() {
getActualGenesisConfigOptions().getTerminalTotalDifficulty().isPresent());
}

private void setIgnorableStorageSegments() {
/** Set ignorable segments in RocksDB Storage Provider plugin. */
public void setIgnorableStorageSegments() {
if (!unstableChainPruningOptions.getChainDataPruningEnabled()) {
rocksDBPlugin.addIgnorableSegmentIdentifier(KeyValueSegmentIdentifier.CHAIN_PRUNER_STATE);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,8 @@ public void run() {
}

private StorageProvider getStorageProvider() {
// init collection of ignorable segments
parentCommand.parentCommand.setIgnorableStorageSegments();
return parentCommand.parentCommand.getStorageProvider();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1095,10 +1095,26 @@ public String toString() {
sb.append("value=").append(getValue()).append(", ");
sb.append("sig=").append(getSignature()).append(", ");
if (chainId.isPresent()) sb.append("chainId=").append(getChainId().get()).append(", ");
sb.append("payload=").append(getPayload());
if (transactionType.equals(TransactionType.ACCESS_LIST)) {
sb.append(", ").append("accessList=").append(maybeAccessList);
sb.append("accessList=").append(maybeAccessList).append(", ");
}
if (versionedHashes.isPresent()) {
final List<VersionedHash> vhs = versionedHashes.get();
if (!vhs.isEmpty()) {
sb.append("versionedHashes=[");
sb.append(
vhs.get(0)
.toString()); // can't be empty if present, as this is checked in the constructor
for (int i = 1; i < vhs.size(); i++) {
sb.append(", ").append(vhs.get(i).toString());
}
sb.append("], ");
}
}
if (transactionType.supportsBlob() && this.blobsWithCommitments.isPresent()) {
sb.append("numberOfBlobs=").append(blobsWithCommitments.get().getBlobs().size()).append(", ");
}
sb.append("payload=").append(getPayload());
return sb.append("}").toString();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ public OptimisticRocksDBColumnarKeyValueStorage(
initColumnHandles();

} catch (final RocksDBException e) {
throw new StorageException(e);
throw parseRocksDBException(e, segments, ignorableSegments);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,10 @@
import java.util.stream.Collectors;
import java.util.stream.Stream;

import com.google.common.base.Splitter;
import com.google.common.collect.Streams;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.tuweni.bytes.Bytes;
import org.rocksdb.BlockBasedTableConfig;
import org.rocksdb.BloomFilter;
import org.rocksdb.ColumnFamilyDescriptor;
Expand Down Expand Up @@ -161,7 +164,52 @@ public RocksDBColumnarKeyValueStorage(
txOptions = new TransactionDBOptions();
columnHandles = new ArrayList<>(columnDescriptors.size());
} catch (RocksDBException e) {
throw new StorageException(e);
throw parseRocksDBException(e, defaultSegments, ignorableSegments);
}
}

/**
* Parse RocksDBException and wrap in StorageException
*
* @param ex RocksDBException
* @param defaultSegments segments requested to open
* @param ignorableSegments segments which are ignorable if not present
* @return StorageException wrapping the RocksDB Exception
*/
protected static StorageException parseRocksDBException(
final RocksDBException ex,
final List<SegmentIdentifier> defaultSegments,
final List<SegmentIdentifier> ignorableSegments) {
String message = ex.getMessage();
List<SegmentIdentifier> knownSegments =
Streams.concat(defaultSegments.stream(), ignorableSegments.stream()).distinct().toList();

// parse out unprintable segment names for a more useful exception:
String columnExceptionMessagePrefix = "Column families not opened: ";
if (message.contains(columnExceptionMessagePrefix)) {
String substring = message.substring(message.indexOf(": ") + 2);

List<String> unHandledSegments = new ArrayList<>();
Splitter.on(", ")
.splitToStream(substring)
.forEach(
part -> {
byte[] bytes = part.getBytes(StandardCharsets.UTF_8);
unHandledSegments.add(
knownSegments.stream()
.filter(seg -> Arrays.equals(seg.getId(), bytes))
.findFirst()
.map(seg -> new SegmentRecord(seg.getName(), seg.getId()))
.orElse(new SegmentRecord(part, bytes))
.forDisplay());
});

return new StorageException(
"RocksDBException: Unhandled column families: ["
+ unHandledSegments.stream().collect(Collectors.joining(", "))
+ "]");
} else {
return new StorageException(ex);
}
}

Expand Down Expand Up @@ -356,4 +404,10 @@ void throwIfClosed() {
}

abstract RocksDB getDB();

record SegmentRecord(String name, byte[] id) {
public String forDisplay() {
return String.format("'%s'(%s)", name, Bytes.of(id).toHexString());
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ public TransactionDBRocksDBColumnarKeyValueStorage(
initColumnHandles();

} catch (final RocksDBException e) {
throw new StorageException(e);
throw parseRocksDBException(e, segments, ignorableSegments);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ public void dbShouldNotIgnoreExperimentalSegmentsIfExisted(@TempDir final Path t
createSegmentedStore(testPath, Arrays.asList(TestSegment.FOO, TestSegment.BAR), List.of());
fail("DB without knowledge of experimental column family should fail");
} catch (StorageException e) {
assertThat(e.getMessage()).contains("Column families not opened");
assertThat(e.getMessage()).contains("Unhandled column families");
}

// Even if the column family is marked as ignored, as long as it exists, it will not be ignored
Expand Down Expand Up @@ -265,7 +265,7 @@ public void dbWillBeBackwardIncompatibleAfterExperimentalSegmentsAreAdded(
createSegmentedStore(testPath, Arrays.asList(TestSegment.FOO, TestSegment.BAR), List.of());
fail("DB without knowledge of experimental column family should fail");
} catch (StorageException e) {
assertThat(e.getMessage()).contains("Column families not opened");
assertThat(e.getMessage()).contains("Unhandled column families");
}
}

Expand Down

0 comments on commit ea4651d

Please sign in to comment.