From 24602b01ace54c1853bd76dca740f20bd8121ec4 Mon Sep 17 00:00:00 2001 From: galrogo Date: Sun, 14 Jan 2018 16:13:22 +0200 Subject: [PATCH 01/45] create safeutils --- src/main/java/com/iota/iri/utils/SafeUtils.java | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 src/main/java/com/iota/iri/utils/SafeUtils.java diff --git a/src/main/java/com/iota/iri/utils/SafeUtils.java b/src/main/java/com/iota/iri/utils/SafeUtils.java new file mode 100644 index 0000000000..25e229cfdf --- /dev/null +++ b/src/main/java/com/iota/iri/utils/SafeUtils.java @@ -0,0 +1,15 @@ +package com.iota.iri.utils; + +import java.util.Collection; +import java.util.stream.Stream; + + +/** + * Null safe utils + */ +public class SafeUtils { + + public static boolean isContaining(Collection collection, T element) { + return collection != null && element != null && collection.contains(element); + } +} From ceada96db9c4aae20dedddc7fcdad7900f708237 Mon Sep 17 00:00:00 2001 From: galrogo Date: Mon, 15 Jan 2018 18:16:38 +0200 Subject: [PATCH 02/45] create BoundedHashSet --- .../collections/impl/BoundedHashSet.java | 94 +++++++++++++++++++ .../interfaces/BoundedCollection.java | 39 ++++++++ .../collections/interfaces/BoundedSet.java | 11 +++ .../iota/iri/utils/BoundedHashSetTest.java | 39 ++++++++ 4 files changed, 183 insertions(+) create mode 100644 src/main/java/com/iota/iri/utils/collections/impl/BoundedHashSet.java create mode 100644 src/main/java/com/iota/iri/utils/collections/interfaces/BoundedCollection.java create mode 100644 src/main/java/com/iota/iri/utils/collections/interfaces/BoundedSet.java create mode 100644 src/test/java/com/iota/iri/utils/BoundedHashSetTest.java diff --git a/src/main/java/com/iota/iri/utils/collections/impl/BoundedHashSet.java b/src/main/java/com/iota/iri/utils/collections/impl/BoundedHashSet.java new file mode 100644 index 0000000000..9e6280ebec --- /dev/null +++ b/src/main/java/com/iota/iri/utils/collections/impl/BoundedHashSet.java @@ -0,0 +1,94 @@ +package com.iota.iri.utils.collections.impl; + +import com.iota.iri.utils.collections.interfaces.BoundedSet; + +import java.util.Collection; +import java.util.HashSet; +import java.util.stream.Collectors; + + +/** + * A set that doesn't allow to add elements to it once it is full + * + * @param the type parameter + */ +public class BoundedHashSet extends HashSet implements BoundedSet{ + final private int maxSize; + + /** + * Instantiates a new Bounded hash set. + * + * @param initialCapacity the initial capacity + * @param loadFactor the load factor of the hashmap + * @param maxSize the max size + */ + public BoundedHashSet(int initialCapacity, float loadFactor, int maxSize) { + super(initialCapacity, loadFactor); + this.maxSize = maxSize; + } + + /** + * Instantiates a new Bounded hash set. + * + * @param initialCapacity the initial capacity + * @param maxSize the max size + */ + public BoundedHashSet(int initialCapacity, int maxSize) { + super(initialCapacity); + this.maxSize = maxSize; + } + + /** + * Instantiates a new Bounded hash set. + * + * @param maxSize the max size + */ + public BoundedHashSet(int maxSize) { + super(); + this.maxSize = maxSize; + } + + /** + * Instantiates a new Bounded hash set. + * + * @param c the collection from which you create the set from + * @param maxSize the max size + * @throws NullPointerException if the specified collection is null + */ + public BoundedHashSet(Collection c, int maxSize) { + this(maxSize); + c = c.stream() + .limit(maxSize) + .collect(Collectors.toSet()); + this.addAll(c); + } + + @Override + public int getMaxSize() { + return maxSize; + } + + @Override + public boolean add(E e) { + if (isFull()) { + return false; + } + + return super.add(e); + } + + @Override + public boolean addAll(Collection c) { + if (isFull()) { + return false; + } + + if (!canCollectionBeFullyAdded(c)) { + int remainingSize = getMaxSize() - this.size(); + c = c.stream() + .limit(remainingSize) + .collect(Collectors.toSet()); + } + return super.addAll(c); + } +} diff --git a/src/main/java/com/iota/iri/utils/collections/interfaces/BoundedCollection.java b/src/main/java/com/iota/iri/utils/collections/interfaces/BoundedCollection.java new file mode 100644 index 0000000000..3ace00162a --- /dev/null +++ b/src/main/java/com/iota/iri/utils/collections/interfaces/BoundedCollection.java @@ -0,0 +1,39 @@ +package com.iota.iri.utils.collections.interfaces; + +import java.util.Collection; + +/** + * A collection that can't hold more than {@link #getMaxSize()} elements + * + * @author galrogo on 08/02/18 + **/ +public interface BoundedCollection extends Collection { + + /** + * + * @return the maximal number of elements that the collection cha hold + */ + int getMaxSize(); + + /** + * @return true if no more elements can be added + */ + default boolean isFull() { + return getMaxSize() <= this.size(); + } + + /** + * + * @param c collection to be added + * @return true only if all the elements in {@code c} can be added to this collection + * else return false + */ + default boolean canCollectionBeFullyAdded(Collection c) { + if (isFull()) { + return false; + } + + int remainingSize = getMaxSize() - this.size(); + return (c.size() <= remainingSize); + } +} diff --git a/src/main/java/com/iota/iri/utils/collections/interfaces/BoundedSet.java b/src/main/java/com/iota/iri/utils/collections/interfaces/BoundedSet.java new file mode 100644 index 0000000000..277a2910b0 --- /dev/null +++ b/src/main/java/com/iota/iri/utils/collections/interfaces/BoundedSet.java @@ -0,0 +1,11 @@ +package com.iota.iri.utils.collections.interfaces; + +import java.util.Set; + +/** + * A set that can't hold more than {@link #getMaxSize()} elements + * + * @author galrogo on 08/02/18 + **/ +public interface BoundedSet extends BoundedCollection, Set{ +} diff --git a/src/test/java/com/iota/iri/utils/BoundedHashSetTest.java b/src/test/java/com/iota/iri/utils/BoundedHashSetTest.java new file mode 100644 index 0000000000..76760f6b03 --- /dev/null +++ b/src/test/java/com/iota/iri/utils/BoundedHashSetTest.java @@ -0,0 +1,39 @@ +package com.iota.iri.utils; + +import com.iota.iri.utils.collections.impl.BoundedHashSet; +import org.junit.Assert; +import org.junit.Test; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; + +public class BoundedHashSetTest { + + @Test + public void createBoundedHashSetWithCollectionTest() { + List list = Arrays.asList(1, 2 ,3, 4, 5, 6); + BoundedHashSet boundedSet = new BoundedHashSet<>(list, 4); + Assert.assertEquals(new HashSet<>(Arrays.asList(1, 2, 3, 4)), boundedSet); + } + + @Test + public void testAdd() { + BoundedHashSet boundedSet = new BoundedHashSet<>(3); + Assert.assertTrue("can't add to unfull set", boundedSet.add(1)); + Assert.assertTrue("can't add to unfull set", boundedSet.add(2)); + Assert.assertTrue("can't add to unfull set", boundedSet.add(3)); + Assert.assertFalse("can add to full set", boundedSet.add(4)); + Assert.assertEquals("bounded set doesn't have expected contents", + new HashSet<>(Arrays.asList(1, 2, 3)), boundedSet); + } + + @Test + public void testAddAll() { + BoundedHashSet boundedSet = new BoundedHashSet<>(3); + Assert.assertTrue("set did not change after add", boundedSet.addAll(Arrays.asList(5, 6, 7, 8, 9))); + Assert.assertEquals("bounded set doesn't have expected contents", + new HashSet<>(Arrays.asList(5, 6, 7)), boundedSet); + } + +} From 22585aa2588591c5c73fbd85301a1ee4f599ca98 Mon Sep 17 00:00:00 2001 From: galrogo Date: Tue, 23 Jan 2018 18:41:05 +0200 Subject: [PATCH 03/45] add apache-commons-collections4 --- pom.xml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index ae39c977c9..df6e60973e 100644 --- a/pom.xml +++ b/pom.xml @@ -51,7 +51,14 @@ 3.5 - + + + org.apache.commons + commons-collections4 + 4.1 + + + org.slf4j slf4j-api From 2de9dbae01c86d894234b96c20f5da553b73c1e2 Mon Sep 17 00:00:00 2001 From: galrogo Date: Wed, 31 Jan 2018 14:51:43 +0200 Subject: [PATCH 04/45] Transaction View Model Implements Equals and Hash Code --- .../iri/controllers/TransactionViewModel.java | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/src/main/java/com/iota/iri/controllers/TransactionViewModel.java b/src/main/java/com/iota/iri/controllers/TransactionViewModel.java index 4af504de05..9f4f707459 100644 --- a/src/main/java/com/iota/iri/controllers/TransactionViewModel.java +++ b/src/main/java/com/iota/iri/controllers/TransactionViewModel.java @@ -438,4 +438,21 @@ public void updateSender(String sender) throws Exception { public String getSender() { return transaction.sender; } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + TransactionViewModel other = (TransactionViewModel) o; + return Objects.equals(getHash(), other.getHash()); + } + + @Override + public int hashCode() { + return Objects.hash(getHash()); + } } From b43446c9e36f74e758aac9ae198e203091968fd9 Mon Sep 17 00:00:00 2001 From: galrogo Date: Thu, 11 Jan 2018 18:49:33 +0200 Subject: [PATCH 05/45] Tip Selection - calculating cumulative weight while being memory efficient --- .../com/iota/iri/service/TipsManager.java | 230 ++++++++++++------ 1 file changed, 159 insertions(+), 71 deletions(-) diff --git a/src/main/java/com/iota/iri/service/TipsManager.java b/src/main/java/com/iota/iri/service/TipsManager.java index 31968a58f8..3ce2574c84 100644 --- a/src/main/java/com/iota/iri/service/TipsManager.java +++ b/src/main/java/com/iota/iri/service/TipsManager.java @@ -3,19 +3,26 @@ import java.util.*; import com.iota.iri.LedgerValidator; -import com.iota.iri.Snapshot; +import com.iota.iri.Milestone; import com.iota.iri.TransactionValidator; +import com.iota.iri.controllers.ApproveeViewModel; +import com.iota.iri.controllers.MilestoneViewModel; +import com.iota.iri.controllers.TipsViewModel; +import com.iota.iri.controllers.TransactionViewModel; import com.iota.iri.model.Hash; -import com.iota.iri.controllers.*; import com.iota.iri.storage.Tangle; +import com.iota.iri.utils.collections.BoundedSetValuedHashMap; import com.iota.iri.zmq.MessageQ; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.collections4.multimap.AbstractSetValuedMap; +import org.apache.commons.lang3.ObjectUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.iota.iri.Milestone; - public class TipsManager { + public static final int MAX_ANCESTORS_SIZE = 10000; + private final Logger log = LoggerFactory.getLogger(TipsManager.class); private final Tangle tangle; private final TipsViewModel tipsViewModel; @@ -113,17 +120,16 @@ Hash transactionToApprove(final Set visitedHashes, final Map d if (milestone.latestSolidSubtangleMilestoneIndex > Milestone.MILESTONE_START_INDEX || milestone.latestMilestoneIndex == Milestone.MILESTONE_START_INDEX) { - Map ratings = new HashMap<>(); Set analyzedTips = new HashSet<>(); Set maxDepthOk = new HashSet<>(); try { Hash tip = entryPoint(reference, extraTip, depth); - serialUpdateRatings(visitedHashes, tip, ratings, analyzedTips, extraTip); + Map cumulativeWeights = calculateCumulativeWeight(visitedHashes, tip, + extraTip != null, new HashSet<>()); analyzedTips.clear(); if (ledgerValidator.updateDiff(visitedHashes, diff, tip)) { - return markovChainMonteCarlo(visitedHashes, diff, tip, extraTip, ratings, iterations, milestone.latestSolidSubtangleMilestoneIndex - depth * 2, maxDepthOk, seed); - } - else { + return markovChainMonteCarlo(visitedHashes, diff, tip, extraTip, cumulativeWeights, iterations, milestone.latestSolidSubtangleMilestoneIndex - depth * 2, maxDepthOk, seed); + } else { throw new RuntimeException("starting tip failed consistency check: " + tip.toString()); } } catch (Exception e) { @@ -154,12 +160,14 @@ Hash entryPoint(final Hash reference, final Hash extraTip, final int depth) thro return milestone.latestSolidSubtangleMilestone; } - Hash markovChainMonteCarlo(final Set visitedHashes, final Map diff, final Hash tip, final Hash extraTip, final Map ratings, final int iterations, final int maxDepth, final Set maxDepthOk, final Random seed) throws Exception { + Hash markovChainMonteCarlo(final Set visitedHashes, final Map diff, Hash tip, Hash extraTip, Map cumulativeWeight, + int iterations, int maxDepth, Set maxDepthOk, Random seed) throws Exception { Map monteCarloIntegrations = new HashMap<>(); Hash tail; - for (int i = iterations; i-- > 0; ) { - tail = randomWalk(visitedHashes, diff, tip, extraTip, ratings, maxDepth, maxDepthOk, seed); - if (monteCarloIntegrations.containsKey(tail)) { + for(int i = iterations; i-- > 0; ) { + tail = randomWalk(visitedHashes, diff, tip, extraTip, cumulativeWeight, + maxDepth, maxDepthOk, seed); + if(monteCarloIntegrations.containsKey(tail)) { monteCarloIntegrations.put(tail, monteCarloIntegrations.get(tail) + 1); } else { @@ -182,7 +190,25 @@ else if (seed.nextBoolean()) { }).map(Map.Entry::getKey).orElse(null); } - Hash randomWalk(final Set visitedHashes, final Map diff, final Hash start, final Hash extraTip, final Map ratings, final int maxDepth, final Set maxDepthOk, Random rnd) throws Exception { + /** + * Performs a walk from {@code start} until you reach a tip or {@code extraTip}. The path depends of the values + * of transaction weights given in {@code cumulativeWeights}. If a tx weight is missing, then calculate it on + * the fly. + * + * @param visitedHashes hashes of transactions that were validated and their weights can be disregarded when we have + * {@code extraTip} is not {@code null}. + * @param diff map of address to change in balance since last snapshot. + * @param start hash of the transaction that starts the walk. + * @param extraTip an extra ending point for the walk. If not null the walk will ignore the weights of + * {@code visitedHashes}. + * @param cumulativeWeights maps transaction hashes to weights. Missing data is computed by this method. + * @param maxDepth the transactions we are traversing may not be below this depth measured in number of snapshots. + * @param maxDepthOk transaction hashes that we know are not below {@code maxDepth} + * @param rnd generates random doubles to make the walk less deterministic + * @return a tip's hash + * @throws Exception + */ + Hash randomWalk(final Set visitedHashes, final Map diff, final Hash start, final Hash extraTip, final Map cumulativeWeights, final int maxDepth, final Set maxDepthOk, Random rnd) throws Exception { Hash tip = start, tail = tip; Hash[] tips; Set tipSet; @@ -248,17 +274,18 @@ else if (tipSet.size() == 1) { else { // walk to the next approver tips = tipSet.toArray(new Hash[tipSet.size()]); - if (!ratings.containsKey(tip)) { - serialUpdateRatings(myApprovedHashes, tip, ratings, analyzedTips, extraTip); + if (!cumulativeWeights.containsKey(tip)) { + cumulativeWeights.putAll(calculateCumulativeWeight(myApprovedHashes, tip, extraTip != null, + analyzedTips)); analyzedTips.clear(); } walkRatings = new double[tips.length]; double maxRating = 0; - long tipRating = ratings.get(tip); + long tipRating = cumulativeWeights.get(tip); for (int i = 0; i < tips.length; i++) { //transition probability = ((Hx-Hy)^-3)/maxRating - walkRatings[i] = Math.pow(tipRating - ratings.getOrDefault(tips[i], 0L), -3); + walkRatings[i] = Math.pow(tipRating - cumulativeWeights.getOrDefault(tips[i],0), -3); maxRating += walkRatings[i]; } ratingWeight = rnd.nextDouble() * maxRating; @@ -288,74 +315,135 @@ static long capSum(long a, long b, long max) { return a + b; } - void serialUpdateRatings(final Set visitedHashes, final Hash txHash, final Map ratings, final Set analyzedTips, final Hash extraTip) throws Exception { - Stack hashesToRate = new Stack<>(); - hashesToRate.push(txHash); - Hash currentHash; - boolean addedBack; - while (!hashesToRate.empty()) { - currentHash = hashesToRate.pop(); - TransactionViewModel transactionViewModel = TransactionViewModel.fromHash(tangle, currentHash); - addedBack = false; - Set approvers = transactionViewModel.getApprovers(tangle).getHashes(); - for (Hash approver : approvers) { - if (ratings.get(approver) == null && !approver.equals(currentHash)) { - if (!addedBack) { - addedBack = true; - hashesToRate.push(currentHash); + /** + * Updates the cumulative weight of txs. + * A cumulative weight of each tx is 1 + the number of ancestors it has. + * + * See https://github.com/alongalky/iota-docs/blob/master/cumulative.md + * + * + * @param myApprovedHashes the current hashes of the snapshot at the time of calculation + * @param currentTxHash the transaction from where the analysis starts + * @param confirmLeftBehind if true attempt to give more weight to previously + * unconfirmed txs + * @throws Exception if there is a problem accessing the db + */ + Map calculateCumulativeWeight(Set myApprovedHashes, Hash currentTxHash, boolean confirmLeftBehind, + Set analyzedTips) throws Exception { + Collection txsToRate = sortTransactionsInTopologicalOrder(currentTxHash); + return calculateCwInOrder(txsToRate, myApprovedHashes, confirmLeftBehind, analyzedTips); + } + + private Set sortTransactionsInTopologicalOrder(Hash startTx) throws Exception { + Set sortedTxs = new LinkedHashSet<>(); + Set temporary = new HashSet<>(); + Deque stack = new ArrayDeque<>(); + Map> txToDirectApprovers = new HashMap<>(); + + stack.push(TransactionViewModel.fromHash(tangle, startTx)); + while (CollectionUtils.isNotEmpty(stack)) { + TransactionViewModel tx = stack.peek(); + if (!sortedTxs.contains(tx)) { + Collection appHashes = getTxDirectApproversHashes(tx, txToDirectApprovers); + if (CollectionUtils.isNotEmpty(appHashes)) { + TransactionViewModel txApp = getAndRemoveApprover(appHashes); + if (!temporary.add(txApp)) { + throw new IllegalStateException("A circle was found in a subtangle on hash: " + txApp.getHash()); } - hashesToRate.push(approver); + stack.push(txApp); + continue; } } - if (!addedBack && analyzedTips.add(currentHash)) { - long rating = (extraTip != null && visitedHashes.contains(currentHash) ? 0 : 1) + approvers.stream().map(ratings::get).filter(Objects::nonNull) - .reduce((a, b) -> capSum(a, b, Long.MAX_VALUE / 2)).orElse(0L); - ratings.put(currentHash, rating); + else { + temporary.remove(stack.pop()); + continue; } + sortedTxs.add(tx); } + + return sortedTxs; + } + + private TransactionViewModel getAndRemoveApprover(Collection appHashes) { + Iterator hashIterator = appHashes.iterator(); + TransactionViewModel txApp = hashIterator.next(); + hashIterator.remove(); + return txApp; } - Set updateHashRatings(Hash txHash, Map> ratings, Set analyzedTips) throws Exception { - Set rating; - if (analyzedTips.add(txHash)) { - TransactionViewModel transactionViewModel = TransactionViewModel.fromHash(tangle, txHash); - rating = new HashSet<>(Collections.singleton(txHash)); - Set approverHashes = transactionViewModel.getApprovers(tangle).getHashes(); - for (Hash approver : approverHashes) { - rating.addAll(updateHashRatings(approver, ratings, analyzedTips)); + private Collection getTxDirectApproversHashes(TransactionViewModel tx, + Map> txToDirectApprovers) throws Exception { + Collection txApprovers = txToDirectApprovers.get(tx); + if (txApprovers == null) { + ApproveeViewModel approvers = tx.getApprovers(tangle); + Collection appHashes = CollectionUtils.emptyIfNull(approvers.getHashes()); + txApprovers = new HashSet<>(appHashes.size()); + for (Hash appHash : appHashes) { + //if not genesis (the tx that confirms itself) + if (ObjectUtils.notEqual(Hash.NULL_HASH, appHash)) { + TransactionViewModel txApp = TransactionViewModel.fromHash(tangle, appHash); + txApprovers.add(txApp); + } } - ratings.put(txHash, rating); + txToDirectApprovers.put(tx, txApprovers); } - else { - if (ratings.containsKey(txHash)) { - rating = ratings.get(txHash); - } - else { - rating = new HashSet<>(); + return txApprovers; + } + + private Map calculateCwInOrder(Collection txsToRate, + Set myApprovedHashes, boolean confirmLeftBehind, Set analyzedTips) throws Exception { + AbstractSetValuedMap txToApprovers = + new BoundedSetValuedHashMap<>(MAX_ANCESTORS_SIZE); + HashMap txToCumulativeWeight = new HashMap<>(); + + for (TransactionViewModel transactionViewModel : txsToRate) { + if (analyzedTips.add(transactionViewModel.getHash())) { + txToCumulativeWeight = updateCw(txToApprovers, txToCumulativeWeight, transactionViewModel, myApprovedHashes, + confirmLeftBehind); } + txToApprovers = updateApproversAndReleaseMemory(txToApprovers, transactionViewModel, myApprovedHashes, + confirmLeftBehind); } - return rating; + + return txToCumulativeWeight; } - long recursiveUpdateRatings(Hash txHash, Map ratings, Set analyzedTips) throws Exception { - long rating = 1; - if (analyzedTips.add(txHash)) { - TransactionViewModel transactionViewModel = TransactionViewModel.fromHash(tangle, txHash); - Set approverHashes = transactionViewModel.getApprovers(tangle).getHashes(); - for (Hash approver : approverHashes) { - rating = capSum(rating, recursiveUpdateRatings(approver, ratings, analyzedTips), Long.MAX_VALUE / 2); - } - ratings.put(txHash, rating); + + private AbstractSetValuedMap updateApproversAndReleaseMemory( + AbstractSetValuedMap txToApprovers, + TransactionViewModel transactionViewModel, Set myApprovedHashes, boolean confirmLeftBehind) throws Exception { + Set approvers = txToApprovers.get(transactionViewModel); + + TransactionViewModel trunkTransaction = transactionViewModel.getTrunkTransaction(tangle); + txToApprovers.putAll(trunkTransaction, approvers); + TransactionViewModel branchTransaction = transactionViewModel.getBranchTransaction(tangle); + txToApprovers.putAll(branchTransaction, approvers); + if (shouldIncludeTransaction(transactionViewModel, myApprovedHashes, confirmLeftBehind)) { + txToApprovers.put(trunkTransaction, transactionViewModel); + txToApprovers.put(branchTransaction, transactionViewModel); } - else { - if (ratings.containsKey(txHash)) { - rating = ratings.get(txHash); - } - else { - rating = 0; - } + + txToApprovers.remove(transactionViewModel); + + return txToApprovers; + } + + private static boolean shouldIncludeTransaction(TransactionViewModel tx, Set myApprovedHashes, + boolean confirmLeftBehind) { + return tx != null + && !(confirmLeftBehind && myApprovedHashes.contains(tx.getHash())); + } + + private HashMap updateCw(AbstractSetValuedMap txToApprovers, + HashMap txToCumulativeWeight, TransactionViewModel transactionViewModel, + Set myApprovedHashes, boolean confirmLeftBehind) { + Set approvers = txToApprovers.get(transactionViewModel); + int weight = CollectionUtils.emptyIfNull(approvers).size(); + if (shouldIncludeTransaction(transactionViewModel, myApprovedHashes, confirmLeftBehind)) { + ++weight; } - return rating; + txToCumulativeWeight.put(transactionViewModel.getHash(), weight); + return txToCumulativeWeight; } public int getMaxDepth() { From 76293f55b12a1a532d355e896658e154fdc1ea0a Mon Sep 17 00:00:00 2001 From: galrogo Date: Mon, 15 Jan 2018 11:46:54 +0200 Subject: [PATCH 06/45] Tip Selection - unit tests for new weight algorithm --- .../com/iota/iri/service/TipsManagerTest.java | 285 ++++++++++++++---- 1 file changed, 224 insertions(+), 61 deletions(-) diff --git a/src/test/java/com/iota/iri/service/TipsManagerTest.java b/src/test/java/com/iota/iri/service/TipsManagerTest.java index 52d838c958..f7a25bfa00 100644 --- a/src/test/java/com/iota/iri/service/TipsManagerTest.java +++ b/src/test/java/com/iota/iri/service/TipsManagerTest.java @@ -16,6 +16,8 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.rules.TemporaryFolder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.*; @@ -30,13 +32,16 @@ public class TipsManagerTest { private static final TemporaryFolder dbFolder = new TemporaryFolder(); private static final TemporaryFolder logFolder = new TemporaryFolder(); + private static final String TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT = + "tx%d cumulative weight is not as expected"; private static Tangle tangle; private static TipsManager tipsManager; + private final Logger log = LoggerFactory.getLogger(this.getClass()); @Test - public void capSum() throws Exception { - long a = 0, b, max = Long.MAX_VALUE/2; - for(b = 0; b < max; b+= max/100) { + public void capSum() { + long a = 0, b, max = Long.MAX_VALUE / 2; + for (b = 0; b < max; b += max / 100) { a = TipsManager.capSum(a, b, max); Assert.assertTrue("a should never go above max", a <= max); } @@ -47,15 +52,19 @@ public static void setUp() throws Exception { tangle = new Tangle(); dbFolder.create(); logFolder.create(); - tangle.addPersistenceProvider(new RocksDBPersistenceProvider(dbFolder.getRoot().getAbsolutePath(), logFolder.getRoot().getAbsolutePath(),1000)); + tangle.addPersistenceProvider(new RocksDBPersistenceProvider(dbFolder.getRoot().getAbsolutePath(), logFolder + .getRoot().getAbsolutePath(), 1000)); tangle.init(); TipsViewModel tipsViewModel = new TipsViewModel(); MessageQ messageQ = new MessageQ(0, null, 1, false); TransactionRequester transactionRequester = new TransactionRequester(tangle, messageQ); - TransactionValidator transactionValidator = new TransactionValidator(tangle, tipsViewModel, transactionRequester, messageQ); - Milestone milestone = new Milestone(tangle, Hash.NULL_HASH, Snapshot.initialSnapshot.clone(), transactionValidator, true, messageQ); + TransactionValidator transactionValidator = new TransactionValidator(tangle, tipsViewModel, + transactionRequester, messageQ); + Milestone milestone = new Milestone(tangle, Hash.NULL_HASH, Snapshot.initialSnapshot.clone(), + transactionValidator, true, messageQ); LedgerValidator ledgerValidator = new LedgerValidator(tangle, milestone, transactionRequester, messageQ); - tipsManager = new TipsManager(tangle, ledgerValidator, transactionValidator, tipsViewModel, milestone, 15, messageQ); + tipsManager = new TipsManager(tangle, ledgerValidator, transactionValidator, tipsViewModel, milestone, 15, + messageQ); } @AfterClass @@ -65,111 +74,265 @@ public static void tearDown() throws Exception { } @Test - public void updateLinearRatingsTestWorks() throws Exception { - TransactionViewModel transaction, transaction1, transaction2; + public void testCalculateCumulativeWeight() throws Exception { + TransactionViewModel transaction, transaction1, transaction2, transaction3, transaction4; transaction = new TransactionViewModel(getRandomTransactionTrits(), getRandomTransactionHash()); - transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), transaction.getHash()), getRandomTransactionHash()); - transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction1.getHash(), transaction1.getHash()), getRandomTransactionHash()); + transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction1.getHash(), + transaction1.getHash()), getRandomTransactionHash()); + transaction3 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction2.getHash(), + transaction1.getHash()), getRandomTransactionHash()); + transaction4 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction2.getHash(), + transaction3.getHash()), getRandomTransactionHash()); transaction.store(tangle); transaction1.store(tangle); transaction2.store(tangle); - Map> ratings = new HashMap<>(); - tipsManager.updateHashRatings(transaction.getHash(), ratings, new HashSet<>()); - Assert.assertEquals(ratings.get(transaction.getHash()).size(), 3); - Assert.assertEquals(ratings.get(transaction1.getHash()).size(), 2); - Assert.assertEquals(ratings.get(transaction2.getHash()).size(), 1); + transaction3.store(tangle); + transaction4.store(tangle); + Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), + transaction.getHash(), false, new HashSet<>()); + + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 4), + 1, txToCw.get(transaction4.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), + 2, txToCw.get(transaction3.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), + 3, txToCw.get(transaction2.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), + 4, txToCw.get(transaction1.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), + 5, txToCw.get(transaction.getHash()).intValue()); + } + + @Test + public void testCalculateCumulativeWeightDiamond() throws Exception { + TransactionViewModel transaction, transaction1, transaction2, transaction3; + transaction = new TransactionViewModel(getRandomTransactionTrits(), getRandomTransactionHash()); + transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction3 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction1.getHash(), + transaction2.getHash()), getRandomTransactionHash()); + transaction.store(tangle); + transaction1.store(tangle); + transaction2.store(tangle); + transaction3.store(tangle); + log.debug("printing transaction in diamond shape \n {} \n{} {}\n {}", + transaction.getHash(), transaction1.getHash(), transaction2.getHash(), transaction3.getHash()); + Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), + transaction.getHash(), false, new HashSet<>()); + + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), + 1, txToCw.get(transaction3.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), + 2, txToCw.get(transaction1.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), + 2, txToCw.get(transaction2.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), + 4, txToCw.get(transaction.getHash()).intValue()); } @Test - public void updateRatingsTestWorks() throws Exception { + public void testCalculateCumulativeWeightLinear() throws Exception { TransactionViewModel transaction, transaction1, transaction2, transaction3, transaction4; transaction = new TransactionViewModel(getRandomTransactionTrits(), getRandomTransactionHash()); - transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), transaction.getHash()), getRandomTransactionHash()); - transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction1.getHash(), transaction1.getHash()), getRandomTransactionHash()); - transaction3 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction2.getHash(), transaction1.getHash()), getRandomTransactionHash()); - transaction4 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction2.getHash(), transaction3.getHash()), getRandomTransactionHash()); + transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction.getHash(), transaction.getHash()), getRandomTransactionHash()); + transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction1.getHash(), transaction1.getHash()), getRandomTransactionHash()); + transaction3 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction2.getHash(), transaction2.getHash()), getRandomTransactionHash()); + transaction4 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction3.getHash(), transaction3.getHash()), getRandomTransactionHash()); transaction.store(tangle); transaction1.store(tangle); transaction2.store(tangle); transaction3.store(tangle); transaction4.store(tangle); - Map> ratings = new HashMap<>(); - tipsManager.updateHashRatings(transaction.getHash(), ratings, new HashSet<>()); - Assert.assertEquals(ratings.get(transaction.getHash()).size(), 5); - Assert.assertEquals(ratings.get(transaction1.getHash()).size(),4); - Assert.assertEquals(ratings.get(transaction2.getHash()).size(), 3); + Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), + transaction.getHash(), false, new HashSet<>()); + + + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 4), + 1, txToCw.get(transaction4.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), + 2, txToCw.get(transaction3.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), + 3, txToCw.get(transaction2.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), + 4, txToCw.get(transaction1.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), + 5, txToCw.get(transaction.getHash()).intValue()); } @Test - public void updateRatings2TestWorks() throws Exception { - TransactionViewModel transaction, transaction1, transaction2, transaction3, transaction4; + public void testCalculateCumulativeWeightAlon() throws Exception { + TransactionViewModel transaction, transaction1, transaction2, transaction3, transaction4, transaction5, + transaction6; transaction = new TransactionViewModel(getRandomTransactionTrits(), getRandomTransactionHash()); - transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), transaction.getHash()), getRandomTransactionHash()); - transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction1.getHash(), transaction1.getHash()), getRandomTransactionHash()); - transaction3 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction2.getHash(), transaction2.getHash()), getRandomTransactionHash()); - transaction4 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction3.getHash(), transaction3.getHash()), getRandomTransactionHash()); + transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction.getHash(), transaction.getHash()), getRandomTransactionHash()); + transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction.getHash(), transaction.getHash()), getRandomTransactionHash()); + transaction3 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction.getHash(), transaction.getHash()), getRandomTransactionHash()); + transaction4 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction.getHash(), transaction.getHash()), getRandomTransactionHash()); + transaction5 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction3.getHash(), transaction2.getHash()), getRandomTransactionHash()); + transaction6 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction4.getHash(), transaction5.getHash()), getRandomTransactionHash()); + transaction.store(tangle); transaction1.store(tangle); transaction2.store(tangle); transaction3.store(tangle); transaction4.store(tangle); - Map ratings = new HashMap<>(); - tipsManager.recursiveUpdateRatings(transaction.getHash(), ratings, new HashSet<>()); - Assert.assertTrue(ratings.get(transaction.getHash()).equals(5L)); + transaction5.store(tangle); + transaction6.store(tangle); + + log.debug("printing transactions in order \n{}\n{}\n{}\n{}\n{}\n{}\n{}", + transaction.getHash(), transaction1.getHash(), transaction2.getHash(), transaction3.getHash(), + transaction4, transaction5, transaction6); + + Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), + transaction.getHash(), false, new HashSet<>()); + + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 6), + 1, txToCw.get(transaction6.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 5), + 2, txToCw.get(transaction5.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 4), + 2, txToCw.get(transaction4.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), + 3, txToCw.get(transaction3.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), + 3, txToCw.get(transaction2.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), + 1, txToCw.get(transaction1.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), + 7, txToCw.get(transaction.getHash()).intValue()); } @Test - public void updateRatingsSerialWorks() throws Exception { - Hash[] hashes = new Hash[5]; + public void cwCalculationSameAsLegacy() throws Exception { + Hash[] hashes = new Hash[100]; hashes[0] = getRandomTransactionHash(); - new TransactionViewModel(getRandomTransactionTrits(), hashes[0]).store(tangle); - for(int i = 1; i < hashes.length; i ++) { + TransactionViewModel transactionViewModel1 = new TransactionViewModel(getRandomTransactionTrits(), hashes[0]); + transactionViewModel1.store(tangle); + //constant seed for consistent results + Random random = new Random(181783497276652981L); + for (int i = 1; i < hashes.length; i++) { hashes[i] = getRandomTransactionHash(); - new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(hashes[i-1], hashes[i-1]), hashes[i]).store(tangle); + TransactionViewModel transactionViewModel = new TransactionViewModel( + getRandomTransactionWithTrunkAndBranch(hashes[i - random.nextInt(i) - 1], + hashes[i - random.nextInt(i) - 1]), hashes[i]); + transactionViewModel.store(tangle); + log.debug(String.format("current transaction %.4s \n with trunk %.4s \n and branch %.4s", hashes[i], + transactionViewModel.getTrunkTransactionHash(), + transactionViewModel.getBranchTransactionHash())); } - Map ratings = new HashMap<>(); - tipsManager.recursiveUpdateRatings(hashes[0], ratings, new HashSet<>()); - Assert.assertTrue(ratings.get(hashes[0]).equals(5L)); + Map> ratings = new HashMap<>(); + updateApproversRecursively(hashes[0], ratings, new HashSet<>()); + Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), + hashes[0], false, new HashSet<>()); + + Assert.assertEquals("missing txs from new calculation", ratings.size(), txToCw.size()); + ratings.forEach((hash, weight) -> { + log.debug(String.format("tx %.4s has expected weight of %d", hash, weight.size())); + Assert.assertEquals( + "new calculation weight is not as expected for hash " + hash, + weight.size(), txToCw.get(hash).intValue()); + }); } @Test - public void updateRatingsSerialWorks2() throws Exception { - Hash[] hashes = new Hash[5]; - hashes[0] = getRandomTransactionHash(); - new TransactionViewModel(getRandomTransactionTrits(), hashes[0]).store(tangle); - for(int i = 1; i < hashes.length; i ++) { - hashes[i] = getRandomTransactionHash(); - new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(hashes[i-1], hashes[i-(i > 1 ?2:1)]), hashes[i]).store(tangle); - } - Map ratings = new HashMap<>(); - tipsManager.recursiveUpdateRatings(hashes[0], ratings, new HashSet<>()); - Assert.assertTrue(ratings.get(hashes[0]).equals(12L)); + public void testCalculateCommulativeWeightWithLeftBehind() throws Exception { + TransactionViewModel transaction, transaction1, transaction2, transaction3, transaction4; + transaction = new TransactionViewModel(getRandomTransactionTrits(), getRandomTransactionHash()); + transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction1.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction3 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction2.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction4 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction3.getHash(), + transaction1.getHash()), getRandomTransactionHash()); + Set approvedHashes = new HashSet<>(); + transaction.store(tangle); + transaction1.store(tangle); + approvedHashes.add(transaction2.getHash()); + transaction2.store(tangle); + approvedHashes.add(transaction3.getHash()); + transaction3.store(tangle); + transaction4.store(tangle); + + Map cumulativeWeight = tipsManager.calculateCumulativeWeight(approvedHashes, + transaction.getHash(), true, new HashSet<>()); + + log.info(cumulativeWeight.toString()); + String msg = "Cumulative weight is wrong for tx"; + Assert.assertEquals(msg + 4, 1, cumulativeWeight.get(transaction4.getHash()).intValue()); + Assert.assertEquals(msg + 3, 1, cumulativeWeight.get(transaction3.getHash()).intValue()); + Assert.assertEquals(msg + 2, 1, cumulativeWeight.get(transaction2.getHash()).intValue()); + Assert.assertEquals(msg + 1, 2, cumulativeWeight.get(transaction1.getHash()).intValue()); + Assert.assertEquals(msg + 0, 3, cumulativeWeight.get(transaction.getHash()).intValue()); } - //@Test + // @Test + //To be removed once CI tests are ready public void testUpdateRatingsTime() throws Exception { int max = 100001; long time; List times = new LinkedList<>(); - for(int size = 1; size < max; size *= 10) { + for (int size = 1; size < max; size *= 10) { time = ratingTime(size); times.add(time); } Assert.assertEquals(1, 1); } - public long ratingTime(int size) throws Exception { + private long ratingTime(int size) throws Exception { Hash[] hashes = new Hash[size]; hashes[0] = getRandomTransactionHash(); new TransactionViewModel(getRandomTransactionTrits(), hashes[0]).store(tangle); Random random = new Random(); - for(int i = 1; i < hashes.length; i ++) { + for (int i = 1; i < hashes.length; i++) { hashes[i] = getRandomTransactionHash(); - new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(hashes[i-random.nextInt(i)-1], hashes[i-random.nextInt(i)-1]), hashes[i]).store(tangle); + new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(hashes[i - random.nextInt(i) - 1], + hashes[i - random.nextInt(i) - 1]), hashes[i]).store(tangle); } Map ratings = new HashMap<>(); long start = System.currentTimeMillis(); - tipsManager.serialUpdateRatings(new HashSet<>(), hashes[0], ratings, new HashSet<>(), null); - return System.currentTimeMillis() - start; +// tipsManager.serialUpdateRatings(new Snapshot(Snapshot.initialSnapshot), hashes[0], ratings, new HashSet<>() +// , null); + tipsManager.calculateCumulativeWeight(new HashSet<>(), hashes[0], false, new HashSet<>()); + long time = System.currentTimeMillis() - start; + System.out.println(time); + return time; + } + + //Simple recursive algorithm that maps each tx hash to its approvers' hashes + private static Set updateApproversRecursively(Hash txHash, Map> txToApprovers, + Set analyzedTips) throws Exception { + Set approvers; + if (analyzedTips.add(txHash)) { + TransactionViewModel transactionViewModel = TransactionViewModel.fromHash(tangle, txHash); + approvers = new HashSet<>(Collections.singleton(txHash)); + Set approverHashes = transactionViewModel.getApprovers(tangle).getHashes(); + for (Hash approver : approverHashes) { + approvers.addAll(updateApproversRecursively(approver, txToApprovers, analyzedTips)); + } + txToApprovers.put(txHash, approvers); + } else { + if (txToApprovers.containsKey(txHash)) { + approvers = txToApprovers.get(txHash); + } else { + approvers = new HashSet<>(); + } + } + return approvers; } } \ No newline at end of file From 83fdb607e1b226402a6b9e75a7e9e75a76e05996 Mon Sep 17 00:00:00 2001 From: galrogo Date: Sun, 11 Feb 2018 16:52:53 +0200 Subject: [PATCH 07/45] Tip Selection - use subhashes to save memory --- src/main/java/com/iota/iri/model/Hash.java | 12 ++ .../com/iota/iri/service/TipsManager.java | 174 ++++++++++-------- .../com/iota/iri/service/TipsManagerTest.java | 71 +++---- 3 files changed, 151 insertions(+), 106 deletions(-) diff --git a/src/main/java/com/iota/iri/model/Hash.java b/src/main/java/com/iota/iri/model/Hash.java index dfa570b830..ea5df15f61 100644 --- a/src/main/java/com/iota/iri/model/Hash.java +++ b/src/main/java/com/iota/iri/model/Hash.java @@ -7,6 +7,7 @@ import com.iota.iri.utils.Converter; import java.io.Serializable; +import java.nio.ByteBuffer; import java.util.Arrays; public class Hash implements Serializable, Indexable { @@ -15,6 +16,7 @@ public class Hash implements Serializable, Indexable { public static final int SIZE_IN_BYTES = 49; public static final Hash NULL_HASH = new Hash(new int[Curl.HASH_LENGTH]); + public static final int SUBHASH_LENGTH = 16; private byte[] bytes; private int[] trits; @@ -130,6 +132,16 @@ private void fullRead(byte[] bytes, int offset, int size) { hashCode = Arrays.hashCode(this.bytes); } + /** + * Used to create low-memory index keys. + * + * @return a {@link ByteBuffer} that holds a subarray of {@link #bytes()} + * that has a size of {@value #SUBHASH_LENGTH} + */ + public ByteBuffer getSubHash() { + return ByteBuffer.wrap(Arrays.copyOf(bytes(), SUBHASH_LENGTH)); + } + @Override public void read(byte[] bytes) { fullRead(bytes, 0, SIZE_IN_BYTES); diff --git a/src/main/java/com/iota/iri/service/TipsManager.java b/src/main/java/com/iota/iri/service/TipsManager.java index 3ce2574c84..440605e8be 100644 --- a/src/main/java/com/iota/iri/service/TipsManager.java +++ b/src/main/java/com/iota/iri/service/TipsManager.java @@ -1,7 +1,5 @@ package com.iota.iri.service; -import java.util.*; - import com.iota.iri.LedgerValidator; import com.iota.iri.Milestone; import com.iota.iri.TransactionValidator; @@ -11,17 +9,23 @@ import com.iota.iri.controllers.TransactionViewModel; import com.iota.iri.model.Hash; import com.iota.iri.storage.Tangle; -import com.iota.iri.utils.collections.BoundedSetValuedHashMap; +import com.iota.iri.utils.SafeUtils; +import com.iota.iri.utils.collections.impl.BoundedHashSet; +import com.iota.iri.utils.collections.interfaces.BoundedSet; import com.iota.iri.zmq.MessageQ; import org.apache.commons.collections4.CollectionUtils; -import org.apache.commons.collections4.multimap.AbstractSetValuedMap; +import org.apache.commons.collections4.SetUtils; import org.apache.commons.lang3.ObjectUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.nio.Buffer; +import java.nio.ByteBuffer; +import java.util.*; + public class TipsManager { - public static final int MAX_ANCESTORS_SIZE = 10000; + public static final int MAX_ANCESTORS_SIZE = 1000; private final Logger log = LoggerFactory.getLogger(TipsManager.class); private final Tangle tangle; @@ -124,7 +128,7 @@ Hash transactionToApprove(final Set visitedHashes, final Map d Set maxDepthOk = new HashSet<>(); try { Hash tip = entryPoint(reference, extraTip, depth); - Map cumulativeWeights = calculateCumulativeWeight(visitedHashes, tip, + Map cumulativeWeights = calculateCumulativeWeight(visitedHashes, tip, extraTip != null, new HashSet<>()); analyzedTips.clear(); if (ledgerValidator.updateDiff(visitedHashes, diff, tip)) { @@ -160,7 +164,7 @@ Hash entryPoint(final Hash reference, final Hash extraTip, final int depth) thro return milestone.latestSolidSubtangleMilestone; } - Hash markovChainMonteCarlo(final Set visitedHashes, final Map diff, Hash tip, Hash extraTip, Map cumulativeWeight, + Hash markovChainMonteCarlo(final Set visitedHashes, final Map diff, Hash tip, Hash extraTip, Map cumulativeWeight, int iterations, int maxDepth, Set maxDepthOk, Random seed) throws Exception { Map monteCarloIntegrations = new HashMap<>(); Hash tail; @@ -208,7 +212,7 @@ else if (seed.nextBoolean()) { * @return a tip's hash * @throws Exception */ - Hash randomWalk(final Set visitedHashes, final Map diff, final Hash start, final Hash extraTip, final Map cumulativeWeights, final int maxDepth, final Set maxDepthOk, Random rnd) throws Exception { + Hash randomWalk(final Set visitedHashes, final Map diff, final Hash start, final Hash extraTip, final Map cumulativeWeights, final int maxDepth, final Set maxDepthOk, Random rnd) throws Exception { Hash tip = start, tail = tip; Hash[] tips; Set tipSet; @@ -218,10 +222,6 @@ Hash randomWalk(final Set visitedHashes, final Map diff, final int approverIndex; double ratingWeight; double[] walkRatings; - List extraTipList = null; - if (extraTip != null) { - extraTipList = Collections.singletonList(extraTip); - } Map myDiff = new HashMap<>(diff); Set myApprovedHashes = new HashSet<>(visitedHashes); @@ -274,7 +274,7 @@ else if (tipSet.size() == 1) { else { // walk to the next approver tips = tipSet.toArray(new Hash[tipSet.size()]); - if (!cumulativeWeights.containsKey(tip)) { + if (!cumulativeWeights.containsKey(tip.getSubHash())) { cumulativeWeights.putAll(calculateCumulativeWeight(myApprovedHashes, tip, extraTip != null, analyzedTips)); analyzedTips.clear(); @@ -282,10 +282,12 @@ else if (tipSet.size() == 1) { walkRatings = new double[tips.length]; double maxRating = 0; - long tipRating = cumulativeWeights.get(tip); + ByteBuffer subHash = tip.getSubHash(); + long tipRating = cumulativeWeights.get(subHash); for (int i = 0; i < tips.length; i++) { + subHash = tips[i].getSubHash(); //transition probability = ((Hx-Hy)^-3)/maxRating - walkRatings[i] = Math.pow(tipRating - cumulativeWeights.getOrDefault(tips[i],0), -3); + walkRatings[i] = Math.pow(tipRating - cumulativeWeights.getOrDefault(subHash,0), -3); maxRating += walkRatings[i]; } ratingWeight = rnd.nextDouble() * maxRating; @@ -328,121 +330,147 @@ static long capSum(long a, long b, long max) { * unconfirmed txs * @throws Exception if there is a problem accessing the db */ - Map calculateCumulativeWeight(Set myApprovedHashes, Hash currentTxHash, boolean confirmLeftBehind, + Map calculateCumulativeWeight(Set myApprovedHashes, Hash currentTxHash, boolean confirmLeftBehind, Set analyzedTips) throws Exception { - Collection txsToRate = sortTransactionsInTopologicalOrder(currentTxHash); - return calculateCwInOrder(txsToRate, myApprovedHashes, confirmLeftBehind, analyzedTips); + log.info("Start calculating cw starting with tx hash {}", currentTxHash); + log.debug("Start topological sort"); + long start = System.currentTimeMillis(); + LinkedHashSet txHashesToRate = sortTransactionsInTopologicalOrder(currentTxHash); + log.debug("Subtangle size: {}", txHashesToRate.size()); + log.debug("Topological sort done. Start traversing on txs in order and calculate weight"); + Map cumulativeWeights = calculateCwInOrder(txHashesToRate, myApprovedHashes, confirmLeftBehind, + analyzedTips); + log.debug("Cumulative weights calculation done in {} ms", System.currentTimeMillis() - start); + return cumulativeWeights; } - private Set sortTransactionsInTopologicalOrder(Hash startTx) throws Exception { - Set sortedTxs = new LinkedHashSet<>(); - Set temporary = new HashSet<>(); - Deque stack = new ArrayDeque<>(); - Map> txToDirectApprovers = new HashMap<>(); + private LinkedHashSet sortTransactionsInTopologicalOrder(Hash startTx) throws Exception { + LinkedHashSet sortedTxs = new LinkedHashSet<>(); + Set temporary = new HashSet<>(); + Deque stack = new ArrayDeque<>(); + Map> txToDirectApprovers = new HashMap<>(); - stack.push(TransactionViewModel.fromHash(tangle, startTx)); + stack.push(startTx); while (CollectionUtils.isNotEmpty(stack)) { - TransactionViewModel tx = stack.peek(); - if (!sortedTxs.contains(tx)) { - Collection appHashes = getTxDirectApproversHashes(tx, txToDirectApprovers); + Hash txHash = stack.peek(); + if (!sortedTxs.contains(txHash)) { + Collection appHashes = getTxDirectApproversHashes(txHash, txToDirectApprovers); if (CollectionUtils.isNotEmpty(appHashes)) { - TransactionViewModel txApp = getAndRemoveApprover(appHashes); + Hash txApp = getAndRemoveApprover(appHashes); if (!temporary.add(txApp)) { - throw new IllegalStateException("A circle was found in a subtangle on hash: " + txApp.getHash()); + throw new IllegalStateException("A circle or a collision was found in a subtangle on hash: " + + txApp); } stack.push(txApp); continue; } } else { - temporary.remove(stack.pop()); + txHash = stack.pop(); + temporary.remove(txHash); continue; } - sortedTxs.add(tx); + sortedTxs.add(txHash); } return sortedTxs; } - private TransactionViewModel getAndRemoveApprover(Collection appHashes) { - Iterator hashIterator = appHashes.iterator(); - TransactionViewModel txApp = hashIterator.next(); + private Hash getAndRemoveApprover(Collection appHashes) { + Iterator hashIterator = appHashes.iterator(); + Hash txApp = hashIterator.next(); hashIterator.remove(); return txApp; } - private Collection getTxDirectApproversHashes(TransactionViewModel tx, - Map> txToDirectApprovers) throws Exception { - Collection txApprovers = txToDirectApprovers.get(tx); + private Collection getTxDirectApproversHashes(Hash txHash, + Map> txToDirectApprovers) throws Exception { + Collection txApprovers = txToDirectApprovers.get(txHash); if (txApprovers == null) { - ApproveeViewModel approvers = tx.getApprovers(tangle); + ApproveeViewModel approvers = TransactionViewModel.fromHash(tangle, txHash).getApprovers(tangle); Collection appHashes = CollectionUtils.emptyIfNull(approvers.getHashes()); txApprovers = new HashSet<>(appHashes.size()); for (Hash appHash : appHashes) { //if not genesis (the tx that confirms itself) if (ObjectUtils.notEqual(Hash.NULL_HASH, appHash)) { - TransactionViewModel txApp = TransactionViewModel.fromHash(tangle, appHash); - txApprovers.add(txApp); + txApprovers.add(appHash); } } - txToDirectApprovers.put(tx, txApprovers); + txToDirectApprovers.put(txHash, txApprovers); } return txApprovers; } - private Map calculateCwInOrder(Collection txsToRate, + //must specify using LinkedHashSet since Java has no interface that guarantees uniqueness and insertion order + private Map calculateCwInOrder(LinkedHashSet txsToRate, Set myApprovedHashes, boolean confirmLeftBehind, Set analyzedTips) throws Exception { - AbstractSetValuedMap txToApprovers = - new BoundedSetValuedHashMap<>(MAX_ANCESTORS_SIZE); - HashMap txToCumulativeWeight = new HashMap<>(); - - for (TransactionViewModel transactionViewModel : txsToRate) { - if (analyzedTips.add(transactionViewModel.getHash())) { - txToCumulativeWeight = updateCw(txToApprovers, txToCumulativeWeight, transactionViewModel, myApprovedHashes, - confirmLeftBehind); + Map> txSubHashToApprovers = new HashMap<>(); + Map txSubHashToCumulativeWeight = new HashMap<>(); + + Iterator txHashIterator = txsToRate.iterator(); + while (txHashIterator.hasNext()) { + Hash txHash = txHashIterator.next(); + if (analyzedTips.add(txHash)) { + txSubHashToCumulativeWeight = updateCw(txSubHashToApprovers, txSubHashToCumulativeWeight, txHash, + myApprovedHashes, confirmLeftBehind); } - txToApprovers = updateApproversAndReleaseMemory(txToApprovers, transactionViewModel, myApprovedHashes, + txSubHashToApprovers = updateApproversAndReleaseMemory(txSubHashToApprovers, txHash, myApprovedHashes, confirmLeftBehind); + txHashIterator.remove(); } - return txToCumulativeWeight; + return txSubHashToCumulativeWeight; } - private AbstractSetValuedMap updateApproversAndReleaseMemory( - AbstractSetValuedMap txToApprovers, - TransactionViewModel transactionViewModel, Set myApprovedHashes, boolean confirmLeftBehind) throws Exception { - Set approvers = txToApprovers.get(transactionViewModel); + private Map> updateApproversAndReleaseMemory( + Map> txSubHashToApprovers, + Hash txHash, Set myApprovedHashes, boolean confirmLeftBehind) throws Exception { + ByteBuffer txSubHash = txHash.getSubHash(); + BoundedSet approvers = + new BoundedHashSet<>(SetUtils.emptyIfNull(txSubHashToApprovers.get(txSubHash)), MAX_ANCESTORS_SIZE); - TransactionViewModel trunkTransaction = transactionViewModel.getTrunkTransaction(tangle); - txToApprovers.putAll(trunkTransaction, approvers); - TransactionViewModel branchTransaction = transactionViewModel.getBranchTransaction(tangle); - txToApprovers.putAll(branchTransaction, approvers); - if (shouldIncludeTransaction(transactionViewModel, myApprovedHashes, confirmLeftBehind)) { - txToApprovers.put(trunkTransaction, transactionViewModel); - txToApprovers.put(branchTransaction, transactionViewModel); + if (shouldIncludeTransaction(txHash, myApprovedHashes, confirmLeftBehind)) { + approvers.add(txSubHash); } - txToApprovers.remove(transactionViewModel); + TransactionViewModel transactionViewModel = TransactionViewModel.fromHash(tangle, txHash); + Hash trunkHash = transactionViewModel.getTrunkTransactionHash(); + Buffer trunkSubHash = trunkHash.getSubHash(); + Hash branchHash = transactionViewModel.getBranchTransactionHash(); + Buffer branchSubHash = branchHash.getSubHash(); + if (!approvers.isFull()) { + Set trunkApprovers = new BoundedHashSet<>(approvers, MAX_ANCESTORS_SIZE); + trunkApprovers.addAll(CollectionUtils.emptyIfNull(txSubHashToApprovers.get(trunkSubHash))); + Set branchApprovers = new BoundedHashSet<>(approvers, MAX_ANCESTORS_SIZE); + branchApprovers.addAll(CollectionUtils.emptyIfNull(txSubHashToApprovers.get(branchSubHash))); + txSubHashToApprovers.put(trunkSubHash, trunkApprovers); + txSubHashToApprovers.put(branchSubHash, branchApprovers); + } + else { + txSubHashToApprovers.put(trunkSubHash, approvers); + txSubHashToApprovers.put(branchSubHash, approvers); + } + txSubHashToApprovers.remove(txSubHash); - return txToApprovers; + return txSubHashToApprovers; } - private static boolean shouldIncludeTransaction(TransactionViewModel tx, Set myApprovedHashes, + private static boolean shouldIncludeTransaction(Hash txHash, Set myApprovedSubHashes, boolean confirmLeftBehind) { - return tx != null - && !(confirmLeftBehind && myApprovedHashes.contains(tx.getHash())); + return !confirmLeftBehind || !SafeUtils.isContaining(myApprovedSubHashes, txHash); } - private HashMap updateCw(AbstractSetValuedMap txToApprovers, - HashMap txToCumulativeWeight, TransactionViewModel transactionViewModel, + private Map updateCw(Map> txSubHashToApprovers, + Map txToCumulativeWeight, Hash txHash, Set myApprovedHashes, boolean confirmLeftBehind) { - Set approvers = txToApprovers.get(transactionViewModel); + ByteBuffer txSubHash = txHash.getSubHash(); + Set approvers = txSubHashToApprovers.get(txSubHash); int weight = CollectionUtils.emptyIfNull(approvers).size(); - if (shouldIncludeTransaction(transactionViewModel, myApprovedHashes, confirmLeftBehind)) { + if (shouldIncludeTransaction(txHash, myApprovedHashes, confirmLeftBehind)) { ++weight; } - txToCumulativeWeight.put(transactionViewModel.getHash(), weight); + txToCumulativeWeight.put(txSubHash, weight); return txToCumulativeWeight; } diff --git a/src/test/java/com/iota/iri/service/TipsManagerTest.java b/src/test/java/com/iota/iri/service/TipsManagerTest.java index f7a25bfa00..fc5dabe91c 100644 --- a/src/test/java/com/iota/iri/service/TipsManagerTest.java +++ b/src/test/java/com/iota/iri/service/TipsManagerTest.java @@ -19,6 +19,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.nio.Buffer; import java.util.*; import static com.iota.iri.controllers.TransactionViewModelTest.getRandomTransactionHash; @@ -90,19 +91,19 @@ public void testCalculateCumulativeWeight() throws Exception { transaction2.store(tangle); transaction3.store(tangle); transaction4.store(tangle); - Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), + Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), transaction.getHash(), false, new HashSet<>()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 4), - 1, txToCw.get(transaction4.getHash()).intValue()); + 1, txToCw.get(transaction4.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), - 2, txToCw.get(transaction3.getHash()).intValue()); + 2, txToCw.get(transaction3.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), - 3, txToCw.get(transaction2.getHash()).intValue()); + 3, txToCw.get(transaction2.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), - 4, txToCw.get(transaction1.getHash()).intValue()); + 4, txToCw.get(transaction1.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), - 5, txToCw.get(transaction.getHash()).intValue()); + 5, txToCw.get(transaction.getHash().getSubHash()).intValue()); } @Test @@ -121,17 +122,17 @@ public void testCalculateCumulativeWeightDiamond() throws Exception { transaction3.store(tangle); log.debug("printing transaction in diamond shape \n {} \n{} {}\n {}", transaction.getHash(), transaction1.getHash(), transaction2.getHash(), transaction3.getHash()); - Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), + Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), transaction.getHash(), false, new HashSet<>()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), - 1, txToCw.get(transaction3.getHash()).intValue()); + 1, txToCw.get(transaction3.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), - 2, txToCw.get(transaction1.getHash()).intValue()); + 2, txToCw.get(transaction1.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), - 2, txToCw.get(transaction2.getHash()).intValue()); + 2, txToCw.get(transaction2.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), - 4, txToCw.get(transaction.getHash()).intValue()); + 4, txToCw.get(transaction.getHash().getSubHash()).intValue()); } @Test @@ -151,20 +152,24 @@ public void testCalculateCumulativeWeightLinear() throws Exception { transaction2.store(tangle); transaction3.store(tangle); transaction4.store(tangle); - Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), + + log.info(String.format("Linear ordered hashes from tip %.4s, %.4s, %.4s, %.4s, %.4s", transaction4.getHash(), + transaction3.getHash(), transaction2.getHash(), transaction1.getHash(), transaction.getHash())); + + Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), transaction.getHash(), false, new HashSet<>()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 4), - 1, txToCw.get(transaction4.getHash()).intValue()); + 1, txToCw.get(transaction4.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), - 2, txToCw.get(transaction3.getHash()).intValue()); + 2, txToCw.get(transaction3.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), - 3, txToCw.get(transaction2.getHash()).intValue()); + 3, txToCw.get(transaction2.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), - 4, txToCw.get(transaction1.getHash()).intValue()); + 4, txToCw.get(transaction1.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), - 5, txToCw.get(transaction.getHash()).intValue()); + 5, txToCw.get(transaction.getHash().getSubHash()).intValue()); } @Test @@ -197,23 +202,23 @@ public void testCalculateCumulativeWeightAlon() throws Exception { transaction.getHash(), transaction1.getHash(), transaction2.getHash(), transaction3.getHash(), transaction4, transaction5, transaction6); - Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), + Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), transaction.getHash(), false, new HashSet<>()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 6), - 1, txToCw.get(transaction6.getHash()).intValue()); + 1, txToCw.get(transaction6.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 5), - 2, txToCw.get(transaction5.getHash()).intValue()); + 2, txToCw.get(transaction5.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 4), - 2, txToCw.get(transaction4.getHash()).intValue()); + 2, txToCw.get(transaction4.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), - 3, txToCw.get(transaction3.getHash()).intValue()); + 3, txToCw.get(transaction3.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), - 3, txToCw.get(transaction2.getHash()).intValue()); + 3, txToCw.get(transaction2.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), - 1, txToCw.get(transaction1.getHash()).intValue()); + 1, txToCw.get(transaction1.getHash().getSubHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), - 7, txToCw.get(transaction.getHash()).intValue()); + 7, txToCw.get(transaction.getHash().getSubHash()).intValue()); } @Test @@ -236,7 +241,7 @@ public void cwCalculationSameAsLegacy() throws Exception { } Map> ratings = new HashMap<>(); updateApproversRecursively(hashes[0], ratings, new HashSet<>()); - Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), + Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), hashes[0], false, new HashSet<>()); Assert.assertEquals("missing txs from new calculation", ratings.size(), txToCw.size()); @@ -244,7 +249,7 @@ public void cwCalculationSameAsLegacy() throws Exception { log.debug(String.format("tx %.4s has expected weight of %d", hash, weight.size())); Assert.assertEquals( "new calculation weight is not as expected for hash " + hash, - weight.size(), txToCw.get(hash).intValue()); + weight.size(), txToCw.get(hash.getSubHash()).intValue()); }); } @@ -269,16 +274,16 @@ public void testCalculateCommulativeWeightWithLeftBehind() throws Exception { transaction3.store(tangle); transaction4.store(tangle); - Map cumulativeWeight = tipsManager.calculateCumulativeWeight(approvedHashes, + Map cumulativeWeight = tipsManager.calculateCumulativeWeight(approvedHashes, transaction.getHash(), true, new HashSet<>()); log.info(cumulativeWeight.toString()); String msg = "Cumulative weight is wrong for tx"; - Assert.assertEquals(msg + 4, 1, cumulativeWeight.get(transaction4.getHash()).intValue()); - Assert.assertEquals(msg + 3, 1, cumulativeWeight.get(transaction3.getHash()).intValue()); - Assert.assertEquals(msg + 2, 1, cumulativeWeight.get(transaction2.getHash()).intValue()); - Assert.assertEquals(msg + 1, 2, cumulativeWeight.get(transaction1.getHash()).intValue()); - Assert.assertEquals(msg + 0, 3, cumulativeWeight.get(transaction.getHash()).intValue()); + Assert.assertEquals(msg + 4, 1, cumulativeWeight.get(transaction4.getHash().getSubHash()).intValue()); + Assert.assertEquals(msg + 3, 1, cumulativeWeight.get(transaction3.getHash().getSubHash()).intValue()); + Assert.assertEquals(msg + 2, 1, cumulativeWeight.get(transaction2.getHash().getSubHash()).intValue()); + Assert.assertEquals(msg + 1, 2, cumulativeWeight.get(transaction1.getHash().getSubHash()).intValue()); + Assert.assertEquals(msg + 0, 3, cumulativeWeight.get(transaction.getHash().getSubHash()).intValue()); } // @Test From 884ca1eb47e42ed10d65357669aefb9297564927 Mon Sep 17 00:00:00 2001 From: Gal Rogozinski Date: Tue, 6 Mar 2018 11:53:14 +0200 Subject: [PATCH 08/45] move getSubHash to IotaUtils --- src/main/java/com/iota/iri/model/Hash.java | 11 ---- .../com/iota/iri/service/TipsManager.java | 16 +++-- .../java/com/iota/iri/utils/IotaUtils.java | 24 +++++++ .../com/iota/iri/service/TipsManagerTest.java | 64 +++++++++++-------- 4 files changed, 70 insertions(+), 45 deletions(-) create mode 100644 src/main/java/com/iota/iri/utils/IotaUtils.java diff --git a/src/main/java/com/iota/iri/model/Hash.java b/src/main/java/com/iota/iri/model/Hash.java index ea5df15f61..9f9c57d785 100644 --- a/src/main/java/com/iota/iri/model/Hash.java +++ b/src/main/java/com/iota/iri/model/Hash.java @@ -16,7 +16,6 @@ public class Hash implements Serializable, Indexable { public static final int SIZE_IN_BYTES = 49; public static final Hash NULL_HASH = new Hash(new int[Curl.HASH_LENGTH]); - public static final int SUBHASH_LENGTH = 16; private byte[] bytes; private int[] trits; @@ -132,16 +131,6 @@ private void fullRead(byte[] bytes, int offset, int size) { hashCode = Arrays.hashCode(this.bytes); } - /** - * Used to create low-memory index keys. - * - * @return a {@link ByteBuffer} that holds a subarray of {@link #bytes()} - * that has a size of {@value #SUBHASH_LENGTH} - */ - public ByteBuffer getSubHash() { - return ByteBuffer.wrap(Arrays.copyOf(bytes(), SUBHASH_LENGTH)); - } - @Override public void read(byte[] bytes) { fullRead(bytes, 0, SIZE_IN_BYTES); diff --git a/src/main/java/com/iota/iri/service/TipsManager.java b/src/main/java/com/iota/iri/service/TipsManager.java index 440605e8be..1cc7fc85b4 100644 --- a/src/main/java/com/iota/iri/service/TipsManager.java +++ b/src/main/java/com/iota/iri/service/TipsManager.java @@ -9,6 +9,7 @@ import com.iota.iri.controllers.TransactionViewModel; import com.iota.iri.model.Hash; import com.iota.iri.storage.Tangle; +import com.iota.iri.utils.IotaUtils; import com.iota.iri.utils.SafeUtils; import com.iota.iri.utils.collections.impl.BoundedHashSet; import com.iota.iri.utils.collections.interfaces.BoundedSet; @@ -35,6 +36,7 @@ public class TipsManager { private final TransactionValidator transactionValidator; private final MessageQ messageQ; + public static final int SUBHASH_LENGTH = 16; private int RATING_THRESHOLD = 75; // Must be in [0..100] range private boolean shuttingDown = false; private int RESCAN_TX_TO_REQUEST_INTERVAL = 750; @@ -274,7 +276,7 @@ else if (tipSet.size() == 1) { else { // walk to the next approver tips = tipSet.toArray(new Hash[tipSet.size()]); - if (!cumulativeWeights.containsKey(tip.getSubHash())) { + if (!cumulativeWeights.containsKey(IotaUtils.getSubHash(tip, SUBHASH_LENGTH))) { cumulativeWeights.putAll(calculateCumulativeWeight(myApprovedHashes, tip, extraTip != null, analyzedTips)); analyzedTips.clear(); @@ -282,10 +284,10 @@ else if (tipSet.size() == 1) { walkRatings = new double[tips.length]; double maxRating = 0; - ByteBuffer subHash = tip.getSubHash(); + ByteBuffer subHash = IotaUtils.getSubHash(tip, SUBHASH_LENGTH); long tipRating = cumulativeWeights.get(subHash); for (int i = 0; i < tips.length; i++) { - subHash = tips[i].getSubHash(); + subHash = IotaUtils.getSubHash(tip, SUBHASH_LENGTH); //transition probability = ((Hx-Hy)^-3)/maxRating walkRatings[i] = Math.pow(tipRating - cumulativeWeights.getOrDefault(subHash,0), -3); maxRating += walkRatings[i]; @@ -426,7 +428,7 @@ private Map calculateCwInOrder(LinkedHashSet txsToRate, private Map> updateApproversAndReleaseMemory( Map> txSubHashToApprovers, Hash txHash, Set myApprovedHashes, boolean confirmLeftBehind) throws Exception { - ByteBuffer txSubHash = txHash.getSubHash(); + ByteBuffer txSubHash = IotaUtils.getSubHash(txHash, SUBHASH_LENGTH); BoundedSet approvers = new BoundedHashSet<>(SetUtils.emptyIfNull(txSubHashToApprovers.get(txSubHash)), MAX_ANCESTORS_SIZE); @@ -436,9 +438,9 @@ private Map> updateApproversAndReleaseMemory( TransactionViewModel transactionViewModel = TransactionViewModel.fromHash(tangle, txHash); Hash trunkHash = transactionViewModel.getTrunkTransactionHash(); - Buffer trunkSubHash = trunkHash.getSubHash(); + Buffer trunkSubHash = IotaUtils.getSubHash(trunkHash, SUBHASH_LENGTH); Hash branchHash = transactionViewModel.getBranchTransactionHash(); - Buffer branchSubHash = branchHash.getSubHash(); + Buffer branchSubHash = IotaUtils.getSubHash(branchHash, SUBHASH_LENGTH); if (!approvers.isFull()) { Set trunkApprovers = new BoundedHashSet<>(approvers, MAX_ANCESTORS_SIZE); trunkApprovers.addAll(CollectionUtils.emptyIfNull(txSubHashToApprovers.get(trunkSubHash))); @@ -464,7 +466,7 @@ private static boolean shouldIncludeTransaction(Hash txHash, Set myApprove private Map updateCw(Map> txSubHashToApprovers, Map txToCumulativeWeight, Hash txHash, Set myApprovedHashes, boolean confirmLeftBehind) { - ByteBuffer txSubHash = txHash.getSubHash(); + ByteBuffer txSubHash = IotaUtils.getSubHash(txHash, SUBHASH_LENGTH); Set approvers = txSubHashToApprovers.get(txSubHash); int weight = CollectionUtils.emptyIfNull(approvers).size(); if (shouldIncludeTransaction(txHash, myApprovedHashes, confirmLeftBehind)) { diff --git a/src/main/java/com/iota/iri/utils/IotaUtils.java b/src/main/java/com/iota/iri/utils/IotaUtils.java new file mode 100644 index 0000000000..a0f3d93e8c --- /dev/null +++ b/src/main/java/com/iota/iri/utils/IotaUtils.java @@ -0,0 +1,24 @@ +package com.iota.iri.utils; + +import com.iota.iri.model.Hash; + +import java.nio.ByteBuffer; +import java.util.Arrays; + +public class IotaUtils { + /** + * Used to create low-memory index keys. + * + * @param hash the hash we create the key from + * @param length the length of the desired subhash + * @return a {@link ByteBuffer} that holds a subarray of {@link Hash#bytes()} + * that has the specified {@code length} + */ + public static ByteBuffer getSubHash(Hash hash, int length) { + if (hash == null) { + return null; + } + + return ByteBuffer.wrap(Arrays.copyOf(hash.bytes(), length)); + } +} diff --git a/src/test/java/com/iota/iri/service/TipsManagerTest.java b/src/test/java/com/iota/iri/service/TipsManagerTest.java index fc5dabe91c..bb8c1bb72d 100644 --- a/src/test/java/com/iota/iri/service/TipsManagerTest.java +++ b/src/test/java/com/iota/iri/service/TipsManagerTest.java @@ -10,6 +10,7 @@ import com.iota.iri.network.TransactionRequester; import com.iota.iri.storage.Tangle; import com.iota.iri.storage.rocksDB.RocksDBPersistenceProvider; +import com.iota.iri.utils.IotaUtils; import com.iota.iri.zmq.MessageQ; import org.junit.AfterClass; import org.junit.Assert; @@ -95,15 +96,15 @@ public void testCalculateCumulativeWeight() throws Exception { transaction.getHash(), false, new HashSet<>()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 4), - 1, txToCw.get(transaction4.getHash().getSubHash()).intValue()); + 1, txToCw.get(IotaUtils.getSubHash(transaction4.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), - 2, txToCw.get(transaction3.getHash().getSubHash()).intValue()); + 2, txToCw.get(IotaUtils.getSubHash(transaction3.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), - 3, txToCw.get(transaction2.getHash().getSubHash()).intValue()); + 3, txToCw.get(IotaUtils.getSubHash(transaction2.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), - 4, txToCw.get(transaction1.getHash().getSubHash()).intValue()); + 4, txToCw.get(IotaUtils.getSubHash(transaction1.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), - 5, txToCw.get(transaction.getHash().getSubHash()).intValue()); + 5, txToCw.get(IotaUtils.getSubHash(transaction.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); } @Test @@ -126,13 +127,16 @@ public void testCalculateCumulativeWeightDiamond() throws Exception { transaction.getHash(), false, new HashSet<>()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), - 1, txToCw.get(transaction3.getHash().getSubHash()).intValue()); + 1, txToCw.get(IotaUtils.getSubHash(transaction3.getHash(), TipsManager.SUBHASH_LENGTH)) + .intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), - 2, txToCw.get(transaction1.getHash().getSubHash()).intValue()); + 2, txToCw.get(IotaUtils.getSubHash(transaction1.getHash(), TipsManager.SUBHASH_LENGTH)) + .intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), - 2, txToCw.get(transaction2.getHash().getSubHash()).intValue()); + 2, txToCw.get(IotaUtils.getSubHash(transaction2.getHash(), TipsManager.SUBHASH_LENGTH)) + .intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), - 4, txToCw.get(transaction.getHash().getSubHash()).intValue()); + 4, txToCw.get(IotaUtils.getSubHash(transaction.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); } @Test @@ -161,15 +165,15 @@ public void testCalculateCumulativeWeightLinear() throws Exception { Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 4), - 1, txToCw.get(transaction4.getHash().getSubHash()).intValue()); + 1, txToCw.get(IotaUtils.getSubHash(transaction4.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), - 2, txToCw.get(transaction3.getHash().getSubHash()).intValue()); + 2, txToCw.get(IotaUtils.getSubHash(transaction3.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), - 3, txToCw.get(transaction2.getHash().getSubHash()).intValue()); + 3, txToCw.get(IotaUtils.getSubHash(transaction2.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), - 4, txToCw.get(transaction1.getHash().getSubHash()).intValue()); + 4, txToCw.get(IotaUtils.getSubHash(transaction1.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), - 5, txToCw.get(transaction.getHash().getSubHash()).intValue()); + 5, txToCw.get(IotaUtils.getSubHash(transaction.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); } @Test @@ -206,19 +210,19 @@ public void testCalculateCumulativeWeightAlon() throws Exception { transaction.getHash(), false, new HashSet<>()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 6), - 1, txToCw.get(transaction6.getHash().getSubHash()).intValue()); + 1, txToCw.get(IotaUtils.getSubHash(transaction6.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 5), - 2, txToCw.get(transaction5.getHash().getSubHash()).intValue()); + 2, txToCw.get(IotaUtils.getSubHash(transaction5.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 4), - 2, txToCw.get(transaction4.getHash().getSubHash()).intValue()); + 2, txToCw.get(IotaUtils.getSubHash(transaction4.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), - 3, txToCw.get(transaction3.getHash().getSubHash()).intValue()); + 3, txToCw.get(IotaUtils.getSubHash(transaction3.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), - 3, txToCw.get(transaction2.getHash().getSubHash()).intValue()); + 3, txToCw.get(IotaUtils.getSubHash(transaction2.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), - 1, txToCw.get(transaction1.getHash().getSubHash()).intValue()); + 1, txToCw.get(IotaUtils.getSubHash(transaction1.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), - 7, txToCw.get(transaction.getHash().getSubHash()).intValue()); + 7, txToCw.get(IotaUtils.getSubHash(transaction.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); } @Test @@ -249,7 +253,8 @@ public void cwCalculationSameAsLegacy() throws Exception { log.debug(String.format("tx %.4s has expected weight of %d", hash, weight.size())); Assert.assertEquals( "new calculation weight is not as expected for hash " + hash, - weight.size(), txToCw.get(hash.getSubHash()).intValue()); + weight.size(), txToCw.get(IotaUtils.getSubHash(hash, TipsManager.SUBHASH_LENGTH)) + .intValue()); }); } @@ -279,11 +284,16 @@ public void testCalculateCommulativeWeightWithLeftBehind() throws Exception { log.info(cumulativeWeight.toString()); String msg = "Cumulative weight is wrong for tx"; - Assert.assertEquals(msg + 4, 1, cumulativeWeight.get(transaction4.getHash().getSubHash()).intValue()); - Assert.assertEquals(msg + 3, 1, cumulativeWeight.get(transaction3.getHash().getSubHash()).intValue()); - Assert.assertEquals(msg + 2, 1, cumulativeWeight.get(transaction2.getHash().getSubHash()).intValue()); - Assert.assertEquals(msg + 1, 2, cumulativeWeight.get(transaction1.getHash().getSubHash()).intValue()); - Assert.assertEquals(msg + 0, 3, cumulativeWeight.get(transaction.getHash().getSubHash()).intValue()); + Assert.assertEquals(msg + 4, 1, cumulativeWeight.get( + IotaUtils.getSubHash(transaction4.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + Assert.assertEquals(msg + 3, 1, cumulativeWeight.get( + IotaUtils.getSubHash(transaction3.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + Assert.assertEquals(msg + 2, 1, cumulativeWeight.get( + IotaUtils.getSubHash(transaction2.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + Assert.assertEquals(msg + 1, 2, cumulativeWeight.get( + IotaUtils.getSubHash(transaction1.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + Assert.assertEquals(msg + 0, 3, cumulativeWeight.get( + IotaUtils.getSubHash(transaction.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); } // @Test From b1e59584bc113b8b111d99d247fc15fb8d960681 Mon Sep 17 00:00:00 2001 From: Gal Rogozinski Date: Tue, 6 Mar 2018 13:41:47 +0200 Subject: [PATCH 09/45] remove unused import --- src/main/java/com/iota/iri/utils/SafeUtils.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/com/iota/iri/utils/SafeUtils.java b/src/main/java/com/iota/iri/utils/SafeUtils.java index 25e229cfdf..58eaf1944d 100644 --- a/src/main/java/com/iota/iri/utils/SafeUtils.java +++ b/src/main/java/com/iota/iri/utils/SafeUtils.java @@ -1,7 +1,6 @@ package com.iota.iri.utils; import java.util.Collection; -import java.util.stream.Stream; /** From a9052890074adbbdeec9cd852e6333a0b6a08e83 Mon Sep 17 00:00:00 2001 From: galrogo Date: Sun, 15 Apr 2018 19:19:07 +0300 Subject: [PATCH 10/45] check config file before loading default testnet values and add testne db flag --- src/main/java/com/iota/iri/IRI.java | 83 ++++++++++++++++--- .../java/com/iota/iri/conf/Configuration.java | 2 + 2 files changed, 74 insertions(+), 11 deletions(-) diff --git a/src/main/java/com/iota/iri/IRI.java b/src/main/java/com/iota/iri/IRI.java index d7e7a4bccb..659f7c89af 100644 --- a/src/main/java/com/iota/iri/IRI.java +++ b/src/main/java/com/iota/iri/IRI.java @@ -109,6 +109,7 @@ public static void validateParams(final Configuration configuration, final Strin final Option dnsResolutionFalse = parser.addBooleanOption("dns-resolution-false"); final Option maxPeers = parser.addStringOption("max-peers"); final Option testnetCoordinator = parser.addStringOption("testnet-coordinator"); + final Option testnetDbPath = parser.addStringOption("db-path"); final Option disableCooValidation = parser.addBooleanOption("testnet-no-coo-validation"); final Option snapshot = parser.addStringOption("snapshot"); final Option snapshotSignature = parser.addStringOption("snapshot-sig"); @@ -139,18 +140,69 @@ public static void validateParams(final Configuration configuration, final Strin || configuration.booling(DefaultConfSettings.TESTNET); if (isTestnet) { configuration.put(DefaultConfSettings.TESTNET, "true"); - configuration.put(DefaultConfSettings.DB_PATH.name(), "testnetdb"); - configuration.put(DefaultConfSettings.DB_LOG_PATH.name(), "testnetdb.log"); - configuration.put(DefaultConfSettings.COORDINATOR, Configuration.TESTNET_COORDINATOR_ADDRESS); - configuration.put(DefaultConfSettings.SNAPSHOT_FILE, Configuration.TESTNET_SNAPSHOT_FILE); - configuration.put(DefaultConfSettings.MILESTONE_START_INDEX, Configuration.TESTNET_MILESTONE_START_INDEX); + + String dbPath = configuration.string(DefaultConfSettings.DB_PATH); + if (StringUtils.isEmpty(dbPath)) { + dbPath = Configuration.TESTNETDB; + } + configuration.put(DefaultConfSettings.DB_PATH.name(), dbPath); + + String dbLog = configuration.string(DefaultConfSettings.DB_LOG_PATH); + if (StringUtils.isEmpty(dbLog)) { + dbLog = Configuration.TESTNETDB_LOG; + } + configuration.put(DefaultConfSettings.DB_LOG_PATH.name(), dbLog); + + String coordinator_address = configuration.string(DefaultConfSettings.COORDINATOR); + if (StringUtils.isEmpty(coordinator_address)) { + coordinator_address = Configuration.TESTNET_COORDINATOR_ADDRESS; + } + configuration.put(DefaultConfSettings.COORDINATOR, coordinator_address); + + String snapshotFile = configuration.string(DefaultConfSettings.SNAPSHOT_FILE); + if (StringUtils.isEmpty(snapshotFile)) { + snapshotFile = Configuration.TESTNET_SNAPSHOT_FILE; + } + configuration.put(DefaultConfSettings.SNAPSHOT_FILE, snapshotFile); + + String milestoneStart = configuration.string(DefaultConfSettings.MILESTONE_START_INDEX); + if (StringUtils.isEmpty(milestoneStart)) { + milestoneStart = Configuration.TESTNET_MILESTONE_START_INDEX; + } + configuration.put(DefaultConfSettings.MILESTONE_START_INDEX, milestoneStart); + + //this should always be empty configuration.put(DefaultConfSettings.SNAPSHOT_SIGNATURE_FILE, ""); - configuration.put(DefaultConfSettings.MWM, Configuration.TESTNET_MWM); - configuration.put(DefaultConfSettings.NUMBER_OF_KEYS_IN_A_MILESTONE, - Configuration.TESTNET_NUM_KEYS_IN_MILESTONE); - configuration.put(DefaultConfSettings.TRANSACTION_PACKET_SIZE, Configuration.TESTNET_PACKET_SIZE); - configuration.put(DefaultConfSettings.REQUEST_HASH_SIZE, Configuration.TESTNET_REQ_HASH_SIZE); - configuration.put(DefaultConfSettings.SNAPSHOT_TIME, Configuration.TESTNET_GLOBAL_SNAPSHOT_TIME); + + String mwm = configuration.string(DefaultConfSettings.MWM); + if (StringUtils.isEmpty(mwm)) { + mwm = Configuration.TESTNET_MWM; + } + configuration.put(DefaultConfSettings.MWM, mwm); + + String keysInMilestone = configuration.string(DefaultConfSettings.NUMBER_OF_KEYS_IN_A_MILESTONE); + if (StringUtils.isEmpty(keysInMilestone)) { + keysInMilestone = Configuration.TESTNET_NUM_KEYS_IN_MILESTONE; + } + configuration.put(DefaultConfSettings.NUMBER_OF_KEYS_IN_A_MILESTONE, keysInMilestone); + + String transactionPacketSize = configuration.string(DefaultConfSettings.TRANSACTION_PACKET_SIZE); + if (StringUtils.isEmpty(transactionPacketSize)) { + transactionPacketSize = Configuration.TESTNET_PACKET_SIZE; + } + configuration.put(DefaultConfSettings.TRANSACTION_PACKET_SIZE, transactionPacketSize); + + String reqHashSize = configuration.string(DefaultConfSettings.REQUEST_HASH_SIZE); + if (StringUtils.isEmpty(reqHashSize)) { + reqHashSize = Configuration.TESTNET_REQ_HASH_SIZE; + } + configuration.put(DefaultConfSettings.REQUEST_HASH_SIZE, reqHashSize); + + String globalSnapshotTime = configuration.string(DefaultConfSettings.SNAPSHOT_TIME); + if (StringUtils.isEmpty(globalSnapshotTime)) { + globalSnapshotTime = Configuration.TESTNET_GLOBAL_SNAPSHOT_TIME; + } + configuration.put(DefaultConfSettings.SNAPSHOT_TIME, globalSnapshotTime); } // mandatory args @@ -222,6 +274,15 @@ public static void validateParams(final Configuration configuration, final Strin StatusPrinter.print((LoggerContext) LoggerFactory.getILoggerFactory()); } + final String dbPath = parser.getOptionValue(testnetDbPath); + if (dbPath != null) { + if (isTestnet) { + configuration.put(DefaultConfSettings.DB_PATH, dbPath); + configuration.put(DefaultConfSettings.DB_LOG_PATH.name(), dbPath + ".log"); + } else { + log.warn(TESTNET_FLAG_REQUIRED + testnetDbPath.longForm()); + } + } final String coordinatorAddress = parser.getOptionValue(testnetCoordinator); if (coordinatorAddress != null) { diff --git a/src/main/java/com/iota/iri/conf/Configuration.java b/src/main/java/com/iota/iri/conf/Configuration.java index d708190d93..15939d006e 100644 --- a/src/main/java/com/iota/iri/conf/Configuration.java +++ b/src/main/java/com/iota/iri/conf/Configuration.java @@ -48,6 +48,8 @@ public class Configuration { public static final String TESTNET_PACKET_SIZE = "1653"; public static final String REQ_HASH_SIZE = "46"; public static final String TESTNET_REQ_HASH_SIZE = "49"; + public static final String TESTNETDB = "testnetdb"; + public static final String TESTNETDB_LOG = "testnetdb.log"; From f31b2b89f3837643739cc18cd746386ff73bee7c Mon Sep 17 00:00:00 2001 From: galrogo Date: Sun, 15 Apr 2018 19:38:53 +0300 Subject: [PATCH 11/45] create a method for testnet configs --- src/main/java/com/iota/iri/IRI.java | 132 ++++++++++++++-------------- 1 file changed, 68 insertions(+), 64 deletions(-) diff --git a/src/main/java/com/iota/iri/IRI.java b/src/main/java/com/iota/iri/IRI.java index 659f7c89af..1e1d61a574 100644 --- a/src/main/java/com/iota/iri/IRI.java +++ b/src/main/java/com/iota/iri/IRI.java @@ -139,70 +139,7 @@ public static void validateParams(final Configuration configuration, final Strin final boolean isTestnet = Optional.ofNullable(parser.getOptionValue(testnet)).orElse(Boolean.FALSE) || configuration.booling(DefaultConfSettings.TESTNET); if (isTestnet) { - configuration.put(DefaultConfSettings.TESTNET, "true"); - - String dbPath = configuration.string(DefaultConfSettings.DB_PATH); - if (StringUtils.isEmpty(dbPath)) { - dbPath = Configuration.TESTNETDB; - } - configuration.put(DefaultConfSettings.DB_PATH.name(), dbPath); - - String dbLog = configuration.string(DefaultConfSettings.DB_LOG_PATH); - if (StringUtils.isEmpty(dbLog)) { - dbLog = Configuration.TESTNETDB_LOG; - } - configuration.put(DefaultConfSettings.DB_LOG_PATH.name(), dbLog); - - String coordinator_address = configuration.string(DefaultConfSettings.COORDINATOR); - if (StringUtils.isEmpty(coordinator_address)) { - coordinator_address = Configuration.TESTNET_COORDINATOR_ADDRESS; - } - configuration.put(DefaultConfSettings.COORDINATOR, coordinator_address); - - String snapshotFile = configuration.string(DefaultConfSettings.SNAPSHOT_FILE); - if (StringUtils.isEmpty(snapshotFile)) { - snapshotFile = Configuration.TESTNET_SNAPSHOT_FILE; - } - configuration.put(DefaultConfSettings.SNAPSHOT_FILE, snapshotFile); - - String milestoneStart = configuration.string(DefaultConfSettings.MILESTONE_START_INDEX); - if (StringUtils.isEmpty(milestoneStart)) { - milestoneStart = Configuration.TESTNET_MILESTONE_START_INDEX; - } - configuration.put(DefaultConfSettings.MILESTONE_START_INDEX, milestoneStart); - - //this should always be empty - configuration.put(DefaultConfSettings.SNAPSHOT_SIGNATURE_FILE, ""); - - String mwm = configuration.string(DefaultConfSettings.MWM); - if (StringUtils.isEmpty(mwm)) { - mwm = Configuration.TESTNET_MWM; - } - configuration.put(DefaultConfSettings.MWM, mwm); - - String keysInMilestone = configuration.string(DefaultConfSettings.NUMBER_OF_KEYS_IN_A_MILESTONE); - if (StringUtils.isEmpty(keysInMilestone)) { - keysInMilestone = Configuration.TESTNET_NUM_KEYS_IN_MILESTONE; - } - configuration.put(DefaultConfSettings.NUMBER_OF_KEYS_IN_A_MILESTONE, keysInMilestone); - - String transactionPacketSize = configuration.string(DefaultConfSettings.TRANSACTION_PACKET_SIZE); - if (StringUtils.isEmpty(transactionPacketSize)) { - transactionPacketSize = Configuration.TESTNET_PACKET_SIZE; - } - configuration.put(DefaultConfSettings.TRANSACTION_PACKET_SIZE, transactionPacketSize); - - String reqHashSize = configuration.string(DefaultConfSettings.REQUEST_HASH_SIZE); - if (StringUtils.isEmpty(reqHashSize)) { - reqHashSize = Configuration.TESTNET_REQ_HASH_SIZE; - } - configuration.put(DefaultConfSettings.REQUEST_HASH_SIZE, reqHashSize); - - String globalSnapshotTime = configuration.string(DefaultConfSettings.SNAPSHOT_TIME); - if (StringUtils.isEmpty(globalSnapshotTime)) { - globalSnapshotTime = Configuration.TESTNET_GLOBAL_SNAPSHOT_TIME; - } - configuration.put(DefaultConfSettings.SNAPSHOT_TIME, globalSnapshotTime); + setTestnetConfigs(configuration); } // mandatory args @@ -358,6 +295,73 @@ public static void validateParams(final Configuration configuration, final Strin } } + private static void setTestnetConfigs(Configuration configuration) { + configuration.put(DefaultConfSettings.TESTNET, "true"); + + String dbPath = configuration.string(DefaultConfSettings.DB_PATH); + if (StringUtils.isEmpty(dbPath)) { + dbPath = Configuration.TESTNETDB; + } + configuration.put(DefaultConfSettings.DB_PATH.name(), dbPath); + + String dbLog = configuration.string(DefaultConfSettings.DB_LOG_PATH); + if (StringUtils.isEmpty(dbLog)) { + dbLog = Configuration.TESTNETDB_LOG; + } + configuration.put(DefaultConfSettings.DB_LOG_PATH.name(), dbLog); + + String coordinator_address = configuration.string(DefaultConfSettings.COORDINATOR); + if (StringUtils.isEmpty(coordinator_address)) { + coordinator_address = Configuration.TESTNET_COORDINATOR_ADDRESS; + } + configuration.put(DefaultConfSettings.COORDINATOR, coordinator_address); + + String snapshotFile = configuration.string(DefaultConfSettings.SNAPSHOT_FILE); + if (StringUtils.isEmpty(snapshotFile)) { + snapshotFile = Configuration.TESTNET_SNAPSHOT_FILE; + } + configuration.put(DefaultConfSettings.SNAPSHOT_FILE, snapshotFile); + + String milestoneStart = configuration.string(DefaultConfSettings.MILESTONE_START_INDEX); + if (StringUtils.isEmpty(milestoneStart)) { + milestoneStart = Configuration.TESTNET_MILESTONE_START_INDEX; + } + configuration.put(DefaultConfSettings.MILESTONE_START_INDEX, milestoneStart); + + //this should always be empty + configuration.put(DefaultConfSettings.SNAPSHOT_SIGNATURE_FILE, ""); + + String mwm = configuration.string(DefaultConfSettings.MWM); + if (StringUtils.isEmpty(mwm)) { + mwm = Configuration.TESTNET_MWM; + } + configuration.put(DefaultConfSettings.MWM, mwm); + + String keysInMilestone = configuration.string(DefaultConfSettings.NUMBER_OF_KEYS_IN_A_MILESTONE); + if (StringUtils.isEmpty(keysInMilestone)) { + keysInMilestone = Configuration.TESTNET_NUM_KEYS_IN_MILESTONE; + } + configuration.put(DefaultConfSettings.NUMBER_OF_KEYS_IN_A_MILESTONE, keysInMilestone); + + String transactionPacketSize = configuration.string(DefaultConfSettings.TRANSACTION_PACKET_SIZE); + if (StringUtils.isEmpty(transactionPacketSize)) { + transactionPacketSize = Configuration.TESTNET_PACKET_SIZE; + } + configuration.put(DefaultConfSettings.TRANSACTION_PACKET_SIZE, transactionPacketSize); + + String reqHashSize = configuration.string(DefaultConfSettings.REQUEST_HASH_SIZE); + if (StringUtils.isEmpty(reqHashSize)) { + reqHashSize = Configuration.TESTNET_REQ_HASH_SIZE; + } + configuration.put(DefaultConfSettings.REQUEST_HASH_SIZE, reqHashSize); + + String globalSnapshotTime = configuration.string(DefaultConfSettings.SNAPSHOT_TIME); + if (StringUtils.isEmpty(globalSnapshotTime)) { + globalSnapshotTime = Configuration.TESTNET_GLOBAL_SNAPSHOT_TIME; + } + configuration.put(DefaultConfSettings.SNAPSHOT_TIME, globalSnapshotTime); + } + private static void printUsage() { log.info("Usage: java -jar {}-{}.jar " + "[{-n,--neighbors} ''] " + From b34409bf3bafe72b6437bd9f0afb9a602bca6ce4 Mon Sep 17 00:00:00 2001 From: footloosejava Date: Wed, 25 Apr 2018 04:58:39 -0700 Subject: [PATCH 12/45] Refactor SignedFiles.java (#730) * - Removed local exception capturing. - Added null check for trytes result. - Fixed reader configuration. Signed-off-by: footloosejava <32090281+footloosejava@users.noreply.github.com> * Added exception throw message. Signed-off-by: footloosejava <32090281+footloosejava@users.noreply.github.com> * Added exception throw message. Signed-off-by: footloosejava <32090281+footloosejava@users.noreply.github.com> * Added throws and code for handling IOException in Snapshpt SignedFiles... Signed-off-by: footloosejava <32090281+footloosejava@users.noreply.github.com> --- src/main/java/com/iota/iri/Iota.java | 13 +-- src/main/java/com/iota/iri/SignedFiles.java | 104 ++++++++----------- src/main/java/com/iota/iri/Snapshot.java | 2 +- src/main/java/com/iota/iri/service/API.java | 2 +- src/test/java/com/iota/iri/SnapshotTest.java | 29 ++++-- 5 files changed, 70 insertions(+), 80 deletions(-) diff --git a/src/main/java/com/iota/iri/Iota.java b/src/main/java/com/iota/iri/Iota.java index b03d94b9d7..1157b39cac 100644 --- a/src/main/java/com/iota/iri/Iota.java +++ b/src/main/java/com/iota/iri/Iota.java @@ -3,25 +3,22 @@ import com.iota.iri.conf.Configuration; import com.iota.iri.controllers.*; import com.iota.iri.hash.SpongeFactory; -import com.iota.iri.network.TransactionRequester; import com.iota.iri.model.Hash; import com.iota.iri.network.Node; +import com.iota.iri.network.TransactionRequester; import com.iota.iri.network.UDPReceiver; import com.iota.iri.network.replicator.Replicator; -import com.iota.iri.zmq.MessageQ; import com.iota.iri.service.TipsManager; -import com.iota.iri.storage.FileExportProvider; -import com.iota.iri.storage.Indexable; -import com.iota.iri.storage.Persistable; -import com.iota.iri.storage.Tangle; -import com.iota.iri.storage.ZmqPublishProvider; +import com.iota.iri.storage.*; import com.iota.iri.storage.rocksDB.RocksDBPersistenceProvider; import com.iota.iri.utils.Pair; +import com.iota.iri.zmq.MessageQ; import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; import java.util.List; /** @@ -50,7 +47,7 @@ public class Iota { public final int tcpPort; public final int maxTipSearchDepth; - public Iota(Configuration configuration) { + public Iota(Configuration configuration) throws IOException { this.configuration = configuration; testnet = configuration.booling(Configuration.DefaultConfSettings.TESTNET); maxPeers = configuration.integer(Configuration.DefaultConfSettings.MAX_PEERS); diff --git a/src/main/java/com/iota/iri/SignedFiles.java b/src/main/java/com/iota/iri/SignedFiles.java index 8b16fab23d..ad82eb469d 100644 --- a/src/main/java/com/iota/iri/SignedFiles.java +++ b/src/main/java/com/iota/iri/SignedFiles.java @@ -5,91 +5,75 @@ import com.iota.iri.hash.Sponge; import com.iota.iri.hash.SpongeFactory; import com.iota.iri.utils.Converter; -import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.ArrayUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import java.io.*; import java.util.Arrays; -/** - * Created by alon on 26/01/18. - */ public class SignedFiles { - private static final Logger log = LoggerFactory.getLogger(SignedFiles.class); - public static boolean isFileSignatureValid(String filename, String signatureFilename, String publicKey, int depth, int index) { - int[] trits = new int[Curl.HASH_LENGTH * 3]; - Sponge curl = SpongeFactory.create(SpongeFactory.Mode.KERL); - BufferedReader reader = null; - //digest file - try { - InputStream inputStream = SignedFiles.class.getResourceAsStream(filename); - //if resource doesn't exist, read from file system - if (inputStream == null) { - inputStream = new FileInputStream(filename); - } - BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream); - reader = new BufferedReader(new InputStreamReader(bufferedInputStream)); - String line; - while ((line = reader.readLine()) != null) { - Converter.trits(Converter.asciiToTrytes(line), trits, 0); - curl.absorb(trits, 0, trits.length); - Arrays.fill(trits, 0); - } - } catch (IOException e) { - log.error("Can't read file " + filename, e); - return false; - } finally { - IOUtils.closeQuietly(reader); - } + public static boolean isFileSignatureValid(String filename, String signatureFilename, String publicKey, int depth, int index) throws IOException { + int[] signature = digestFile(filename, SpongeFactory.create(SpongeFactory.Mode.KERL)); + return validateSignature(signatureFilename, publicKey, depth, index, signature); + } + + private static boolean validateSignature(String signatureFilename, String publicKey, int depth, int index, int[] digest) throws IOException { //validate signature - trits = new int[Curl.HASH_LENGTH]; - curl.squeeze(trits, 0, Curl.HASH_LENGTH); SpongeFactory.Mode mode = SpongeFactory.Mode.CURLP81; int[] digests = new int[0]; - int[] bundle = ISS.normalizedBundle(trits); + int[] bundle = ISS.normalizedBundle(digest); int[] root; int i; - try { - InputStream inputStream = SignedFiles.class.getResourceAsStream(signatureFilename); - if (inputStream == null) { - inputStream = new FileInputStream(signatureFilename); - } - BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream); - reader = new BufferedReader(new InputStreamReader(bufferedInputStream)); + + try (InputStream inputStream = SignedFiles.class.getResourceAsStream(signatureFilename); + BufferedReader reader = new BufferedReader((inputStream == null) + ? new FileReader(signatureFilename) : new InputStreamReader(inputStream))) { + String line; for (i = 0; i < 3 && (line = reader.readLine()) != null; i++) { int[] lineTrits = Converter.allocateTritsForTrytes(line.length()); Converter.trits(line, lineTrits, 0); - digests = ArrayUtils.addAll( - digests, - ISS.digest(mode - , Arrays.copyOfRange(bundle, i * ISS.NORMALIZED_FRAGMENT_LENGTH, (i + 1) * ISS.NORMALIZED_FRAGMENT_LENGTH) - , lineTrits)); + int[] normalizedBundleFragment = Arrays.copyOfRange(bundle, i * ISS.NORMALIZED_FRAGMENT_LENGTH, (i + 1) * ISS.NORMALIZED_FRAGMENT_LENGTH); + int[] issDigest = ISS.digest(mode, normalizedBundleFragment, lineTrits); + digests = ArrayUtils.addAll(digests, issDigest); } + if ((line = reader.readLine()) != null) { int[] lineTrits = Converter.allocateTritsForTrytes(line.length()); Converter.trits(line, lineTrits, 0); root = ISS.getMerkleRoot(mode, ISS.address(mode, digests), lineTrits, 0, index, depth); - } - else { + } else { root = ISS.address(mode, digests); } int[] pubkeyTrits = Converter.allocateTritsForTrytes(publicKey.length()); Converter.trits(publicKey, pubkeyTrits, 0); - if (Arrays.equals(pubkeyTrits, root)) { - //valid - return true; - } - } catch (IOException e) { - log.error("Can't read signature file " + filename, e); - return false; - } finally { - IOUtils.closeQuietly(reader); + return Arrays.equals(pubkeyTrits, root); // valid + } + } + + private static int[] digestFile(String filename, Sponge curl) throws IOException { + try (InputStream inputStream = SignedFiles.class.getResourceAsStream(filename); + BufferedReader reader = new BufferedReader((inputStream == null) + ? new FileReader(filename) : new InputStreamReader(inputStream))) { + + int[] buffer = new int[Curl.HASH_LENGTH * 3]; + + reader.lines().forEach(line -> { + String trytes = Converter.asciiToTrytes(line); // can return a null + if (trytes == null) { + throw new IllegalArgumentException("TRYTES IS NULL. INPUT= '" + line + "'"); + } + Converter.trits(trytes, buffer, 0); + curl.absorb(buffer, 0, buffer.length); + Arrays.fill(buffer, 0); + }); + + int[] signature = new int[Curl.HASH_LENGTH]; + curl.squeeze(signature, 0, Curl.HASH_LENGTH); + return signature; + } catch (UncheckedIOException e) { + throw e.getCause(); } - return false; } -} +} \ No newline at end of file diff --git a/src/main/java/com/iota/iri/Snapshot.java b/src/main/java/com/iota/iri/Snapshot.java index 7564b38318..7100d4ab54 100644 --- a/src/main/java/com/iota/iri/Snapshot.java +++ b/src/main/java/com/iota/iri/Snapshot.java @@ -27,7 +27,7 @@ public class Snapshot { public final ReadWriteLock rwlock = new ReentrantReadWriteLock(); - public static Snapshot init(String snapshotPath, String snapshotSigPath, boolean testnet) { + public static Snapshot init(String snapshotPath, String snapshotSigPath, boolean testnet) throws IOException { //This is not thread-safe (and it is ok) if (initialSnapshot == null) { if (!testnet && !SignedFiles.isFileSignatureValid(snapshotPath, snapshotSigPath, SNAPSHOT_PUBKEY, diff --git a/src/main/java/com/iota/iri/service/API.java b/src/main/java/com/iota/iri/service/API.java index 26032bcda8..2dade751fe 100644 --- a/src/main/java/com/iota/iri/service/API.java +++ b/src/main/java/com/iota/iri/service/API.java @@ -145,7 +145,7 @@ public void handleRequest(final HttpServerExchange exchange) throws Exception { server.start(); } - private void readPreviousEpochsSpentAddresses(boolean isTestnet) { + private void readPreviousEpochsSpentAddresses(boolean isTestnet) throws IOException { if (isTestnet) { return; } diff --git a/src/test/java/com/iota/iri/SnapshotTest.java b/src/test/java/com/iota/iri/SnapshotTest.java index 939207db48..5f42842786 100644 --- a/src/test/java/com/iota/iri/SnapshotTest.java +++ b/src/test/java/com/iota/iri/SnapshotTest.java @@ -3,34 +3,43 @@ import com.iota.iri.conf.Configuration; import com.iota.iri.model.Hash; import org.junit.Assert; +import org.junit.BeforeClass; import org.junit.Test; +import java.io.IOException; +import java.io.UncheckedIOException; import java.util.HashMap; import java.util.Iterator; import java.util.Map; -import static org.junit.Assert.*; +import static org.junit.Assert.assertFalse; -/** - * Created by paul on 4/12/17. - */ public class SnapshotTest { - private static final Snapshot initSnapshot = Snapshot.init(Configuration.MAINNET_SNAPSHOT_FILE, - Configuration.MAINNET_SNAPSHOT_SIG_FILE, false); + private static Snapshot initSnapshot; + + @BeforeClass + public static void beforeClass() { + try { + initSnapshot = Snapshot.init(Configuration.MAINNET_SNAPSHOT_FILE, + Configuration.MAINNET_SNAPSHOT_SIG_FILE, false); + } catch (IOException e) { + throw new UncheckedIOException("Problem initiating snapshot", e); + } + } @Test - public void getState() throws Exception { + public void getState() { //Assert.assertTrue(latestSnapshot.getState().equals(Snapshot.initialState)); } @Test - public void isConsistent() throws Exception { + public void isConsistent() { Assert.assertTrue("Initial confirmed should be consistent", Snapshot.isConsistent(initSnapshot.state)); } @Test - public void patch() throws Exception { + public void patch() { Map.Entry firstOne = initSnapshot.state.entrySet().iterator().next(); Hash someHash = new Hash("PSRQPWWIECDGDDZXHGJNMEVJNSVOSMECPPVRPEVRZFVIZYNNXZNTOTJOZNGCZNQVSPXBXTYUJUOXYASLS"); Map diff = new HashMap<>(); @@ -41,7 +50,7 @@ public void patch() throws Exception { } @Test - public void applyShouldFail() throws Exception { + public void applyShouldFail() { Snapshot latestSnapshot = initSnapshot.clone(); Map badMap = new HashMap<>(); badMap.put(new Hash("PSRQPWWIECDGDDZEHGJNMEVJNSVOSMECPPVRPEVRZFVIZYNNXZNTOTJOZNGCZNQVSPXBXTYUJUOXYASLS"), 100L); From 88df68792bcca5363559fb43dd09f3d09c3f8fa1 Mon Sep 17 00:00:00 2001 From: galrogo Date: Wed, 25 Apr 2018 15:34:33 +0300 Subject: [PATCH 13/45] auto-format pom (#725) --- pom.xml | 932 +++++++++++++++++++++++++++++--------------------------- 1 file changed, 481 insertions(+), 451 deletions(-) diff --git a/pom.xml b/pom.xml index 55e36d4229..c4af111c85 100644 --- a/pom.xml +++ b/pom.xml @@ -1,6 +1,6 @@ + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 com.iota @@ -37,457 +37,487 @@ - - - org.bouncycastle - bcprov-jdk15on - 1.58 - - - - - org.apache.commons - commons-lang3 - 3.5 - - - - - org.slf4j - slf4j-api - 1.7.25 - - - - - ch.qos.logback - logback-classic - 1.2.3 - - - - - commons-io - commons-io - 2.5 - - - - - org.rocksdb - rocksdbjni - 5.7.3 - - - - - com.google.code.gson - gson - 2.8.1 - - - - - io.undertow - undertow-core - ${undertow.version} - - - - io.undertow - undertow-servlet - ${undertow.version} - - - - io.undertow - undertow-websockets-jsr - ${undertow.version} - - - - - com.sanityinc - jargs - 2.0-SNAPSHOT - - - - - - - com.jayway.restassured - rest-assured - 2.9.0 - test - - - - - com.jayway.jsonpath - json-path - 2.2.0 - test - - - - junit - junit - 4.12 - test - - - - uk.co.froot.maven.enforcer - digest-enforcer-rules - 0.0.1 - compile - - - - org.ini4j - ini4j - 0.5.4 - - - - org.zeromq - jeromq - 0.4.3 - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.3 - - ${java-version} - ${java-version} - ${java-version} - ${java-version} - - - - org.apache.maven.plugins - maven-shade-plugin - 2.4.3 - - - package - - shade - - - iri-${project.version} - ${project.basedir}/target - com.iota.iri.IRI - - - com.iota.iri.IRI - - - - - *:* - - META-INF/*.SF - META-INF/*.DSA - META-INF/*.RSA - - - - - - - - - org.apache.maven.plugins - maven-checkstyle-plugin - 3.0.0 - - - validate - validate - - checkstyle.xml - UTF-8 - true - true - false - - - check - - - - - - - org.apache.maven.plugins - maven-enforcer-plugin - 1.4 - - - enforce - package - - enforce - - - - - - - true - - - - - - org.apache.commons:commons-lang3:3.5:jar:null:compile:6c6c702c89bfff3cd9e80b04d668c5e190d588c6 - - org.slf4j:slf4j-api:1.7.25:jar:null:compile:da76ca59f6a57ee3102f8f9bd9cee742973efa8a - ch.qos.logback:logback-classic:1.2.3:jar:null:compile:7c4f3c474fb2c041d8028740440937705ebb473a - commons-io:commons-io:2.5:jar:null:compile:2852e6e05fbb95076fc091f6d1780f1f8fe35e0f - org.rocksdb:rocksdbjni:5.7.3:jar:null:compile:421b44ad957a2b6cce5adedc204db551831b553d - com.google.code.gson:gson:2.8.1:jar:null:compile:02a8e0aa38a2e21cb39e2f5a7d6704cbdc941da0 - io.undertow:undertow-core:${undertow.version}:jar:null:compile:e5764e5017bfe8c2dd421dc80035e5165501bfda - io.undertow:undertow-servlet:${undertow.version}:jar:null:compile:0e2850a558e70a2d72d9a3e782c7b6fde2d9f1c7 - io.undertow:undertow-websockets-jsr:${undertow.version}:jar:null:compile:a76941dade81de7847520755c2efc7fab67043cb + + + org.bouncycastle + bcprov-jdk15on + 1.58 + + + + + org.apache.commons + commons-lang3 + 3.5 + + + + + org.slf4j + slf4j-api + 1.7.25 + + + + + ch.qos.logback + logback-classic + 1.2.3 + + + + + commons-io + commons-io + 2.5 + + + + + org.rocksdb + rocksdbjni + 5.7.3 + + + + + com.google.code.gson + gson + 2.8.1 + + + + + io.undertow + undertow-core + ${undertow.version} + + + + io.undertow + undertow-servlet + ${undertow.version} + + + + io.undertow + undertow-websockets-jsr + ${undertow.version} + + + + + com.sanityinc + jargs + 2.0-SNAPSHOT + + + + + + + com.jayway.restassured + rest-assured + 2.9.0 + test + + + + + com.jayway.jsonpath + json-path + 2.2.0 + test + + + + junit + junit + 4.12 + test + + + + uk.co.froot.maven.enforcer + digest-enforcer-rules + 0.0.1 + compile + + + + org.ini4j + ini4j + 0.5.4 + + + + org.zeromq + jeromq + 0.4.3 + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.3 + + ${java-version} + ${java-version} + ${java-version} + ${java-version} + + + + org.apache.maven.plugins + maven-shade-plugin + 2.4.3 + + + package + + shade + + + iri-${project.version} + ${project.basedir}/target + com.iota.iri.IRI + + + com.iota.iri.IRI + + + + + *:* + + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + 3.0.0 + + + validate + validate + + checkstyle.xml + UTF-8 + true + true + false + + + check + + + + + + + org.apache.maven.plugins + maven-enforcer-plugin + 1.4 + + + enforce + package + + enforce + + + + + + + true + + + + + + + org.apache.commons:commons-lang3:3.5:jar:null:compile:6c6c702c89bfff3cd9e80b04d668c5e190d588c6 + + + + org.slf4j:slf4j-api:1.7.25:jar:null:compile:da76ca59f6a57ee3102f8f9bd9cee742973efa8a + + + ch.qos.logback:logback-classic:1.2.3:jar:null:compile:7c4f3c474fb2c041d8028740440937705ebb473a + + + commons-io:commons-io:2.5:jar:null:compile:2852e6e05fbb95076fc091f6d1780f1f8fe35e0f + + + org.rocksdb:rocksdbjni:5.7.3:jar:null:compile:421b44ad957a2b6cce5adedc204db551831b553d + + + com.google.code.gson:gson:2.8.1:jar:null:compile:02a8e0aa38a2e21cb39e2f5a7d6704cbdc941da0 + + + io.undertow:undertow-core:${undertow.version}:jar:null:compile:e5764e5017bfe8c2dd421dc80035e5165501bfda + + + io.undertow:undertow-servlet:${undertow.version}:jar:null:compile:0e2850a558e70a2d72d9a3e782c7b6fde2d9f1c7 + + + io.undertow:undertow-websockets-jsr:${undertow.version}:jar:null:compile:a76941dade81de7847520755c2efc7fab67043cb + + + + com.jayway.restassured:rest-assured:2.9.0:jar:null:test:d0d5b6720a58472ab99287c931a8205373d6e7b2 + + + com.jayway.jsonpath:json-path:2.2.0:jar:null:test:22290d17944bd239fabf5ac69005a60a7ecbbbcb + + junit:junit:4.12:jar:null:test:2973d150c0dc1fefe998f834810d68f278ea58ec + + + org.ini4j:ini4j:0.5.4:jar:null:compile:4a3ee4146a90c619b20977d65951825f5675b560 + + + org.bouncycastle:bcprov-jdk15on:1.58:jar:null:compile:2c9aa1c4e3372b447ba5daabade4adf2a2264b12 + + + + + uk.co.froot.maven.enforcer:digest-enforcer-rules:0.0.1:jar:null:runtime:16a9e04f3fe4bb143c42782d07d5faf65b32106f + + + + + + + + + + + + + uk.co.froot.maven.enforcer + digest-enforcer-rules + 0.0.1 + + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.21.0 + + + integration/*.java + + + + + integration-test + + test + + integration-test + + + none + + + integration/*.java + + + + + + + org.jacoco + jacoco-maven-plugin + 0.7.9 + + + + prepare-agent + + + + report + integration-test + + report + + + + + + + + + + + build-extras + + + build + full + + + + ${env.GPG_KEYNAME} + ${env.GPG_PASSPHRASE} + + + + + org.apache.maven.plugins + maven-source-plugin + + + attach-sources + + jar + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 2.10.4 + + private + true + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins + maven-gpg-plugin + 1.6 + + + sign-artifacts + package + + sign + + + + + + jdeb + org.vafer + 1.5 + + + package + + jdeb + + - com.jayway.restassured:rest-assured:2.9.0:jar:null:test:d0d5b6720a58472ab99287c931a8205373d6e7b2 - com.jayway.jsonpath:json-path:2.2.0:jar:null:test:22290d17944bd239fabf5ac69005a60a7ecbbbcb - junit:junit:4.12:jar:null:test:2973d150c0dc1fefe998f834810d68f278ea58ec - org.ini4j:ini4j:0.5.4:jar:null:compile:4a3ee4146a90c619b20977d65951825f5675b560 - org.bouncycastle:bcprov-jdk15on:1.58:jar:null:compile:2c9aa1c4e3372b447ba5daabade4adf2a2264b12 - - - uk.co.froot.maven.enforcer:digest-enforcer-rules:0.0.1:jar:null:runtime:16a9e04f3fe4bb143c42782d07d5faf65b32106f - - - - - - - - - - - - uk.co.froot.maven.enforcer - digest-enforcer-rules - 0.0.1 - - - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.21.0 - - - integration/*.java - - - - - integration-test - - test - - integration-test - - - none - - - integration/*.java - - - - - - - org.jacoco - jacoco-maven-plugin - 0.7.9 - - - - prepare-agent - - - - report - integration-test - - report - - - - - - - - - - - build-extras - - - build - full - - - - ${env.GPG_KEYNAME} - ${env.GPG_PASSPHRASE} - - - - - org.apache.maven.plugins - maven-source-plugin - - - attach-sources - - jar - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - 2.10.4 - - private - true - - - - attach-javadocs - - jar - - - - - - org.apache.maven.plugins - maven-gpg-plugin - 1.6 - - - sign-artifacts - package - - sign - - - - - - jdeb - org.vafer - 1.5 - - - package - - jdeb - - - - true - true - - USER - true - ${basedir}/src/deb/control - - - - ${project.build.directory}/${project.build.finalName}.jar - file - - perm - /usr/share/iota/lib - loader - loader - - - - - link - true - /usr/share/iota/iri.jar - /usr/share/iota/lib/${project.build.finalName}.jar - - - - ${basedir}/src/deb/init.d - directory - - perm - /etc/init.d - loader - loader - - - - - template - - etc/${project.artifactId} - var/lib/${project.artifactId} - var/log/${project.artifactId} - var/run/${project.artifactId} - - - perm - loader - loader - - - - - - - - - - - - - - - - org.apache.maven.plugins - maven-checkstyle-plugin - 2.17 - - - - checkstyle - - - - - - + true + true + + USER + true + ${basedir}/src/deb/control + + + + ${project.build.directory}/${project.build.finalName}.jar + file + + perm + /usr/share/iota/lib + loader + loader + + + + + link + true + /usr/share/iota/iri.jar + /usr/share/iota/lib/${project.build.finalName}.jar + + + + ${basedir}/src/deb/init.d + directory + + perm + /etc/init.d + loader + loader + + + + + template + + etc/${project.artifactId} + var/lib/${project.artifactId} + var/log/${project.artifactId} + var/run/${project.artifactId} + + + perm + loader + loader + + + + + + + + + + + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + 2.17 + + + + checkstyle + + + + + + From 43b739729e721e18de4726310f7f33d53c97033b Mon Sep 17 00:00:00 2001 From: footloosejava Date: Thu, 26 Apr 2018 01:15:51 -0700 Subject: [PATCH 14/45] edit readme (replaces old) (#731) * chnages as per reviews Signed-off-by: footloosejava <32090281+footloosejava@users.noreply.github.com> * Added backk codacy and build status icons at top Signed-off-by: footloosejava <32090281+footloosejava@users.noreply.github.com> --- README.md | 64 ++++++++++++++++++++++++++++++++++++++----------------- 1 file changed, 44 insertions(+), 20 deletions(-) diff --git a/README.md b/README.md index 5c1ecfefa0..27bb49be73 100644 --- a/README.md +++ b/README.md @@ -6,26 +6,43 @@ ## IOTA -This is the main branch of the main IRI repository, as this is a IOTA reference implementation that is utilized specifically for what we have setup. It is a complete [[IOTA]](http://iota.org/) Node with a JSON-REST HTTP interface. +The IRI repository is the main IOTA Reference Implementation and the embodiment of the IOTA network specification. -It allows to connect easily using java directly to a local or a remote [[IOTA node]](http://learn.iota.org/). +This is a full-featured [[IOTA]](https://iota.org/) node with a convenient JSON-REST HTTP interface. +It allows users to become part of the [[IOTA]](https://iota.org) network as both a transaction relay +and network information provider through the easy-to-use [[API]](https://iota.readme.io/reference). + +It is specially designed for users seeking a fast, efficient and fully-compatible network setup. + +Running an IRI node also allows light wallet users a node to directly connect to for their own wallet transactions. * **Latest release:** 1.4.2.2 Release * **License:** GPLv3 # How to get started -Obviously, because this is its own, independent network, you have to go through the same process as in the main network: **find neighbors**. You can find neighbors in the [[Discord Community]](https://discord.gg/7Gu2mG5), or on [[our forum]](https://forum.iota.org/). Community members are usually very happy to help you out and get you connected. If you want to get tokens for your testcase, please just ask in one of the communication channels as well. +The IOTA network is an independent peer-to-peer network with a first-user, friend-to-friend, network structure: -## Reporting Issues +- As a 'first-user' network, to access the data streams and APIs that other users provide, you must first exchange your IP and port configuration with a current user. -If you notice any issues or irregularities in this release. Please make sure to submit an issue on github. +- As a 'friend-to-friend' network, you have the privilege of joining new users into the network through your node +by adding them to your approved neighbors list — ensuring that you both broadcast to them and also receive their broadcasts. +You can **find neighbors** quickly at both our [[Discord Community]](https://discord.gg/7Gu2mG5) and [[forum.iota.org]](https://forum.iota.org/). + +Everyone will be welcoming and very happy to help you get connected. +If you want to get tokens for your testcase, please just ask in one of the communication channels. -# Installing +## Reporting Issues + +If you notice any bugs, problems or other irregularities with this release, +please submit an issue on github [[submit new issue]](https://github.com/iotaledger/iri/issues/new). -You have two options, the preferred option is that you compile yourself. The second option is that you utilize the provided jar, which is released regularly (when new updates occur) here: [Github Releases](https://github.com/iotaledger/iri/releases). +# Installing +The preferred option is that you compile yourself. +The second option is that you utilize the provided jar, +which is released whenever there is a new update here: [Github Releases](https://github.com/iotaledger/iri/releases). ### Compiling yourself @@ -39,13 +56,22 @@ $ mvn clean compile $ mvn package ``` -This will create a `target` directory in which you will find the executable jar file that you can use for the +This will create a `target` directory in which you will find the executable jar file that you can use. ### How to run IRI #### Locally -Running IRI is pretty simple, and you don't even have to run it under admin rights. Below is a list of command line options. Here is an example script: +Running IRI is quick and easy, and you can usually run it without admin rights. +Below is a list of command line options. + +At a minimum, the port must be specified on the command-line — e.g., '`-p 14265`' +or in the `iota.ini` file — e.g., '`PORT = 14265`'. + +If the '`iota.ini`' file exists, it will be read. +The port and all the command line options below take precedence over values specified in the ini config file. + +Here is an example script that specifies only the port, with all other setting to be read from the ini file **if it exists**: ``` java -jar iri.jar -p 14265 @@ -63,29 +89,27 @@ command line arguments. Option | Shortened version | Description | Example Input --- | --- | --- | --- -`--port` | `-p` | This is a *mandatory* option that defines the port to be used to send API commands to your node | `-p 14800` +`--port` | `-p` | This is a *mandatory* option that defines the port to be used to send API commands to your node | `-p 14265` `--neighbors` | `-n` | Neighbors that you are connected with will be added via this option. | `-n "udp://148.148.148.148:14265 udp://[2001:db8:a0b:12f0::1]:14265"` `--config` | `-c` | Config INI file that can be used instead of CLI options. See more below | `-c iri.ini` -`--udp-receiver-port` | `-u` | UDP receiver port | `-u 14800` -`--tcp-receiver-port` | `-t` | TCP receiver port | `-t 14800` +`--udp-receiver-port` | `-u` | UDP receiver port | `-u 14600` +`--tcp-receiver-port` | `-t` | TCP receiver port | `-t 15600` `--testnet` | | Makes it possible to run IRI with the IOTA testnet | `--testnet` `--remote` | | Remotely access your node and send API commands | `--remote` `--remote-auth` | | Require authentication password for accessing remotely. Requires a correct `username:hashedpassword` combination | `--remote-auth iotatoken:LL9EZFNCHZCMLJLVUBCKJSWKFEXNYRHHMYS9XQLUZRDEKUUDOCMBMRBWJEMEDDXSDPHIGQULENCRVEYMO` `--remote-limit-api` | | Exclude certain API calls from being able to be accessed remotely | `--remote-limit-api "attachToTangle, addNeighbors"` `--send-limit`| | Limit the outbound bandwidth consumption. Limit is set to mbit/s | `--send-limit 1.0` `--max-peers` | | Limit the number of max accepted peers. Default is set to 0 (mutual tethering) | `--max-peers 8` -`--dns-resolution-false` | | Ignores DNS resolution refreshing | --dns-resolution-false +`--dns-resolution-false` | | Ignores DNS resolution refreshing | `--dns-resolution-false` ### INI File You can also provide an ini file to store all of your command line options and easily update (especially neighbors) if needed. You can enable it via the `--config` flag. Here is an example INI file: ``` [IRI] -PORT = 14700 -UDP_RECEIVER_PORT = 14700 -NEIGHBORS = udp://my.favorite.com:15600 +PORT = 14265 +UDP_RECEIVER_PORT = 14600 +NEIGHBORS = udp://my.favorite.com:14600 IXI_DIR = ixi -HEADLESS = true -DEBUG = true +DEBUG = false DB_PATH = db -``` - +``` \ No newline at end of file From d95c71613e5ebd362d39086f94221cf47c6fbc43 Mon Sep 17 00:00:00 2001 From: footloosejava Date: Wed, 2 May 2018 04:13:29 -0700 Subject: [PATCH 15/45] =?UTF-8?q?Kerl.java=20absorb=20now=20=F0=9F=98=88?= =?UTF-8?q?=20666%=20faster=20=F0=9F=98=88=20-=20plus=20=F0=9F=8D=A6=20cle?= =?UTF-8?q?aner=20and=20=F0=9F=8D=A6=20simplified=20code=20(#628)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: footloosejava <32090281+footloosejava@users.noreply.github.com> * Kerl.java absorb(...) now 600% faster! Snapshot initial now loading 368% faster. Big time hog was bigIntFromTrits(...) - Created a fast track for Radix power multiplication. - Created a native math routine to munch 36 trits at a time into the main math routine. - Added test to ensure that there is now math overflow. LOADING SNAPSHOT TIMES INITIAL BEFORE AFTER SNAPSHOT 9693 2879 ms - ABSORB 7920 1306 ms - SQUEEZE 800828 832151 ns 10 rounds BEFORE AFTER SNAPSHOT 8839 2452 ms ABSORB 7060 1266 ms SQUEEZE 125591 121977 ns AVG 20 rounds BEFORE AFTER SNAPSHOT 8760 2378 ms ABSORB 7195 1197 ms SQUEEZE 187030 186910 ns --- src/main/java/com/iota/iri/hash/Kerl.java | 148 ++++++++---------- src/test/java/com/iota/iri/hash/KerlTest.java | 83 ++++++---- 2 files changed, 114 insertions(+), 117 deletions(-) diff --git a/src/main/java/com/iota/iri/hash/Kerl.java b/src/main/java/com/iota/iri/hash/Kerl.java index b8394db91a..9ae1bf82f9 100644 --- a/src/main/java/com/iota/iri/hash/Kerl.java +++ b/src/main/java/com/iota/iri/hash/Kerl.java @@ -6,142 +6,122 @@ import java.math.BigInteger; import java.security.DigestException; import java.util.Arrays; +import java.util.stream.IntStream; + +public final class Kerl implements Sponge { -public class Kerl implements Sponge { public static final int BIT_HASH_LENGTH = 384; public static final int BYTE_HASH_LENGTH = BIT_HASH_LENGTH / 8; - private byte[] byte_state; - private int[] trit_state; + public static final BigInteger RADIX = BigInteger.valueOf(Converter.RADIX); + public static final int MAX_POWERS_LONG = 40; + private static final BigInteger[] RADIX_POWERS = IntStream.range(0, MAX_POWERS_LONG + 1).mapToObj(RADIX::pow).toArray(BigInteger[]::new); + private final Keccak.Digest384 keccak; protected Kerl() { - this.keccak = new Keccak.Digest384(); - this.byte_state = new byte[BYTE_HASH_LENGTH]; - this.trit_state = new int[Sponge.HASH_LENGTH]; } @Override public void reset() { - this.keccak.reset(); } @Override - public void absorb(final int[] trits, int offset, int length) { - + public void absorb(final int[] trits, final int offset, final int length) { if (length % 243 != 0) { throw new RuntimeException("Illegal length: " + length); } - - do { - //copy trits[offset:offset+length] - System.arraycopy(trits, offset, trit_state, 0, HASH_LENGTH); - - //convert to bits - trit_state[HASH_LENGTH - 1] = 0; - bytesFromBigInt(bigIntFromTrits(trit_state, 0, HASH_LENGTH), byte_state, 0); - - //run keccak - keccak.update(byte_state); - offset += HASH_LENGTH; - - } while ((length -= HASH_LENGTH) > 0); + for (int pos = offset; pos < offset + length; pos += HASH_LENGTH) { + //convert to bytes && update + byte[] state = new byte[BYTE_HASH_LENGTH]; + trits[pos + HASH_LENGTH - 1] = 0; + bytesFromBigInt(bigIntFromTrits(trits, pos, HASH_LENGTH), state); + keccak.update(state); + } } @Override - public void squeeze(final int[] trits, int offset, int length) { - + public void squeeze(final int[] trits, final int offset, final int length) { if (length % 243 != 0) { - throw new RuntimeException("Illegal length: " + length); + throw new IllegalArgumentException("Illegal length: " + length); } - try { - do { - this.keccak.digest(byte_state, 0, BYTE_HASH_LENGTH); - //convert to trits - tritsFromBigInt(bigIntFromBytes(byte_state, 0, BYTE_HASH_LENGTH), trit_state, 0, Sponge.HASH_LENGTH); - - //copy with offset - trit_state[HASH_LENGTH - 1] = 0; - System.arraycopy(trit_state, 0, trits, offset, HASH_LENGTH); + for (int pos = offset; pos < offset + length; pos += HASH_LENGTH) { - //calculate hash again - for (int i = byte_state.length; i-- > 0; ) { + byte[] state = new byte[BYTE_HASH_LENGTH]; + keccak.digest(state, 0, BYTE_HASH_LENGTH); - byte_state[i] = (byte) (byte_state[i] ^ 0xFF); - } - keccak.update(byte_state); - offset += HASH_LENGTH; + //convert into trits + BigInteger value = new BigInteger(state); + tritsFromBigInt(value, trits, pos, Sponge.HASH_LENGTH); + trits[pos + HASH_LENGTH - 1] = 0; - } while ((length -= HASH_LENGTH) > 0); + //calculate hash again + for (int i = state.length; i-- > 0; ) { + state[i] = (byte) (state[i] ^ 0xFF); + } + keccak.update(state); + } } catch (DigestException e) { - e.printStackTrace(); + e.printStackTrace(System.err); throw new RuntimeException(e); } } public static BigInteger bigIntFromTrits(final int[] trits, final int offset, final int size) { - + for (int i = offset; i < offset + size; i++) { + if (trits[i] < -1 || trits[i] > 1) { + throw new IllegalArgumentException("not a trit: " + trits[i]); + } + } BigInteger value = BigInteger.ZERO; - - for (int i = size; i-- > 0; ) { - - value = value.multiply(BigInteger.valueOf(Converter.RADIX)).add(BigInteger.valueOf(trits[offset + i])); + for (int n = offset + size - 1; n >= offset; ) { + int count = 0; + long num = 0L; + while (n >= offset && count < MAX_POWERS_LONG) { + num = 3 * num + trits[n--]; + count++; + } + value = value.multiply(RADIX_POWERS[count]).add(BigInteger.valueOf(num)); } - return value; } - public static BigInteger bigIntFromBytes(final byte[] bytes, final int offset, final int size) { - - return new BigInteger(Arrays.copyOfRange(bytes, offset, offset + size)); - } + public static void tritsFromBigInt(final BigInteger value, final int[] destination, final int offset, final int size) { - public static void tritsFromBigInt(final BigInteger value, int[] destination, int offset, int size) { - - if(destination.length - offset < size) { + if (destination.length - offset < size) { throw new IllegalArgumentException("Destination array has invalid size"); } - - BigInteger absoluteValue = value.compareTo(BigInteger.ZERO) < 0 ? value.negate() : value; + final int signum = value.signum(); + if (signum == 0) { + Arrays.fill(destination, offset, size, 0); + return; + } + BigInteger absoluteValue = value.abs(); for (int i = 0; i < size; i++) { - - BigInteger[] divRemainder = absoluteValue.divideAndRemainder(BigInteger.valueOf(Converter.RADIX)); - int remainder = divRemainder[1].intValue(); + BigInteger[] divRemainder = absoluteValue.divideAndRemainder(RADIX); absoluteValue = divRemainder[0]; + int remainder = divRemainder[1].intValue(); if (remainder > Converter.MAX_TRIT_VALUE) { - remainder = Converter.MIN_TRIT_VALUE; absoluteValue = absoluteValue.add(BigInteger.ONE); } - destination[offset + i] = remainder; - } - - if (value.compareTo(BigInteger.ZERO) < 0) { - - for (int i = 0; i < size; i++) { - - destination[offset + i] = -destination[offset + i]; - } + destination[offset + i] = signum < 0 ? -remainder : remainder; } } - public static void bytesFromBigInt(final BigInteger value, byte[] destination, int offset) { - if(destination.length - offset < BYTE_HASH_LENGTH) { - throw new IllegalArgumentException("Destination array has invalid size."); - } - - final byte[] bytes = value.toByteArray(); - int i = 0; - while (i + bytes.length < BYTE_HASH_LENGTH) { - destination[i++] = (byte) (bytes[0] < 0 ? -1 : 0); + public static void bytesFromBigInt(final BigInteger value, final byte[] destination) { + if (destination.length < BYTE_HASH_LENGTH) { + throw new IllegalArgumentException("Destination array has invalid size."); } - for (int j = bytes.length; j-- > 0; ) { - - destination[i++] = bytes[bytes.length - 1 - j]; + byte[] bytes = value.toByteArray(); + int start = BYTE_HASH_LENGTH - bytes.length; + Arrays.fill(destination, 0, start, (byte) (value.signum() < 0 ? -1 : 0)); + for (int i = 0; i < bytes.length; i++) { + destination[start++] = bytes[i]; } } -} \ No newline at end of file +} diff --git a/src/test/java/com/iota/iri/hash/KerlTest.java b/src/test/java/com/iota/iri/hash/KerlTest.java index fa5e878e64..da68414896 100644 --- a/src/test/java/com/iota/iri/hash/KerlTest.java +++ b/src/test/java/com/iota/iri/hash/KerlTest.java @@ -36,8 +36,8 @@ public void bytesFromBigInt() throws Exception { int byte_size = 48; BigInteger bigInteger = new BigInteger("13190295509826637194583200125168488859623001289643321872497025844241981297292953903419783680940401133507992851240799"); byte[] outBytes = new byte[Kerl.BYTE_HASH_LENGTH]; - Kerl.bytesFromBigInt(bigInteger,outBytes, 0); - BigInteger out_bigInteger = Kerl.bigIntFromBytes(outBytes,0,outBytes.length); + Kerl.bytesFromBigInt(bigInteger, outBytes); + BigInteger out_bigInteger = new BigInteger(outBytes); Assert.assertTrue(bigInteger.equals(out_bigInteger)); } @@ -49,16 +49,16 @@ public void loopRandBytesFromBigInt() throws Exception { byte[] inBytes = new byte[byte_size]; int[] trits = new int[Kerl.HASH_LENGTH]; byte[] outBytes = new byte[Kerl.BYTE_HASH_LENGTH]; - for (int i = 0; i<10_000; i++) { + for (int i = 0; i < 10_000; i++) { seed.nextBytes(inBytes); - BigInteger in_bigInteger = Kerl.bigIntFromBytes(inBytes,0,inBytes.length); + BigInteger in_bigInteger = new BigInteger(inBytes); Kerl.tritsFromBigInt(in_bigInteger, trits, 0, trit_size); BigInteger out_bigInteger = Kerl.bigIntFromTrits(trits, 0, trit_size); - Kerl.bytesFromBigInt(out_bigInteger,outBytes, 0); - if(i % 1_000 == 0) { - System.out.println(String.format("%d iteration: %s",i, in_bigInteger )); + Kerl.bytesFromBigInt(out_bigInteger, outBytes); + if (i % 1_000 == 0) { + System.out.println(String.format("%d iteration: %s", i, in_bigInteger)); } - Assert.assertTrue(String.format("bigInt that failed: %s",in_bigInteger),Arrays.equals(inBytes,outBytes)); + Assert.assertTrue(String.format("bigInt that failed: %s", in_bigInteger), Arrays.equals(inBytes, outBytes)); } } @@ -70,26 +70,42 @@ public void loopRandTritsFromBigInt() throws Exception { int[] inTrits; byte[] bytes = new byte[Kerl.BYTE_HASH_LENGTH]; int[] outTrits = new int[Kerl.HASH_LENGTH]; - for (int i = 0; i<10_000; i++) { + for (int i = 0; i < 10_000; i++) { inTrits = getRandomTrits(trit_size); inTrits[242] = 0; BigInteger in_bigInteger = Kerl.bigIntFromTrits(inTrits, 0, trit_size); - Kerl.bytesFromBigInt(in_bigInteger,bytes,0); - BigInteger out_bigInteger = Kerl.bigIntFromBytes(bytes,0,bytes.length); + Kerl.bytesFromBigInt(in_bigInteger, bytes); + BigInteger out_bigInteger = new BigInteger(bytes); Kerl.tritsFromBigInt(out_bigInteger, outTrits, 0, trit_size); - if(i % 1_000 == 0) { - System.out.println(String.format("%d iteration: %s",i, in_bigInteger )); + if (i % 1_000 == 0) { + System.out.println(String.format("%d iteration: %s", i, in_bigInteger)); } - Assert.assertTrue(String.format("bigInt that failed: %s",in_bigInteger),Arrays.equals(inTrits,outTrits)); + Assert.assertTrue(String.format("bigInt that failed: %s", in_bigInteger), Arrays.equals(inTrits, outTrits)); } } + @Test + public void limitBigIntFromTrits() { + // this confirms that the long math does not produce an overflow. + int[] trits = new int[Kerl.MAX_POWERS_LONG]; + + Arrays.fill(trits, 1); + BigInteger result = Kerl.bigIntFromTrits(trits, 0, trits.length); + + Arrays.fill(trits, 1); + BigInteger expected = BigInteger.ZERO; + for (int i = trits.length; i-- > 0; ) { + expected = expected.multiply(BigInteger.valueOf(Converter.RADIX)).add(BigInteger.valueOf(trits[i])); + } + Assert.assertTrue("Overflow in long math", expected.equals(result)); + } + //@Test public void generateBytesFromBigInt() throws Exception { System.out.println("bigInteger,ByteArray"); - for (int i = 0; i<100_000; i++) { + for (int i = 0; i < 100_000; i++) { int byte_size = 48; byte[] outBytes = new byte[byte_size]; seed.nextBytes(outBytes); @@ -104,12 +120,12 @@ public void benchmarkCurl() { int i; Hash hash; long start, diff; - long maxdiff=0, sumdiff = 0, subSumDiff = 0; + long maxdiff = 0, sumdiff = 0, subSumDiff = 0; int max = 100;// was 10000; int interval = 1000; String test = "curl"; - for (i = 0; i++ < max;) { + for (i = 0; i++ < max; ) { //pre int size = 8019; int[] in_trits = getRandomTrits(size); @@ -133,21 +149,22 @@ public void benchmarkCurl() { String out_trytes = Converter.trytes(hash_trits); sumdiff += diff; - subSumDiff +=diff; - if (diff>maxdiff) { + subSumDiff += diff; + if (diff > maxdiff) { maxdiff = diff; } - if(i % interval == 0) { + if (i % interval == 0) { //log.info("{}", new String(new char[(int) ((diff / 10000))]).replace('\0', '|')); } - if(i % interval == 0) { + if (i % interval == 0) { log.info("Run time for {}: {} us.\tInterval Time: {} us.\tMax time per iter: {} us. \tAverage: {} us.\t Total time: {} us.", i, - (diff / 1000) , subSumDiff/1000, (maxdiff/ 1000), sumdiff/i/1000, sumdiff/1000 ); + (diff / 1000), subSumDiff / 1000, (maxdiff / 1000), sumdiff / i / 1000, sumdiff / 1000); subSumDiff = 0; maxdiff = 0; } } } + @Test public void kerlOneAbsorb() throws Exception { int[] initial_value = Converter.allocatingTritsFromTrytes("EMIDYNHBWMBCXVDEFOFWINXTERALUKYYPPHKP9JJFGJEIUY9MUDVNFZHMMWZUYUSWAIOWEVTHNWMHANBH"); @@ -182,7 +199,7 @@ public void kerlMultiAbsorbMultiSqueeze() throws Exception { } public static int[] getRandomTrits(int length) { - return Arrays.stream(new int[length]).map(i -> seed.nextInt(3)-1).toArray(); + return Arrays.stream(new int[length]).map(i -> seed.nextInt(3) - 1).toArray(); } public static Hash getRandomTransactionHash() { @@ -192,7 +209,7 @@ public static Hash getRandomTransactionHash() { //@Test public void generateTrytesAndHashes() throws Exception { System.out.println("trytes,Kerl_hash"); - for (int i = 0; i< 10000 ; i++) { + for (int i = 0; i < 10000; i++) { Hash trytes = getRandomTransactionHash(); int[] initial_value = trytes.trits(); Sponge k = SpongeFactory.create(SpongeFactory.Mode.KERL); @@ -200,14 +217,14 @@ public void generateTrytesAndHashes() throws Exception { int[] hash_value = new int[Curl.HASH_LENGTH]; k.squeeze(hash_value, 0, hash_value.length); String hash = Converter.trytes(hash_value); - System.out.println(String.format("%s,%s",trytes,hash)); + System.out.println(String.format("%s,%s", trytes, hash)); } } //@Test public void generateTrytesAndMultiSqueeze() throws Exception { System.out.println("trytes,Kerl_squeeze1,Kerl_squeeze2,Kerl_squeeze3"); - for (int i = 0; i< 10000 ; i++) { + for (int i = 0; i < 10000; i++) { Hash trytes = getRandomTransactionHash(); int[] initial_value = trytes.trits(); Sponge k = SpongeFactory.create(SpongeFactory.Mode.KERL); @@ -219,22 +236,22 @@ public void generateTrytesAndMultiSqueeze() throws Exception { String hash2 = Converter.trytes(hash_value); k.squeeze(hash_value, 0, hash_value.length); String hash3 = Converter.trytes(hash_value); - System.out.println(String.format("%s,%s,%s,%s",trytes,hash1,hash2,hash3)); + System.out.println(String.format("%s,%s,%s,%s", trytes, hash1, hash2, hash3)); } } //@Test public void generateMultiTrytesAndHash() throws Exception { System.out.println("multiTrytes,Kerl_hash"); - for (int i = 0; i< 10000 ; i++) { - String multi = String.format("%s%s%s",getRandomTransactionHash(),getRandomTransactionHash(),getRandomTransactionHash()); + for (int i = 0; i < 10000; i++) { + String multi = String.format("%s%s%s", getRandomTransactionHash(), getRandomTransactionHash(), getRandomTransactionHash()); int[] initial_value = Converter.allocatingTritsFromTrytes(multi); Sponge k = SpongeFactory.create(SpongeFactory.Mode.KERL); k.absorb(initial_value, 0, initial_value.length); int[] hash_value = new int[Curl.HASH_LENGTH]; k.squeeze(hash_value, 0, hash_value.length); String hash = Converter.trytes(hash_value); - System.out.println(String.format("%s,%s",multi,hash)); + System.out.println(String.format("%s,%s", multi, hash)); } } @@ -242,7 +259,7 @@ public void generateMultiTrytesAndHash() throws Exception { //@Test public void generateHashes() throws Exception { //System.out.println("trytes,Kerl_hash"); - for (int i = 0; i< 1_000_000 ; i++) { + for (int i = 0; i < 1_000_000; i++) { Hash trytes = getRandomTransactionHash(); int[] initial_value = trytes.trits(); Sponge k = SpongeFactory.create(SpongeFactory.Mode.KERL); @@ -251,8 +268,8 @@ public void generateHashes() throws Exception { k.squeeze(hash_value, 0, hash_value.length); String hash = Converter.trytes(hash_value); //System.out.println(String.format("%s,%s",trytes,hash)); - System.out.println(String.format("%s",hash)); + System.out.println(String.format("%s", hash)); } } -} \ No newline at end of file +} From bcb58fe06e2fa29e90e86622da9689a6a925406c Mon Sep 17 00:00:00 2001 From: galrogo Date: Wed, 2 May 2018 18:49:04 +0300 Subject: [PATCH 16/45] Revert "Check ini file before loading default testnet values and add testnetdb flag" (#740) --- src/main/java/com/iota/iri/IRI.java | 91 +++---------------- .../java/com/iota/iri/conf/Configuration.java | 2 - 2 files changed, 13 insertions(+), 80 deletions(-) diff --git a/src/main/java/com/iota/iri/IRI.java b/src/main/java/com/iota/iri/IRI.java index d05fbc53a2..e653860c5a 100644 --- a/src/main/java/com/iota/iri/IRI.java +++ b/src/main/java/com/iota/iri/IRI.java @@ -109,7 +109,6 @@ public static void validateParams(final Configuration configuration, final Strin final Option dnsResolutionFalse = parser.addBooleanOption("dns-resolution-false"); final Option maxPeers = parser.addStringOption("max-peers"); final Option testnetCoordinator = parser.addStringOption("testnet-coordinator"); - final Option testnetDbPath = parser.addStringOption("db-path"); final Option disableCooValidation = parser.addBooleanOption("testnet-no-coo-validation"); final Option snapshot = parser.addStringOption("snapshot"); final Option snapshotSignature = parser.addStringOption("snapshot-sig"); @@ -139,7 +138,19 @@ public static void validateParams(final Configuration configuration, final Strin final boolean isTestnet = Optional.ofNullable(parser.getOptionValue(testnet)).orElse(Boolean.FALSE) || configuration.booling(DefaultConfSettings.TESTNET); if (isTestnet) { - setTestnetConfigs(configuration); + configuration.put(DefaultConfSettings.TESTNET, "true"); + configuration.put(DefaultConfSettings.DB_PATH.name(), "testnetdb"); + configuration.put(DefaultConfSettings.DB_LOG_PATH.name(), "testnetdb.log"); + configuration.put(DefaultConfSettings.COORDINATOR, Configuration.TESTNET_COORDINATOR_ADDRESS); + configuration.put(DefaultConfSettings.SNAPSHOT_FILE, Configuration.TESTNET_SNAPSHOT_FILE); + configuration.put(DefaultConfSettings.MILESTONE_START_INDEX, Configuration.TESTNET_MILESTONE_START_INDEX); + configuration.put(DefaultConfSettings.SNAPSHOT_SIGNATURE_FILE, ""); + configuration.put(DefaultConfSettings.MWM, Configuration.TESTNET_MWM); + configuration.put(DefaultConfSettings.NUMBER_OF_KEYS_IN_A_MILESTONE, + Configuration.TESTNET_NUM_KEYS_IN_MILESTONE); + configuration.put(DefaultConfSettings.TRANSACTION_PACKET_SIZE, Configuration.TESTNET_PACKET_SIZE); + configuration.put(DefaultConfSettings.REQUEST_HASH_SIZE, Configuration.TESTNET_REQ_HASH_SIZE); + configuration.put(DefaultConfSettings.SNAPSHOT_TIME, Configuration.TESTNET_GLOBAL_SNAPSHOT_TIME); } // mandatory args @@ -211,15 +222,6 @@ public static void validateParams(final Configuration configuration, final Strin StatusPrinter.print((LoggerContext) LoggerFactory.getILoggerFactory()); } - final String dbPath = parser.getOptionValue(testnetDbPath); - if (dbPath != null) { - if (isTestnet) { - configuration.put(DefaultConfSettings.DB_PATH, dbPath); - configuration.put(DefaultConfSettings.DB_LOG_PATH.name(), dbPath + ".log"); - } else { - log.warn(TESTNET_FLAG_REQUIRED + testnetDbPath.longForm()); - } - } final String coordinatorAddress = parser.getOptionValue(testnetCoordinator); if (coordinatorAddress != null) { @@ -295,73 +297,6 @@ public static void validateParams(final Configuration configuration, final Strin } } - private static void setTestnetConfigs(Configuration configuration) { - configuration.put(DefaultConfSettings.TESTNET, "true"); - - String dbPath = configuration.string(DefaultConfSettings.DB_PATH); - if (StringUtils.isEmpty(dbPath)) { - dbPath = Configuration.TESTNETDB; - } - configuration.put(DefaultConfSettings.DB_PATH.name(), dbPath); - - String dbLog = configuration.string(DefaultConfSettings.DB_LOG_PATH); - if (StringUtils.isEmpty(dbLog)) { - dbLog = Configuration.TESTNETDB_LOG; - } - configuration.put(DefaultConfSettings.DB_LOG_PATH.name(), dbLog); - - String coordinator_address = configuration.string(DefaultConfSettings.COORDINATOR); - if (StringUtils.isEmpty(coordinator_address)) { - coordinator_address = Configuration.TESTNET_COORDINATOR_ADDRESS; - } - configuration.put(DefaultConfSettings.COORDINATOR, coordinator_address); - - String snapshotFile = configuration.string(DefaultConfSettings.SNAPSHOT_FILE); - if (StringUtils.isEmpty(snapshotFile)) { - snapshotFile = Configuration.TESTNET_SNAPSHOT_FILE; - } - configuration.put(DefaultConfSettings.SNAPSHOT_FILE, snapshotFile); - - String milestoneStart = configuration.string(DefaultConfSettings.MILESTONE_START_INDEX); - if (StringUtils.isEmpty(milestoneStart)) { - milestoneStart = Configuration.TESTNET_MILESTONE_START_INDEX; - } - configuration.put(DefaultConfSettings.MILESTONE_START_INDEX, milestoneStart); - - //this should always be empty - configuration.put(DefaultConfSettings.SNAPSHOT_SIGNATURE_FILE, ""); - - String mwm = configuration.string(DefaultConfSettings.MWM); - if (StringUtils.isEmpty(mwm)) { - mwm = Configuration.TESTNET_MWM; - } - configuration.put(DefaultConfSettings.MWM, mwm); - - String keysInMilestone = configuration.string(DefaultConfSettings.NUMBER_OF_KEYS_IN_A_MILESTONE); - if (StringUtils.isEmpty(keysInMilestone)) { - keysInMilestone = Configuration.TESTNET_NUM_KEYS_IN_MILESTONE; - } - configuration.put(DefaultConfSettings.NUMBER_OF_KEYS_IN_A_MILESTONE, keysInMilestone); - - String transactionPacketSize = configuration.string(DefaultConfSettings.TRANSACTION_PACKET_SIZE); - if (StringUtils.isEmpty(transactionPacketSize)) { - transactionPacketSize = Configuration.TESTNET_PACKET_SIZE; - } - configuration.put(DefaultConfSettings.TRANSACTION_PACKET_SIZE, transactionPacketSize); - - String reqHashSize = configuration.string(DefaultConfSettings.REQUEST_HASH_SIZE); - if (StringUtils.isEmpty(reqHashSize)) { - reqHashSize = Configuration.TESTNET_REQ_HASH_SIZE; - } - configuration.put(DefaultConfSettings.REQUEST_HASH_SIZE, reqHashSize); - - String globalSnapshotTime = configuration.string(DefaultConfSettings.SNAPSHOT_TIME); - if (StringUtils.isEmpty(globalSnapshotTime)) { - globalSnapshotTime = Configuration.TESTNET_GLOBAL_SNAPSHOT_TIME; - } - configuration.put(DefaultConfSettings.SNAPSHOT_TIME, globalSnapshotTime); - } - private static void printUsage() { log.info("Usage: java -jar {}-{}.jar " + "[{-n,--neighbors} ''] " + diff --git a/src/main/java/com/iota/iri/conf/Configuration.java b/src/main/java/com/iota/iri/conf/Configuration.java index 9d258c9ebf..80d4a1c60b 100644 --- a/src/main/java/com/iota/iri/conf/Configuration.java +++ b/src/main/java/com/iota/iri/conf/Configuration.java @@ -48,8 +48,6 @@ public class Configuration { public static final String TESTNET_PACKET_SIZE = "1653"; public static final String REQ_HASH_SIZE = "46"; public static final String TESTNET_REQ_HASH_SIZE = "49"; - public static final String TESTNETDB = "testnetdb"; - public static final String TESTNETDB_LOG = "testnetdb.log"; From 43a67dc1dad51201afe7b431ebeeaa18996a550a Mon Sep 17 00:00:00 2001 From: Rajiv Shah Date: Sun, 6 May 2018 05:04:15 -0400 Subject: [PATCH 17/45] Add milestoneStartIndex to getNodeInfo (#588) * Add milestoneStartIndex to getNodeInfo() * Make suggested changes * Incorporate milestoneStartIndex into tests * Incorporate milestoneStartIndex into tests * Remove DS_Store * Update milestoneStartIndex integration * Change visibility of milestoneStartIndex --- src/main/java/com/iota/iri/Milestone.java | 2 +- src/main/java/com/iota/iri/service/API.java | 4 ++-- .../com/iota/iri/service/dto/GetNodeInfoResponse.java | 10 +++++++++- .../com/iota/iri/integration/APIIntegrationTests.java | 3 ++- 4 files changed, 14 insertions(+), 5 deletions(-) diff --git a/src/main/java/com/iota/iri/Milestone.java b/src/main/java/com/iota/iri/Milestone.java index decaa0f1ae..bc5e21c858 100644 --- a/src/main/java/com/iota/iri/Milestone.java +++ b/src/main/java/com/iota/iri/Milestone.java @@ -42,7 +42,6 @@ enum Validity { private final boolean testnet; private final MessageQ messageQ; private final int numOfKeysInMilestone; - private final int milestoneStartIndex; private final boolean acceptAnyTestnetCoo; public Snapshot latestSnapshot; @@ -52,6 +51,7 @@ enum Validity { public int latestMilestoneIndex; public int latestSolidSubtangleMilestoneIndex; + public final int milestoneStartIndex; private final Set analyzedMilestoneCandidates = new HashSet<>(); diff --git a/src/main/java/com/iota/iri/service/API.java b/src/main/java/com/iota/iri/service/API.java index b70f506ca0..7a0d79fd44 100644 --- a/src/main/java/com/iota/iri/service/API.java +++ b/src/main/java/com/iota/iri/service/API.java @@ -278,7 +278,7 @@ private AbstractResponse process(final String requestString, InetSocketAddress s return GetNodeInfoResponse.create(name, IRI.VERSION, Runtime.getRuntime().availableProcessors(), Runtime.getRuntime().freeMemory(), System.getProperty("java.version"), Runtime.getRuntime().maxMemory(), Runtime.getRuntime().totalMemory(), instance.milestone.latestMilestone, instance.milestone.latestMilestoneIndex, - instance.milestone.latestSolidSubtangleMilestone, instance.milestone.latestSolidSubtangleMilestoneIndex, + instance.milestone.latestSolidSubtangleMilestone, instance.milestone.latestSolidSubtangleMilestoneIndex, instance.milestone.milestoneStartIndex, instance.node.howManyNeighbors(), instance.node.queuedTransactionsSize(), System.currentTimeMillis(), instance.tipsViewModel.size(), instance.transactionRequester.numberOfTransactionsToRequest()); @@ -477,7 +477,7 @@ private AbstractResponse checkConsistencyStatement(List transactionsList return CheckConsistency.create(state,info); } - + private double getParameterAsDouble(Map request, String paramName) throws ValidationException { validateParamExists(request, paramName); final double result; diff --git a/src/main/java/com/iota/iri/service/dto/GetNodeInfoResponse.java b/src/main/java/com/iota/iri/service/dto/GetNodeInfoResponse.java index e770812b9c..cff4f05e40 100644 --- a/src/main/java/com/iota/iri/service/dto/GetNodeInfoResponse.java +++ b/src/main/java/com/iota/iri/service/dto/GetNodeInfoResponse.java @@ -18,6 +18,8 @@ public class GetNodeInfoResponse extends AbstractResponse { private String latestSolidSubtangleMilestone; private int latestSolidSubtangleMilestoneIndex; + private int milestoneStartIndex; + private int neighbors; private int packetsQueueSize; private long time; @@ -26,7 +28,7 @@ public class GetNodeInfoResponse extends AbstractResponse { public static AbstractResponse create(String appName, String appVersion, int jreAvailableProcessors, long jreFreeMemory, String jreVersion, long maxMemory, long totalMemory, Hash latestMilestone, int latestMilestoneIndex, - Hash latestSolidSubtangleMilestone, int latestSolidSubtangleMilestoneIndex, + Hash latestSolidSubtangleMilestone, int latestSolidSubtangleMilestoneIndex, int milestoneStartIndex, int neighbors, int packetsQueueSize, long currentTimeMillis, int tips, int numberOfTransactionsToRequest) { final GetNodeInfoResponse res = new GetNodeInfoResponse(); @@ -44,6 +46,8 @@ public static AbstractResponse create(String appName, String appVersion, int jre res.latestSolidSubtangleMilestone = latestSolidSubtangleMilestone.toString(); res.latestSolidSubtangleMilestoneIndex = latestSolidSubtangleMilestoneIndex; + res.milestoneStartIndex = milestoneStartIndex; + res.neighbors = neighbors; res.packetsQueueSize = packetsQueueSize; res.time = currentTimeMillis; @@ -96,6 +100,10 @@ public int getLatestSolidSubtangleMilestoneIndex() { return latestSolidSubtangleMilestoneIndex; } + public int getMilestoneStartIndex() { + return milestoneStartIndex; + } + public int getNeighbors() { return neighbors; } diff --git a/src/test/java/com/iota/iri/integration/APIIntegrationTests.java b/src/test/java/com/iota/iri/integration/APIIntegrationTests.java index fa10246f7f..2ee984bc4c 100644 --- a/src/test/java/com/iota/iri/integration/APIIntegrationTests.java +++ b/src/test/java/com/iota/iri/integration/APIIntegrationTests.java @@ -173,6 +173,7 @@ public void shouldTestGetNodeInfo() { body(containsString("jreAvailableProcessors")). body(containsString("latestSolidSubtangleMilestone")). body(containsString("latestSolidSubtangleMilestoneIndex")). + body(containsString("milestoneStartIndex")). body(containsString("neighbors")). body(containsString("packetsQueueSize")). body(containsString("time")). @@ -467,4 +468,4 @@ private String getHash(String temp) { return Hash.calculate(Converter.allocatingTritsFromTrytes(temp), 0, TransactionViewModel.TRINARY_SIZE, SpongeFactory.create(SpongeFactory.Mode.CURLP81)).toString(); } -} \ No newline at end of file +} From cab451636d561d11830b32840d7006215b7abe36 Mon Sep 17 00:00:00 2001 From: Giorgio Mandolfo Date: Mon, 7 May 2018 14:08:52 +1000 Subject: [PATCH 18/45] Improve Dockerfile to build against Oracle Java and add more flexibility at runtime. --- DOCKER.md | 62 ++++++++++++++++++++++++++ Dockerfile | 91 +++++++++++++++++++++++++++++++++----- docker/entrypoint.sh | 24 ++++++++++ docker/mvn-entrypoint.sh | 39 ++++++++++++++++ docker/settings-docker.xml | 6 +++ 5 files changed, 212 insertions(+), 10 deletions(-) create mode 100644 DOCKER.md create mode 100644 docker/entrypoint.sh create mode 100644 docker/mvn-entrypoint.sh create mode 100644 docker/settings-docker.xml diff --git a/DOCKER.md b/DOCKER.md new file mode 100644 index 0000000000..f390e0db04 --- /dev/null +++ b/DOCKER.md @@ -0,0 +1,62 @@ +## DOCKER and IRI + +The Dockerfile included in this repo builds a working IRI docker container whilst trying to stay the least opinionated as possible. This allows system administrators the option to deploy and configure IRI based on their own individual circumstances and needs. + +When building IRI via the Dockerfile provided, Docker 17.05 minimum is required, due to the use of Docker build stages. During docker build, these are the stages invoked: +- java: installs Oracle Java on top of Ubuntu +- build: installs Maven on top of the java stage and compiles IRI +- final container: copies the IRI jar file using the java stage as base + +The built container assumes the WORKDIR inside the container is /iri/data: this means that the database directory will be written inside that directory by default. If a system administrator wants to retain the database across restarts, it is his/her job to mount a docker volume in the right folder. + +The docker conatiner supports the env variables to configure advanced options. These variables can be set but are not required to run IRI. + +JAVA_OPTIONS: these are the java options to pass right after the java command. It must not contain -Xms nor -Xmx. Defaults to a safe value +JAVA_MIN_MEMORY: the value of -Xms option. Defaults to 2G +JAVA_MAX_MEMORY: the value of -Xmx option. Defaults to 4G +DOCKER_IRI_JAR_PATH: defaults to /iri/target/iri*.jar as pushed by the Dockerfile. This is useful if custom IRI binaries want to be executed and the default path needs to be overridden +DOCKER_IRI_REMOTE_LIMIT_API: defaults to "interruptAttachToTangle, addNeighbors, removeNeighbors, getNeighbors" +DOCKER_IRI_NEIGHBOR_FILE: defaults to an empty value. If set, the entrypoint looks for such file and adds each line as IRI neighbours. Commented lines will be skipped +DOCKER_IRI_MONITORING_API_PORT_ENABLE: defaults to 0. If set to 1, a socat on port 14266 directed to 127.0.0.1:DOCKER_IRI_MONITORING_API_PORT_DESTINATION will be open in order to allow all API calls regardless of the DOCKER_IRI_REMOTE_LIMIT_API setting. This is useful to give access to restricted API calls to local tools and still denying access to restricted API calls to the internet. It is highly recommended to use this option together with docker networks (docker run --net). + +The container entry point is a shell script that performs few additional steps before launching IRI: +- verifies if DOCKER_IRI_MONITORING_API_PORT_ENABLE is set to 1 +- verifies if DOCKER_IRI_NEIGHBOR_FILE is set +- launches IRI with all parameters passed as desired + +It is important to note that other than --neighbors (via the env var DOCKER_IRI_NEIGHBOR_FILE), --remote and --remote-limit-api "$DOCKER_IRI_REMOTE_LIMIT_API", neither the entrypoint nor the Dockerfile are aware of any IRI configuration option. This is to not tie the Dockerfile and its container to a specific set of IRI options. Instead, this contain still allows the use of an INI file or command line options. Please refer to the IRI documentation to learn what are the allowed options at command line and via the INI file. + +At the time of writing, IRI requires -p to be passed either via INI or via command line. The entrypoint of this docker container does not do that for you. + +Here is a systemd unit example you can use with this Docker container. This is just an example and customisation is possible and recommended. In this example the docker network iri must be created and the paths /mnt/iri/conf and /mnt/iri/data are used on the docker host to serve respectively the neighbors file and the data directory. No INI files are used in this example, instead options are passed via command line options, such as --testnet and --zmq-enabled. + +[Unit] +Description=IRI +After=docker.service +Requires=docker.service + +[Service] +TimeoutStartSec=0 +Restart=always +ExecStartPre=-/usr/bin/docker rm %n +ExecStart=/usr/bin/docker run \ +--name %n \ +--hostname iri \ +--net=iri \ +-v /mnt/iri/conf:/iri/conf \ +-v /mnt/iri/data:/iri/data \ +-p 14265:14265 \ +-p 15600:145600/udp \ +-p 14600:14600/udp \ +-e "DOCKER_IRI_NEIGHBOR_FILE=/iri/conf/neighbors" \ +iotaledger/iri:v1.4.2.4 \ +-p 14265 \ +--zmq-enabled \ +--testnet + +ExecStop=/usr/bin/docker stop %n +ExecReload=/usr/bin/docker restart %n + +[Install] +WantedBy=multi-user.target + diff --git a/Dockerfile b/Dockerfile index 3346b552a7..0236e07004 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,16 +1,87 @@ -FROM maven:3.5-jdk-8 as builder +FROM ubuntu:16.04 as java + +# Install Java +ARG JAVA_VERSION=8u171-1 +RUN \ + apt-get update && \ + apt-get install -y software-properties-common && \ + echo oracle-java8-installer shared/accepted-oracle-license-v1-1 select true | debconf-set-selections && \ + add-apt-repository -y ppa:webupd8team/java && \ + apt-get update && \ + apt-get install -y oracle-java8-installer=8u171-1~webupd8~0 && \ + rm -rf /var/lib/apt/lists/* && \ + rm -rf /var/cache/oracle-jdk8-installer + +# Define commonly used JAVA_HOME variable +ENV JAVA_HOME /usr/lib/jvm/java-8-oracle + +# install maven on top of java stage +FROM java as build +ARG MAVEN_VERSION=3.5.3 +ARG USER_HOME_DIR="/root" +ARG SHA=b52956373fab1dd4277926507ab189fb797b3bc51a2a267a193c931fffad8408 +ARG BASE_URL=https://apache.osuosl.org/maven/maven-3/${MAVEN_VERSION}/binaries + +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + && rm -rf /var/lib/apt/lists/* + +RUN mkdir -p /usr/share/maven /usr/share/maven/ref \ + && curl -fsSL -o /tmp/apache-maven.tar.gz ${BASE_URL}/apache-maven-${MAVEN_VERSION}-bin.tar.gz \ + && echo "${SHA} /tmp/apache-maven.tar.gz" | sha256sum -c - \ + && tar -xzf /tmp/apache-maven.tar.gz -C /usr/share/maven --strip-components=1 \ + && rm -f /tmp/apache-maven.tar.gz \ + && ln -s /usr/share/maven/bin/mvn /usr/bin/mvn + +ENV MAVEN_HOME /usr/share/maven +ENV MAVEN_CONFIG "$USER_HOME_DIR/.m2" + +COPY docker/mvn-entrypoint.sh /usr/local/bin/mvn-entrypoint.sh +COPY docker/settings-docker.xml /usr/share/maven/ref/ + +VOLUME "$USER_HOME_DIR/.m2" + +ENTRYPOINT ["/usr/local/bin/mvn-entrypoint.sh"] +CMD ["mvn"] + +# install build dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + git \ + && rm -rf /var/lib/apt/lists/* + WORKDIR /iri + COPY . /iri RUN mvn clean package -FROM openjdk:jre-slim -WORKDIR /iri -COPY --from=builder /iri/target/iri-1.4.2.4.jar iri.jar -COPY logback.xml /iri -VOLUME /iri +# execution image +FROM java + +RUN apt-get update && apt-get install -y --no-install-recommends \ + jq curl socat \ + && rm -rf /var/lib/apt/lists/* + +COPY --from=build /iri/target/iri*.jar /iri/target/ +COPY docker/entrypoint.sh / + +# Java related options. Defaults set as below +ENV JAVA_OPTIONS="-XX:+UnlockExperimentalVMOptions -XX:+DisableAttachMechanism -XX:InitiatingHeapOccupancyPercent=60 -XX:G1MaxNewSizePercent=75 -XX:MaxGCPauseMillis=10000 -XX:+UseG1GC" +ENV JAVA_MIN_MEMORY 2G +ENV JAVA_MAX_MEMORY 4G + +# Additional custom variables. See DOCKER.md for details +ENV DOCKER_IRI_JAR_PATH "/iri/target/iri*.jar" +ENV DOCKER_IRI_REMOTE_LIMIT_API "interruptAttachToTangle, addNeighbors, removeNeighbors, getNeighbors" +ENV DOCKER_IRI_NEIGHBOR_FILE "" -EXPOSE 14265 -EXPOSE 14777/udp -EXPOSE 15777 +# Setting this to 1 will have socat exposing 14266 and pointing it on +# localhost. See /entrypoint.sh +# !!! DO NOT DOCKER EXPOSE (-p) 14266 as the remote api settings +# will not be applied on that port !!! +# You also have to maintain $DOCKER_IRI_MONITORING_API_PORT_DESTINATION +# based on the actual API port exposed via IRI +ENV DOCKER_IRI_MONITORING_API_PORT_ENABLE 0 +ENV DOCKER_IRI_MONITORING_API_PORT_DESTINATION 14265 -CMD ["/usr/bin/java", "-XX:+DisableAttachMechanism", "-Xmx8g", "-Xms256m", "-Dlogback.configurationFile=/iri/conf/logback.xml", "-Djava.net.preferIPv4Stack=true", "-jar", "iri.jar", "-p", "14265", "-u", "14777", "-t", "15777", "--remote", "--remote-limit-api", "\"addNeighbors, removeNeighbors, getNeighbors\"", "$@"] +WORKDIR /iri/data +ENTRYPOINT [ "/entrypoint.sh" ] diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh new file mode 100644 index 0000000000..b4b1d7da00 --- /dev/null +++ b/docker/entrypoint.sh @@ -0,0 +1,24 @@ +#!/bin/bash +# See iotaledger/iri.git Dockerfile and DOCKER.md for further info + +if [ "${DOCKER_IRI_MONITORING_API_PORT_ENABLE}" == "1" ]; then + nohup socat -lm TCP-LISTEN:14266,fork TCP:127.0.0.1:${DOCKER_IRI_MONITORING_API_PORT_DESTINATION} & +fi + +if [ "${DOCKER_IRI_NEIGHBOR_FILE}" ]; then + DOCKER_IRI_NEIGHBORS_OPTIONS="--neighbors " + for neighbor in $(grep -v \# $DOCKER_IRI_NEIGHBOR_FILE); do + DOCKER_IRI_NEIGHBORS_OPTIONS+=" $neighbor" + done +fi + +exec java \ + $JAVA_OPTIONS \ + -Xms$JAVA_MIN_MEMORY \ + -Xmx$JAVA_MAX_MEMORY \ + -Djava.net.preferIPv4Stack=true \ + -jar $DOCKER_IRI_JAR_PATH \ + --remote --remote-limit-api "$DOCKER_IRI_REMOTE_LIMIT_API" \ + $DOCKER_IRI_REMOTE_OPTIONS \ + $DOCKER_IRI_NEIGHBORS_OPTIONS \ + "$@" diff --git a/docker/mvn-entrypoint.sh b/docker/mvn-entrypoint.sh new file mode 100644 index 0000000000..b80c278868 --- /dev/null +++ b/docker/mvn-entrypoint.sh @@ -0,0 +1,39 @@ +#! /bin/bash -eu + +set -o pipefail + +# Copy files from /usr/share/maven/ref into ${MAVEN_CONFIG} +# So the initial ~/.m2 is set with expected content. +# Don't override, as this is just a reference setup +copy_reference_file() { + local root="${1}" + local f="${2%/}" + local logfile="${3}" + local rel="${f/${root}/}" # path relative to /usr/share/maven/ref/ + echo "$f" >> "$logfile" + echo " $f -> $rel" >> "$logfile" + if [[ ! -e ${MAVEN_CONFIG}/${rel} || $f = *.override ]] + then + echo "copy $rel to ${MAVEN_CONFIG}" >> "$logfile" + mkdir -p "${MAVEN_CONFIG}/$(dirname "${rel}")" + cp -r "${f}" "${MAVEN_CONFIG}/${rel}"; + fi; +} + +copy_reference_files() { + local log="$MAVEN_CONFIG/copy_reference_file.log" + + if (touch "${log}" > /dev/null 2>&1) + then + echo "--- Copying files at $(date)" >> "$log" + find /usr/share/maven/ref/ -type f -exec bash -eu -c 'copy_reference_file /usr/share/maven/ref/ "$1" "$2"' _ {} "$log" \; + else + echo "Can not write to ${log}. Wrong volume permissions? Carrying on ..." + fi +} + +export -f copy_reference_file +copy_reference_files +unset MAVEN_CONFIG + +exec "$@" diff --git a/docker/settings-docker.xml b/docker/settings-docker.xml new file mode 100644 index 0000000000..586c587c11 --- /dev/null +++ b/docker/settings-docker.xml @@ -0,0 +1,6 @@ + + /usr/share/maven/ref/repository + From fc7a31a76221027759c28eff5ecc15ca40ed7e7d Mon Sep 17 00:00:00 2001 From: footloosejava Date: Sun, 6 May 2018 22:46:22 -0700 Subject: [PATCH 19/45] autoformat Signed-off-by: footloosejava <32090281+footloosejava@users.noreply.github.com> --- .../java/com/iota/iri/hash/PearlDiver.java | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/main/java/com/iota/iri/hash/PearlDiver.java b/src/main/java/com/iota/iri/hash/PearlDiver.java index f7dad4dd95..449ca398e6 100644 --- a/src/main/java/com/iota/iri/hash/PearlDiver.java +++ b/src/main/java/com/iota/iri/hash/PearlDiver.java @@ -1,8 +1,6 @@ package com.iota.iri.hash; -import static com.iota.iri.hash.PearlDiver.State.CANCELLED; -import static com.iota.iri.hash.PearlDiver.State.COMPLETED; -import static com.iota.iri.hash.PearlDiver.State.RUNNING; +import static com.iota.iri.hash.PearlDiver.State.*; /** * (c) 2016 Come-from-Beyond @@ -34,7 +32,7 @@ public void cancel() { } public synchronized boolean search(final int[] transactionTrits, final int minWeightMagnitude, - int numberOfThreads) { + int numberOfThreads) { if (transactionTrits.length != TRANSACTION_LENGTH) { throw new RuntimeException( @@ -95,14 +93,16 @@ public synchronized boolean search(final int[] transactionTrits, final int minWe midCurlStateLow[i] = 0b1111111111111111111111111111111111111111111111111111111111111111L; midCurlStateHigh[i] = 0b1111111111111111111111111111111111111111111111111111111111111111L; - } break; + } + break; case 1: { midCurlStateLow[i] = 0b0000000000000000000000000000000000000000000000000000000000000000L; midCurlStateHigh[i] = 0b1111111111111111111111111111111111111111111111111111111111111111L; - } break; + } + break; default: { @@ -139,7 +139,7 @@ public synchronized boolean search(final int[] transactionTrits, final int minWe System.arraycopy(midCurlStateHigh, 0, midCurlStateCopyHigh, 0, CURL_STATE_LENGTH); for (int i = threadIndex; i-- > 0; ) { increment(midCurlStateCopyLow, midCurlStateCopyHigh, 162 + CURL_HASH_LENGTH / 9, - 162 + (CURL_HASH_LENGTH / 9) * 2); + 162 + (CURL_HASH_LENGTH / 9) * 2); } @@ -149,7 +149,7 @@ public synchronized boolean search(final int[] transactionTrits, final int minWe while (state == RUNNING) { increment(midCurlStateCopyLow, midCurlStateCopyHigh, 162 + (CURL_HASH_LENGTH / 9) * 2, - CURL_HASH_LENGTH); + CURL_HASH_LENGTH); System.arraycopy(midCurlStateCopyLow, 0, curlStateLow, 0, CURL_STATE_LENGTH); System.arraycopy(midCurlStateCopyHigh, 0, curlStateHigh, 0, CURL_STATE_LENGTH); @@ -214,7 +214,7 @@ public synchronized boolean search(final int[] transactionTrits, final int minWe } private static void transform(final long[] curlStateLow, final long[] curlStateHigh, - final long[] curlScratchpadLow, final long[] curlScratchpadHigh) { + final long[] curlScratchpadLow, final long[] curlScratchpadHigh) { int curlScratchpadIndex = 0; for (int round = 0; round < Curl.NUMBER_OF_ROUNDSP81; round++) { @@ -239,7 +239,7 @@ private static void transform(final long[] curlStateLow, final long[] curlStateH } private static void increment(final long[] midCurlStateCopyLow, - final long[] midCurlStateCopyHigh, final int fromIndex, final int toIndex) { + final long[] midCurlStateCopyHigh, final int fromIndex, final int toIndex) { for (int i = fromIndex; i < toIndex; i++) { if (midCurlStateCopyLow[i] == LOW_BITS) { From bef04a77936b08b1df686aeab20b53d85384f000 Mon Sep 17 00:00:00 2001 From: footloosejava Date: Sun, 6 May 2018 23:19:24 -0700 Subject: [PATCH 20/45] rebased to post-snapshot and then merged in changes and new Max Threads formula. Modularized code. - changed a few loops for(;;) to start at the desired index and end --- .../java/com/iota/iri/hash/PearlDiver.java | 287 ++++++++---------- 1 file changed, 129 insertions(+), 158 deletions(-) diff --git a/src/main/java/com/iota/iri/hash/PearlDiver.java b/src/main/java/com/iota/iri/hash/PearlDiver.java index 449ca398e6..02607e1ab9 100644 --- a/src/main/java/com/iota/iri/hash/PearlDiver.java +++ b/src/main/java/com/iota/iri/hash/PearlDiver.java @@ -1,10 +1,10 @@ package com.iota.iri.hash; +import java.util.ArrayList; +import java.util.List; + import static com.iota.iri.hash.PearlDiver.State.*; -/** - * (c) 2016 Come-from-Beyond - */ public class PearlDiver { enum State { @@ -27,13 +27,10 @@ enum State { public void cancel() { synchronized (syncObj) { state = CANCELLED; - syncObj.notifyAll(); } } - public synchronized boolean search(final int[] transactionTrits, final int minWeightMagnitude, - int numberOfThreads) { - + private static void validateParameters(int[] transactionTrits, int minWeightMagnitude) { if (transactionTrits.length != TRANSACTION_LENGTH) { throw new RuntimeException( "Invalid transaction trits length: " + transactionTrits.length); @@ -41,187 +38,162 @@ public synchronized boolean search(final int[] transactionTrits, final int minWe if (minWeightMagnitude < 0 || minWeightMagnitude > CURL_HASH_LENGTH) { throw new RuntimeException("Invalid min weight magnitude: " + minWeightMagnitude); } + } + + public synchronized boolean search(final int[] transactionTrits, final int minWeightMagnitude, + int numberOfThreads) { + validateParameters(transactionTrits, minWeightMagnitude); synchronized (syncObj) { state = RUNNING; } - final long[] midCurlStateLow = new long[CURL_STATE_LENGTH], midCurlStateHigh = new long[CURL_STATE_LENGTH]; - - { - for (int i = CURL_HASH_LENGTH; i < CURL_STATE_LENGTH; i++) { - midCurlStateLow[i] = HIGH_BITS; - midCurlStateHigh[i] = HIGH_BITS; - } - - int offset = 0; - final long[] curlScratchpadLow = new long[CURL_STATE_LENGTH], curlScratchpadHigh = new long[CURL_STATE_LENGTH]; - for (int i = (TRANSACTION_LENGTH - CURL_HASH_LENGTH) / CURL_HASH_LENGTH; i-- > 0; ) { - - for (int j = 0; j < CURL_HASH_LENGTH; j++) { + final long[] midCurlStateLow = new long[CURL_STATE_LENGTH]; + final long[] midCurlStateHigh = new long[CURL_STATE_LENGTH]; + initializeMidCurlStates(transactionTrits, midCurlStateLow, midCurlStateHigh); - switch (transactionTrits[offset++]) { - case 0: { - midCurlStateLow[j] = HIGH_BITS; - midCurlStateHigh[j] = HIGH_BITS; - - } - break; - - case 1: { - midCurlStateLow[j] = LOW_BITS; - midCurlStateHigh[j] = HIGH_BITS; - } - break; - - default: { - midCurlStateLow[j] = HIGH_BITS; - midCurlStateHigh[j] = LOW_BITS; - } - } + if (numberOfThreads <= 0) { + numberOfThreads = Math.max(1, Math.floorDiv(numberOfThreads * 8, 10)); + } + List workers = new ArrayList<>(numberOfThreads); + while (numberOfThreads-- > 0) { + Runnable runnable = getRunnable(transactionTrits, minWeightMagnitude, midCurlStateLow, midCurlStateHigh, numberOfThreads); + Thread worker = new Thread(runnable); + workers.add(worker); + worker.setName(this + ":worker-" + numberOfThreads); + worker.setDaemon(true); + worker.start(); + } + for (Thread worker : workers) { + try { + worker.join(); + } catch (InterruptedException e) { + synchronized (syncObj) { + state = CANCELLED; } - - transform(midCurlStateLow, midCurlStateHigh, curlScratchpadLow, curlScratchpadHigh); } + } + return state == COMPLETED; + } - for (int i = 0; i < 162; i++) { - - switch (transactionTrits[offset++]) { - - case 0: { - - midCurlStateLow[i] = 0b1111111111111111111111111111111111111111111111111111111111111111L; - midCurlStateHigh[i] = 0b1111111111111111111111111111111111111111111111111111111111111111L; - - } - break; - - case 1: { - - midCurlStateLow[i] = 0b0000000000000000000000000000000000000000000000000000000000000000L; - midCurlStateHigh[i] = 0b1111111111111111111111111111111111111111111111111111111111111111L; - - } - break; - - default: { + private Runnable getRunnable(int[] transactionTrits, int minWeightMagnitude, long[] midCurlStateLow, long[] midCurlStateHigh, int threadIndex) { + return () -> { + final long[] midCurlStateCopyLow = new long[CURL_STATE_LENGTH]; + final long[] midCurlStateCopyHigh = new long[CURL_STATE_LENGTH]; + copy(midCurlStateLow, midCurlStateHigh, midCurlStateCopyLow, midCurlStateCopyHigh); - midCurlStateLow[i] = 0b1111111111111111111111111111111111111111111111111111111111111111L; - midCurlStateHigh[i] = 0b0000000000000000000000000000000000000000000000000000000000000000L; - } - } + for (int i = 0; i < threadIndex; i++) { + increment(midCurlStateCopyLow, midCurlStateCopyHigh, 162 + CURL_HASH_LENGTH / 9, + 162 + (CURL_HASH_LENGTH / 9) * 2); } - midCurlStateLow[162 + 0] = 0b1101101101101101101101101101101101101101101101101101101101101101L; - midCurlStateHigh[162 + 0] = 0b1011011011011011011011011011011011011011011011011011011011011011L; - midCurlStateLow[162 + 1] = 0b1111000111111000111111000111111000111111000111111000111111000111L; - midCurlStateHigh[162 + 1] = 0b1000111111000111111000111111000111111000111111000111111000111111L; - midCurlStateLow[162 + 2] = 0b0111111111111111111000000000111111111111111111000000000111111111L; - midCurlStateHigh[162 + 2] = 0b1111111111000000000111111111111111111000000000111111111111111111L; - midCurlStateLow[162 + 3] = 0b1111111111000000000000000000000000000111111111111111111111111111L; - midCurlStateHigh[162 + 3] = 0b0000000000111111111111111111111111111111111111111111111111111111L; + final long[] curlStateLow = new long[CURL_STATE_LENGTH]; + final long[] curlStateHigh = new long[CURL_STATE_LENGTH]; - } + final long[] curlScratchpadLow = new long[CURL_STATE_LENGTH]; + final long[] curlScratchpadHigh = new long[CURL_STATE_LENGTH]; - if (numberOfThreads <= 0) { - numberOfThreads = Math.max(Runtime.getRuntime().availableProcessors() - 1, 1); - } - - Thread[] workers = new Thread[numberOfThreads]; - - while (numberOfThreads-- > 0) { + final int maskStartIndex = CURL_HASH_LENGTH - minWeightMagnitude; + long mask = 0; + while (state == RUNNING && mask == 0) { - final int threadIndex = numberOfThreads; - Thread worker = (new Thread(() -> { + increment(midCurlStateCopyLow, midCurlStateCopyHigh, 162 + (CURL_HASH_LENGTH / 9) * 2, + CURL_HASH_LENGTH); - final long[] midCurlStateCopyLow = new long[CURL_STATE_LENGTH], midCurlStateCopyHigh = new long[CURL_STATE_LENGTH]; - System.arraycopy(midCurlStateLow, 0, midCurlStateCopyLow, 0, CURL_STATE_LENGTH); - System.arraycopy(midCurlStateHigh, 0, midCurlStateCopyHigh, 0, CURL_STATE_LENGTH); - for (int i = threadIndex; i-- > 0; ) { - increment(midCurlStateCopyLow, midCurlStateCopyHigh, 162 + CURL_HASH_LENGTH / 9, - 162 + (CURL_HASH_LENGTH / 9) * 2); + copy(midCurlStateCopyLow, midCurlStateCopyHigh, curlStateLow, curlStateHigh); + transform(curlStateLow, curlStateHigh, curlScratchpadLow, curlScratchpadHigh); + mask = HIGH_BITS; + for (int i = maskStartIndex; i < CURL_HASH_LENGTH && mask != 0; i++) { + mask &= ~(curlStateLow[i] ^ curlStateHigh[i]); } - - final long[] curlStateLow = new long[CURL_STATE_LENGTH], curlStateHigh = new long[CURL_STATE_LENGTH]; - final long[] curlScratchpadLow = new long[CURL_STATE_LENGTH], curlScratchpadHigh = new long[CURL_STATE_LENGTH]; - long mask, outMask = 1; - while (state == RUNNING) { - - increment(midCurlStateCopyLow, midCurlStateCopyHigh, 162 + (CURL_HASH_LENGTH / 9) * 2, - CURL_HASH_LENGTH); - - System.arraycopy(midCurlStateCopyLow, 0, curlStateLow, 0, CURL_STATE_LENGTH); - System.arraycopy(midCurlStateCopyHigh, 0, curlStateHigh, 0, CURL_STATE_LENGTH); - transform(curlStateLow, curlStateHigh, curlScratchpadLow, curlScratchpadHigh); - - mask = HIGH_BITS; - for (int i = minWeightMagnitude; i-- > 0; ) { - mask &= ~(curlStateLow[CURL_HASH_LENGTH - 1 - i] ^ curlStateHigh[ - CURL_HASH_LENGTH - 1 - i]); - if (mask == 0) { - break; + } + if (mask != 0) { + synchronized (syncObj) { + if (state == RUNNING) { + state = COMPLETED; + long outMask = 1; + while ((outMask & mask) == 0) { + outMask <<= 1; } - } - if (mask == 0) { - continue; - } - - synchronized (syncObj) { - if (state == RUNNING) { - state = COMPLETED; - while ((outMask & mask) == 0) { - outMask <<= 1; - } - for (int i = 0; i < CURL_HASH_LENGTH; i++) { - transactionTrits[TRANSACTION_LENGTH - CURL_HASH_LENGTH + i] = - (midCurlStateCopyLow[i] & outMask) == 0 ? 1 - : (midCurlStateCopyHigh[i] & outMask) == 0 ? -1 : 0; - } - syncObj.notifyAll(); + for (int i = 0; i < CURL_HASH_LENGTH; i++) { + transactionTrits[TRANSACTION_LENGTH - CURL_HASH_LENGTH + i] = + (midCurlStateCopyLow[i] & outMask) == 0 ? 1 + : (midCurlStateCopyHigh[i] & outMask) == 0 ? -1 : 0; } } - break; } - })); - workers[threadIndex] = worker; - worker.start(); + } + }; + } + + private static void copy(long[] srcLow, long[] srcHigh, long[] destLow, long[] destHigh) { + System.arraycopy(srcLow, 0, destLow, 0, CURL_STATE_LENGTH); + System.arraycopy(srcHigh, 0, destHigh, 0, CURL_STATE_LENGTH); + } + + private static void initializeMidCurlStates(int[] transactionTrits, long[] midCurlStateLow, long[] midCurlStateHigh) { + for (int i = CURL_HASH_LENGTH; i < CURL_STATE_LENGTH; i++) { + midCurlStateLow[i] = HIGH_BITS; + midCurlStateHigh[i] = HIGH_BITS; } - try { - synchronized (syncObj) { - if (state == RUNNING) { - syncObj.wait(); + int offset = 0; + final long[] curlScratchpadLow = new long[CURL_STATE_LENGTH]; + final long[] curlScratchpadHigh = new long[CURL_STATE_LENGTH]; + for (int i = (TRANSACTION_LENGTH - CURL_HASH_LENGTH) / CURL_HASH_LENGTH; i-- > 0; ) { + + for (int j = 0; j < CURL_HASH_LENGTH; j++) { + switch (transactionTrits[offset++]) { + case 0: + midCurlStateLow[j] = HIGH_BITS; + midCurlStateHigh[j] = HIGH_BITS; + break; + case 1: + midCurlStateLow[j] = LOW_BITS; + midCurlStateHigh[j] = HIGH_BITS; + break; + default: + midCurlStateLow[j] = HIGH_BITS; + midCurlStateHigh[j] = LOW_BITS; } } - } catch (final InterruptedException e) { - synchronized (syncObj) { - state = CANCELLED; - } + transform(midCurlStateLow, midCurlStateHigh, curlScratchpadLow, curlScratchpadHigh); } - for (Thread worker : workers) { - try { - worker.join(); - } catch (final InterruptedException e) { - synchronized (syncObj) { - state = CANCELLED; - } + for (int i = 0; i < 162; i++) { + switch (transactionTrits[offset++]) { + case 0: + midCurlStateLow[i] = HIGH_BITS; + midCurlStateHigh[i] = HIGH_BITS; + break; + case 1: + midCurlStateLow[i] = LOW_BITS; + midCurlStateHigh[i] = HIGH_BITS; + break; + default: + midCurlStateLow[i] = HIGH_BITS; + midCurlStateHigh[i] = LOW_BITS; } } - return state == COMPLETED; + midCurlStateLow[162 + 0] = 0b1101101101101101101101101101101101101101101101101101101101101101L; + midCurlStateHigh[162 + 0] = 0b1011011011011011011011011011011011011011011011011011011011011011L; + midCurlStateLow[162 + 1] = 0b1111000111111000111111000111111000111111000111111000111111000111L; + midCurlStateHigh[162 + 1] = 0b1000111111000111111000111111000111111000111111000111111000111111L; + midCurlStateLow[162 + 2] = 0b0111111111111111111000000000111111111111111111000000000111111111L; + midCurlStateHigh[162 + 2] = 0b1111111111000000000111111111111111111000000000111111111111111111L; + midCurlStateLow[162 + 3] = 0b1111111111000000000000000000000000000111111111111111111111111111L; + midCurlStateHigh[162 + 3] = 0b0000000000111111111111111111111111111111111111111111111111111111L; } private static void transform(final long[] curlStateLow, final long[] curlStateHigh, final long[] curlScratchpadLow, final long[] curlScratchpadHigh) { - int curlScratchpadIndex = 0; for (int round = 0; round < Curl.NUMBER_OF_ROUNDSP81; round++) { - System.arraycopy(curlStateLow, 0, curlScratchpadLow, 0, CURL_STATE_LENGTH); - System.arraycopy(curlStateHigh, 0, curlScratchpadHigh, 0, CURL_STATE_LENGTH); + copy(curlStateLow, curlStateHigh, curlScratchpadLow, curlScratchpadHigh); - for (int curlStateIndex = 0; curlStateIndex < CURL_STATE_LENGTH; curlStateIndex++) { + for (int curlStateIndex = 0, curlScratchpadIndex = 0; curlStateIndex < CURL_STATE_LENGTH; curlStateIndex++) { final long alpha = curlScratchpadLow[curlScratchpadIndex]; final long beta = curlScratchpadHigh[curlScratchpadIndex]; if (curlScratchpadIndex < 365) { @@ -238,21 +210,20 @@ private static void transform(final long[] curlStateLow, final long[] curlStateH } } - private static void increment(final long[] midCurlStateCopyLow, - final long[] midCurlStateCopyHigh, final int fromIndex, final int toIndex) { + private static void increment(final long[] midCurlStateCopyLow, final long[] midCurlStateCopyHigh, + final int fromIndex, final int toIndex) { for (int i = fromIndex; i < toIndex; i++) { if (midCurlStateCopyLow[i] == LOW_BITS) { midCurlStateCopyLow[i] = HIGH_BITS; midCurlStateCopyHigh[i] = LOW_BITS; + } else if (midCurlStateCopyHigh[i] == LOW_BITS) { + midCurlStateCopyHigh[i] = HIGH_BITS; + break; } else { - if (midCurlStateCopyHigh[i] == LOW_BITS) { - midCurlStateCopyHigh[i] = HIGH_BITS; - } else { - midCurlStateCopyLow[i] = LOW_BITS; - } + midCurlStateCopyLow[i] = LOW_BITS; break; } } } -} +} \ No newline at end of file From a5f666fc916e539e736f42b04fca81ec1199b407 Mon Sep 17 00:00:00 2001 From: footloosejava Date: Mon, 7 May 2018 00:40:34 -0700 Subject: [PATCH 21/45] - removed the word 'Curl' from every state array variable name. - moved copy of mid-state-curl to outside of runnable and used 'clone' instead since the actual parent array is in sight above and clone() is simple and works well for any primitive types. - codacy complained that the POW loop scratchIdx was defined in the loop. I have moved it to the head of the for-loop where it logocally belongs. Signed-off-by: footloosejava <32090281+footloosejava@users.noreply.github.com> --- .../java/com/iota/iri/hash/PearlDiver.java | 130 +++++++++--------- 1 file changed, 65 insertions(+), 65 deletions(-) diff --git a/src/main/java/com/iota/iri/hash/PearlDiver.java b/src/main/java/com/iota/iri/hash/PearlDiver.java index 02607e1ab9..133ac35fa1 100644 --- a/src/main/java/com/iota/iri/hash/PearlDiver.java +++ b/src/main/java/com/iota/iri/hash/PearlDiver.java @@ -48,16 +48,18 @@ public synchronized boolean search(final int[] transactionTrits, final int minWe state = RUNNING; } - final long[] midCurlStateLow = new long[CURL_STATE_LENGTH]; - final long[] midCurlStateHigh = new long[CURL_STATE_LENGTH]; - initializeMidCurlStates(transactionTrits, midCurlStateLow, midCurlStateHigh); + final long[] midStateLow = new long[CURL_STATE_LENGTH]; + final long[] midStateHigh = new long[CURL_STATE_LENGTH]; + initializeMidCurlStates(transactionTrits, midStateLow, midStateHigh); if (numberOfThreads <= 0) { numberOfThreads = Math.max(1, Math.floorDiv(numberOfThreads * 8, 10)); } List workers = new ArrayList<>(numberOfThreads); while (numberOfThreads-- > 0) { - Runnable runnable = getRunnable(transactionTrits, minWeightMagnitude, midCurlStateLow, midCurlStateHigh, numberOfThreads); + long[] midStateCopyLow = midStateLow.clone(); + long[] midStateCopyHigh = midStateHigh.clone(); + Runnable runnable = getRunnable(numberOfThreads, transactionTrits, minWeightMagnitude, midStateCopyLow, midStateCopyHigh); Thread worker = new Thread(runnable); workers.add(worker); worker.setName(this + ":worker-" + numberOfThreads); @@ -76,36 +78,33 @@ public synchronized boolean search(final int[] transactionTrits, final int minWe return state == COMPLETED; } - private Runnable getRunnable(int[] transactionTrits, int minWeightMagnitude, long[] midCurlStateLow, long[] midCurlStateHigh, int threadIndex) { + private Runnable getRunnable(final int threadIndex, final int[] transactionTrits, final int minWeightMagnitude, + final long[] midStateCopyLow, final long[] midStateCopyHigh) { return () -> { - final long[] midCurlStateCopyLow = new long[CURL_STATE_LENGTH]; - final long[] midCurlStateCopyHigh = new long[CURL_STATE_LENGTH]; - copy(midCurlStateLow, midCurlStateHigh, midCurlStateCopyLow, midCurlStateCopyHigh); - for (int i = 0; i < threadIndex; i++) { - increment(midCurlStateCopyLow, midCurlStateCopyHigh, 162 + CURL_HASH_LENGTH / 9, + increment(midStateCopyLow, midStateCopyHigh, 162 + CURL_HASH_LENGTH / 9, 162 + (CURL_HASH_LENGTH / 9) * 2); } - final long[] curlStateLow = new long[CURL_STATE_LENGTH]; - final long[] curlStateHigh = new long[CURL_STATE_LENGTH]; + final long[] stateLow = new long[CURL_STATE_LENGTH]; + final long[] stateHigh = new long[CURL_STATE_LENGTH]; - final long[] curlScratchpadLow = new long[CURL_STATE_LENGTH]; - final long[] curlScratchpadHigh = new long[CURL_STATE_LENGTH]; + final long[] scratchpadLow = new long[CURL_STATE_LENGTH]; + final long[] scratchpadHigh = new long[CURL_STATE_LENGTH]; final int maskStartIndex = CURL_HASH_LENGTH - minWeightMagnitude; long mask = 0; while (state == RUNNING && mask == 0) { - increment(midCurlStateCopyLow, midCurlStateCopyHigh, 162 + (CURL_HASH_LENGTH / 9) * 2, + increment(midStateCopyLow, midStateCopyHigh, 162 + (CURL_HASH_LENGTH / 9) * 2, CURL_HASH_LENGTH); - copy(midCurlStateCopyLow, midCurlStateCopyHigh, curlStateLow, curlStateHigh); - transform(curlStateLow, curlStateHigh, curlScratchpadLow, curlScratchpadHigh); + copy(midStateCopyLow, midStateCopyHigh, stateLow, stateHigh); + transform(stateLow, stateHigh, scratchpadLow, scratchpadHigh); mask = HIGH_BITS; for (int i = maskStartIndex; i < CURL_HASH_LENGTH && mask != 0; i++) { - mask &= ~(curlStateLow[i] ^ curlStateHigh[i]); + mask &= ~(stateLow[i] ^ stateHigh[i]); } } if (mask != 0) { @@ -118,8 +117,8 @@ private Runnable getRunnable(int[] transactionTrits, int minWeightMagnitude, lon } for (int i = 0; i < CURL_HASH_LENGTH; i++) { transactionTrits[TRANSACTION_LENGTH - CURL_HASH_LENGTH + i] = - (midCurlStateCopyLow[i] & outMask) == 0 ? 1 - : (midCurlStateCopyHigh[i] & outMask) == 0 ? -1 : 0; + (midStateCopyLow[i] & outMask) == 0 ? 1 + : (midStateCopyHigh[i] & outMask) == 0 ? -1 : 0; } } } @@ -132,10 +131,10 @@ private static void copy(long[] srcLow, long[] srcHigh, long[] destLow, long[] d System.arraycopy(srcHigh, 0, destHigh, 0, CURL_STATE_LENGTH); } - private static void initializeMidCurlStates(int[] transactionTrits, long[] midCurlStateLow, long[] midCurlStateHigh) { + private static void initializeMidCurlStates(int[] transactionTrits, long[] midStateLow, long[] midStateHigh) { for (int i = CURL_HASH_LENGTH; i < CURL_STATE_LENGTH; i++) { - midCurlStateLow[i] = HIGH_BITS; - midCurlStateHigh[i] = HIGH_BITS; + midStateLow[i] = HIGH_BITS; + midStateHigh[i] = HIGH_BITS; } int offset = 0; @@ -146,82 +145,83 @@ private static void initializeMidCurlStates(int[] transactionTrits, long[] midCu for (int j = 0; j < CURL_HASH_LENGTH; j++) { switch (transactionTrits[offset++]) { case 0: - midCurlStateLow[j] = HIGH_BITS; - midCurlStateHigh[j] = HIGH_BITS; + midStateLow[j] = HIGH_BITS; + midStateHigh[j] = HIGH_BITS; break; case 1: - midCurlStateLow[j] = LOW_BITS; - midCurlStateHigh[j] = HIGH_BITS; + midStateLow[j] = LOW_BITS; + midStateHigh[j] = HIGH_BITS; break; default: - midCurlStateLow[j] = HIGH_BITS; - midCurlStateHigh[j] = LOW_BITS; + midStateLow[j] = HIGH_BITS; + midStateHigh[j] = LOW_BITS; } } - transform(midCurlStateLow, midCurlStateHigh, curlScratchpadLow, curlScratchpadHigh); + transform(midStateLow, midStateHigh, curlScratchpadLow, curlScratchpadHigh); } for (int i = 0; i < 162; i++) { switch (transactionTrits[offset++]) { case 0: - midCurlStateLow[i] = HIGH_BITS; - midCurlStateHigh[i] = HIGH_BITS; + midStateLow[i] = HIGH_BITS; + midStateHigh[i] = HIGH_BITS; break; case 1: - midCurlStateLow[i] = LOW_BITS; - midCurlStateHigh[i] = HIGH_BITS; + midStateLow[i] = LOW_BITS; + midStateHigh[i] = HIGH_BITS; break; default: - midCurlStateLow[i] = HIGH_BITS; - midCurlStateHigh[i] = LOW_BITS; + midStateLow[i] = HIGH_BITS; + midStateHigh[i] = LOW_BITS; } } - midCurlStateLow[162 + 0] = 0b1101101101101101101101101101101101101101101101101101101101101101L; - midCurlStateHigh[162 + 0] = 0b1011011011011011011011011011011011011011011011011011011011011011L; - midCurlStateLow[162 + 1] = 0b1111000111111000111111000111111000111111000111111000111111000111L; - midCurlStateHigh[162 + 1] = 0b1000111111000111111000111111000111111000111111000111111000111111L; - midCurlStateLow[162 + 2] = 0b0111111111111111111000000000111111111111111111000000000111111111L; - midCurlStateHigh[162 + 2] = 0b1111111111000000000111111111111111111000000000111111111111111111L; - midCurlStateLow[162 + 3] = 0b1111111111000000000000000000000000000111111111111111111111111111L; - midCurlStateHigh[162 + 3] = 0b0000000000111111111111111111111111111111111111111111111111111111L; + midStateLow[162 + 0] = 0b1101101101101101101101101101101101101101101101101101101101101101L; + midStateHigh[162 + 0] = 0b1011011011011011011011011011011011011011011011011011011011011011L; + midStateLow[162 + 1] = 0b1111000111111000111111000111111000111111000111111000111111000111L; + midStateHigh[162 + 1] = 0b1000111111000111111000111111000111111000111111000111111000111111L; + midStateLow[162 + 2] = 0b0111111111111111111000000000111111111111111111000000000111111111L; + midStateHigh[162 + 2] = 0b1111111111000000000111111111111111111000000000111111111111111111L; + midStateLow[162 + 3] = 0b1111111111000000000000000000000000000111111111111111111111111111L; + midStateHigh[162 + 3] = 0b0000000000111111111111111111111111111111111111111111111111111111L; } - private static void transform(final long[] curlStateLow, final long[] curlStateHigh, - final long[] curlScratchpadLow, final long[] curlScratchpadHigh) { + private static void transform(final long[] stateLow, final long[] stateHigh, + final long[] scratchpadLow, final long[] scratchpadHigh) { for (int round = 0; round < Curl.NUMBER_OF_ROUNDSP81; round++) { - copy(curlStateLow, curlStateHigh, curlScratchpadLow, curlScratchpadHigh); - - for (int curlStateIndex = 0, curlScratchpadIndex = 0; curlStateIndex < CURL_STATE_LENGTH; curlStateIndex++) { - final long alpha = curlScratchpadLow[curlScratchpadIndex]; - final long beta = curlScratchpadHigh[curlScratchpadIndex]; - if (curlScratchpadIndex < 365) { - curlScratchpadIndex += 364; + copy(stateLow, stateHigh, scratchpadLow, scratchpadHigh); + + int scratchpadIndex = 0; + for (int stateIndex = 0; stateIndex < CURL_STATE_LENGTH; stateIndex++) { + final long alpha = scratchpadLow[scratchpadIndex]; + final long beta = scratchpadHigh[scratchpadIndex]; + if (scratchpadIndex < 365) { + scratchpadIndex += 364; } else { - curlScratchpadIndex += -365; + scratchpadIndex += -365; } - final long gamma = curlScratchpadHigh[curlScratchpadIndex]; - final long delta = (alpha | (~gamma)) & (curlScratchpadLow[curlScratchpadIndex] ^ beta); + final long gamma = scratchpadHigh[scratchpadIndex]; + final long delta = (alpha | (~gamma)) & (scratchpadLow[scratchpadIndex] ^ beta); - curlStateLow[curlStateIndex] = ~delta; - curlStateHigh[curlStateIndex] = (alpha ^ gamma) | delta; + stateLow[stateIndex] = ~delta; + stateHigh[stateIndex] = (alpha ^ gamma) | delta; } } } - private static void increment(final long[] midCurlStateCopyLow, final long[] midCurlStateCopyHigh, + private static void increment(final long[] midStateCopyLow, final long[] midStateCopyHigh, final int fromIndex, final int toIndex) { for (int i = fromIndex; i < toIndex; i++) { - if (midCurlStateCopyLow[i] == LOW_BITS) { - midCurlStateCopyLow[i] = HIGH_BITS; - midCurlStateCopyHigh[i] = LOW_BITS; - } else if (midCurlStateCopyHigh[i] == LOW_BITS) { - midCurlStateCopyHigh[i] = HIGH_BITS; + if (midStateCopyLow[i] == LOW_BITS) { + midStateCopyLow[i] = HIGH_BITS; + midStateCopyHigh[i] = LOW_BITS; + } else if (midStateCopyHigh[i] == LOW_BITS) { + midStateCopyHigh[i] = HIGH_BITS; break; } else { - midCurlStateCopyLow[i] = LOW_BITS; + midStateCopyLow[i] = LOW_BITS; break; } } From 9919584810ecbd2a15807767cb732579136510c1 Mon Sep 17 00:00:00 2001 From: Giorgio Mandolfo Date: Wed, 9 May 2018 12:26:07 +1000 Subject: [PATCH 22/45] Fixed use of JAVA_VERSION Dockerfile env file. Bumped base image to Ubuntu 18.04. Also added default for DOCKER_IRI_NEIGHBOR_FILE and fixed use of --neighbors to pass multiple neighbors via cmdline --- Dockerfile | 6 +++--- docker/entrypoint.sh | 14 +++++--------- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/Dockerfile b/Dockerfile index 0236e07004..09c9e35879 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:16.04 as java +FROM ubuntu:18.04 as java # Install Java ARG JAVA_VERSION=8u171-1 @@ -8,7 +8,7 @@ RUN \ echo oracle-java8-installer shared/accepted-oracle-license-v1-1 select true | debconf-set-selections && \ add-apt-repository -y ppa:webupd8team/java && \ apt-get update && \ - apt-get install -y oracle-java8-installer=8u171-1~webupd8~0 && \ + apt-get install -y oracle-java8-installer=${JAVA_VERSION}~webupd8~0 && \ rm -rf /var/lib/apt/lists/* && \ rm -rf /var/cache/oracle-jdk8-installer @@ -72,7 +72,7 @@ ENV JAVA_MAX_MEMORY 4G # Additional custom variables. See DOCKER.md for details ENV DOCKER_IRI_JAR_PATH "/iri/target/iri*.jar" ENV DOCKER_IRI_REMOTE_LIMIT_API "interruptAttachToTangle, addNeighbors, removeNeighbors, getNeighbors" -ENV DOCKER_IRI_NEIGHBOR_FILE "" +ENV DOCKER_IRI_NEIGHBOR_FILE "/iri/conf/neighbors" # Setting this to 1 will have socat exposing 14266 and pointing it on # localhost. See /entrypoint.sh diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh index b4b1d7da00..05f59e6cbe 100644 --- a/docker/entrypoint.sh +++ b/docker/entrypoint.sh @@ -1,16 +1,13 @@ #!/bin/bash -# See iotaledger/iri.git Dockerfile and DOCKER.md for further info +# See Dockerfile and DOCKER.md for further info if [ "${DOCKER_IRI_MONITORING_API_PORT_ENABLE}" == "1" ]; then nohup socat -lm TCP-LISTEN:14266,fork TCP:127.0.0.1:${DOCKER_IRI_MONITORING_API_PORT_DESTINATION} & fi -if [ "${DOCKER_IRI_NEIGHBOR_FILE}" ]; then - DOCKER_IRI_NEIGHBORS_OPTIONS="--neighbors " - for neighbor in $(grep -v \# $DOCKER_IRI_NEIGHBOR_FILE); do - DOCKER_IRI_NEIGHBORS_OPTIONS+=" $neighbor" - done -fi +for neighbor in $(grep -v \# $DOCKER_IRI_NEIGHBOR_FILE); do + DOCKER_IRI_NEIGHBORS+=" $neighbor" +done exec java \ $JAVA_OPTIONS \ @@ -19,6 +16,5 @@ exec java \ -Djava.net.preferIPv4Stack=true \ -jar $DOCKER_IRI_JAR_PATH \ --remote --remote-limit-api "$DOCKER_IRI_REMOTE_LIMIT_API" \ - $DOCKER_IRI_REMOTE_OPTIONS \ - $DOCKER_IRI_NEIGHBORS_OPTIONS \ + --neighbors "$DOCKER_IRI_NEIGHBORS" \ "$@" From aef41a6b54a3ef9aa3c82f72d6543f0bb5d6bb49 Mon Sep 17 00:00:00 2001 From: Giorgio Mandolfo Date: Wed, 9 May 2018 16:11:46 +1000 Subject: [PATCH 23/45] Added MAINTAINER directive in Dockerfile. Improved naming of docker build stages and added --no-install-recommends to apt-get install commands as suggested by Codacy --- Dockerfile | 16 +++++++--------- docker/entrypoint.sh | 0 2 files changed, 7 insertions(+), 9 deletions(-) mode change 100644 => 100755 docker/entrypoint.sh diff --git a/Dockerfile b/Dockerfile index 09c9e35879..ad9780c844 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,14 +1,15 @@ -FROM ubuntu:18.04 as java +FROM ubuntu:18.04 as local_stage_java +MAINTAINER giorgio@iota.org # Install Java ARG JAVA_VERSION=8u171-1 RUN \ apt-get update && \ - apt-get install -y software-properties-common && \ + apt-get install -y software-properties-common --no-install-recommends && \ echo oracle-java8-installer shared/accepted-oracle-license-v1-1 select true | debconf-set-selections && \ add-apt-repository -y ppa:webupd8team/java && \ apt-get update && \ - apt-get install -y oracle-java8-installer=${JAVA_VERSION}~webupd8~0 && \ + apt-get install -y oracle-java8-installer=${JAVA_VERSION}~webupd8~0 --no-install-recommends && \ rm -rf /var/lib/apt/lists/* && \ rm -rf /var/cache/oracle-jdk8-installer @@ -16,7 +17,7 @@ RUN \ ENV JAVA_HOME /usr/lib/jvm/java-8-oracle # install maven on top of java stage -FROM java as build +FROM local_stage_java as local_stage_build ARG MAVEN_VERSION=3.5.3 ARG USER_HOME_DIR="/root" ARG SHA=b52956373fab1dd4277926507ab189fb797b3bc51a2a267a193c931fffad8408 @@ -41,9 +42,6 @@ COPY docker/settings-docker.xml /usr/share/maven/ref/ VOLUME "$USER_HOME_DIR/.m2" -ENTRYPOINT ["/usr/local/bin/mvn-entrypoint.sh"] -CMD ["mvn"] - # install build dependencies RUN apt-get update && apt-get install -y --no-install-recommends \ git \ @@ -55,13 +53,13 @@ COPY . /iri RUN mvn clean package # execution image -FROM java +FROM local_stage_java RUN apt-get update && apt-get install -y --no-install-recommends \ jq curl socat \ && rm -rf /var/lib/apt/lists/* -COPY --from=build /iri/target/iri*.jar /iri/target/ +COPY --from=local_stage_build /iri/target/iri*.jar /iri/target/ COPY docker/entrypoint.sh / # Java related options. Defaults set as below diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh old mode 100644 new mode 100755 From a2cfa84a9c3d345e1ca2f2a8d873942da0f17567 Mon Sep 17 00:00:00 2001 From: galrogo Date: Wed, 9 May 2018 15:41:38 +0300 Subject: [PATCH 24/45] make travis run integration tests after merge (#747) * make travis run integration tests after merge * use iotaledger repo to clone tests --- .travis.yml | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/.travis.yml b/.travis.yml index bc9b80317c..edba93fa3d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,6 +15,7 @@ addons: apt: packages: - jq + - libxml2-utils matrix: allow_failures: @@ -22,19 +23,27 @@ matrix: script: #run tests and integration tests + # see https://stackoverflow.com/questions/34405047/how-do-you-merge-into-another-branch-using-travis-with-git-commands?utm_medium=organic&utm_source=google_rich_qa&utm_campaign=google_rich_qa + - build_head=$(git rev-parse HEAD) + - git config --replace-all remote.origin.fetch +refs/heads/*:refs/remotes/origin/* + - git fetch origin dev + - git checkout -f dev + - git checkout $build_head + - git merge dev - mvn integration-test - - echo `mvn help:evaluate -Dexpression=project.version | grep -e '^[^\[]' | grep -v 'Downloading'` > iri.version #run jar sanity tests - - git clone https://github.com/alon-e/iri-regression - - cd iri-regression + - VERSION=`echo -e 'setns x=http://maven.apache.org/POM/4.0.0\ncat /x:project/x:version/text()' | xmllint --shell pom.xml | grep -v / | tr -d -` + - echo $VERSION + - git clone https://github.com/iotaledger/iri-regression-tests.git + - cd iri-regression-tests + - git checkout -f master - mkdir iri - cp -rf ../target iri/target - - cp ../iri.version . - - bash run_all_stable_tests.sh `cat iri.version` + - bash run_all_stable_tests.sh $VERSION - cd .. after_success: - #codacy-coverage send report + #codacy-coverage send report. Uses Travis Env variable (CODACY_PROJECT_TOKEN) - test $TRAVIS_PULL_REQUEST = "false" && test $TRAVIS_JDK_VERSION = "oraclejdk8" && wget -O codacy-coverage-reporter-assembly-latest.jar $(curl https://api.github.com/repos/codacy/codacy-coverage-reporter/releases/latest | jq -r .assets[0].browser_download_url) - test $TRAVIS_PULL_REQUEST = "false" && test $TRAVIS_JDK_VERSION = "oraclejdk8" && java -jar codacy-coverage-reporter-assembly-latest.jar report -l Java -r target/site/jacoco/jacoco.xml From b683e71e502ed53f7a6c97794ebcbb810acdefdc Mon Sep 17 00:00:00 2001 From: Mathieu Viossat Date: Wed, 9 May 2018 17:21:51 +0200 Subject: [PATCH 25/45] ZMQ: Send transaction hash with trytes (#739) --- src/main/java/com/iota/iri/storage/ZmqPublishProvider.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/iota/iri/storage/ZmqPublishProvider.java b/src/main/java/com/iota/iri/storage/ZmqPublishProvider.java index 0451fd7b4c..14cff92d98 100644 --- a/src/main/java/com/iota/iri/storage/ZmqPublishProvider.java +++ b/src/main/java/com/iota/iri/storage/ZmqPublishProvider.java @@ -95,7 +95,8 @@ private void publishTxTrytes(TransactionViewModel transactionViewModel) { try { txTrytesStringBuilder.append("tx_trytes "); - txTrytesStringBuilder.append(Converter.trytes(transactionViewModel.trits())); + txTrytesStringBuilder.append(Converter.trytes(transactionViewModel.trits())); txTrytesStringBuilder.append(" "); + txTrytesStringBuilder.append(transactionViewModel.getHash()); messageQ.publish(txTrytesStringBuilder.toString()); } catch (Exception e) { From bc6b943368695ab266db31347c1752ba79dca846 Mon Sep 17 00:00:00 2001 From: galrogo Date: Wed, 9 May 2018 18:23:25 +0300 Subject: [PATCH 26/45] fix null pointer exception when attempting to close RocksDb (#738) * fix null pointer exception when attempting to close RocksDb * Start using IotaIOUtils * Start using IotaIOUtils everywhere IOUtils was used before * add debug log --- src/main/java/com/iota/iri/Snapshot.java | 5 ++-- src/main/java/com/iota/iri/service/API.java | 4 ++-- .../rocksDB/RocksDBPersistenceProvider.java | 9 ++++---- .../java/com/iota/iri/utils/IotaIOUtils.java | 23 +++++++++++++++++++ src/main/java/com/iota/iri/zmq/MessageQ.java | 6 ++--- 5 files changed, 36 insertions(+), 11 deletions(-) create mode 100644 src/main/java/com/iota/iri/utils/IotaIOUtils.java diff --git a/src/main/java/com/iota/iri/Snapshot.java b/src/main/java/com/iota/iri/Snapshot.java index 0049e08048..419084f32a 100644 --- a/src/main/java/com/iota/iri/Snapshot.java +++ b/src/main/java/com/iota/iri/Snapshot.java @@ -2,7 +2,8 @@ import com.iota.iri.controllers.TransactionViewModel; import com.iota.iri.model.Hash; -import org.apache.commons.io.IOUtils; + +import com.iota.iri.utils.IotaIOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -91,7 +92,7 @@ private static Map initInitialState(String snapshotFile) { System.exit(-1); } finally { - IOUtils.closeQuietly(reader); + IotaIOUtils.closeQuietly(reader); } return state; } diff --git a/src/main/java/com/iota/iri/service/API.java b/src/main/java/com/iota/iri/service/API.java index 7a0d79fd44..e9869dbad1 100644 --- a/src/main/java/com/iota/iri/service/API.java +++ b/src/main/java/com/iota/iri/service/API.java @@ -17,6 +17,7 @@ import com.iota.iri.network.Neighbor; import com.iota.iri.service.dto.*; import com.iota.iri.utils.Converter; +import com.iota.iri.utils.IotaIOUtils; import com.iota.iri.utils.MapIdentityManager; import io.undertow.Undertow; import io.undertow.security.api.AuthenticationMechanism; @@ -30,7 +31,6 @@ import io.undertow.server.HttpHandler; import io.undertow.server.HttpServerExchange; import io.undertow.util.*; -import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -173,7 +173,7 @@ private void processRequest(final HttpServerExchange exchange) throws IOExceptio exchange.getResponseHeaders().put(Headers.CONTENT_TYPE, "application/json"); final long beginningTime = System.currentTimeMillis(); - final String body = IOUtils.toString(cis, StandardCharsets.UTF_8); + final String body = IotaIOUtils.toString(cis, StandardCharsets.UTF_8); final AbstractResponse response; if (!exchange.getRequestHeaders().contains("X-IOTA-API-Version")) { diff --git a/src/main/java/com/iota/iri/storage/rocksDB/RocksDBPersistenceProvider.java b/src/main/java/com/iota/iri/storage/rocksDB/RocksDBPersistenceProvider.java index 8a6e56321e..3c6e4f6a94 100644 --- a/src/main/java/com/iota/iri/storage/rocksDB/RocksDBPersistenceProvider.java +++ b/src/main/java/com/iota/iri/storage/rocksDB/RocksDBPersistenceProvider.java @@ -4,8 +4,8 @@ import com.iota.iri.storage.Indexable; import com.iota.iri.storage.Persistable; import com.iota.iri.storage.PersistenceProvider; +import com.iota.iri.utils.IotaIOUtils; import com.iota.iri.utils.Pair; -import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.SystemUtils; import org.rocksdb.*; import org.rocksdb.util.SizeUnit; @@ -104,9 +104,9 @@ private void initClassTreeMap() { @Override public void shutdown() { for (final ColumnFamilyHandle columnFamilyHandle : columnFamilyHandles) { - IOUtils.closeQuietly(columnFamilyHandle::close); + IotaIOUtils.closeQuietly(columnFamilyHandle); } - IOUtils.closeQuietly(db::close, options::close, bloomFilter::close); + IotaIOUtils.closeQuietly(db, options, bloomFilter); } @Override @@ -455,7 +455,8 @@ private void initDB(String path, String logPath) { fillModelColumnHandles(); } catch (Exception e) { - IOUtils.closeQuietly(db::close); + log.error("Error while initializing RocksDb", e); + IotaIOUtils.closeQuietly(db); } } diff --git a/src/main/java/com/iota/iri/utils/IotaIOUtils.java b/src/main/java/com/iota/iri/utils/IotaIOUtils.java new file mode 100644 index 0000000000..f81fbd8b4f --- /dev/null +++ b/src/main/java/com/iota/iri/utils/IotaIOUtils.java @@ -0,0 +1,23 @@ +package com.iota.iri.utils; + + +import org.apache.commons.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class IotaIOUtils extends IOUtils { + + private static final Logger log = LoggerFactory.getLogger(IotaIOUtils.class); + + public static void closeQuietly(AutoCloseable... autoCloseables) { + for (AutoCloseable it : autoCloseables) { + try { + if (it != null) { + it.close(); + } + } catch (Exception ignored) { + log.debug("Silent exception occured", ignored); + } + } + } +} diff --git a/src/main/java/com/iota/iri/zmq/MessageQ.java b/src/main/java/com/iota/iri/zmq/MessageQ.java index ec3d25ae85..675f312348 100644 --- a/src/main/java/com/iota/iri/zmq/MessageQ.java +++ b/src/main/java/com/iota/iri/zmq/MessageQ.java @@ -1,6 +1,6 @@ package com.iota.iri.zmq; -import org.apache.commons.io.IOUtils; +import com.iota.iri.utils.IotaIOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.zeromq.ZMQ; @@ -52,7 +52,7 @@ public void shutdown() { LOG.error("Publisher service shutdown failed.", e); } - IOUtils.closeQuietly(publisher); - IOUtils.closeQuietly(context); + IotaIOUtils.closeQuietly(publisher); + IotaIOUtils.closeQuietly(context); } } From 9fc302b9b37b365dc174cc9c131f72cd590567a8 Mon Sep 17 00:00:00 2001 From: Giorgio Mandolfo Date: Thu, 10 May 2018 17:08:27 +1000 Subject: [PATCH 27/45] Removed DOCKER_IRI_NEIGHBOR_FILE and hardcoded --neighbors option. Also added attachToTangle to the list of APIs disabled by default. Updated docs --- DOCKER.md | 24 +++++++++++++++++------- Dockerfile | 3 +-- docker/entrypoint.sh | 5 ----- 3 files changed, 18 insertions(+), 14 deletions(-) diff --git a/DOCKER.md b/DOCKER.md index f390e0db04..19ea3297d7 100644 --- a/DOCKER.md +++ b/DOCKER.md @@ -1,4 +1,17 @@ -## DOCKER and IRI +## Quickstart + +1. Run the official iotaledger/iri container, passing the mandatory -p option +```docker run iotaledger/iri:v1.4.2.4 -p 14265``` + +This will get your a running IRI with its API listening on port 14265, no neighbours and an empty database. The IRI Docker container by default expects data at /iri/data. Use the `-v` option of the `docker run` command to mount volumes so to have persistent data. You can also pass more command line options to the docker run command and those will be passed to IRI. + +If you want to use a iri.ini file with the docker container, supposing it's stored under /path/to/conf/iri.ini on your docker host, then pass `-v /path/to/conf:/iri/conf` and add -c /iri/conf/iri.ini as docker run arguments. So for example the `docker run` command above would become: + +```docker run -v /path/to/conf:/iri/conf -v /path/to/data:/iri/data iotaledger/iri:v1.4.2.4 -p 14265 -c /iri/conf/iri.ini``` + +Please refer to the IRI documentation for further command line options and iri.ini options. + +## DOCKER and IRI in depth The Dockerfile included in this repo builds a working IRI docker container whilst trying to stay the least opinionated as possible. This allows system administrators the option to deploy and configure IRI based on their own individual circumstances and needs. @@ -15,18 +28,16 @@ JAVA_OPTIONS: these are the java options to pass right after the java command. I JAVA_MIN_MEMORY: the value of -Xms option. Defaults to 2G JAVA_MAX_MEMORY: the value of -Xmx option. Defaults to 4G DOCKER_IRI_JAR_PATH: defaults to /iri/target/iri*.jar as pushed by the Dockerfile. This is useful if custom IRI binaries want to be executed and the default path needs to be overridden -DOCKER_IRI_REMOTE_LIMIT_API: defaults to "interruptAttachToTangle, addNeighbors, removeNeighbors, getNeighbors" -DOCKER_IRI_NEIGHBOR_FILE: defaults to an empty value. If set, the entrypoint looks for such file and adds each line as IRI neighbours. Commented lines will be skipped +DOCKER_IRI_REMOTE_LIMIT_API: defaults to "interruptAttachToTangle, attachToTangle, addNeighbors, removeNeighbors, getNeighbors" DOCKER_IRI_MONITORING_API_PORT_ENABLE: defaults to 0. If set to 1, a socat on port 14266 directed to 127.0.0.1:DOCKER_IRI_MONITORING_API_PORT_DESTINATION will be open in order to allow all API calls regardless of the DOCKER_IRI_REMOTE_LIMIT_API setting. This is useful to give access to restricted API calls to local tools and still denying access to restricted API calls to the internet. It is highly recommended to use this option together with docker networks (docker run --net). The container entry point is a shell script that performs few additional steps before launching IRI: - verifies if DOCKER_IRI_MONITORING_API_PORT_ENABLE is set to 1 -- verifies if DOCKER_IRI_NEIGHBOR_FILE is set - launches IRI with all parameters passed as desired -It is important to note that other than --neighbors (via the env var DOCKER_IRI_NEIGHBOR_FILE), --remote and --remote-limit-api "$DOCKER_IRI_REMOTE_LIMIT_API", neither the entrypoint nor the Dockerfile are aware of any IRI configuration option. This is to not tie the Dockerfile and its container to a specific set of IRI options. Instead, this contain still allows the use of an INI file or command line options. Please refer to the IRI documentation to learn what are the allowed options at command line and via the INI file. +It is important to note that other than --remote and --remote-limit-api "$DOCKER_IRI_REMOTE_LIMIT_API", neither the entrypoint nor the Dockerfile are aware of any IRI configuration option. This is to not tie the Dockerfile and its container to a specific set of IRI options. Instead, this contain still allows the use of an INI file or command line options. Please refer to the IRI documentation to learn what are the allowed options at command line and via the INI file. -At the time of writing, IRI requires -p to be passed either via INI or via command line. The entrypoint of this docker container does not do that for you. +**At the time of writing, IRI requires -p to be passed either via INI or via command line. The entrypoint of this docker container does not do that for you.** Here is a systemd unit example you can use with this Docker container. This is just an example and customisation is possible and recommended. In this example the docker network iri must be created and the paths /mnt/iri/conf and /mnt/iri/data are used on the docker host to serve respectively the neighbors file and the data directory. No INI files are used in this example, instead options are passed via command line options, such as --testnet and --zmq-enabled. @@ -48,7 +59,6 @@ ExecStart=/usr/bin/docker run \ -p 14265:14265 \ -p 15600:145600/udp \ -p 14600:14600/udp \ --e "DOCKER_IRI_NEIGHBOR_FILE=/iri/conf/neighbors" \ iotaledger/iri:v1.4.2.4 \ -p 14265 \ --zmq-enabled \ diff --git a/Dockerfile b/Dockerfile index ad9780c844..8c84ace5f8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -69,8 +69,7 @@ ENV JAVA_MAX_MEMORY 4G # Additional custom variables. See DOCKER.md for details ENV DOCKER_IRI_JAR_PATH "/iri/target/iri*.jar" -ENV DOCKER_IRI_REMOTE_LIMIT_API "interruptAttachToTangle, addNeighbors, removeNeighbors, getNeighbors" -ENV DOCKER_IRI_NEIGHBOR_FILE "/iri/conf/neighbors" +ENV DOCKER_IRI_REMOTE_LIMIT_API "interruptAttachToTangle, attachToTangle, addNeighbors, removeNeighbors, getNeighbors" # Setting this to 1 will have socat exposing 14266 and pointing it on # localhost. See /entrypoint.sh diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh index 05f59e6cbe..0fbf491c42 100755 --- a/docker/entrypoint.sh +++ b/docker/entrypoint.sh @@ -5,10 +5,6 @@ if [ "${DOCKER_IRI_MONITORING_API_PORT_ENABLE}" == "1" ]; then nohup socat -lm TCP-LISTEN:14266,fork TCP:127.0.0.1:${DOCKER_IRI_MONITORING_API_PORT_DESTINATION} & fi -for neighbor in $(grep -v \# $DOCKER_IRI_NEIGHBOR_FILE); do - DOCKER_IRI_NEIGHBORS+=" $neighbor" -done - exec java \ $JAVA_OPTIONS \ -Xms$JAVA_MIN_MEMORY \ @@ -16,5 +12,4 @@ exec java \ -Djava.net.preferIPv4Stack=true \ -jar $DOCKER_IRI_JAR_PATH \ --remote --remote-limit-api "$DOCKER_IRI_REMOTE_LIMIT_API" \ - --neighbors "$DOCKER_IRI_NEIGHBORS" \ "$@" From edb0315cf172e56f341c07595efb618fb109624b Mon Sep 17 00:00:00 2001 From: Giorgio Mandolfo Date: Thu, 10 May 2018 17:24:17 +1000 Subject: [PATCH 28/45] Cosmetic and typos fixes in DOCKER.md --- DOCKER.md | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/DOCKER.md b/DOCKER.md index 19ea3297d7..2867a81e79 100644 --- a/DOCKER.md +++ b/DOCKER.md @@ -1,6 +1,7 @@ ## Quickstart -1. Run the official iotaledger/iri container, passing the mandatory -p option +Run the official iotaledger/iri container, passing the mandatory -p option: + ```docker run iotaledger/iri:v1.4.2.4 -p 14265``` This will get your a running IRI with its API listening on port 14265, no neighbours and an empty database. The IRI Docker container by default expects data at /iri/data. Use the `-v` option of the `docker run` command to mount volumes so to have persistent data. You can also pass more command line options to the docker run command and those will be passed to IRI. @@ -24,15 +25,15 @@ The built container assumes the WORKDIR inside the container is /iri/data: this The docker conatiner supports the env variables to configure advanced options. These variables can be set but are not required to run IRI. -JAVA_OPTIONS: these are the java options to pass right after the java command. It must not contain -Xms nor -Xmx. Defaults to a safe value -JAVA_MIN_MEMORY: the value of -Xms option. Defaults to 2G -JAVA_MAX_MEMORY: the value of -Xmx option. Defaults to 4G -DOCKER_IRI_JAR_PATH: defaults to /iri/target/iri*.jar as pushed by the Dockerfile. This is useful if custom IRI binaries want to be executed and the default path needs to be overridden -DOCKER_IRI_REMOTE_LIMIT_API: defaults to "interruptAttachToTangle, attachToTangle, addNeighbors, removeNeighbors, getNeighbors" -DOCKER_IRI_MONITORING_API_PORT_ENABLE: defaults to 0. If set to 1, a socat on port 14266 directed to 127.0.0.1:DOCKER_IRI_MONITORING_API_PORT_DESTINATION will be open in order to allow all API calls regardless of the DOCKER_IRI_REMOTE_LIMIT_API setting. This is useful to give access to restricted API calls to local tools and still denying access to restricted API calls to the internet. It is highly recommended to use this option together with docker networks (docker run --net). +`JAVA_OPTIONS`: these are the java options to pass right after the java command. It must not contain -Xms nor -Xmx. Defaults to a safe value +`JAVA_MIN_MEMORY`: the value of -Xms option. Defaults to 2G +`JAVA_MAX_MEMORY`: the value of -Xmx option. Defaults to 4G +`DOCKER_IRI_JAR_PATH`: defaults to /iri/target/iri*.jar as pushed by the Dockerfile. This is useful if custom IRI binaries want to be executed and the default path needs to be overridden +`DOCKER_IRI_REMOTE_LIMIT_API`: defaults to "interruptAttachToTangle, attachToTangle, addNeighbors, removeNeighbors, getNeighbors" +`DOCKER_IRI_MONITORING_API_PORT_ENABLE`: defaults to 0. If set to 1, a socat on port 14266 directed to 127.0.0.1:DOCKER_IRI_MONITORING_API_PORT_DESTINATION will be open in order to allow all API calls regardless of the DOCKER_IRI_REMOTE_LIMIT_API setting. This is useful to give access to restricted API calls to local tools and still denying access to restricted API calls to the internet. It is highly recommended to use this option together with docker networks (docker run --net). The container entry point is a shell script that performs few additional steps before launching IRI: -- verifies if DOCKER_IRI_MONITORING_API_PORT_ENABLE is set to 1 +- verifies if `DOCKER_IRI_MONITORING_API_PORT_ENABLE` is set to 1 - launches IRI with all parameters passed as desired It is important to note that other than --remote and --remote-limit-api "$DOCKER_IRI_REMOTE_LIMIT_API", neither the entrypoint nor the Dockerfile are aware of any IRI configuration option. This is to not tie the Dockerfile and its container to a specific set of IRI options. Instead, this contain still allows the use of an INI file or command line options. Please refer to the IRI documentation to learn what are the allowed options at command line and via the INI file. @@ -41,6 +42,7 @@ It is important to note that other than --remote and --remote-limit-api "$DOCKER Here is a systemd unit example you can use with this Docker container. This is just an example and customisation is possible and recommended. In this example the docker network iri must be created and the paths /mnt/iri/conf and /mnt/iri/data are used on the docker host to serve respectively the neighbors file and the data directory. No INI files are used in this example, instead options are passed via command line options, such as --testnet and --zmq-enabled. +``` [Unit] Description=IRI After=docker.service @@ -57,7 +59,7 @@ ExecStart=/usr/bin/docker run \ -v /mnt/iri/conf:/iri/conf \ -v /mnt/iri/data:/iri/data \ -p 14265:14265 \ --p 15600:145600/udp \ +-p 15600:15600 \ -p 14600:14600/udp \ iotaledger/iri:v1.4.2.4 \ -p 14265 \ @@ -69,4 +71,4 @@ ExecReload=/usr/bin/docker restart %n [Install] WantedBy=multi-user.target - +``` From 6f8d3dceb1bddcfd2be2e86f0ee3224bd77d739b Mon Sep 17 00:00:00 2001 From: Rajiv Shah Date: Mon, 14 May 2018 03:23:54 -0400 Subject: [PATCH 29/45] Update version in README (#754) --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 27bb49be73..6e05f6451e 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ It is specially designed for users seeking a fast, efficient and fully-compatibl Running an IRI node also allows light wallet users a node to directly connect to for their own wallet transactions. -* **Latest release:** 1.4.2.2 Release +* **Latest release:** 1.4.2.4 Release * **License:** GPLv3 # How to get started @@ -112,4 +112,4 @@ NEIGHBORS = udp://my.favorite.com:14600 IXI_DIR = ixi DEBUG = false DB_PATH = db -``` \ No newline at end of file +``` From 2036733c3eba8a071f8d9eb3c5f0b6ec47fb54ed Mon Sep 17 00:00:00 2001 From: Johnny-Milkshakes Date: Wed, 16 May 2018 03:50:03 -0400 Subject: [PATCH 30/45] Slight spelling correction :) (#755) --- src/main/java/com/iota/iri/service/TipsManager.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/com/iota/iri/service/TipsManager.java b/src/main/java/com/iota/iri/service/TipsManager.java index 6229f745b8..192ed5e7f8 100644 --- a/src/main/java/com/iota/iri/service/TipsManager.java +++ b/src/main/java/com/iota/iri/service/TipsManager.java @@ -375,10 +375,10 @@ boolean belowMaxDepth(Hash tip, int depth, Set maxDepthOk) throws Exceptio } //if tip unconfirmed, check if any referenced tx is confirmed below maxDepth Queue nonAnalyzedTransactions = new LinkedList<>(Collections.singleton(tip)); - Set analyzedTranscations = new HashSet<>(); + Set analyzedTransactions = new HashSet<>(); Hash hash; while ((hash = nonAnalyzedTransactions.poll()) != null) { - if (analyzedTranscations.add(hash)) { + if (analyzedTransactions.add(hash)) { TransactionViewModel transaction = TransactionViewModel.fromHash(tangle, hash); if (transaction.snapshotIndex() != 0 && transaction.snapshotIndex() < depth) { return true; From 58363da9d9df82b7475d63ba6e20d81ead24c889 Mon Sep 17 00:00:00 2001 From: footloosejava Date: Wed, 16 May 2018 01:05:14 -0700 Subject: [PATCH 31/45] Logging - routing, levels and file locations (#727) * deleted main logback file in root folder Signed-off-by: footloosejava <32090281+footloosejava@users.noreply.github.com> * autoformat Signed-off-by: footloosejava <32090281+footloosejava@users.noreply.github.com> * replaced with revised logback as before rebased PR Signed-off-by: footloosejava <32090281+footloosejava@users.noreply.github.com> * replaced with revised IRI.java as before rebased PR Signed-off-by: footloosejava <32090281+footloosejava@users.noreply.github.com> * - added trace level back in. - changed speil to lower case to see if it passes travisCI testing - changed preamble to remove first colon before statement. Signed-off-by: footloosejava <32090281+footloosejava@users.noreply.github.com> * removed line printing levels to user Signed-off-by: footloosejava <32090281+footloosejava@users.noreply.github.com> --- logback.xml | 20 - src/main/java/com/iota/iri/IRI.java | 547 +++++++++++++++------------- src/main/resources/logback.xml | 82 ++--- 3 files changed, 328 insertions(+), 321 deletions(-) delete mode 100644 logback.xml diff --git a/logback.xml b/logback.xml deleted file mode 100644 index 9c16413dcc..0000000000 --- a/logback.xml +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - - %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n - - - - - - - - - - - - diff --git a/src/main/java/com/iota/iri/IRI.java b/src/main/java/com/iota/iri/IRI.java index e653860c5a..310498dc93 100644 --- a/src/main/java/com/iota/iri/IRI.java +++ b/src/main/java/com/iota/iri/IRI.java @@ -16,314 +16,353 @@ import java.util.Optional; /** - * Main IOTA Reference Implementation starting class + * Main IOTA Reference Implementation starting class. */ public class IRI { - private static final Logger log = LoggerFactory.getLogger(IRI.class); - public static final String MAINNET_NAME = "IRI"; public static final String TESTNET_NAME = "IRI Testnet"; public static final String VERSION = "1.4.2.4"; - public static Iota iota; - public static API api; - public static IXI ixi; - public static Configuration configuration; - private static final String TESTNET_FLAG_REQUIRED = "--testnet flag must be turned on to use "; - - public static void main(final String[] args) throws IOException { - configuration = new Configuration(); - validateParams(configuration, args); - log.info("Welcome to {} {}", configuration.booling(DefaultConfSettings.TESTNET) ? TESTNET_NAME : MAINNET_NAME, VERSION); - iota = new Iota(configuration); - ixi = new IXI(iota); - api = new API(iota, ixi); - shutdownHook(); - - if (configuration.booling(DefaultConfSettings.DEBUG)) { - log.info("You have set the debug flag. To enable debug output, you need to uncomment the DEBUG appender in the source tree at iri/src/main/resources/logback.xml and re-package iri.jar"); + + public static void main(String[] args) throws Exception { + // Logging is configured first before any references to Logger or LoggerFactory. + // Any public method or field accessors needed in IRI should be put in IRI and then delegate to IRILauncher. That + // ensures that future code does not need to know about this setup. + configureLogging(); + IRILauncher.main(args); + } + + private static void configureLogging() { + String config = System.getProperty("logback.configurationFile"); + String level = System.getProperty("logging-level", "").toUpperCase(); + switch (level) { + case "OFF": + case "ERROR": + case "WARN": + case "INFO": + case "DEBUG": + case "TRACE": + break; + case "ALL": + level = "TRACE"; + break; + default: + level = "INFO"; + } + System.getProperties().put("logging-level", level); + System.out.println("Logging - property 'logging-level' set to: [" + level + "]"); + if (config != null) { + System.out.println("Logging - alternate logging configuration file specified at: '" + config + "'"); } + } - if (configuration.booling(DefaultConfSettings.EXPORT)) { - File exportDir = new File("export"); - // if the directory does not exist, create it - if (!exportDir.exists()) { - log.info("Create directory 'export'"); - try { - exportDir.mkdir(); - } catch (SecurityException e) { - log.error("Could not create directory", e); - } + private static class IRILauncher { + private static final Logger log = LoggerFactory.getLogger(IRILauncher.class); + + public static Iota iota; + public static API api; + public static IXI ixi; + public static Configuration configuration; + + private static final String TESTNET_FLAG_REQUIRED = "--testnet flag must be turned on to use "; + + public static void main(final String[] args) throws Exception { + + configuration = new Configuration(); + if (!isValidated(configuration, args)) { + printUsage(); + return; } - exportDir = new File("export-solid"); - // if the directory does not exist, create it - if (!exportDir.exists()) { - log.info("Create directory 'export-solid'"); - try { - exportDir.mkdir(); - } catch (SecurityException e) { - log.error("Could not create directory", e); + + log.info("Welcome to {} {}", configuration.booling(DefaultConfSettings.TESTNET) ? TESTNET_NAME : MAINNET_NAME, VERSION); + iota = new Iota(configuration); + ixi = new IXI(iota); + api = new API(iota, ixi); + shutdownHook(); + + if (configuration.booling(DefaultConfSettings.DEBUG)) { + log.info("You have set the debug flag. To enable debug output, you need to uncomment the DEBUG appender in the source tree at iri/src/main/resources/logback.xml and re-package iri.jar"); + } + + if (configuration.booling(DefaultConfSettings.EXPORT)) { + File exportDir = new File("export"); + if (!exportDir.exists()) { + log.info("Create directory 'export'"); + try { + exportDir.mkdir(); + } catch (SecurityException e) { + log.error("Could not create directory", e); + } + } + exportDir = new File("export-solid"); + if (!exportDir.exists()) { + log.info("Create directory 'export-solid'"); + try { + exportDir.mkdir(); + } catch (SecurityException e) { + log.error("Could not create directory", e); + } } } - } - try { - iota.init(); - api.init(); - ixi.init(configuration.string(Configuration.DefaultConfSettings.IXI_DIR)); - } catch (final Exception e) { - log.error("Exception during IOTA node initialisation: ", e); - System.exit(-1); + try { + iota.init(); + api.init(); + ixi.init(configuration.string(Configuration.DefaultConfSettings.IXI_DIR)); + log.info("IOTA Node initialised correctly."); + } catch (Exception e) { + log.error("Exception during IOTA node initialisation: ", e); + throw e; + } } - log.info("IOTA Node initialised correctly."); - } - public static void validateParams(final Configuration configuration, final String[] args) throws IOException { + private static boolean isValidated(final Configuration configuration, final String[] args) throws IOException { - boolean configurationInit = configuration.init(); + boolean configurationInit = configuration.init(); - if (args == null || (args.length < 2 && !configurationInit)) { - log.error("Invalid arguments list. Provide ini-file 'iota.ini' or API port number (i.e. '-p 14600')."); - printUsage(); - } + if (args == null || (args.length < 2 && !configurationInit)) { + log.error("Invalid arguments list. Provide ini-file 'iota.ini' or API port number (i.e. '-p 14600')."); + return false; + } - final CmdLineParser parser = new CmdLineParser(); - - final Option config = parser.addStringOption('c', "config"); - final Option port = parser.addStringOption('p', "port"); - final Option rportudp = parser.addStringOption('u', "udp-receiver-port"); - final Option rporttcp = parser.addStringOption('t', "tcp-receiver-port"); - final Option debug = parser.addBooleanOption('d', "debug"); - final Option remote = parser.addBooleanOption("remote"); - final Option remoteLimitApi = parser.addStringOption("remote-limit-api"); - final Option remoteAuth = parser.addStringOption("remote-auth"); - final Option neighbors = parser.addStringOption('n', "neighbors"); - final Option export = parser.addBooleanOption("export"); - final Option zmq = parser.addBooleanOption("zmq-enabled"); - final Option help = parser.addBooleanOption('h', "help"); - final Option testnet = parser.addBooleanOption("testnet"); - final Option revalidate = parser.addBooleanOption("revalidate"); - final Option rescan = parser.addBooleanOption("rescan"); - final Option sendLimit = parser.addStringOption("send-limit"); - final Option sync = parser.addBooleanOption("sync"); - final Option dnsResolutionFalse = parser.addBooleanOption("dns-resolution-false"); - final Option maxPeers = parser.addStringOption("max-peers"); - final Option testnetCoordinator = parser.addStringOption("testnet-coordinator"); - final Option disableCooValidation = parser.addBooleanOption("testnet-no-coo-validation"); - final Option snapshot = parser.addStringOption("snapshot"); - final Option snapshotSignature = parser.addStringOption("snapshot-sig"); - final Option minimalWeightMagnitude = parser.addIntegerOption("mwm"); - final Option milestoneStartIndex = parser.addIntegerOption("milestone-start"); - final Option milestoneKeys = parser.addIntegerOption("milestone-keys"); - final Option snapshotTime = parser.addLongOption("snapshot-timestamp"); - - - try { - assert args != null; - parser.parse(args); - } catch (CmdLineParser.OptionException e) { - log.error("CLI error: ", e); - printUsage(); - System.exit(2); - } + final CmdLineParser parser = new CmdLineParser(); + + final Option config = parser.addStringOption('c', "config"); + final Option port = parser.addStringOption('p', "port"); + final Option rportudp = parser.addStringOption('u', "udp-receiver-port"); + final Option rporttcp = parser.addStringOption('t', "tcp-receiver-port"); + final Option debug = parser.addBooleanOption('d', "debug"); + final Option remote = parser.addBooleanOption("remote"); + final Option remoteLimitApi = parser.addStringOption("remote-limit-api"); + final Option remoteAuth = parser.addStringOption("remote-auth"); + final Option neighbors = parser.addStringOption('n', "neighbors"); + final Option export = parser.addBooleanOption("export"); + final Option zmq = parser.addBooleanOption("zmq-enabled"); + final Option help = parser.addBooleanOption('h', "help"); + final Option testnet = parser.addBooleanOption("testnet"); + final Option revalidate = parser.addBooleanOption("revalidate"); + final Option rescan = parser.addBooleanOption("rescan"); + final Option sendLimit = parser.addStringOption("send-limit"); + final Option sync = parser.addBooleanOption("sync"); + final Option dnsResolutionFalse = parser.addBooleanOption("dns-resolution-false"); + final Option maxPeers = parser.addStringOption("max-peers"); + final Option testnetCoordinator = parser.addStringOption("testnet-coordinator"); + final Option disableCooValidation = parser.addBooleanOption("testnet-no-coo-validation"); + final Option snapshot = parser.addStringOption("snapshot"); + final Option snapshotSignature = parser.addStringOption("snapshot-sig"); + final Option minimalWeightMagnitude = parser.addIntegerOption("mwm"); + final Option milestoneStartIndex = parser.addIntegerOption("milestone-start"); + final Option milestoneKeys = parser.addIntegerOption("milestone-keys"); + final Option snapshotTime = parser.addLongOption("snapshot-timestamp"); - // optional config file path - String confFilePath = parser.getOptionValue(config); - if (confFilePath != null) { - configuration.put(DefaultConfSettings.CONFIG, confFilePath); - configuration.init(); - } - //This block cannot be moved down - final boolean isTestnet = Optional.ofNullable(parser.getOptionValue(testnet)).orElse(Boolean.FALSE) + try { + parser.parse(args); + } catch (CmdLineParser.OptionException e) { + log.error("CLI error: ", e); + throw new IllegalArgumentException("CLI error: " + e, e); + } + + // optional config file path + String confFilePath = parser.getOptionValue(config); + if (confFilePath != null) { + configuration.put(DefaultConfSettings.CONFIG, confFilePath); + configuration.init(); + } + + //This block cannot be moved down + final boolean isTestnet = Optional.ofNullable(parser.getOptionValue(testnet)).orElse(Boolean.FALSE) || configuration.booling(DefaultConfSettings.TESTNET); - if (isTestnet) { - configuration.put(DefaultConfSettings.TESTNET, "true"); - configuration.put(DefaultConfSettings.DB_PATH.name(), "testnetdb"); - configuration.put(DefaultConfSettings.DB_LOG_PATH.name(), "testnetdb.log"); - configuration.put(DefaultConfSettings.COORDINATOR, Configuration.TESTNET_COORDINATOR_ADDRESS); - configuration.put(DefaultConfSettings.SNAPSHOT_FILE, Configuration.TESTNET_SNAPSHOT_FILE); - configuration.put(DefaultConfSettings.MILESTONE_START_INDEX, Configuration.TESTNET_MILESTONE_START_INDEX); - configuration.put(DefaultConfSettings.SNAPSHOT_SIGNATURE_FILE, ""); - configuration.put(DefaultConfSettings.MWM, Configuration.TESTNET_MWM); - configuration.put(DefaultConfSettings.NUMBER_OF_KEYS_IN_A_MILESTONE, + if (isTestnet) { + configuration.put(DefaultConfSettings.TESTNET, "true"); + configuration.put(DefaultConfSettings.DB_PATH.name(), "testnetdb"); + configuration.put(DefaultConfSettings.DB_LOG_PATH.name(), "testnetdb.log"); + configuration.put(DefaultConfSettings.COORDINATOR, Configuration.TESTNET_COORDINATOR_ADDRESS); + configuration.put(DefaultConfSettings.SNAPSHOT_FILE, Configuration.TESTNET_SNAPSHOT_FILE); + configuration.put(DefaultConfSettings.MILESTONE_START_INDEX, Configuration.TESTNET_MILESTONE_START_INDEX); + configuration.put(DefaultConfSettings.SNAPSHOT_SIGNATURE_FILE, ""); + configuration.put(DefaultConfSettings.MWM, Configuration.TESTNET_MWM); + configuration.put(DefaultConfSettings.NUMBER_OF_KEYS_IN_A_MILESTONE, Configuration.TESTNET_NUM_KEYS_IN_MILESTONE); - configuration.put(DefaultConfSettings.TRANSACTION_PACKET_SIZE, Configuration.TESTNET_PACKET_SIZE); - configuration.put(DefaultConfSettings.REQUEST_HASH_SIZE, Configuration.TESTNET_REQ_HASH_SIZE); - configuration.put(DefaultConfSettings.SNAPSHOT_TIME, Configuration.TESTNET_GLOBAL_SNAPSHOT_TIME); - } + configuration.put(DefaultConfSettings.TRANSACTION_PACKET_SIZE, Configuration.TESTNET_PACKET_SIZE); + configuration.put(DefaultConfSettings.REQUEST_HASH_SIZE, Configuration.TESTNET_REQ_HASH_SIZE); + configuration.put(DefaultConfSettings.SNAPSHOT_TIME, Configuration.TESTNET_GLOBAL_SNAPSHOT_TIME); + } - // mandatory args - String inicport = configuration.getIniValue(DefaultConfSettings.PORT.name()); - final String cport = inicport == null ? parser.getOptionValue(port) : inicport; - if (cport == null) { - log.error("Invalid arguments list. Provide at least the PORT in iota.ini or with -p option"); - printUsage(); - } else { - configuration.put(DefaultConfSettings.PORT, cport); - } + // mandatory args + String inicport = configuration.getIniValue(DefaultConfSettings.PORT.name()); + final String cport = inicport == null ? parser.getOptionValue(port) : inicport; + if (cport == null) { + log.error("Invalid arguments list. Provide at least the PORT in iota.ini or with -p option"); + return false; + } else { + configuration.put(DefaultConfSettings.PORT, cport); + } - // optional flags - if (parser.getOptionValue(help) != null) { - printUsage(); - } + // optional flags + if (parser.getOptionValue(help) != null) { + return false; + } - String cns = parser.getOptionValue(neighbors); - if (cns == null) { - log.warn("No neighbor has been specified. Server starting nodeless."); - cns = StringUtils.EMPTY; - } - configuration.put(DefaultConfSettings.NEIGHBORS, cns); + String cns = parser.getOptionValue(neighbors); + if (cns == null) { + log.warn("No neighbor has been specified. Server starting nodeless."); + cns = StringUtils.EMPTY; + } + configuration.put(DefaultConfSettings.NEIGHBORS, cns); - final String vremoteapilimit = parser.getOptionValue(remoteLimitApi); - if (vremoteapilimit != null) { - log.debug("The following api calls are not allowed : {} ", vremoteapilimit); - configuration.put(DefaultConfSettings.REMOTE_LIMIT_API, vremoteapilimit); - } + final String vremoteapilimit = parser.getOptionValue(remoteLimitApi); + if (vremoteapilimit != null) { + log.debug("The following api calls are not allowed : {} ", vremoteapilimit); + configuration.put(DefaultConfSettings.REMOTE_LIMIT_API, vremoteapilimit); + } - final String vremoteauth = parser.getOptionValue(remoteAuth); - if (vremoteauth != null) { - log.debug("Remote access requires basic authentication"); - configuration.put(DefaultConfSettings.REMOTE_AUTH, vremoteauth); - } + final String vremoteauth = parser.getOptionValue(remoteAuth); + if (vremoteauth != null) { + log.debug("Remote access requires basic authentication"); + configuration.put(DefaultConfSettings.REMOTE_AUTH, vremoteauth); + } - final String vrportudp = parser.getOptionValue(rportudp); - if (vrportudp != null) { - configuration.put(DefaultConfSettings.UDP_RECEIVER_PORT, vrportudp); - } + final String vrportudp = parser.getOptionValue(rportudp); + if (vrportudp != null) { + configuration.put(DefaultConfSettings.UDP_RECEIVER_PORT, vrportudp); + } - final String vrporttcp = parser.getOptionValue(rporttcp); - if (vrporttcp != null) { - configuration.put(DefaultConfSettings.TCP_RECEIVER_PORT, vrporttcp); - } + final String vrporttcp = parser.getOptionValue(rporttcp); + if (vrporttcp != null) { + configuration.put(DefaultConfSettings.TCP_RECEIVER_PORT, vrporttcp); + } - if (parser.getOptionValue(remote) != null) { - log.info("Remote access enabled. Binding API socket to listen any interface."); - configuration.put(DefaultConfSettings.API_HOST, "0.0.0.0"); - } + if (parser.getOptionValue(remote) != null) { + log.info("Remote access enabled. Binding API socket to listen any interface."); + configuration.put(DefaultConfSettings.API_HOST, "0.0.0.0"); + } - if (parser.getOptionValue(export) != null) { - log.info("Export transaction trytes turned on."); - configuration.put(DefaultConfSettings.EXPORT, "true"); - } + if (parser.getOptionValue(export) != null) { + log.info("Export transaction trytes turned on."); + configuration.put(DefaultConfSettings.EXPORT, "true"); + } - if (parser.getOptionValue(zmq) != null) { - configuration.put(DefaultConfSettings.ZMQ_ENABLED, "true"); - } + if (parser.getOptionValue(zmq) != null) { + configuration.put(DefaultConfSettings.ZMQ_ENABLED, "true"); + } - if (Integer.parseInt(cport) < 1024) { - log.warn("Warning: api port value seems too low."); - } + if (Integer.parseInt(cport) < 1024) { + log.warn("Warning: api port value seems too low."); + } - if (parser.getOptionValue(debug) != null) { - configuration.put(DefaultConfSettings.DEBUG, "true"); - log.info(configuration.allSettings()); - StatusPrinter.print((LoggerContext) LoggerFactory.getILoggerFactory()); - } + if (parser.getOptionValue(debug) != null) { + configuration.put(DefaultConfSettings.DEBUG, "true"); + log.info(configuration.allSettings()); + StatusPrinter.print((LoggerContext) LoggerFactory.getILoggerFactory()); + } - final String coordinatorAddress = parser.getOptionValue(testnetCoordinator); - if (coordinatorAddress != null) { - if (isTestnet) { - configuration.put(DefaultConfSettings.COORDINATOR, coordinatorAddress); - } else { - log.warn(TESTNET_FLAG_REQUIRED + testnetCoordinator.longForm()); + final String coordinatorAddress = parser.getOptionValue(testnetCoordinator); + if (coordinatorAddress != null) { + if (isTestnet) { + configuration.put(DefaultConfSettings.COORDINATOR, coordinatorAddress); + } else { + log.warn(TESTNET_FLAG_REQUIRED + testnetCoordinator.longForm()); + } } - } - final Boolean noCooValidation = parser.getOptionValue(disableCooValidation); - if (noCooValidation != null) { - if (isTestnet) { - configuration.put(DefaultConfSettings.DONT_VALIDATE_TESTNET_MILESTONE_SIG, noCooValidation.toString()); + final Boolean noCooValidation = parser.getOptionValue(disableCooValidation); + if (noCooValidation != null) { + if (isTestnet) { + configuration.put(DefaultConfSettings.DONT_VALIDATE_TESTNET_MILESTONE_SIG, noCooValidation.toString()); + } else { + log.warn(TESTNET_FLAG_REQUIRED + noCooValidation); + } } - else { - log.warn(TESTNET_FLAG_REQUIRED + noCooValidation); + + //TODO check what happens if string is invalid int + final Integer mwm = parser.getOptionValue(minimalWeightMagnitude); + if (mwm != null) { + configuration.put(DefaultConfSettings.MWM, String.valueOf(mwm)); } - } - //TODO check what happens if string is invalid int - final Integer mwm = parser.getOptionValue(minimalWeightMagnitude); - if (mwm != null) { - configuration.put(DefaultConfSettings.MWM, String.valueOf(mwm)); - } + final String snapshotFile = parser.getOptionValue(snapshot); + if (snapshotFile != null) { + configuration.put(DefaultConfSettings.SNAPSHOT_FILE, snapshotFile); + } - final String snapshotFile = parser.getOptionValue(snapshot); - if (snapshotFile != null) { - configuration.put(DefaultConfSettings.SNAPSHOT_FILE, snapshotFile); - } + final String snapshotSig = parser.getOptionValue(snapshotSignature); + if (snapshotSig != null) { + configuration.put(DefaultConfSettings.SNAPSHOT_SIGNATURE_FILE, snapshotSig); + } - final String snapshotSig = parser.getOptionValue(snapshotSignature); - if (snapshotSig != null) { - configuration.put(DefaultConfSettings.SNAPSHOT_SIGNATURE_FILE, snapshotSig); - } + final Integer milestoneStart = parser.getOptionValue(milestoneStartIndex); + if (milestoneStart != null) { + configuration.put(DefaultConfSettings.MILESTONE_START_INDEX, String.valueOf(milestoneStart)); + } - final Integer milestoneStart = parser.getOptionValue(milestoneStartIndex); - if (milestoneStart != null) { - configuration.put(DefaultConfSettings.MILESTONE_START_INDEX, String.valueOf(milestoneStart)); - } + final Integer numberOfKeys = parser.getOptionValue(milestoneKeys); + if (numberOfKeys != null) { + configuration.put(DefaultConfSettings.NUMBER_OF_KEYS_IN_A_MILESTONE, String.valueOf(numberOfKeys)); + } - final Integer numberOfKeys = parser.getOptionValue(milestoneKeys); - if (numberOfKeys != null) { - configuration.put(DefaultConfSettings.NUMBER_OF_KEYS_IN_A_MILESTONE, String.valueOf(numberOfKeys)); - } + final Long snapshotTimestamp = parser.getOptionValue(snapshotTime); + if (snapshotTimestamp != null) { + configuration.put(DefaultConfSettings.SNAPSHOT_TIME, String.valueOf(snapshotTimestamp)); + } - final Long snapshotTimestamp = parser.getOptionValue(snapshotTime); - if (snapshotTimestamp != null) { - configuration.put(DefaultConfSettings.SNAPSHOT_TIME, String.valueOf(snapshotTimestamp)); - } + if (parser.getOptionValue(revalidate) != null) { + configuration.put(DefaultConfSettings.REVALIDATE, "true"); + } - if (parser.getOptionValue(revalidate) != null) { - configuration.put(DefaultConfSettings.REVALIDATE, "true"); - } + if (parser.getOptionValue(rescan) != null) { + configuration.put(DefaultConfSettings.RESCAN_DB, "true"); + } - if (parser.getOptionValue(rescan) != null) { - configuration.put(DefaultConfSettings.RESCAN_DB, "true"); - } + if (parser.getOptionValue(dnsResolutionFalse) != null) { + configuration.put(DefaultConfSettings.DNS_RESOLUTION_ENABLED, "false"); + } - if (parser.getOptionValue(dnsResolutionFalse) != null) { - configuration.put(DefaultConfSettings.DNS_RESOLUTION_ENABLED, "false"); - } + final String vsendLimit = parser.getOptionValue(sendLimit); + if (vsendLimit != null) { + configuration.put(DefaultConfSettings.SEND_LIMIT, vsendLimit); + } - final String vsendLimit = parser.getOptionValue(sendLimit); - if (vsendLimit != null) { - configuration.put(DefaultConfSettings.SEND_LIMIT, vsendLimit); + final String vmaxPeers = parser.getOptionValue(maxPeers); + if (vmaxPeers != null) { + configuration.put(DefaultConfSettings.MAX_PEERS, vmaxPeers); + } + return true; + } + + private static void printUsage() { + log.info("Usage: java -jar {}-{}.jar " + + "[{-n,--neighbors} ''] " + + "[{-p,--port} 14265] " + + "[{-c,--config} 'config-file-name'] " + + "[{-u,--udp-receiver-port} 14600] " + + "[{-t,--tcp-receiver-port} 15600] " + + "[{-d,--debug} false] " + + "[{--testnet} false]" + + "[{--remote} false]" + + "[{--remote-auth} string]" + + "[{--remote-limit-api} string]" + , MAINNET_NAME, VERSION); } - final String vmaxPeers = parser.getOptionValue(maxPeers); - if (vmaxPeers != null) { - configuration.put(DefaultConfSettings.MAX_PEERS, vmaxPeers); + private static void shutdownHook() { + Runtime.getRuntime().addShutdownHook(new Thread(() -> { + log.info("Shutting down IOTA node, please hold tight..."); + try { + ixi.shutdown(); + api.shutDown(); + iota.shutdown(); + } catch (Exception e) { + log.error("Exception occurred shutting down IOTA node: ", e); + } + }, "Shutdown Hook")); } } - private static void printUsage() { - log.info("Usage: java -jar {}-{}.jar " + - "[{-n,--neighbors} ''] " + - "[{-p,--port} 14600] " + - "[{-c,--config} 'config-file-name'] " + - "[{-u,--udp-receiver-port} 14600] " + - "[{-t,--tcp-receiver-port} 15600] " + - "[{-d,--debug} false] " + - "[{--testnet} false]" + - "[{--remote} false]" + - "[{--remote-auth} string]" + - "[{--remote-limit-api} string]" - , MAINNET_NAME, VERSION); - System.exit(0); - } - - private static void shutdownHook() { - Runtime.getRuntime().addShutdownHook(new Thread(() -> { - - log.info("Shutting down IOTA node, please hold tight..."); - try { - ixi.shutdown(); - api.shutDown(); - iota.shutdown(); - } catch (final Exception e) { - log.error("Exception occurred shutting down IOTA node: ", e); - } - }, "Shutdown Hook")); + public static boolean validateParams(Configuration configuration, String[] args) throws IOException { + return IRILauncher.isValidated(configuration, args); } -} +} \ No newline at end of file diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml index fc14cbd5b8..0ce840ac49 100644 --- a/src/main/resources/logback.xml +++ b/src/main/resources/logback.xml @@ -1,61 +1,49 @@ - + - - - - INFO + + System.out + + TRACE ACCEPT - DENY + NEUTRAL - - System.out - - %d{MM/dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n - - - - - + + %d{MM/dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + - - + + System.err + ERROR ACCEPT DENY - System.err - - %d{MM/dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n - - - - - - - - - - - - - - - + + %d{MM/dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + - - + + + + + \ No newline at end of file From 8f999f7c9b5a36c5d7d8a9912145d1fae1605bc6 Mon Sep 17 00:00:00 2001 From: footloosejava Date: Thu, 17 May 2018 00:46:48 -0700 Subject: [PATCH 32/45] Inserted check for number of available processors. (#758) ``` int available = Runtime.getRuntime().availableProcessors(); numberOfThreads = Math.max(1, Math.floorDiv(available * 8, 10)); ``` Signed-off-by: footloosejava <32090281+footloosejava@users.noreply.github.com> --- src/main/java/com/iota/iri/hash/PearlDiver.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/iota/iri/hash/PearlDiver.java b/src/main/java/com/iota/iri/hash/PearlDiver.java index 133ac35fa1..6ff37fde86 100644 --- a/src/main/java/com/iota/iri/hash/PearlDiver.java +++ b/src/main/java/com/iota/iri/hash/PearlDiver.java @@ -53,7 +53,8 @@ public synchronized boolean search(final int[] transactionTrits, final int minWe initializeMidCurlStates(transactionTrits, midStateLow, midStateHigh); if (numberOfThreads <= 0) { - numberOfThreads = Math.max(1, Math.floorDiv(numberOfThreads * 8, 10)); + int available = Runtime.getRuntime().availableProcessors(); + numberOfThreads = Math.max(1, Math.floorDiv(available * 8, 10)); } List workers = new ArrayList<>(numberOfThreads); while (numberOfThreads-- > 0) { From 5a2768d834092ddda4a58f35b2f0c5c67e769d16 Mon Sep 17 00:00:00 2001 From: Gal Rogozinski Date: Sun, 27 May 2018 16:43:35 +0300 Subject: [PATCH 33/45] travis run mvn on info level (#765) --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index edba93fa3d..56825e9999 100644 --- a/.travis.yml +++ b/.travis.yml @@ -30,7 +30,7 @@ script: - git checkout -f dev - git checkout $build_head - git merge dev - - mvn integration-test + - mvn integration-test -Dlogging-level=INFO #run jar sanity tests - VERSION=`echo -e 'setns x=http://maven.apache.org/POM/4.0.0\ncat /x:project/x:version/text()' | xmllint --shell pom.xml | grep -v / | tr -d -` - echo $VERSION @@ -55,7 +55,7 @@ deploy: file: target/*.jar* skip_cleanup: true before_deploy: openssl aes-256-cbc -K $encrypted_5a15fa813cca_key -iv $encrypted_5a15fa813cca_iv -in codesigning.asc.enc -out codesigning.asc -d && gpg --fast-import codesigning.asc - deploy: mvn package -P build-extras + deploy: mvn package -P build-extras -Dlogging-level=INFO on: tags: true repo: iotaledger/iri From 8ca079d30f63f79cd5101e7439f69abb55e281f6 Mon Sep 17 00:00:00 2001 From: Thibault Martinez Date: Wed, 30 May 2018 12:58:03 +0200 Subject: [PATCH 34/45] Remove trailing space in getBalances tips body param (#776) --- src/main/java/com/iota/iri/service/API.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/main/java/com/iota/iri/service/API.java b/src/main/java/com/iota/iri/service/API.java index e9869dbad1..b49559907f 100644 --- a/src/main/java/com/iota/iri/service/API.java +++ b/src/main/java/com/iota/iri/service/API.java @@ -255,7 +255,7 @@ private AbstractResponse process(final String requestString, InetSocketAddress s case "getBalances": { final List addresses = getParameterAsList(request,"addresses", HASH_SIZE); final List tips = request.containsKey("tips") ? - getParameterAsList(request,"tips ", HASH_SIZE): + getParameterAsList(request,"tips", HASH_SIZE): null; final int threshold = getParameterAsInt(request, "threshold"); return getBalancesStatement(addresses, tips, threshold); @@ -1177,4 +1177,3 @@ private synchronized void storeMessageStatement(final String address, final Stri broadcastTransactionStatement(powResult); } } - From f575913967e0635f0d2907967a6945fdb5b47365 Mon Sep 17 00:00:00 2001 From: Alon Elmaliah Date: Wed, 30 May 2018 14:34:50 +0300 Subject: [PATCH 35/45] fix MAX_TIMESTAMP_VALUE to actually represent "max positive 27-trits value" (#777) --- src/main/java/com/iota/iri/service/API.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/com/iota/iri/service/API.java b/src/main/java/com/iota/iri/service/API.java index b49559907f..52760a9e4a 100644 --- a/src/main/java/com/iota/iri/service/API.java +++ b/src/main/java/com/iota/iri/service/API.java @@ -74,7 +74,7 @@ public class API { private final static int HASH_SIZE = 81; private final static int TRYTES_SIZE = 2673; - private final static long MAX_TIMESTAMP_VALUE = (3^27 - 1) / 2; + private final static long MAX_TIMESTAMP_VALUE = (long) (Math.pow(3, 27) - 1) / 2; // max positive 27-trits value private final int minRandomWalks; private final int maxRandomWalks; From 1979128ebee727c03cb58062fa96b96ef7ce1f7f Mon Sep 17 00:00:00 2001 From: alon-e Date: Thu, 31 May 2018 18:25:52 +0300 Subject: [PATCH 36/45] Implementation and testing of EntryPointSelector --- pom.xml | 8 +++ .../tipselection/EntryPointSelector.java | 27 ++++++++ .../impl/EntryPointSelectorImpl.java | 40 ++++++++++++ .../impl/EntryPointSelectorImplTest.java | 63 +++++++++++++++++++ 4 files changed, 138 insertions(+) create mode 100644 src/main/java/com/iota/iri/service/tipselection/EntryPointSelector.java create mode 100644 src/main/java/com/iota/iri/service/tipselection/impl/EntryPointSelectorImpl.java create mode 100644 src/test/java/com/iota/iri/service/tipselection/impl/EntryPointSelectorImplTest.java diff --git a/pom.xml b/pom.xml index 162201ce3d..6353698d92 100644 --- a/pom.xml +++ b/pom.xml @@ -144,6 +144,14 @@ test + + + org.mockito + mockito-all + 1.10.19 + test + + uk.co.froot.maven.enforcer digest-enforcer-rules diff --git a/src/main/java/com/iota/iri/service/tipselection/EntryPointSelector.java b/src/main/java/com/iota/iri/service/tipselection/EntryPointSelector.java new file mode 100644 index 0000000000..5afcee7ec2 --- /dev/null +++ b/src/main/java/com/iota/iri/service/tipselection/EntryPointSelector.java @@ -0,0 +1,27 @@ +package com.iota.iri.service.tipselection; + +import com.iota.iri.model.Hash; + +/** + * Selects an entryPoint for tip selection. + *

+ * this point is used as the starting point where + * the particle starts the random walk. + *

+ */ + +public interface EntryPointSelector { + + /** + *get an entryPoint for tip selection + * + *Uses depth to determine the entry point for + *the random walk. + * + * @param depth Depth, in milestones. a notion of how deep to search for a good starting point. + * @return Entry point for walk method + * @throws Exception If DB fails to retrieve transactions + */ + Hash getEntryPoint(int depth)throws Exception; + +} diff --git a/src/main/java/com/iota/iri/service/tipselection/impl/EntryPointSelectorImpl.java b/src/main/java/com/iota/iri/service/tipselection/impl/EntryPointSelectorImpl.java new file mode 100644 index 0000000000..d1837c6728 --- /dev/null +++ b/src/main/java/com/iota/iri/service/tipselection/impl/EntryPointSelectorImpl.java @@ -0,0 +1,40 @@ +package com.iota.iri.service.tipselection.impl; + +import com.iota.iri.Milestone; +import com.iota.iri.controllers.MilestoneViewModel; +import com.iota.iri.model.Hash; +import com.iota.iri.service.tipselection.EntryPointSelector; +import com.iota.iri.storage.Tangle; + +/** + * Implementation of EntryPointSelector that given a depth N, returns a N-deep milestone. + * Meaning milestone(latestSolid - depth) + * Used to as a starting point for the random walk. + */ +public class EntryPointSelectorImpl implements EntryPointSelector { + + private final Tangle tangle; + private final Milestone milestone; + private final boolean testnet; + private final int milestoneStartIndex; + + public EntryPointSelectorImpl(Tangle tangle, Milestone milestone, boolean testnet, int milestoneStartIndex) { + this.tangle = tangle; + this.milestone = milestone; + + this.testnet = testnet; + this.milestoneStartIndex = milestoneStartIndex; + } + + @Override + public Hash getEntryPoint(int depth) throws Exception { + int milestoneIndex = Math.max(milestone.latestSolidSubtangleMilestoneIndex - depth - 1, 0); + MilestoneViewModel milestoneViewModel = + MilestoneViewModel.findClosestNextMilestone(tangle, milestoneIndex, testnet, milestoneStartIndex); + if (milestoneViewModel != null && milestoneViewModel.getHash() != null) { + return milestoneViewModel.getHash(); + } + + return milestone.latestSolidSubtangleMilestone; + } +} diff --git a/src/test/java/com/iota/iri/service/tipselection/impl/EntryPointSelectorImplTest.java b/src/test/java/com/iota/iri/service/tipselection/impl/EntryPointSelectorImplTest.java new file mode 100644 index 0000000000..dc5808cd0e --- /dev/null +++ b/src/test/java/com/iota/iri/service/tipselection/impl/EntryPointSelectorImplTest.java @@ -0,0 +1,63 @@ +package com.iota.iri.service.tipselection.impl; + +import com.iota.iri.Milestone; +import com.iota.iri.hash.SpongeFactory; +import com.iota.iri.model.Hash; +import com.iota.iri.model.IntegerIndex; +import com.iota.iri.service.tipselection.EntryPointSelector; +import com.iota.iri.storage.Indexable; +import com.iota.iri.storage.Persistable; +import com.iota.iri.storage.Tangle; +import com.iota.iri.utils.Pair; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.runners.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class EntryPointSelectorImplTest { + + @Mock + private Milestone milestone; + @Mock + private Tangle tangle; + + @Test + public void testEntryPointWithTangleData() throws Exception { + Hash milestoneHash = Hash.calculate(SpongeFactory.Mode.CURLP81, new int[]{1}); + mockTangleBehavior(milestoneHash); + mockMilestoneTrackerBehavior(0, Hash.NULL_HASH); + + EntryPointSelector entryPointSelector = new EntryPointSelectorImpl(tangle, milestone, false, 0); + Hash entryPoint = entryPointSelector.getEntryPoint(10); + + Assert.assertEquals("The entry point should be the milestone in the Tangle", milestoneHash, entryPoint); + } + + @Test + public void testEntryPointWithoutTangleData() throws Exception { + mockMilestoneTrackerBehavior(0, Hash.NULL_HASH); + + EntryPointSelector entryPointSelector = new EntryPointSelectorImpl(tangle, milestone, false, 0); + Hash entryPoint = entryPointSelector.getEntryPoint(10); + + Assert.assertEquals("The entry point should be the last tracked solid milestone", Hash.NULL_HASH, entryPoint); + } + + + private void mockMilestoneTrackerBehavior(int latestSolidSubtangleMilestoneIndex, Hash latestSolidSubtangleMilestone) { + milestone.latestSolidSubtangleMilestoneIndex = latestSolidSubtangleMilestoneIndex; + milestone.latestSolidSubtangleMilestone = latestSolidSubtangleMilestone; + } + + private void mockTangleBehavior(Hash milestoneModelHash) throws Exception { + com.iota.iri.model.Milestone milestoneModel = new com.iota.iri.model.Milestone(); + milestoneModel.index = new IntegerIndex(0); + milestoneModel.hash = milestoneModelHash; + Pair indexMilestoneModel = new Pair<>(new IntegerIndex(0), milestoneModel); + Mockito.when(tangle.getFirst(com.iota.iri.model.Milestone.class, IntegerIndex.class)) + .thenReturn(indexMilestoneModel); + } +} \ No newline at end of file From 0c3dc70144c465a8bbc0e868c914dbef5813f6d9 Mon Sep 17 00:00:00 2001 From: alon-e Date: Thu, 31 May 2018 18:55:43 +0300 Subject: [PATCH 37/45] Implementation and testing of RatingCalculator --- src/main/java/com/iota/iri/model/Hash.java | 2 +- src/main/java/com/iota/iri/model/HashId.java | 13 + .../java/com/iota/iri/model/HashPrefix.java | 61 +++++ .../tipselection/RatingCalculator.java | 26 ++ .../impl/CumulativeWeightCalculator.java | 161 +++++++++++++ .../service/tipselection/impl/RatingOne.java | 49 ++++ .../impl/TransformingBoundedHashSet.java | 40 +++ .../collections/impl/TransformingMap.java | 80 ++++++ .../collections/interfaces/UnIterableMap.java | 63 +++++ .../impl/CumulativeWeightCalculatorTest.java} | 227 +++++++++--------- .../tipselection/impl/RatingOneTest.java | 75 ++++++ 11 files changed, 681 insertions(+), 116 deletions(-) create mode 100644 src/main/java/com/iota/iri/model/HashId.java create mode 100644 src/main/java/com/iota/iri/model/HashPrefix.java create mode 100644 src/main/java/com/iota/iri/service/tipselection/RatingCalculator.java create mode 100644 src/main/java/com/iota/iri/service/tipselection/impl/CumulativeWeightCalculator.java create mode 100644 src/main/java/com/iota/iri/service/tipselection/impl/RatingOne.java create mode 100644 src/main/java/com/iota/iri/utils/collections/impl/TransformingBoundedHashSet.java create mode 100644 src/main/java/com/iota/iri/utils/collections/impl/TransformingMap.java create mode 100644 src/main/java/com/iota/iri/utils/collections/interfaces/UnIterableMap.java rename src/test/java/com/iota/iri/service/{TipsManagerTest.java => tipselection/impl/CumulativeWeightCalculatorTest.java} (62%) create mode 100644 src/test/java/com/iota/iri/service/tipselection/impl/RatingOneTest.java diff --git a/src/main/java/com/iota/iri/model/Hash.java b/src/main/java/com/iota/iri/model/Hash.java index 9988ad078b..4b0559f78a 100644 --- a/src/main/java/com/iota/iri/model/Hash.java +++ b/src/main/java/com/iota/iri/model/Hash.java @@ -11,7 +11,7 @@ import java.util.Arrays; import java.util.Objects; -public final class Hash implements Serializable, Indexable { +public final class Hash implements Serializable, Indexable, HashId { public static final int SIZE_IN_TRITS = 243; public static final int SIZE_IN_BYTES = 49; diff --git a/src/main/java/com/iota/iri/model/HashId.java b/src/main/java/com/iota/iri/model/HashId.java new file mode 100644 index 0000000000..7a3d75e12a --- /dev/null +++ b/src/main/java/com/iota/iri/model/HashId.java @@ -0,0 +1,13 @@ +package com.iota.iri.model; + +/** + * Represents an ID of a transaction, address or bundle + */ +public interface HashId { + + /** + * + * @return the bytes of the Hash Id + */ + byte[] bytes(); +} diff --git a/src/main/java/com/iota/iri/model/HashPrefix.java b/src/main/java/com/iota/iri/model/HashPrefix.java new file mode 100644 index 0000000000..8eeb83c93e --- /dev/null +++ b/src/main/java/com/iota/iri/model/HashPrefix.java @@ -0,0 +1,61 @@ +package com.iota.iri.model; + +import com.iota.iri.hash.Curl; +import com.iota.iri.utils.Converter; + +import java.util.Arrays; + +public final class HashPrefix implements HashId { + public static final int PREFIX_LENGTH = 44; + private final byte[] bytes; + + public static HashPrefix createPrefix(HashId hashId) { + if (hashId == null) { + return null; + } + if (hashId instanceof HashPrefix) { + return (HashPrefix) hashId; + } + + byte[] bytes = hashId.bytes(); + bytes = Arrays.copyOf(bytes, PREFIX_LENGTH); + return new HashPrefix(bytes); + } + + private HashPrefix(byte[] bytes) { + this.bytes = bytes; + } + + @Override + public byte[] bytes() { + return bytes; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + HashPrefix that = (HashPrefix) o; + return Arrays.equals(bytes, that.bytes); + } + + @Override + public int hashCode() { + return Arrays.hashCode(bytes); + } + + @Override + public String toString() { + return trytes(bytes); + } + + private static String trytes(byte[] bytes) { + int[] dest = new int[Curl.HASH_LENGTH]; + Converter.getTrits(bytes, dest); + return Converter.trytes(dest); + } +} diff --git a/src/main/java/com/iota/iri/service/tipselection/RatingCalculator.java b/src/main/java/com/iota/iri/service/tipselection/RatingCalculator.java new file mode 100644 index 0000000000..6d35bd829e --- /dev/null +++ b/src/main/java/com/iota/iri/service/tipselection/RatingCalculator.java @@ -0,0 +1,26 @@ +package com.iota.iri.service.tipselection; + +import com.iota.iri.model.Hash; +import com.iota.iri.model.HashId; +import com.iota.iri.utils.collections.interfaces.UnIterableMap; + +/** + * Calculates the rating for a sub graph + */ +@FunctionalInterface +public interface RatingCalculator { + + /** + * Rating calculator + *

+ * Calculates the rating of all the transactions that reference + * a given entry point. + *

+ * + * @param entryPoint Transaction hash of a selected entry point. + * @return Map + * @throws Exception If DB fails to retrieve transactions + */ + + UnIterableMap calculate(Hash entryPoint) throws Exception; +} diff --git a/src/main/java/com/iota/iri/service/tipselection/impl/CumulativeWeightCalculator.java b/src/main/java/com/iota/iri/service/tipselection/impl/CumulativeWeightCalculator.java new file mode 100644 index 0000000000..ee0ff86556 --- /dev/null +++ b/src/main/java/com/iota/iri/service/tipselection/impl/CumulativeWeightCalculator.java @@ -0,0 +1,161 @@ +package com.iota.iri.service.tipselection.impl; + +import com.iota.iri.controllers.ApproveeViewModel; +import com.iota.iri.controllers.TransactionViewModel; +import com.iota.iri.model.Hash; +import com.iota.iri.model.HashId; +import com.iota.iri.model.HashPrefix; +import com.iota.iri.service.tipselection.RatingCalculator; +import com.iota.iri.utils.collections.impl.TransformingBoundedHashSet; +import com.iota.iri.storage.Tangle; +import com.iota.iri.utils.collections.impl.TransformingMap; +import com.iota.iri.utils.collections.interfaces.BoundedSet; +import com.iota.iri.utils.collections.interfaces.UnIterableMap; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.collections4.SetUtils; +import org.apache.commons.lang3.ObjectUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +/** + * Implementation of RatingCalculator that gives the cumulative for each transaction referencing entryPoint. + * Used to create a weighted random walks. + * + * @see https://github.com/alongalky/iota-docs/blob/master/cumulative.md + */ +public class CumulativeWeightCalculator implements RatingCalculator{ + + private static final Logger log = LoggerFactory.getLogger(CumulativeWeightCalculator.class); + public static final int MAX_ANCESTORS_SIZE = 5000; + + public final Tangle tangle; + + public CumulativeWeightCalculator(Tangle tangle) { + this.tangle = tangle; + } + + @Override + public UnIterableMap calculate(Hash entryPoint) throws Exception { + log.debug("Start calculating cw starting with tx hash {}", entryPoint); + + LinkedHashSet txHashesToRate = sortTransactionsInTopologicalOrder(entryPoint); + return calculateCwInOrder(txHashesToRate); + } + + //Uses DFS algorithm to sort + private LinkedHashSet sortTransactionsInTopologicalOrder(Hash startTx) throws Exception { + LinkedHashSet sortedTxs = new LinkedHashSet<>(); + Deque stack = new ArrayDeque<>(); + Map> txToDirectApprovers = new HashMap<>(); + + stack.push(startTx); + while (CollectionUtils.isNotEmpty(stack)) { + Hash txHash = stack.peek(); + if (!sortedTxs.contains(txHash)) { + Collection appHashes = getTxDirectApproversHashes(txHash, txToDirectApprovers); + if (CollectionUtils.isNotEmpty(appHashes)) { + Hash txApp = getAndRemoveApprover(appHashes); + stack.push(txApp); + continue; + } + } + else { + stack.pop(); + continue; + } + sortedTxs.add(txHash); + } + + return sortedTxs; + } + + private Hash getAndRemoveApprover(Collection appHashes) { + Iterator hashIterator = appHashes.iterator(); + Hash txApp = hashIterator.next(); + hashIterator.remove(); + return txApp; + } + + private Collection getTxDirectApproversHashes(Hash txHash, Map> txToDirectApprovers) + throws Exception { + Collection txApprovers = txToDirectApprovers.get(txHash); + if (txApprovers == null) { + ApproveeViewModel approvers = ApproveeViewModel.load(tangle, txHash); + Collection appHashes = CollectionUtils.emptyIfNull(approvers.getHashes()); + txApprovers = new HashSet<>(appHashes.size()); + for (Hash appHash : appHashes) { + //if not genesis (the tx that confirms itself) + if (ObjectUtils.notEqual(Hash.NULL_HASH, appHash)) { + txApprovers.add(appHash); + } + } + txToDirectApprovers.put(txHash, txApprovers); + } + return txApprovers; + } + + //must specify using LinkedHashSet since Java has no interface that guarantees uniqueness and insertion order + private UnIterableMap calculateCwInOrder(LinkedHashSet txsToRate) throws Exception { + UnIterableMap> txHashToApprovers = createTxHashToApproversPrefixMap(); + UnIterableMap txHashToCumulativeWeight = createTxHashToCumulativeWeightMap(txsToRate.size()); + + Iterator txHashIterator = txsToRate.iterator(); + while (txHashIterator.hasNext()) { + Hash txHash = txHashIterator.next(); + txHashToCumulativeWeight = updateCw(txHashToApprovers, txHashToCumulativeWeight, txHash); + txHashToApprovers = updateApproversAndReleaseMemory(txHashToApprovers, txHash); + txHashIterator.remove(); + } + return txHashToCumulativeWeight; + } + + + private UnIterableMap> updateApproversAndReleaseMemory(UnIterableMap> txHashToApprovers, Hash txHash) throws Exception { + Set approvers = SetUtils.emptyIfNull(txHashToApprovers.get(txHash)); + + TransactionViewModel transactionViewModel = TransactionViewModel.fromHash(tangle, txHash); + Hash trunkHash = transactionViewModel.getTrunkTransactionHash(); + Hash branchHash = transactionViewModel.getBranchTransactionHash(); + + Set trunkApprovers = createApprovers(txHashToApprovers, txHash, approvers, trunkHash); + txHashToApprovers.put(trunkHash, trunkApprovers); + Set branchApprovers = createApprovers(txHashToApprovers, txHash, approvers, branchHash); + txHashToApprovers.put(branchHash, branchApprovers); + + txHashToApprovers.remove(txHash); + + return txHashToApprovers; + } + + private Set createApprovers(UnIterableMap> txHashToApprovers, HashId txHash, + Set approvers, HashId trunkHash) { + approvers = createTransformingBoundedSet(approvers); + approvers.addAll(CollectionUtils.emptyIfNull(txHashToApprovers.get(trunkHash))); + approvers.add(txHash); + return approvers; + } + + private static UnIterableMap updateCw( + UnIterableMap> txHashToApprovers, UnIterableMap txToCumulativeWeight, + Hash txHash) { + Set approvers = txHashToApprovers.get(txHash); + int weight = CollectionUtils.emptyIfNull(approvers).size() + 1; + txToCumulativeWeight.put(txHash, weight); + return txToCumulativeWeight; + } + + private static UnIterableMap> createTxHashToApproversPrefixMap() { + return new TransformingMap<>(HashPrefix::createPrefix, null); + } + + private static UnIterableMap createTxHashToCumulativeWeightMap(int size) { + return new TransformingMap<>(size, HashPrefix::createPrefix, null); + } + + private static BoundedSet createTransformingBoundedSet(Collection c) { + return new TransformingBoundedHashSet<>(c, MAX_ANCESTORS_SIZE, HashPrefix::createPrefix); + } +} diff --git a/src/main/java/com/iota/iri/service/tipselection/impl/RatingOne.java b/src/main/java/com/iota/iri/service/tipselection/impl/RatingOne.java new file mode 100644 index 0000000000..dd46c0f37a --- /dev/null +++ b/src/main/java/com/iota/iri/service/tipselection/impl/RatingOne.java @@ -0,0 +1,49 @@ +package com.iota.iri.service.tipselection.impl; + +import com.iota.iri.controllers.ApproveeViewModel; +import com.iota.iri.model.Hash; +import com.iota.iri.model.HashId; +import com.iota.iri.service.tipselection.RatingCalculator; +import com.iota.iri.storage.Tangle; +import com.iota.iri.utils.collections.impl.TransformingMap; +import com.iota.iri.utils.collections.interfaces.UnIterableMap; + +import java.util.*; + +/** + * Implementation of RatingCalculator that gives a uniform rating of 1 to each transaction. + * Used to create uniform random walks. + */ +public class RatingOne implements RatingCalculator { + + private final Tangle tangle; + + public RatingOne(Tangle tangle) { + this.tangle = tangle; + } + + public UnIterableMap calculate(Hash entryPoint) throws Exception { + UnIterableMap rating = new TransformingMap<>(null, null); + + Queue queue = new LinkedList<>(); + queue.add(entryPoint); + rating.put(entryPoint, 1); + + Hash hash; + + //traverse all transactions that reference entryPoint + while ((hash = queue.poll()) != null) { + Set approvers = ApproveeViewModel.load(tangle, hash).getHashes(); + for (Hash tx : approvers) { + if (!rating.containsKey(tx)) { + //add to rating w/ value "1" + rating.put(tx, 1); + queue.add(tx); + } + } + } + return rating; + } + + +} diff --git a/src/main/java/com/iota/iri/utils/collections/impl/TransformingBoundedHashSet.java b/src/main/java/com/iota/iri/utils/collections/impl/TransformingBoundedHashSet.java new file mode 100644 index 0000000000..7548c21b07 --- /dev/null +++ b/src/main/java/com/iota/iri/utils/collections/impl/TransformingBoundedHashSet.java @@ -0,0 +1,40 @@ +package com.iota.iri.utils.collections.impl; + +import java.util.Collection; +import java.util.function.UnaryOperator; +import java.util.stream.Collectors; + + +public class TransformingBoundedHashSet extends BoundedHashSet{ + + private final UnaryOperator transformer; + + public TransformingBoundedHashSet(int maxSize, UnaryOperator transformer) { + super(maxSize); + this.transformer = transformer; + } + + public TransformingBoundedHashSet(Collection c, int maxSize, UnaryOperator transformer) { + super(maxSize); + this.transformer = transformer; + this.addAll(c); + } + + @Override + public boolean add(E e) { + if (!isFull()) { + e = transformer.apply(e); + } + return super.add(e); + } + + @Override + public boolean addAll(Collection c) { + if (!isFull()) { + c = c.stream() + .map(el -> transformer.apply(el)) + .collect(Collectors.toSet()); + } + return super.addAll(c); + } +} diff --git a/src/main/java/com/iota/iri/utils/collections/impl/TransformingMap.java b/src/main/java/com/iota/iri/utils/collections/impl/TransformingMap.java new file mode 100644 index 0000000000..6e141cb790 --- /dev/null +++ b/src/main/java/com/iota/iri/utils/collections/impl/TransformingMap.java @@ -0,0 +1,80 @@ +package com.iota.iri.utils.collections.impl; + +import com.iota.iri.utils.collections.interfaces.UnIterableMap; + +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.function.UnaryOperator; + +/** + * A map that performs unary operations on key-value pairs that are inserted into it. + * + * @param key type + * @param value type + */ +public class TransformingMap implements UnIterableMap { + private Map delegateMap; + private UnaryOperator keyOptimizer; + private UnaryOperator valueTransformer; + + public TransformingMap(UnaryOperator keyOptimizer, UnaryOperator valueTransformer) { + this(16, keyOptimizer, valueTransformer); + } + + public TransformingMap(int initialCapacity, UnaryOperator keyOptimizer, UnaryOperator valueTransformer) { + this.keyOptimizer = keyOptimizer == null ? UnaryOperator.identity() : keyOptimizer; + this.valueTransformer = valueTransformer == null ? UnaryOperator.identity() : valueTransformer; + + this.delegateMap = new HashMap<>(initialCapacity); + } + + @Override + public int size() { + return delegateMap.size(); + } + + @Override + public boolean isEmpty() { + return delegateMap.isEmpty(); + } + + @Override + public boolean containsKey(K key) { + K newKey = keyOptimizer.apply(key); + return delegateMap.containsKey(newKey); + } + + @Override + public boolean containsValue(V value) { + return delegateMap.containsValue(value); + } + + @Override + public V get(K key) { + K newKey = keyOptimizer.apply(key); + return delegateMap.get(newKey); + } + + @Override + public V put(K key, V value) { + key = keyOptimizer.apply(key); + value = valueTransformer.apply(value); + return delegateMap.put(key, value); + } + + @Override + public V remove(K key) { + return delegateMap.remove(key); + } + + @Override + public void clear() { + delegateMap.clear(); + } + + @Override + public Collection values() { + return delegateMap.values(); + } +} diff --git a/src/main/java/com/iota/iri/utils/collections/interfaces/UnIterableMap.java b/src/main/java/com/iota/iri/utils/collections/interfaces/UnIterableMap.java new file mode 100644 index 0000000000..26eb6d5131 --- /dev/null +++ b/src/main/java/com/iota/iri/utils/collections/interfaces/UnIterableMap.java @@ -0,0 +1,63 @@ +package com.iota.iri.utils.collections.interfaces; + +import java.util.Collection; +import java.util.Map; + + +/** + * Similar to {@link Map} but hides key retrieval functionality. + * Thus one can't iterate over key or entries. + * Implementing class may transform keys to perform memory operations + * + * @param The key type + * @param The value type + */ +public interface UnIterableMap { + + + /** + * {See {@link Map#size()}} + */ + int size(); + + /** + * {See {@link Map#isEmpty()}} + */ + boolean isEmpty(); + + /** + * {See {@link Map#containsKey(Object)}} + */ + boolean containsKey(K key); + + /** + * {See {@link Map#containsValue(Object)}} + */ + boolean containsValue(V value); + + /** + * + * {See {@link Map#get}} + */ + V get(K key); + + /** + * {See {@link Map#put} + */ + V put(K key, V value); + + /** + * {See {@link Map#keySet()}} + */ + V remove(K key); + + /** + * {See {@link Map#clear()}} + */ + void clear(); + + /** + * {See {@link Map#values} + */ + Collection values(); +} diff --git a/src/test/java/com/iota/iri/service/TipsManagerTest.java b/src/test/java/com/iota/iri/service/tipselection/impl/CumulativeWeightCalculatorTest.java similarity index 62% rename from src/test/java/com/iota/iri/service/TipsManagerTest.java rename to src/test/java/com/iota/iri/service/tipselection/impl/CumulativeWeightCalculatorTest.java index cbde8167eb..ed4ca2ab75 100644 --- a/src/test/java/com/iota/iri/service/TipsManagerTest.java +++ b/src/test/java/com/iota/iri/service/tipselection/impl/CumulativeWeightCalculatorTest.java @@ -1,18 +1,13 @@ -package com.iota.iri.service; - -import com.iota.iri.LedgerValidator; -import com.iota.iri.Milestone; -import com.iota.iri.Snapshot; -import com.iota.iri.TransactionValidator; -import com.iota.iri.conf.Configuration; -import com.iota.iri.controllers.TipsViewModel; +package com.iota.iri.service.tipselection.impl; + + +import com.iota.iri.controllers.ApproveeViewModel; import com.iota.iri.controllers.TransactionViewModel; import com.iota.iri.model.Hash; -import com.iota.iri.network.TransactionRequester; +import com.iota.iri.model.HashId; import com.iota.iri.storage.Tangle; import com.iota.iri.storage.rocksDB.RocksDBPersistenceProvider; -import com.iota.iri.utils.IotaUtils; -import com.iota.iri.zmq.MessageQ; +import com.iota.iri.utils.collections.interfaces.UnIterableMap; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; @@ -21,39 +16,24 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.nio.Buffer; import java.util.*; -import static com.iota.iri.controllers.TransactionViewModelTest.getRandomTransactionHash; -import static com.iota.iri.controllers.TransactionViewModelTest.getRandomTransactionTrits; -import static com.iota.iri.controllers.TransactionViewModelTest.getRandomTransactionWithTrunkAndBranch; - -/** - * Created by paul on 4/27/17. - */ -public class TipsManagerTest { +import static com.iota.iri.controllers.TransactionViewModelTest.*; +public class CumulativeWeightCalculatorTest { private static final TemporaryFolder dbFolder = new TemporaryFolder(); private static final TemporaryFolder logFolder = new TemporaryFolder(); private static final String TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT = "tx%d cumulative weight is not as expected"; private static Tangle tangle; - private static TipsManager tipsManager; + private static CumulativeWeightCalculator cumulativeWeightCalculator; private final Logger log = LoggerFactory.getLogger(this.getClass()); - @Test - public void capSum() { - long a = 0, b, max = Long.MAX_VALUE / 2; - for (b = 0; b < max; b += max / 100) { - a = TipsManager.capSum(a, b, max); - Assert.assertTrue("a should never go above max", a <= max); - } - } - @AfterClass public static void tearDown() throws Exception { tangle.shutdown(); dbFolder.delete(); + logFolder.delete(); } @BeforeClass @@ -64,20 +44,7 @@ public static void setUp() throws Exception { tangle.addPersistenceProvider(new RocksDBPersistenceProvider(dbFolder.getRoot().getAbsolutePath(), logFolder .getRoot().getAbsolutePath(), 1000)); tangle.init(); - TipsViewModel tipsViewModel = new TipsViewModel(); - MessageQ messageQ = new MessageQ(0, null, 1, false); - TransactionRequester transactionRequester = new TransactionRequester(tangle, messageQ); - TransactionValidator transactionValidator = new TransactionValidator(tangle, tipsViewModel, transactionRequester, - messageQ, Long.parseLong(Configuration.GLOBAL_SNAPSHOT_TIME)); - int milestoneStartIndex = Integer.parseInt(Configuration.MAINNET_MILESTONE_START_INDEX); - int numOfKeysInMilestone = Integer.parseInt(Configuration.MAINNET_NUM_KEYS_IN_MILESTONE); - Milestone milestone = new Milestone(tangle, Hash.NULL_HASH, Snapshot.init( - Configuration.MAINNET_SNAPSHOT_FILE, Configuration.MAINNET_SNAPSHOT_SIG_FILE, false).clone(), - transactionValidator, false, messageQ, numOfKeysInMilestone, - milestoneStartIndex, true); - LedgerValidator ledgerValidator = new LedgerValidator(tangle, milestone, transactionRequester, messageQ); - tipsManager = new TipsManager(tangle, ledgerValidator, transactionValidator, tipsViewModel, milestone, - 15, messageQ, false, milestoneStartIndex); + cumulativeWeightCalculator = new CumulativeWeightCalculator(tangle); } @Test @@ -97,19 +64,18 @@ public void testCalculateCumulativeWeight() throws Exception { transaction2.store(tangle); transaction3.store(tangle); transaction4.store(tangle); - Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), - transaction.getHash(), false, new HashSet<>()); + UnIterableMap txToCw = cumulativeWeightCalculator.calculate(transaction.getHash()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 4), - 1, txToCw.get(IotaUtils.getSubHash(transaction4.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 1, txToCw.get(transaction4.getHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), - 2, txToCw.get(IotaUtils.getSubHash(transaction3.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 2, txToCw.get(transaction3.getHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), - 3, txToCw.get(IotaUtils.getSubHash(transaction2.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 3, txToCw.get(transaction2.getHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), - 4, txToCw.get(IotaUtils.getSubHash(transaction1.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 4, txToCw.get(transaction1.getHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), - 5, txToCw.get(IotaUtils.getSubHash(transaction.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 5, txToCw.get(transaction.getHash()).intValue()); } @Test @@ -126,22 +92,22 @@ public void testCalculateCumulativeWeightDiamond() throws Exception { transaction1.store(tangle); transaction2.store(tangle); transaction3.store(tangle); + log.debug("printing transaction in diamond shape \n {} \n{} {}\n {}", transaction.getHash(), transaction1.getHash(), transaction2.getHash(), transaction3.getHash()); - Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), - transaction.getHash(), false, new HashSet<>()); + UnIterableMap txToCw = cumulativeWeightCalculator.calculate(transaction.getHash()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), - 1, txToCw.get(IotaUtils.getSubHash(transaction3.getHash(), TipsManager.SUBHASH_LENGTH)) + 1, txToCw.get(transaction3.getHash()) .intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), - 2, txToCw.get(IotaUtils.getSubHash(transaction1.getHash(), TipsManager.SUBHASH_LENGTH)) + 2, txToCw.get(transaction1.getHash()) .intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), - 2, txToCw.get(IotaUtils.getSubHash(transaction2.getHash(), TipsManager.SUBHASH_LENGTH)) + 2, txToCw.get(transaction2.getHash()) .intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), - 4, txToCw.get(IotaUtils.getSubHash(transaction.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 4, txToCw.get(transaction.getHash()).intValue()); } @Test @@ -165,24 +131,23 @@ public void testCalculateCumulativeWeightLinear() throws Exception { log.info(String.format("Linear ordered hashes from tip %.4s, %.4s, %.4s, %.4s, %.4s", transaction4.getHash(), transaction3.getHash(), transaction2.getHash(), transaction1.getHash(), transaction.getHash())); - Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), - transaction.getHash(), false, new HashSet<>()); + UnIterableMap txToCw = cumulativeWeightCalculator.calculate(transaction.getHash()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 4), - 1, txToCw.get(IotaUtils.getSubHash(transaction4.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 1, txToCw.get(transaction4.getHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), - 2, txToCw.get(IotaUtils.getSubHash(transaction3.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 2, txToCw.get(transaction3.getHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), - 3, txToCw.get(IotaUtils.getSubHash(transaction2.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 3, txToCw.get(transaction2.getHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), - 4, txToCw.get(IotaUtils.getSubHash(transaction1.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 4, txToCw.get(transaction1.getHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), - 5, txToCw.get(IotaUtils.getSubHash(transaction.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 5, txToCw.get(transaction.getHash()).intValue()); } @Test - public void testCalculateCumulativeWeightAlon() throws Exception { + public void testCalculateCumulativeWeight2() throws Exception { TransactionViewModel transaction, transaction1, transaction2, transaction3, transaction4, transaction5, transaction6; transaction = new TransactionViewModel(getRandomTransactionTrits(), getRandomTransactionHash()); @@ -211,23 +176,22 @@ public void testCalculateCumulativeWeightAlon() throws Exception { transaction.getHash(), transaction1.getHash(), transaction2.getHash(), transaction3.getHash(), transaction4, transaction5, transaction6); - Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), - transaction.getHash(), false, new HashSet<>()); + UnIterableMap txToCw = cumulativeWeightCalculator.calculate(transaction.getHash()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 6), - 1, txToCw.get(IotaUtils.getSubHash(transaction6.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 1, txToCw.get(transaction6.getHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 5), - 2, txToCw.get(IotaUtils.getSubHash(transaction5.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 2, txToCw.get(transaction5.getHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 4), - 2, txToCw.get(IotaUtils.getSubHash(transaction4.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 2, txToCw.get(transaction4.getHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), - 3, txToCw.get(IotaUtils.getSubHash(transaction3.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 3, txToCw.get(transaction3.getHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), - 3, txToCw.get(IotaUtils.getSubHash(transaction2.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 3, txToCw.get(transaction2.getHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), - 1, txToCw.get(IotaUtils.getSubHash(transaction1.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 1, txToCw.get(transaction1.getHash()).intValue()); Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), - 7, txToCw.get(IotaUtils.getSubHash(transaction.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + 7, txToCw.get(transaction.getHash()).intValue()); } @Test @@ -248,60 +212,96 @@ public void cwCalculationSameAsLegacy() throws Exception { transactionViewModel.getTrunkTransactionHash(), transactionViewModel.getBranchTransactionHash())); } - Map> ratings = new HashMap<>(); + Map> ratings = new HashMap<>(); updateApproversRecursively(hashes[0], ratings, new HashSet<>()); - Map txToCw = tipsManager.calculateCumulativeWeight(new HashSet<>(), - hashes[0], false, new HashSet<>()); + UnIterableMap txToCw = cumulativeWeightCalculator.calculate(hashes[0]); Assert.assertEquals("missing txs from new calculation", ratings.size(), txToCw.size()); ratings.forEach((hash, weight) -> { log.debug(String.format("tx %.4s has expected weight of %d", hash, weight.size())); Assert.assertEquals( "new calculation weight is not as expected for hash " + hash, - weight.size(), txToCw.get(IotaUtils.getSubHash(hash, TipsManager.SUBHASH_LENGTH)) + weight.size(), txToCw.get(hash) .intValue()); }); } @Test - public void testCalculateCommulativeWeightWithLeftBehind() throws Exception { + public void testTangleWithCircle() throws Exception { + TransactionViewModel transaction; + Hash randomTransactionHash = getRandomTransactionHash(); + transaction = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(randomTransactionHash, randomTransactionHash), randomTransactionHash); + + transaction.store(tangle); + + UnIterableMap txToCw = cumulativeWeightCalculator.calculate(transaction.getHash()); + Assert.assertEquals("There should be only one tx in the map", 1, txToCw.size()); + Assert.assertEquals("The circle raised the weight", 1, txToCw.get(randomTransactionHash).intValue()); + } + + @Test + public void testTangleWithCircle2() throws Exception { TransactionViewModel transaction, transaction1, transaction2, transaction3, transaction4; + Hash randomTransactionHash2 = getRandomTransactionHash(); + transaction = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + randomTransactionHash2, randomTransactionHash2), getRandomTransactionHash()); + transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction.getHash(), transaction.getHash()), getRandomTransactionHash()); + transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction1.getHash(), transaction1.getHash()), randomTransactionHash2); + transaction3 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction.getHash(), transaction.getHash()), getRandomTransactionHash()); + + transaction.store(tangle); + transaction1.store(tangle); + transaction2.store(tangle); + transaction3.store(tangle); + + cumulativeWeightCalculator.calculate(transaction.getHash()); + //No infinite loop (which will probably result in an overflow exception) means test has passed + } + + @Test + public void testCollsionsInDiamondTangle() throws Exception { + TransactionViewModel transaction, transaction1, transaction2, transaction3; transaction = new TransactionViewModel(getRandomTransactionTrits(), getRandomTransactionHash()); transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), transaction.getHash()), getRandomTransactionHash()); - transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction1.getHash(), - transaction.getHash()), getRandomTransactionHash()); - transaction3 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction2.getHash(), - transaction.getHash()), getRandomTransactionHash()); - transaction4 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction3.getHash(), - transaction1.getHash()), getRandomTransactionHash()); - Set approvedHashes = new HashSet<>(); + Hash transactionHash2 = getHashWithSimilarPrefix(transaction1); + transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), transactionHash2); + transaction3 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction1.getHash(), + transaction2.getHash()), getRandomTransactionHash()); transaction.store(tangle); transaction1.store(tangle); - approvedHashes.add(transaction2.getHash()); transaction2.store(tangle); - approvedHashes.add(transaction3.getHash()); transaction3.store(tangle); - transaction4.store(tangle); - Map cumulativeWeight = tipsManager.calculateCumulativeWeight(approvedHashes, - transaction.getHash(), true, new HashSet<>()); - - log.info(cumulativeWeight.toString()); - String msg = "Cumulative weight is wrong for tx"; - Assert.assertEquals(msg + 4, 1, cumulativeWeight.get( - IotaUtils.getSubHash(transaction4.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); - Assert.assertEquals(msg + 3, 1, cumulativeWeight.get( - IotaUtils.getSubHash(transaction3.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); - Assert.assertEquals(msg + 2, 1, cumulativeWeight.get( - IotaUtils.getSubHash(transaction2.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); - Assert.assertEquals(msg + 1, 2, cumulativeWeight.get( - IotaUtils.getSubHash(transaction1.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); - Assert.assertEquals(msg + 0, 3, cumulativeWeight.get( - IotaUtils.getSubHash(transaction.getHash(), TipsManager.SUBHASH_LENGTH)).intValue()); + log.debug("printing transaction in diamond shape \n {} \n{} {}\n {}", + transaction.getHash(), transaction1.getHash(), transaction2.getHash(), transaction3.getHash()); + UnIterableMap txToCw = cumulativeWeightCalculator.calculate(transaction.getHash()); + + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 3), + 1, txToCw.get(transaction3.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 1), + 2, txToCw.get(transaction1.getHash()).intValue()); + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 2), + 2, txToCw.get(transaction2.getHash()).intValue()); + //expected to not count 1 of the parents due to collision + Assert.assertEquals(String.format(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, 0), + 3, txToCw.get(transaction.getHash()).intValue()); + } + + private Hash getHashWithSimilarPrefix(TransactionViewModel transaction1) { + Hash transactionHash1 = transaction1.getHash(); + byte[] bytes = transactionHash1.bytes(); + bytes = Arrays.copyOf(bytes, bytes.length); + Arrays.fill(bytes, bytes.length-5, bytes.length-1, (byte)1); + return new Hash(bytes); } - // @Test + + // @Test //To be removed once CI tests are ready public void testUpdateRatingsTime() throws Exception { int max = 100001; @@ -324,24 +324,21 @@ private long ratingTime(int size) throws Exception { new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(hashes[i - random.nextInt(i) - 1], hashes[i - random.nextInt(i) - 1]), hashes[i]).store(tangle); } - Map ratings = new HashMap<>(); long start = System.currentTimeMillis(); -// tipsManager.serialUpdateRatings(new Snapshot(Snapshot.initialSnapshot), hashes[0], ratings, new HashSet<>() -// , null); - tipsManager.calculateCumulativeWeight(new HashSet<>(), hashes[0], false, new HashSet<>()); + + cumulativeWeightCalculator.calculate(hashes[0]); long time = System.currentTimeMillis() - start; System.out.println(time); return time; } //Simple recursive algorithm that maps each tx hash to its approvers' hashes - private static Set updateApproversRecursively(Hash txHash, Map> txToApprovers, - Set analyzedTips) throws Exception { - Set approvers; + private static Set updateApproversRecursively(Hash txHash, Map> txToApprovers, + Set analyzedTips) throws Exception { + Set approvers; if (analyzedTips.add(txHash)) { - TransactionViewModel transactionViewModel = TransactionViewModel.fromHash(tangle, txHash); approvers = new HashSet<>(Collections.singleton(txHash)); - Set approverHashes = transactionViewModel.getApprovers(tangle).getHashes(); + Set approverHashes = ApproveeViewModel.load(tangle, txHash).getHashes(); for (Hash approver : approverHashes) { approvers.addAll(updateApproversRecursively(approver, txToApprovers, analyzedTips)); } @@ -355,4 +352,4 @@ private static Set updateApproversRecursively(Hash txHash, Map rate = rating.calculate(transaction.getHash()); + + Assert.assertEquals(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, + 1, rate.get(transaction4.getHash()).intValue()); + Assert.assertEquals(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, + 1, rate.get(transaction3.getHash()).intValue()); + Assert.assertEquals(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, + 1, rate.get(transaction2.getHash()).intValue()); + Assert.assertEquals(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, + 1, rate.get(transaction1.getHash()).intValue()); + Assert.assertEquals(TX_CUMULATIVE_WEIGHT_IS_NOT_AS_EXPECTED_FORMAT, + 1, rate.get(transaction.getHash()).intValue()); + } +} \ No newline at end of file From 0d203efac2c2bec1d7d17441d44840b6e3529e5a Mon Sep 17 00:00:00 2001 From: alon-e Date: Thu, 31 May 2018 18:56:34 +0300 Subject: [PATCH 38/45] Implementation and testing of Walker --- .../java/com/iota/iri/conf/Configuration.java | 5 +- .../iri/service/tipselection/TailFinder.java | 27 ++ .../service/tipselection/WalkValidator.java | 23 ++ .../iota/iri/service/tipselection/Walker.java | 29 ++ .../tipselection/impl/TailFinderImpl.java | 48 ++++ .../tipselection/impl/WalkValidatorImpl.java | 107 +++++++ .../tipselection/impl/WalkerAlpha.java | 146 ++++++++++ .../com/iota/iri/TransactionTestUtils.java | 37 +++ .../controllers/TransactionViewModelTest.java | 25 ++ .../tipselection/impl/TailFinderImplTest.java | 92 ++++++ .../impl/WalkValidatorImplTest.java | 88 ++++++ .../tipselection/impl/WalkerAlphaTest.java | 268 ++++++++++++++++++ 12 files changed, 894 insertions(+), 1 deletion(-) create mode 100644 src/main/java/com/iota/iri/service/tipselection/TailFinder.java create mode 100644 src/main/java/com/iota/iri/service/tipselection/WalkValidator.java create mode 100644 src/main/java/com/iota/iri/service/tipselection/Walker.java create mode 100644 src/main/java/com/iota/iri/service/tipselection/impl/TailFinderImpl.java create mode 100644 src/main/java/com/iota/iri/service/tipselection/impl/WalkValidatorImpl.java create mode 100644 src/main/java/com/iota/iri/service/tipselection/impl/WalkerAlpha.java create mode 100644 src/test/java/com/iota/iri/TransactionTestUtils.java create mode 100644 src/test/java/com/iota/iri/service/tipselection/impl/TailFinderImplTest.java create mode 100644 src/test/java/com/iota/iri/service/tipselection/impl/WalkValidatorImplTest.java create mode 100644 src/test/java/com/iota/iri/service/tipselection/impl/WalkerAlphaTest.java diff --git a/src/main/java/com/iota/iri/conf/Configuration.java b/src/main/java/com/iota/iri/conf/Configuration.java index 80d4a1c60b..9aa9e5cb52 100644 --- a/src/main/java/com/iota/iri/conf/Configuration.java +++ b/src/main/java/com/iota/iri/conf/Configuration.java @@ -103,7 +103,8 @@ public enum DefaultConfSettings { NUMBER_OF_KEYS_IN_A_MILESTONE, TRANSACTION_PACKET_SIZE, REQUEST_HASH_SIZE, - SNAPSHOT_TIME + SNAPSHOT_TIME, + TIPSELECTION_ALPHA } @@ -168,6 +169,8 @@ public enum DefaultConfSettings { conf.put(DefaultConfSettings.TRANSACTION_PACKET_SIZE.name(), PACKET_SIZE); conf.put(DefaultConfSettings.REQUEST_HASH_SIZE.name(), REQ_HASH_SIZE); conf.put(DefaultConfSettings.SNAPSHOT_TIME.name(), GLOBAL_SNAPSHOT_TIME); + conf.put(DefaultConfSettings.TIPSELECTION_ALPHA.name(), "0.1"); + } public boolean init() throws IOException { diff --git a/src/main/java/com/iota/iri/service/tipselection/TailFinder.java b/src/main/java/com/iota/iri/service/tipselection/TailFinder.java new file mode 100644 index 0000000000..9c654e7dbb --- /dev/null +++ b/src/main/java/com/iota/iri/service/tipselection/TailFinder.java @@ -0,0 +1,27 @@ +package com.iota.iri.service.tipselection; + +import com.iota.iri.model.Hash; + +import java.util.Optional; + +/** + * Finds the tail of a bundle + */ + +@FunctionalInterface +public interface TailFinder { + /** + *Method for finding tails of bundles + * + *

+ * This method is used to find a tail (current_index=0) of a bundle, + * given any transaction hash in the bundle. + *

+ * + * @param hash The transaction hash of any transaction in the bundle. + * @return Hash of the tail transaction. + * @throws Exception If DB fails to retrieve transactions + */ + Optional findTail(Hash hash) throws Exception; + +} diff --git a/src/main/java/com/iota/iri/service/tipselection/WalkValidator.java b/src/main/java/com/iota/iri/service/tipselection/WalkValidator.java new file mode 100644 index 0000000000..595dd36354 --- /dev/null +++ b/src/main/java/com/iota/iri/service/tipselection/WalkValidator.java @@ -0,0 +1,23 @@ +package com.iota.iri.service.tipselection; + +import com.iota.iri.model.Hash; + +/** + * Validates consistency of tails. + */ +@FunctionalInterface +public interface WalkValidator { + + /** + * Validation + *

+ * Checks if a given transaction is a valid tail. + *

+ * + * @param transactionHash Transaction hash to validate consistency of. + * @return True iff tail is valid. + * @throws Exception If Validation fails to execute + */ + boolean isValid(Hash transactionHash) throws Exception; + +} diff --git a/src/main/java/com/iota/iri/service/tipselection/Walker.java b/src/main/java/com/iota/iri/service/tipselection/Walker.java new file mode 100644 index 0000000000..d882a8ca4b --- /dev/null +++ b/src/main/java/com/iota/iri/service/tipselection/Walker.java @@ -0,0 +1,29 @@ +package com.iota.iri.service.tipselection; + +import com.iota.iri.model.Hash; +import com.iota.iri.model.HashId; +import com.iota.iri.utils.collections.interfaces.UnIterableMap; + +/** + * Walks the tangle from an entry point towards tips + * + */ + +public interface Walker { + + /** + * Walk algorithm + *

+ * Starts from given entry point to select valid transactions to be used + * as tips. It will output a valid transaction as a tip. + *

+ * + * @param entryPoint Transaction hash to start walk from. + * @param ratings Map of ratings for each transaction that references entryPoint. + * @param walkValidator Used to validate consistency of tails. + * @return Transaction hash of tip. + * @throws Exception If DB fails to retrieve transactions + */ + Hash walk(Hash entryPoint, UnIterableMap ratings, WalkValidator walkValidator) throws Exception; + +} diff --git a/src/main/java/com/iota/iri/service/tipselection/impl/TailFinderImpl.java b/src/main/java/com/iota/iri/service/tipselection/impl/TailFinderImpl.java new file mode 100644 index 0000000000..042c46dfd6 --- /dev/null +++ b/src/main/java/com/iota/iri/service/tipselection/impl/TailFinderImpl.java @@ -0,0 +1,48 @@ +package com.iota.iri.service.tipselection.impl; + +import com.iota.iri.controllers.TransactionViewModel; +import com.iota.iri.model.Hash; +import com.iota.iri.service.tipselection.TailFinder; +import com.iota.iri.storage.Tangle; + +import java.util.Optional; +import java.util.Set; + +/** + * Implementation of TailFinder that given a transaction hash finds the tail of the associated bundle. + * + */ +public class TailFinderImpl implements TailFinder { + + private final Tangle tangle; + + public TailFinderImpl(Tangle tangle) { + this.tangle = tangle; + } + + @Override + public Optional findTail(Hash hash) throws Exception { + TransactionViewModel tx = TransactionViewModel.fromHash(tangle, hash); + final Hash bundleHash = tx.getBundleHash(); + long index = tx.getCurrentIndex(); + while (index-- > 0 && bundleHash.equals(tx.getBundleHash())) { + Set approvees = tx.getApprovers(tangle).getHashes(); + boolean foundApprovee = false; + for (Hash approvee : approvees) { + TransactionViewModel nextTx = TransactionViewModel.fromHash(tangle, approvee); + if (nextTx.getCurrentIndex() == index && bundleHash.equals(nextTx.getBundleHash())) { + tx = nextTx; + foundApprovee = true; + break; + } + } + if (!foundApprovee) { + break; + } + } + if (tx.getCurrentIndex() == 0) { + return Optional.of(tx.getHash()); + } + return Optional.empty(); + } +} diff --git a/src/main/java/com/iota/iri/service/tipselection/impl/WalkValidatorImpl.java b/src/main/java/com/iota/iri/service/tipselection/impl/WalkValidatorImpl.java new file mode 100644 index 0000000000..5206341860 --- /dev/null +++ b/src/main/java/com/iota/iri/service/tipselection/impl/WalkValidatorImpl.java @@ -0,0 +1,107 @@ +package com.iota.iri.service.tipselection.impl; + +import com.iota.iri.LedgerValidator; +import com.iota.iri.TransactionValidator; +import com.iota.iri.controllers.TransactionViewModel; +import com.iota.iri.model.Hash; +import com.iota.iri.Milestone; +import com.iota.iri.service.tipselection.WalkValidator; +import com.iota.iri.storage.Tangle; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +/** + * Implementation of WalkValidator that checks consistency of the ledger as part of validity checks. + * + * A transaction is only valid if: + *
    + *
  1. it is a tail + *
  2. all the history of the transaction is present (is solid) + *
  3. it does not reference an old unconfirmed transaction (not belowMaxDepth) + *
  4. the ledger is still consistent if the transaction is added + * (balances of all addresses are correct and all signatures are valid) + *
+ */ +public class WalkValidatorImpl implements WalkValidator { + + private final Tangle tangle; + private final Logger log = LoggerFactory.getLogger(WalkValidator.class); + private final LedgerValidator ledgerValidator; + private final TransactionValidator transactionValidator; + private final Milestone milestone; + + private final int maxDepth; + + private Set maxDepthOkMemoization; + private Map myDiff; + private Set myApprovedHashes; + + public WalkValidatorImpl(Tangle tangle, LedgerValidator ledgerValidator, TransactionValidator transactionValidator, + Milestone milestone, int maxDepth) { + this.tangle = tangle; + this.ledgerValidator = ledgerValidator; + this.transactionValidator = transactionValidator; + this.milestone = milestone; + this.maxDepth = maxDepth; + + maxDepthOkMemoization = new HashSet<>(); + myDiff = new HashMap<>(); + myApprovedHashes = new HashSet<>(); + } + + @Override + public boolean isValid(Hash transactionHash) throws Exception { + + TransactionViewModel transactionViewModel = TransactionViewModel.fromHash(tangle, transactionHash); + + if (transactionViewModel.getType() == TransactionViewModel.PREFILLED_SLOT) { + log.debug("Validation failed: {} is missing in db", transactionHash); + return false; + } else if (transactionViewModel.getCurrentIndex() != 0) { + log.debug("Validation failed: {} not a tail", transactionHash); + return false; + } else if (!transactionValidator.checkSolidity(transactionViewModel.getHash(), false)) { + log.debug("Validation failed: {} is not solid", transactionHash); + return false; + } else if (belowMaxDepth(transactionViewModel.getHash(), milestone.latestSolidSubtangleMilestoneIndex - maxDepth)) { + log.debug("Validation failed: {} is below max depth", transactionHash); + return false; + } else if (!ledgerValidator.updateDiff(myApprovedHashes, myDiff, transactionViewModel.getHash())) { + log.debug("Validation failed: {} is not consistent", transactionHash); + return false; + } + return true; + } + + private boolean belowMaxDepth(Hash tip, int depth) throws Exception { + //if tip is confirmed stop + if (TransactionViewModel.fromHash(tangle, tip).snapshotIndex() >= depth) { + return false; + } + //if tip unconfirmed, check if any referenced tx is confirmed below maxDepth + Queue nonAnalyzedTransactions = new LinkedList<>(Collections.singleton(tip)); + Set analyzedTransactions = new HashSet<>(); + Hash hash; + while ((hash = nonAnalyzedTransactions.poll()) != null) { + if (analyzedTransactions.add(hash)) { + TransactionViewModel transaction = TransactionViewModel.fromHash(tangle, hash); + if (transaction.snapshotIndex() != 0 && transaction.snapshotIndex() < depth) { + return true; + } + if (transaction.snapshotIndex() == 0) { + if (maxDepthOkMemoization.contains(hash)) { + //log.info("Memoization!"); + } + else { + nonAnalyzedTransactions.offer(transaction.getTrunkTransactionHash()); + nonAnalyzedTransactions.offer(transaction.getBranchTransactionHash()); + } + } + } + } + maxDepthOkMemoization.add(tip); + return false; + } +} diff --git a/src/main/java/com/iota/iri/service/tipselection/impl/WalkerAlpha.java b/src/main/java/com/iota/iri/service/tipselection/impl/WalkerAlpha.java new file mode 100644 index 0000000000..0c7a9baa68 --- /dev/null +++ b/src/main/java/com/iota/iri/service/tipselection/impl/WalkerAlpha.java @@ -0,0 +1,146 @@ +package com.iota.iri.service.tipselection.impl; + +import com.iota.iri.controllers.ApproveeViewModel; +import com.iota.iri.model.Hash; +import com.iota.iri.model.HashId; +import com.iota.iri.service.tipselection.TailFinder; +import com.iota.iri.service.tipselection.WalkValidator; +import com.iota.iri.service.tipselection.Walker; +import com.iota.iri.storage.Tangle; +import com.iota.iri.utils.collections.interfaces.UnIterableMap; +import com.iota.iri.zmq.MessageQ; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; +import java.util.stream.Collectors; + +/** + * Implementation of Walker that performs a weighted random walk + * with e^(alpha*Hy) as the transition function. + * + */ +public class WalkerAlpha implements Walker { + + private double alpha; + private final Random random; + + private final Tangle tangle; + private final MessageQ messageQ; + private final Logger log = LoggerFactory.getLogger(Walker.class); + + private final TailFinder tailFinder; + + public WalkerAlpha(double alpha, Random random, Tangle tangle, MessageQ messageQ, TailFinder tailFinder) { + this.alpha = alpha; + this.random = random; + this.tangle = tangle; + this.messageQ = messageQ; + this.tailFinder = tailFinder; + } + + public double getAlpha() { + return alpha; + } + + public void setAlpha(double alpha) { + this.alpha = alpha; + } + + @Override + public Hash walk(Hash entryPoint, UnIterableMap ratings, WalkValidator walkValidator) throws Exception { + if (!walkValidator.isValid(entryPoint)) { + throw new RuntimeException("entry point failed consistency check: " + entryPoint.toString()); + } + + Optional nextStep; + Deque traversedTails = new LinkedList<>(); + traversedTails.add(entryPoint); + + //Walk + do { + nextStep = selectApprover(traversedTails.getLast(), ratings, walkValidator); + nextStep.ifPresent(traversedTails::add); + } while (nextStep.isPresent()); + + log.debug("{} tails traversed to find tip", traversedTails.size()); + messageQ.publish("mctn %d", traversedTails.size()); + + return traversedTails.getLast(); + } + + private Optional selectApprover(Hash tailHash, UnIterableMap ratings, WalkValidator walkValidator) throws Exception { + Set approvers = getApprovers(tailHash); + return findNextValidTail(ratings, approvers, walkValidator); + } + + private Set getApprovers(Hash tailHash) throws Exception { + ApproveeViewModel approveeViewModel = ApproveeViewModel.load(tangle, tailHash); + return approveeViewModel.getHashes(); + } + + private Optional findNextValidTail(UnIterableMap ratings, Set approvers, WalkValidator walkValidator) throws Exception { + Optional nextTailHash = Optional.empty(); + + //select next tail to step to + while (!nextTailHash.isPresent()) { + Optional nextTxHash = select(ratings, approvers); + if (!nextTxHash.isPresent()) { + //no existing approver = tip + return Optional.empty(); + } + + nextTailHash = findTailIfValid(nextTxHash.get(), walkValidator); + approvers.remove(nextTxHash.get()); + //if next tail is not valid, re-select while removing it from approvers set + } + + return nextTailHash; + } + + private Optional select(UnIterableMap ratings, Set approversSet) { + + //filter based on tangle state when starting the walk + List approvers = approversSet.stream().filter(ratings::containsKey).collect(Collectors.toList()); + + //After filtering, if no approvers are available, it's a tip. + if (approvers.size() == 0) { + return Optional.empty(); + } + + //calculate the probabilities + List walkRatings = approvers.stream().map(ratings::get).collect(Collectors.toList()); + + Integer maxRating = walkRatings.stream().max(Integer::compareTo).orElse(0); + //walkRatings.stream().reduce(0, Integer::max); + + //transition probability function (normalize ratings based on Hmax) + List normalizedWalkRatings = walkRatings.stream().map(w -> w - maxRating).collect(Collectors.toList()); + List weights = normalizedWalkRatings.stream().map(w -> Math.exp(alpha * w)).collect(Collectors.toList()); + + //select the next transaction + Double weightsSum = weights.stream().reduce(0.0, Double::sum); + double target = random.nextDouble() * weightsSum; + + int approverIndex; + for (approverIndex = 0; approverIndex < weights.size() - 1; approverIndex++) { + target -= weights.get(approverIndex); + if (target <= 0) { + break; + } + } + + return Optional.of(approvers.get(approverIndex)); + } + + private Optional findTailIfValid(Hash transactionHash, WalkValidator validator) throws Exception { + Optional tailHash = tailFinder.findTail(transactionHash); + if (tailHash.isPresent()) { + if (validator.isValid(tailHash.get())) { + return tailHash; + } + } + + return Optional.empty(); + } +} diff --git a/src/test/java/com/iota/iri/TransactionTestUtils.java b/src/test/java/com/iota/iri/TransactionTestUtils.java new file mode 100644 index 0000000000..4ccca6d9a9 --- /dev/null +++ b/src/test/java/com/iota/iri/TransactionTestUtils.java @@ -0,0 +1,37 @@ +package com.iota.iri; + +import com.iota.iri.controllers.TransactionViewModel; +import com.iota.iri.controllers.TransactionViewModelTest; +import com.iota.iri.model.Hash; +import com.iota.iri.utils.Converter; + +public class TransactionTestUtils { + + public static void setCurrentIndex(TransactionViewModel tx, long currentIndex) { + Converter.copyTrits(currentIndex, tx.trits(), TransactionViewModel.CURRENT_INDEX_TRINARY_OFFSET, + TransactionViewModel.CURRENT_INDEX_TRINARY_SIZE); + } + + public static void setLastIndex(TransactionViewModel tx, long lastIndex) { + Converter.copyTrits(lastIndex, tx.trits(), TransactionViewModel.LAST_INDEX_TRINARY_OFFSET, + TransactionViewModel.LAST_INDEX_TRINARY_SIZE); + } + + public static TransactionViewModel createBundleHead(int index) { + TransactionViewModel tx = new TransactionViewModel(TransactionViewModelTest.getRandomTransactionTrits(), TransactionViewModelTest.getRandomTransactionHash()); + setLastIndex(tx, index); + setCurrentIndex(tx, index); + return tx; + } + + public static TransactionViewModel createTransactionWithTrunkBundleHash(TransactionViewModel trunkTx, Hash branchHash) { + TransactionViewModel tx = new TransactionViewModel( + TransactionViewModelTest.getRandomTransactionWithTrunkAndBranch(trunkTx.getHash(), branchHash), + TransactionViewModelTest.getRandomTransactionHash()); + setCurrentIndex(tx, trunkTx.getCurrentIndex() - 1); + setLastIndex(tx, trunkTx.lastIndex()); + System.arraycopy(trunkTx.trits(), TransactionViewModel.BUNDLE_TRINARY_OFFSET, tx.trits(), + TransactionViewModel.BUNDLE_TRINARY_OFFSET, TransactionViewModel.BUNDLE_TRINARY_SIZE); + return tx; + } +} diff --git a/src/test/java/com/iota/iri/controllers/TransactionViewModelTest.java b/src/test/java/com/iota/iri/controllers/TransactionViewModelTest.java index 3267eed62e..b8904bb9ff 100644 --- a/src/test/java/com/iota/iri/controllers/TransactionViewModelTest.java +++ b/src/test/java/com/iota/iri/controllers/TransactionViewModelTest.java @@ -1,6 +1,7 @@ package com.iota.iri.controllers; import com.iota.iri.conf.Configuration; +import com.iota.iri.hash.Sponge; import com.iota.iri.hash.SpongeFactory; import com.iota.iri.model.Hash; import com.iota.iri.model.Transaction; @@ -408,6 +409,30 @@ public static int[] getRandomTransactionWithTrunkAndBranch(Hash trunk, Hash bran TransactionViewModel.BRANCH_TRANSACTION_TRINARY_SIZE); return trits; } + public static int[] getRandomTransactionWithTrunkAndBranchValidBundle(Hash trunk, Hash branch) { + int[] trits = getRandomTransactionTrits(); + System.arraycopy(trunk.trits(), 0, trits, TransactionViewModel.TRUNK_TRANSACTION_TRINARY_OFFSET, + TransactionViewModel.TRUNK_TRANSACTION_TRINARY_SIZE); + System.arraycopy(branch.trits(), 0, trits, TransactionViewModel.BRANCH_TRANSACTION_TRINARY_OFFSET, + TransactionViewModel.BRANCH_TRANSACTION_TRINARY_SIZE); + System.arraycopy(Hash.NULL_HASH.trits(), 0, trits, TransactionViewModel.CURRENT_INDEX_TRINARY_OFFSET, + TransactionViewModel.CURRENT_INDEX_TRINARY_SIZE); + System.arraycopy(Hash.NULL_HASH.trits(), 0, trits, TransactionViewModel.LAST_INDEX_TRINARY_OFFSET, + TransactionViewModel.LAST_INDEX_TRINARY_SIZE); + System.arraycopy(Hash.NULL_HASH.trits(), 0, trits, TransactionViewModel.VALUE_TRINARY_OFFSET, + TransactionViewModel.VALUE_TRINARY_SIZE); + + final Sponge curlInstance = SpongeFactory.create(SpongeFactory.Mode.KERL); + final int[] bundleHashTrits = new int[TransactionViewModel.BUNDLE_TRINARY_SIZE]; + curlInstance.reset(); + curlInstance.absorb(trits, TransactionViewModel.ESSENCE_TRINARY_OFFSET, TransactionViewModel.ESSENCE_TRINARY_SIZE); + curlInstance.squeeze(bundleHashTrits, 0, bundleHashTrits.length); + + System.arraycopy(bundleHashTrits, 0, trits, TransactionViewModel.BUNDLE_TRINARY_OFFSET, + TransactionViewModel.BUNDLE_TRINARY_SIZE); + + return trits; + } public static int[] getRandomTransactionTrits() { return Arrays.stream(new int[TransactionViewModel.TRINARY_SIZE]).map(i -> seed.nextInt(3)-1).toArray(); } diff --git a/src/test/java/com/iota/iri/service/tipselection/impl/TailFinderImplTest.java b/src/test/java/com/iota/iri/service/tipselection/impl/TailFinderImplTest.java new file mode 100644 index 0000000000..ff5d99e33c --- /dev/null +++ b/src/test/java/com/iota/iri/service/tipselection/impl/TailFinderImplTest.java @@ -0,0 +1,92 @@ +package com.iota.iri.service.tipselection.impl; + +import com.iota.iri.TransactionTestUtils; +import com.iota.iri.controllers.TransactionViewModel; +import com.iota.iri.controllers.TransactionViewModelTest; +import com.iota.iri.model.Hash; +import com.iota.iri.storage.Tangle; +import com.iota.iri.storage.rocksDB.RocksDBPersistenceProvider; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.util.Optional; + +public class TailFinderImplTest { + + private static final TemporaryFolder dbFolder = new TemporaryFolder(); + private static final TemporaryFolder logFolder = new TemporaryFolder(); + private static Tangle tangle; + private TailFinderImpl tailFinder; + + public TailFinderImplTest() { + tailFinder = new TailFinderImpl(tangle); + } + + @AfterClass + public static void tearDown() throws Exception { + tangle.shutdown(); + dbFolder.delete(); + logFolder.delete(); + } + + @BeforeClass + public static void setUp() throws Exception { + tangle = new Tangle(); + dbFolder.create(); + logFolder.create(); + tangle.addPersistenceProvider(new RocksDBPersistenceProvider(dbFolder.getRoot().getAbsolutePath(), logFolder + .getRoot().getAbsolutePath(), 1000)); + tangle.init(); + } + + @Test + public void findTailTest() throws Exception { + TransactionViewModel txa = new TransactionViewModel(TransactionViewModelTest.getRandomTransactionTrits(), TransactionViewModelTest.getRandomTransactionHash()); + txa.store(tangle); + + TransactionViewModel tx2 = TransactionTestUtils.createBundleHead(2); + tx2.store(tangle); + + TransactionViewModel tx1 = TransactionTestUtils.createTransactionWithTrunkBundleHash(tx2, txa.getHash()); + tx1.store(tangle); + + TransactionViewModel tx0 = TransactionTestUtils.createTransactionWithTrunkBundleHash(tx1, txa.getHash()); + tx0.store(tangle); + + //negative index - make sure we stop at 0 + TransactionViewModel txNeg = TransactionTestUtils.createTransactionWithTrunkBundleHash(tx0, txa.getHash()); + txNeg.store(tangle); + + TransactionViewModel txLateTail = TransactionTestUtils.createTransactionWithTrunkBundleHash(tx1, txa.getHash()); + txLateTail.store(tangle); + + Optional tail = tailFinder.findTail(tx2.getHash()); + Assert.assertTrue("no tail was found", tail.isPresent()); + Assert.assertEquals("Expected tail not found", tx0.getHash(), tail.get()); + } + + + @Test + public void findMissingTailTest() throws Exception { + TransactionViewModel txa = new TransactionViewModel(TransactionViewModelTest.getRandomTransactionTrits(), + TransactionViewModelTest.getRandomTransactionHash()); + txa.store(tangle); + + TransactionViewModel tx2 = TransactionTestUtils.createBundleHead(2); + tx2.store(tangle); + + TransactionViewModel tx1 = TransactionTestUtils.createTransactionWithTrunkBundleHash(tx2, txa.getHash()); + tx1.store(tangle); + + TransactionViewModel tx0 = new TransactionViewModel(TransactionViewModelTest + .getRandomTransactionWithTrunkAndBranch(tx1.getHash(), tx2.getHash()), + TransactionViewModelTest.getRandomTransactionHash()); + tx0.store(tangle); + + Optional tail = tailFinder.findTail(tx2.getHash()); + Assert.assertFalse("tail was found, but should me missing", tail.isPresent()); + } +} \ No newline at end of file diff --git a/src/test/java/com/iota/iri/service/tipselection/impl/WalkValidatorImplTest.java b/src/test/java/com/iota/iri/service/tipselection/impl/WalkValidatorImplTest.java new file mode 100644 index 0000000000..d3dc9dde0b --- /dev/null +++ b/src/test/java/com/iota/iri/service/tipselection/impl/WalkValidatorImplTest.java @@ -0,0 +1,88 @@ +package com.iota.iri.service.tipselection.impl; + +import com.iota.iri.LedgerValidator; +import com.iota.iri.Milestone; +import com.iota.iri.Snapshot; +import com.iota.iri.TransactionValidator; +import com.iota.iri.conf.Configuration; +import com.iota.iri.controllers.TipsViewModel; +import com.iota.iri.controllers.TransactionViewModel; +import com.iota.iri.model.Hash; +import com.iota.iri.network.TransactionRequester; +import com.iota.iri.service.tipselection.WalkValidator; +import com.iota.iri.storage.Tangle; +import com.iota.iri.storage.rocksDB.RocksDBPersistenceProvider; +import com.iota.iri.zmq.MessageQ; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static com.iota.iri.controllers.TransactionViewModelTest.*; + +public class WalkValidatorImplTest { + + private static final TemporaryFolder dbFolder = new TemporaryFolder(); + private static final TemporaryFolder logFolder = new TemporaryFolder(); + private static Tangle tangle; + private static WalkValidator walkValidator; + private final Logger log = LoggerFactory.getLogger(this.getClass()); + + @AfterClass + public static void tearDown() throws Exception { + tangle.shutdown(); + dbFolder.delete(); + } + + @BeforeClass + public static void setUp() throws Exception { + tangle = new Tangle(); + dbFolder.create(); + logFolder.create(); + tangle.addPersistenceProvider(new RocksDBPersistenceProvider(dbFolder.getRoot().getAbsolutePath(), logFolder + .getRoot().getAbsolutePath(), 1000)); + tangle.init(); + MessageQ messageQ = new MessageQ(0, null, 1, false); + + TipsViewModel tipsViewModel = new TipsViewModel(); + TransactionRequester transactionRequester = new TransactionRequester(tangle, messageQ); + TransactionValidator transactionValidator = new TransactionValidator(tangle, tipsViewModel, transactionRequester, + messageQ, Long.parseLong(Configuration.GLOBAL_SNAPSHOT_TIME)); + int milestoneStartIndex = Integer.parseInt(Configuration.MAINNET_MILESTONE_START_INDEX); + int numOfKeysInMilestone = Integer.parseInt(Configuration.MAINNET_NUM_KEYS_IN_MILESTONE); + Milestone milestone = new Milestone(tangle, Hash.NULL_HASH, Snapshot.init( + Configuration.MAINNET_SNAPSHOT_FILE, Configuration.MAINNET_SNAPSHOT_SIG_FILE, false).clone(), + transactionValidator, false, messageQ, numOfKeysInMilestone, + milestoneStartIndex, true); + LedgerValidator ledgerValidator = new LedgerValidator(tangle, milestone, transactionRequester, messageQ); + + walkValidator = new WalkValidatorImpl(tangle, ledgerValidator, transactionValidator, milestone, 15); + } + + + @Test + public void shouldPassValidation() throws Exception { + //build a small tangle - 1,2,3,4 point to transaction + TransactionViewModel transaction; + transaction = new TransactionViewModel(getRandomTransactionWithTrunkAndBranchValidBundle(Hash.NULL_HASH, + Hash.NULL_HASH), getRandomTransactionHash()); + transaction.store(tangle); + + Assert.assertTrue(walkValidator.isValid(transaction.getHash())); + } + + @Test + public void shouldFailValidation() throws Exception { + //build a small tangle - 1,2,3,4 point to transaction + TransactionViewModel transaction; + transaction = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(Hash.NULL_HASH, + Hash.NULL_HASH), getRandomTransactionHash()); + transaction.store(tangle); + + Assert.assertFalse(walkValidator.isValid(transaction.getHash())); + } + +} \ No newline at end of file diff --git a/src/test/java/com/iota/iri/service/tipselection/impl/WalkerAlphaTest.java b/src/test/java/com/iota/iri/service/tipselection/impl/WalkerAlphaTest.java new file mode 100644 index 0000000000..bdc358f6cb --- /dev/null +++ b/src/test/java/com/iota/iri/service/tipselection/impl/WalkerAlphaTest.java @@ -0,0 +1,268 @@ +package com.iota.iri.service.tipselection.impl; + +import com.iota.iri.controllers.TransactionViewModel; +import com.iota.iri.model.Hash; +import com.iota.iri.model.HashId; +import com.iota.iri.service.tipselection.RatingCalculator; +import com.iota.iri.storage.Tangle; +import com.iota.iri.storage.rocksDB.RocksDBPersistenceProvider; +import com.iota.iri.utils.collections.interfaces.UnIterableMap; +import com.iota.iri.zmq.MessageQ; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import java.util.Random; + +import static com.iota.iri.controllers.TransactionViewModelTest.*; + +public class WalkerAlphaTest { + private static final TemporaryFolder dbFolder = new TemporaryFolder(); + private static final TemporaryFolder logFolder = new TemporaryFolder(); + private static Tangle tangle; + private static WalkerAlpha walker; + private final Logger log = LoggerFactory.getLogger(this.getClass()); + + @AfterClass + public static void tearDown() throws Exception { + tangle.shutdown(); + dbFolder.delete(); + } + + @BeforeClass + public static void setUp() throws Exception { + tangle = new Tangle(); + dbFolder.create(); + logFolder.create(); + tangle.addPersistenceProvider(new RocksDBPersistenceProvider(dbFolder.getRoot().getAbsolutePath(), logFolder + .getRoot().getAbsolutePath(), 1000)); + tangle.init(); + MessageQ messageQ = new MessageQ(0, null, 1, false); + + walker = new WalkerAlpha(1, new Random(1), tangle, messageQ, (Optional::of)); + } + + + @Test + public void testWalkEndsOnlyInRating() throws Exception { + //build a small tangle - 1,2,3,4 point to transaction + TransactionViewModel transaction, transaction1, transaction2, transaction3, transaction4; + transaction = new TransactionViewModel(getRandomTransactionTrits(), getRandomTransactionHash()); + transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction3 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + + transaction.store(tangle); + transaction1.store(tangle); + transaction2.store(tangle); + transaction3.store(tangle); + + //calculate rating + RatingCalculator ratingCalculator = new RatingOne(tangle); + UnIterableMap rating = ratingCalculator.calculate(transaction.getHash()); + + //add 4 after the rating was calculated + transaction4 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction4.store(tangle); + + for (int i=0; i < 100; i++) { + //select + Hash tip = walker.walk(transaction.getHash(), rating, (o -> true)); + + Assert.assertTrue(tip != null); + //log.info("selected tip: " + tip.toString()); + Assert.assertTrue(!transaction4.getHash().equals(tip)); + } + } + + @Test + public void showWalkDistributionAlphaHalf() throws Exception { + + //build a small tangle - 1,2,3,4 point to transaction + TransactionViewModel transaction, transaction1, transaction2, transaction3, transaction4; + transaction = new TransactionViewModel(getRandomTransactionTrits(), getRandomTransactionHash()); + transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction3 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + + transaction.store(tangle); + transaction1.store(tangle); + transaction2.store(tangle); + transaction3.store(tangle); + + //calculate rating + RatingCalculator ratingCalculator = new RatingOne(tangle); + UnIterableMap rating = ratingCalculator.calculate(transaction.getHash()); + //set a higher rate for transaction2 + rating.put(transaction2.getHash(), 10); + + Map counters = new HashMap<>(rating.size()); + int iterations = 100; + + walker.setAlpha(0.3); + for (int i=0; i < iterations; i++) { + //select + Hash tip = walker.walk(transaction.getHash(), rating, (o -> true)); + + Assert.assertNotNull(tip); + counters.put(tip, 1 + counters.getOrDefault(tip, 0)); + } + + for (Map.Entry entry : counters.entrySet()) { + log.info(entry.getKey().toString() + " : " + entry.getValue()); + } + + Assert.assertTrue(counters.get(transaction2.getHash()) > iterations / 2); + } + + @Test + public void showWalkDistributionAlphaZero() throws Exception { + + //build a small tangle - 1,2,3,4 point to transaction + TransactionViewModel transaction, transaction1, transaction2, transaction3, transaction4; + transaction = new TransactionViewModel(getRandomTransactionTrits(), getRandomTransactionHash()); + transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction3 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + + transaction.store(tangle); + transaction1.store(tangle); + transaction2.store(tangle); + transaction3.store(tangle); + + //calculate rating + RatingCalculator ratingCalculator = new RatingOne(tangle); + UnIterableMap rating = ratingCalculator.calculate(transaction.getHash()); + //set a higher rate for transaction2 + rating.put(transaction2.getHash(), 10); + + //add 4 after the rating was calculated + transaction4 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction4.store(tangle); + + Map counters = new HashMap<>(rating.size()); + int iterations = 100; + + walker.setAlpha(0); + for (int i=0; i < iterations; i++) { + //select + Hash tip = walker.walk(transaction.getHash(), rating, (o -> true)); + + Assert.assertNotNull(tip); + counters.put(tip, 1 + counters.getOrDefault(tip, 0)); + } + + for (Map.Entry entry : counters.entrySet()) { + log.info(entry.getKey().toString() + " : " + entry.getValue()); + } + + Assert.assertTrue(counters.get(transaction1.getHash()) > iterations / 6); + } + + @Test + public void testWalk() throws Exception { + //build a small tangle + TransactionViewModel transaction, transaction1, transaction2, transaction3, transaction4; + transaction = new TransactionViewModel(getRandomTransactionTrits(), Hash.NULL_HASH); + transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction1.getHash(), + transaction1.getHash()), getRandomTransactionHash()); + transaction3 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction2.getHash(), + transaction1.getHash()), getRandomTransactionHash()); + transaction4 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction2.getHash(), + transaction3.getHash()), getRandomTransactionHash()); + transaction.store(tangle); + transaction1.store(tangle); + transaction2.store(tangle); + transaction3.store(tangle); + transaction4.store(tangle); + + //calculate rating + RatingCalculator ratingCalculator = new RatingOne(tangle); + UnIterableMap rating = ratingCalculator.calculate(transaction.getHash()); + + //reach the tips + Hash tip = walker.walk(transaction.getHash(), rating, (o -> true)); + + log.info("selected tip: " + tip.toString()); + Assert.assertEquals(tip, transaction4.getHash()); + } + + @Test + public void testWalkDiamond() throws Exception { + //build a small tangle + TransactionViewModel transaction, transaction1, transaction2, transaction3; + transaction = new TransactionViewModel(getRandomTransactionTrits(), getRandomTransactionHash()); + transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), + transaction.getHash()), getRandomTransactionHash()); + transaction3 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction1.getHash(), + transaction2.getHash()), getRandomTransactionHash()); + transaction.store(tangle); + transaction1.store(tangle); + transaction2.store(tangle); + transaction3.store(tangle); + + //calculate rating + RatingCalculator ratingCalculator = new RatingOne(tangle); + UnIterableMap rating = ratingCalculator.calculate(transaction.getHash()); + + //reach the tips + Hash tip = walker.walk(transaction.getHash(), rating, (o -> true)); + + log.info("selected tip: " + tip.toString()); + Assert.assertEquals(tip, transaction3.getHash()); + } + + @Test + public void testWalkChain() throws Exception { + //build a small tangle + TransactionViewModel transaction, transaction1, transaction2, transaction3, transaction4; + transaction = new TransactionViewModel(getRandomTransactionTrits(), getRandomTransactionHash()); + transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction.getHash(), transaction.getHash()), getRandomTransactionHash()); + transaction2 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction1.getHash(), transaction1.getHash()), getRandomTransactionHash()); + transaction3 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction2.getHash(), transaction2.getHash()), getRandomTransactionHash()); + transaction4 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch( + transaction3.getHash(), transaction3.getHash()), getRandomTransactionHash()); + transaction.store(tangle); + transaction1.store(tangle); + transaction2.store(tangle); + transaction3.store(tangle); + transaction4.store(tangle); + + //calculate rating + RatingCalculator ratingCalculator = new RatingOne(tangle); + UnIterableMap rating = ratingCalculator.calculate(transaction.getHash()); + + //reach the tips + Hash tip = walker.walk(transaction.getHash(), rating, (o -> true)); + + log.info("selected tip: " + tip.toString()); + Assert.assertEquals(tip, transaction4.getHash()); + } + +} + From 07c0d44cd10c19d06b672ef0c96bb9400d76811d Mon Sep 17 00:00:00 2001 From: alon-e Date: Thu, 31 May 2018 18:57:05 +0300 Subject: [PATCH 39/45] Implementation and testing of TipSelector --- .../iri/service/tipselection/TipSelector.java | 30 +++++ .../tipselection/impl/TipSelectorImpl.java | 123 ++++++++++++++++++ 2 files changed, 153 insertions(+) create mode 100644 src/main/java/com/iota/iri/service/tipselection/TipSelector.java create mode 100644 src/main/java/com/iota/iri/service/tipselection/impl/TipSelectorImpl.java diff --git a/src/main/java/com/iota/iri/service/tipselection/TipSelector.java b/src/main/java/com/iota/iri/service/tipselection/TipSelector.java new file mode 100644 index 0000000000..4561a25926 --- /dev/null +++ b/src/main/java/com/iota/iri/service/tipselection/TipSelector.java @@ -0,0 +1,30 @@ +package com.iota.iri.service.tipselection; + +import com.iota.iri.model.Hash; +import java.util.List; +import java.util.Optional; + +/** + * Selects tips to be approved + */ + + +public interface TipSelector { + + /** + * Method for finding tips + * + *

+ * This method is used to find tips for approval given a depth, + * if reference is present then tips will also reference this transaction. + *

+ * + * @param depth The depth that the transactions will be found from. + * @param reference An optional transaction hash to be referenced by tips. + * @return Transactions to approve + * @throws Exception If DB fails to retrieve transactions + */ + List getTransactionsToApprove(int depth, Optional reference) throws Exception; + + int getMaxDepth(); +} diff --git a/src/main/java/com/iota/iri/service/tipselection/impl/TipSelectorImpl.java b/src/main/java/com/iota/iri/service/tipselection/impl/TipSelectorImpl.java new file mode 100644 index 0000000000..c34e41b9c1 --- /dev/null +++ b/src/main/java/com/iota/iri/service/tipselection/impl/TipSelectorImpl.java @@ -0,0 +1,123 @@ +package com.iota.iri.service.tipselection.impl; + +import com.iota.iri.LedgerValidator; +import com.iota.iri.Milestone; +import com.iota.iri.TransactionValidator; +import com.iota.iri.model.Hash; +import com.iota.iri.model.HashId; +import com.iota.iri.service.tipselection.*; +import com.iota.iri.storage.Tangle; +import com.iota.iri.utils.collections.interfaces.UnIterableMap; +import com.iota.iri.zmq.MessageQ; + +import java.security.SecureRandom; +import java.util.*; + +/** + * Implementation of TipSelector that selects 2 tips, + * based on cumulative weights and transition function alpha. + * + */ +public class TipSelectorImpl implements TipSelector { + + public static final String REFERENCE_TRANSACTION_TOO_OLD = "reference transaction is too old"; + public static final String TIPS_NOT_CONSISTENT = "inconsistent tips pair selected"; + + private final EntryPointSelector entryPointSelector; + private final RatingCalculator ratingCalculator; + private final Walker walker; + + private final int maxDepth; + private final LedgerValidator ledgerValidator; + private final TransactionValidator transactionValidator; + private final Tangle tangle; + private final Milestone milestone; + + @Override + public int getMaxDepth() { + return maxDepth; + } + + public TipSelectorImpl(Tangle tangle, + LedgerValidator ledgerValidator, + TransactionValidator transactionValidator, + Milestone milestone, + int maxDepth, + MessageQ messageQ, + boolean testnet, + int milestoneStartIndex, + double alpha) { + + this.entryPointSelector = new EntryPointSelectorImpl(tangle, milestone, testnet, milestoneStartIndex); + this.ratingCalculator = new CumulativeWeightCalculator(tangle); + + this.walker = new WalkerAlpha(alpha, new SecureRandom(), tangle, messageQ, new TailFinderImpl(tangle)); + + //used by walkValidator + this.maxDepth = maxDepth; + this.ledgerValidator = ledgerValidator; + this.transactionValidator = transactionValidator; + this.tangle = tangle; + this.milestone = milestone; + } + + /** + * Implementation of getTransactionsToApprove + * + * General process: + *
    + *
  1. Preparation: select entryPoint and calculate rating for all referencing transactions + *
  2. 1st Random Walk: starting from entryPoint. + *
  3. 2nd Random Walk: if reference exists and is in the rating calulationg, start from reference, + * otherwise start again from entryPoint. + *
  4. Validate: check that both tips are not contradicting. + *
+ * @param depth The depth that the transactions will be found from. + * @param reference An optional transaction hash to be referenced by tips. + * @return Transactions to approve + * @throws Exception If DB fails to retrieve transactions + */ + @Override + public List getTransactionsToApprove(int depth, Optional reference) throws Exception { + try { + milestone.latestSnapshot.rwlock.readLock().lock(); + + //preparation + Hash entryPoint = entryPointSelector.getEntryPoint(depth); + UnIterableMap rating = ratingCalculator.calculate(entryPoint); + + //random walk + List tips = new LinkedList<>(); + WalkValidator walkValidator = new WalkValidatorImpl(tangle, ledgerValidator, transactionValidator, milestone, + maxDepth); + Hash tip = walker.walk(entryPoint, rating, walkValidator); + tips.add(tip); + + if (reference.isPresent()) { + checkReference(reference.get(), rating); + entryPoint = reference.get(); + } + + //passing the same walkValidator means that the walks will be consistent with each other + tip = walker.walk(entryPoint, rating, walkValidator); + tips.add(tip); + + //validate + if (!ledgerValidator.checkConsistency(tips)) { + throw new RuntimeException(TIPS_NOT_CONSISTENT); + } + + return tips; + } finally { + milestone.latestSnapshot.rwlock.readLock().unlock(); + } + } + + private void checkReference(HashId reference, UnIterableMap rating) { + if (!rating.containsKey(reference)) { + throw new RuntimeException(REFERENCE_TRANSACTION_TOO_OLD); + } + } + + +} From 841f484beda313bebb947f97f170a7186b0139e2 Mon Sep 17 00:00:00 2001 From: galrogo Date: Sun, 3 Jun 2018 12:49:32 +0300 Subject: [PATCH 40/45] Codacy fixes --- .../impl/CumulativeWeightCalculator.java | 12 ++++++------ .../service/tipselection/impl/TipSelectorImpl.java | 10 +++++----- .../tipselection/impl/WalkValidatorImpl.java | 5 +---- .../iri/service/tipselection/impl/WalkerAlpha.java | 7 ++----- .../impl/TransformingBoundedHashSet.java | 13 ++++++++----- .../service/tipselection/impl/RatingOneTest.java | 1 - .../tipselection/impl/WalkValidatorImplTest.java | 1 - .../service/tipselection/impl/WalkerAlphaTest.java | 2 +- 8 files changed, 23 insertions(+), 28 deletions(-) diff --git a/src/main/java/com/iota/iri/service/tipselection/impl/CumulativeWeightCalculator.java b/src/main/java/com/iota/iri/service/tipselection/impl/CumulativeWeightCalculator.java index ee0ff86556..92bb854234 100644 --- a/src/main/java/com/iota/iri/service/tipselection/impl/CumulativeWeightCalculator.java +++ b/src/main/java/com/iota/iri/service/tipselection/impl/CumulativeWeightCalculator.java @@ -28,7 +28,7 @@ public class CumulativeWeightCalculator implements RatingCalculator{ private static final Logger log = LoggerFactory.getLogger(CumulativeWeightCalculator.class); - public static final int MAX_ANCESTORS_SIZE = 5000; + public static final int MAX_FUTURE_SET_SIZE = 5000; public final Tangle tangle; @@ -132,10 +132,10 @@ private UnIterableMap> updateApproversAndReleaseMemory(UnIte private Set createApprovers(UnIterableMap> txHashToApprovers, HashId txHash, Set approvers, HashId trunkHash) { - approvers = createTransformingBoundedSet(approvers); - approvers.addAll(CollectionUtils.emptyIfNull(txHashToApprovers.get(trunkHash))); - approvers.add(txHash); - return approvers; + Set approverSet = createTransformingBoundedSet(approvers); + approverSet.addAll(CollectionUtils.emptyIfNull(txHashToApprovers.get(trunkHash))); + approverSet.add(txHash); + return approverSet; } private static UnIterableMap updateCw( @@ -156,6 +156,6 @@ private static UnIterableMap createTxHashToCumulativeWeightMap( } private static BoundedSet createTransformingBoundedSet(Collection c) { - return new TransformingBoundedHashSet<>(c, MAX_ANCESTORS_SIZE, HashPrefix::createPrefix); + return new TransformingBoundedHashSet<>(c, MAX_FUTURE_SET_SIZE, HashPrefix::createPrefix); } } diff --git a/src/main/java/com/iota/iri/service/tipselection/impl/TipSelectorImpl.java b/src/main/java/com/iota/iri/service/tipselection/impl/TipSelectorImpl.java index c34e41b9c1..0cbc76980b 100644 --- a/src/main/java/com/iota/iri/service/tipselection/impl/TipSelectorImpl.java +++ b/src/main/java/com/iota/iri/service/tipselection/impl/TipSelectorImpl.java @@ -10,6 +10,7 @@ import com.iota.iri.utils.collections.interfaces.UnIterableMap; import com.iota.iri.zmq.MessageQ; +import java.security.InvalidAlgorithmParameterException; import java.security.SecureRandom; import java.util.*; @@ -104,7 +105,7 @@ public List getTransactionsToApprove(int depth, Optional reference) //validate if (!ledgerValidator.checkConsistency(tips)) { - throw new RuntimeException(TIPS_NOT_CONSISTENT); + throw new IllegalStateException(TIPS_NOT_CONSISTENT); } return tips; @@ -113,11 +114,10 @@ public List getTransactionsToApprove(int depth, Optional reference) } } - private void checkReference(HashId reference, UnIterableMap rating) { + private void checkReference(HashId reference, UnIterableMap rating) + throws InvalidAlgorithmParameterException { if (!rating.containsKey(reference)) { - throw new RuntimeException(REFERENCE_TRANSACTION_TOO_OLD); + throw new InvalidAlgorithmParameterException(REFERENCE_TRANSACTION_TOO_OLD); } } - - } diff --git a/src/main/java/com/iota/iri/service/tipselection/impl/WalkValidatorImpl.java b/src/main/java/com/iota/iri/service/tipselection/impl/WalkValidatorImpl.java index 5206341860..d17f9452b4 100644 --- a/src/main/java/com/iota/iri/service/tipselection/impl/WalkValidatorImpl.java +++ b/src/main/java/com/iota/iri/service/tipselection/impl/WalkValidatorImpl.java @@ -91,10 +91,7 @@ private boolean belowMaxDepth(Hash tip, int depth) throws Exception { return true; } if (transaction.snapshotIndex() == 0) { - if (maxDepthOkMemoization.contains(hash)) { - //log.info("Memoization!"); - } - else { + if (!maxDepthOkMemoization.contains(hash)) { nonAnalyzedTransactions.offer(transaction.getTrunkTransactionHash()); nonAnalyzedTransactions.offer(transaction.getBranchTransactionHash()); } diff --git a/src/main/java/com/iota/iri/service/tipselection/impl/WalkerAlpha.java b/src/main/java/com/iota/iri/service/tipselection/impl/WalkerAlpha.java index 0c7a9baa68..55580c96d1 100644 --- a/src/main/java/com/iota/iri/service/tipselection/impl/WalkerAlpha.java +++ b/src/main/java/com/iota/iri/service/tipselection/impl/WalkerAlpha.java @@ -50,7 +50,7 @@ public void setAlpha(double alpha) { @Override public Hash walk(Hash entryPoint, UnIterableMap ratings, WalkValidator walkValidator) throws Exception { if (!walkValidator.isValid(entryPoint)) { - throw new RuntimeException("entry point failed consistency check: " + entryPoint.toString()); + throw new IllegalStateException("entry point failed consistency check: " + entryPoint.toString()); } Optional nextStep; @@ -135,12 +135,9 @@ private Optional select(UnIterableMap ratings, Set private Optional findTailIfValid(Hash transactionHash, WalkValidator validator) throws Exception { Optional tailHash = tailFinder.findTail(transactionHash); - if (tailHash.isPresent()) { - if (validator.isValid(tailHash.get())) { + if (tailHash.isPresent() && validator.isValid(tailHash.get())) { return tailHash; - } } - return Optional.empty(); } } diff --git a/src/main/java/com/iota/iri/utils/collections/impl/TransformingBoundedHashSet.java b/src/main/java/com/iota/iri/utils/collections/impl/TransformingBoundedHashSet.java index 7548c21b07..964417f35e 100644 --- a/src/main/java/com/iota/iri/utils/collections/impl/TransformingBoundedHashSet.java +++ b/src/main/java/com/iota/iri/utils/collections/impl/TransformingBoundedHashSet.java @@ -22,19 +22,22 @@ public TransformingBoundedHashSet(Collection c, int maxSize, UnaryOperator @Override public boolean add(E e) { - if (!isFull()) { - e = transformer.apply(e); + if (isFull()) { + return false; } - return super.add(e); + + E el = transformer.apply(e); + return super.add(el); } @Override public boolean addAll(Collection c) { + Collection col = c; if (!isFull()) { - c = c.stream() + col = c.stream() .map(el -> transformer.apply(el)) .collect(Collectors.toSet()); } - return super.addAll(c); + return super.addAll(col); } } diff --git a/src/test/java/com/iota/iri/service/tipselection/impl/RatingOneTest.java b/src/test/java/com/iota/iri/service/tipselection/impl/RatingOneTest.java index 8b01487131..8c6cd2981a 100644 --- a/src/test/java/com/iota/iri/service/tipselection/impl/RatingOneTest.java +++ b/src/test/java/com/iota/iri/service/tipselection/impl/RatingOneTest.java @@ -23,7 +23,6 @@ public class RatingOneTest { "tx%d cumulative weight is not as expected"; private static Tangle tangle; private static RatingCalculator rating; - private final Logger log = LoggerFactory.getLogger(this.getClass()); @AfterClass public static void tearDown() throws Exception { diff --git a/src/test/java/com/iota/iri/service/tipselection/impl/WalkValidatorImplTest.java b/src/test/java/com/iota/iri/service/tipselection/impl/WalkValidatorImplTest.java index d3dc9dde0b..d7ff43ce72 100644 --- a/src/test/java/com/iota/iri/service/tipselection/impl/WalkValidatorImplTest.java +++ b/src/test/java/com/iota/iri/service/tipselection/impl/WalkValidatorImplTest.java @@ -29,7 +29,6 @@ public class WalkValidatorImplTest { private static final TemporaryFolder logFolder = new TemporaryFolder(); private static Tangle tangle; private static WalkValidator walkValidator; - private final Logger log = LoggerFactory.getLogger(this.getClass()); @AfterClass public static void tearDown() throws Exception { diff --git a/src/test/java/com/iota/iri/service/tipselection/impl/WalkerAlphaTest.java b/src/test/java/com/iota/iri/service/tipselection/impl/WalkerAlphaTest.java index bdc358f6cb..ef557def06 100644 --- a/src/test/java/com/iota/iri/service/tipselection/impl/WalkerAlphaTest.java +++ b/src/test/java/com/iota/iri/service/tipselection/impl/WalkerAlphaTest.java @@ -90,7 +90,7 @@ public void testWalkEndsOnlyInRating() throws Exception { public void showWalkDistributionAlphaHalf() throws Exception { //build a small tangle - 1,2,3,4 point to transaction - TransactionViewModel transaction, transaction1, transaction2, transaction3, transaction4; + TransactionViewModel transaction, transaction1, transaction2, transaction3; transaction = new TransactionViewModel(getRandomTransactionTrits(), getRandomTransactionHash()); transaction1 = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(transaction.getHash(), transaction.getHash()), getRandomTransactionHash()); From cbf7ebc762f5cc999734ba9e43cf8a22d5b21737 Mon Sep 17 00:00:00 2001 From: Gal Rogozinski Date: Mon, 4 Jun 2018 14:03:13 +0300 Subject: [PATCH 41/45] walker validator impl test (#782) --- .../impl/WalkValidatorImplTest.java | 138 +++++++++++++----- 1 file changed, 98 insertions(+), 40 deletions(-) diff --git a/src/test/java/com/iota/iri/service/tipselection/impl/WalkValidatorImplTest.java b/src/test/java/com/iota/iri/service/tipselection/impl/WalkValidatorImplTest.java index d7ff43ce72..6199f4ee46 100644 --- a/src/test/java/com/iota/iri/service/tipselection/impl/WalkValidatorImplTest.java +++ b/src/test/java/com/iota/iri/service/tipselection/impl/WalkValidatorImplTest.java @@ -2,33 +2,37 @@ import com.iota.iri.LedgerValidator; import com.iota.iri.Milestone; -import com.iota.iri.Snapshot; +import com.iota.iri.TransactionTestUtils; import com.iota.iri.TransactionValidator; -import com.iota.iri.conf.Configuration; -import com.iota.iri.controllers.TipsViewModel; import com.iota.iri.controllers.TransactionViewModel; import com.iota.iri.model.Hash; -import com.iota.iri.network.TransactionRequester; -import com.iota.iri.service.tipselection.WalkValidator; import com.iota.iri.storage.Tangle; import com.iota.iri.storage.rocksDB.RocksDBPersistenceProvider; -import com.iota.iri.zmq.MessageQ; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.rules.TemporaryFolder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.runners.MockitoJUnitRunner; -import static com.iota.iri.controllers.TransactionViewModelTest.*; +import java.util.HashMap; +import java.util.HashSet; +@RunWith(MockitoJUnitRunner.class) public class WalkValidatorImplTest { private static final TemporaryFolder dbFolder = new TemporaryFolder(); private static final TemporaryFolder logFolder = new TemporaryFolder(); private static Tangle tangle; - private static WalkValidator walkValidator; + @Mock + private LedgerValidator ledgerValidator; + @Mock + private TransactionValidator transactionValidator; + @Mock + private Milestone milestoneTracker; @AfterClass public static void tearDown() throws Exception { @@ -44,44 +48,98 @@ public static void setUp() throws Exception { tangle.addPersistenceProvider(new RocksDBPersistenceProvider(dbFolder.getRoot().getAbsolutePath(), logFolder .getRoot().getAbsolutePath(), 1000)); tangle.init(); - MessageQ messageQ = new MessageQ(0, null, 1, false); - - TipsViewModel tipsViewModel = new TipsViewModel(); - TransactionRequester transactionRequester = new TransactionRequester(tangle, messageQ); - TransactionValidator transactionValidator = new TransactionValidator(tangle, tipsViewModel, transactionRequester, - messageQ, Long.parseLong(Configuration.GLOBAL_SNAPSHOT_TIME)); - int milestoneStartIndex = Integer.parseInt(Configuration.MAINNET_MILESTONE_START_INDEX); - int numOfKeysInMilestone = Integer.parseInt(Configuration.MAINNET_NUM_KEYS_IN_MILESTONE); - Milestone milestone = new Milestone(tangle, Hash.NULL_HASH, Snapshot.init( - Configuration.MAINNET_SNAPSHOT_FILE, Configuration.MAINNET_SNAPSHOT_SIG_FILE, false).clone(), - transactionValidator, false, messageQ, numOfKeysInMilestone, - milestoneStartIndex, true); - LedgerValidator ledgerValidator = new LedgerValidator(tangle, milestone, transactionRequester, messageQ); - - walkValidator = new WalkValidatorImpl(tangle, ledgerValidator, transactionValidator, milestone, 15); } - @Test public void shouldPassValidation() throws Exception { - //build a small tangle - 1,2,3,4 point to transaction - TransactionViewModel transaction; - transaction = new TransactionViewModel(getRandomTransactionWithTrunkAndBranchValidBundle(Hash.NULL_HASH, - Hash.NULL_HASH), getRandomTransactionHash()); - transaction.store(tangle); + TransactionViewModel tx = TransactionTestUtils.createBundleHead(0); + tx.store(tangle); + Hash hash = tx.getHash(); + Mockito.when(transactionValidator.checkSolidity(hash, false)) + .thenReturn(true); + Mockito.when(ledgerValidator.updateDiff(new HashSet<>(), new HashMap<>(), hash)) + .thenReturn(true); + milestoneTracker.latestSolidSubtangleMilestoneIndex = Integer.MAX_VALUE; + + WalkValidatorImpl walkValidator = new WalkValidatorImpl(tangle, ledgerValidator, transactionValidator, milestoneTracker, 15); + Assert.assertTrue("Validation failed", walkValidator.isValid(hash)); + } + + @Test + public void failOnTxType() throws Exception { + TransactionViewModel tx = TransactionTestUtils.createBundleHead(0); + tx.store(tangle); + Hash hash = tx.getTrunkTransactionHash(); + Mockito.when(transactionValidator.checkSolidity(hash, false)) + .thenReturn(true); + Mockito.when(ledgerValidator.updateDiff(new HashSet<>(), new HashMap<>(), hash)) + .thenReturn(true); + milestoneTracker.latestSolidSubtangleMilestoneIndex = Integer.MAX_VALUE; + + WalkValidatorImpl walkValidator = new WalkValidatorImpl(tangle, ledgerValidator, transactionValidator, milestoneTracker, 15); + Assert.assertFalse("Validation succeded but should have failed since tx is missing", walkValidator.isValid(hash)); + } + + @Test + public void failOnTxIndex() throws Exception { + TransactionViewModel tx = TransactionTestUtils.createBundleHead(2); + tx.store(tangle); + Hash hash = tx.getHash(); + Mockito.when(transactionValidator.checkSolidity(hash, false)) + .thenReturn(true); + Mockito.when(ledgerValidator.updateDiff(new HashSet<>(), new HashMap<>(), hash)) + .thenReturn(true); + milestoneTracker.latestSolidSubtangleMilestoneIndex = Integer.MAX_VALUE; + + WalkValidatorImpl walkValidator = new WalkValidatorImpl(tangle, ledgerValidator, transactionValidator, milestoneTracker, 15); + Assert.assertFalse("Validation succeded but should have failed since we are not on a tail", walkValidator.isValid(hash)); + } + + @Test + public void failOnSolid() throws Exception { + TransactionViewModel tx = TransactionTestUtils.createBundleHead(0); + tx.store(tangle); + Hash hash = tx.getHash(); + Mockito.when(transactionValidator.checkSolidity(hash, false)) + .thenReturn(false); + Mockito.when(ledgerValidator.updateDiff(new HashSet<>(), new HashMap<>(), hash)) + .thenReturn(true); + milestoneTracker.latestSolidSubtangleMilestoneIndex = Integer.MAX_VALUE; - Assert.assertTrue(walkValidator.isValid(transaction.getHash())); + WalkValidatorImpl walkValidator = new WalkValidatorImpl(tangle, ledgerValidator, transactionValidator, milestoneTracker, 15); + Assert.assertFalse("Validation succeded but should have failed since tx is not solid", + walkValidator.isValid(hash)); } @Test - public void shouldFailValidation() throws Exception { - //build a small tangle - 1,2,3,4 point to transaction - TransactionViewModel transaction; - transaction = new TransactionViewModel(getRandomTransactionWithTrunkAndBranch(Hash.NULL_HASH, - Hash.NULL_HASH), getRandomTransactionHash()); - transaction.store(tangle); - - Assert.assertFalse(walkValidator.isValid(transaction.getHash())); + public void failOnBelowMaxDepth() throws Exception { + TransactionViewModel tx = TransactionTestUtils.createBundleHead(0); + tx.store(tangle); + tx.setSnapshot(tangle, 2); + Hash hash = tx.getHash(); + Mockito.when(transactionValidator.checkSolidity(hash, false)) + .thenReturn(true); + Mockito.when(ledgerValidator.updateDiff(new HashSet<>(), new HashMap<>(), hash)) + .thenReturn(true); + milestoneTracker.latestSolidSubtangleMilestoneIndex = Integer.MAX_VALUE; + WalkValidatorImpl walkValidator = new WalkValidatorImpl(tangle, ledgerValidator, transactionValidator, milestoneTracker, 15); + Assert.assertFalse("Validation succeded but should have failed tx is below max depth", + walkValidator.isValid(hash)); } + @Test + public void failOnInconsistency() throws Exception { + TransactionViewModel tx = TransactionTestUtils.createBundleHead(0); + tx.store(tangle); + Hash hash = tx.getHash(); + Mockito.when(transactionValidator.checkSolidity(hash, false)) + .thenReturn(true); + Mockito.when(ledgerValidator.updateDiff(new HashSet<>(), new HashMap<>(), hash)) + .thenReturn(false); + milestoneTracker.latestSolidSubtangleMilestoneIndex = Integer.MAX_VALUE; + + WalkValidatorImpl walkValidator = new WalkValidatorImpl(tangle, ledgerValidator, transactionValidator, milestoneTracker, 15); + Assert.assertFalse("Validation succeded but should have failed due to inconsistent ledger state", + walkValidator.isValid(hash)); + } } \ No newline at end of file From afbf8b29b1df79307e3fc399c373721a882d7ed6 Mon Sep 17 00:00:00 2001 From: Alon Elmaliah Date: Tue, 12 Jun 2018 17:21:50 +0300 Subject: [PATCH 42/45] Integration of new tip selection and cleanup of tipsManager (#785) * Integration of new tip selection and cleanup of tipsManager * inject dependencies to tipselector appropriately * added missing TipsSolidifier file * Avoid throwing raw exception types. Remove final. --- src/main/java/com/iota/iri/Iota.java | 30 +- src/main/java/com/iota/iri/service/API.java | 93 +--- .../com/iota/iri/service/TipsManager.java | 519 ------------------ .../com/iota/iri/service/TipsSolidifier.java | 76 +++ .../tipselection/impl/TipSelectorImpl.java | 15 +- .../iri/integration/NodeIntegrationTests.java | 12 +- 6 files changed, 139 insertions(+), 606 deletions(-) delete mode 100644 src/main/java/com/iota/iri/service/TipsManager.java create mode 100644 src/main/java/com/iota/iri/service/TipsSolidifier.java diff --git a/src/main/java/com/iota/iri/Iota.java b/src/main/java/com/iota/iri/Iota.java index fc316fb780..63048dee77 100644 --- a/src/main/java/com/iota/iri/Iota.java +++ b/src/main/java/com/iota/iri/Iota.java @@ -1,14 +1,17 @@ package com.iota.iri; import com.iota.iri.conf.Configuration; -import com.iota.iri.controllers.*; +import com.iota.iri.controllers.TipsViewModel; +import com.iota.iri.controllers.TransactionViewModel; import com.iota.iri.hash.SpongeFactory; import com.iota.iri.model.Hash; import com.iota.iri.network.Node; import com.iota.iri.network.TransactionRequester; import com.iota.iri.network.UDPReceiver; import com.iota.iri.network.replicator.Replicator; -import com.iota.iri.service.TipsManager; +import com.iota.iri.service.TipsSolidifier; +import com.iota.iri.service.tipselection.*; +import com.iota.iri.service.tipselection.impl.*; import com.iota.iri.storage.*; import com.iota.iri.storage.rocksDB.RocksDBPersistenceProvider; import com.iota.iri.utils.Pair; @@ -19,6 +22,7 @@ import org.slf4j.LoggerFactory; import java.io.IOException; +import java.security.SecureRandom; import java.util.List; /** @@ -31,7 +35,7 @@ public class Iota { public final Milestone milestone; public final Tangle tangle; public final TransactionValidator transactionValidator; - public final TipsManager tipsManager; + public final TipsSolidifier tipsSolidifier; public final TransactionRequester transactionRequester; public final Node node; public final UDPReceiver udpReceiver; @@ -40,6 +44,7 @@ public class Iota { public final Hash coordinator; public final TipsViewModel tipsViewModel; public final MessageQ messageQ; + public final TipSelector tipsSelector; public final boolean testnet; public final int maxPeers; @@ -60,6 +65,8 @@ public Iota(Configuration configuration) throws IOException { long snapshotTimestamp = configuration.longNum(Configuration.DefaultConfSettings.SNAPSHOT_TIME); int milestoneStartIndex = configuration.integer(Configuration.DefaultConfSettings.MILESTONE_START_INDEX); int numKeysMilestone = configuration.integer(Configuration.DefaultConfSettings.NUMBER_OF_KEYS_IN_A_MILESTONE); + double alpha = configuration.doubling(Configuration.DefaultConfSettings.TIPSELECTION_ALPHA.name()); + boolean dontValidateMilestoneSig = configuration.booling(Configuration.DefaultConfSettings .DONT_VALIDATE_TESTNET_MILESTONE_SIG); int transactionPacketSize = configuration.integer(Configuration.DefaultConfSettings.TRANSACTION_PACKET_SIZE); @@ -93,8 +100,8 @@ public Iota(Configuration configuration) throws IOException { replicator = new Replicator(node, tcpPort, maxPeers, testnet, transactionPacketSize); udpReceiver = new UDPReceiver(udpPort, node, configuration.integer(Configuration.DefaultConfSettings.TRANSACTION_PACKET_SIZE)); ledgerValidator = new LedgerValidator(tangle, milestone, transactionRequester, messageQ); - tipsManager = new TipsManager(tangle, ledgerValidator, transactionValidator, tipsViewModel, milestone, - maxTipSearchDepth, messageQ, testnet, milestoneStartIndex); + tipsSolidifier = new TipsSolidifier(tangle, transactionValidator, tipsViewModel); + tipsSelector = createTipSelector(milestoneStartIndex, alpha); } public void init() throws Exception { @@ -113,7 +120,7 @@ public void init() throws Exception { } milestone.init(SpongeFactory.Mode.CURLP27, ledgerValidator, revalidate); transactionValidator.init(testnet, configuration.integer(Configuration.DefaultConfSettings.MWM)); - tipsManager.init(); + tipsSolidifier.init(); transactionRequester.init(configuration.doubling(Configuration.DefaultConfSettings.P_REMOVE_REQUEST.name())); udpReceiver.init(); replicator.init(); @@ -147,7 +154,7 @@ private void rescan_db() throws Exception { public void shutdown() throws Exception { milestone.shutDown(); - tipsManager.shutdown(); + tipsSolidifier.shutdown(); node.shutdown(); udpReceiver.shutdown(); replicator.shutdown(); @@ -190,4 +197,13 @@ private void initializeTangle() { tangle.addPersistenceProvider(new ZmqPublishProvider(messageQ)); } } + + private TipSelector createTipSelector(int milestoneStartIndex, double alpha) { + EntryPointSelector entryPointSelector = new EntryPointSelectorImpl(tangle, milestone, testnet, milestoneStartIndex); + RatingCalculator ratingCalculator = new CumulativeWeightCalculator(tangle); + TailFinder tailFinder = new TailFinderImpl(tangle); + Walker walker = new WalkerAlpha(alpha, new SecureRandom(), tangle, messageQ, tailFinder); + return new TipSelectorImpl(tangle, ledgerValidator, transactionValidator, entryPointSelector, ratingCalculator, + walker, milestone, maxTipSearchDepth); + } } diff --git a/src/main/java/com/iota/iri/service/API.java b/src/main/java/com/iota/iri/service/API.java index 52760a9e4a..c796e8c24c 100644 --- a/src/main/java/com/iota/iri/service/API.java +++ b/src/main/java/com/iota/iri/service/API.java @@ -43,7 +43,6 @@ import java.net.URISyntaxException; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; -import java.security.SecureRandom; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; @@ -287,26 +286,18 @@ private AbstractResponse process(final String requestString, InetSocketAddress s return getTipsStatement(); } case "getTransactionsToApprove": { - if (invalidSubtangleStatus()) { - return ErrorResponse - .create("This operations cannot be executed: The subtangle has not been updated yet."); - } - - final String reference = request.containsKey("reference") ? getParameterAsStringAndValidate(request,"reference", HASH_SIZE) : null; + final Optional reference = request.containsKey("reference") ? + Optional.of(new Hash (getParameterAsStringAndValidate(request,"reference", HASH_SIZE))) + : Optional.empty(); final int depth = getParameterAsInt(request, "depth"); - if(depth < 0 || (reference == null && depth == 0)) { + if (depth < 0 || depth > instance.tipsSelector.getMaxDepth()) { return ErrorResponse.create("Invalid depth input"); } - int numWalks = request.containsKey("numWalks") ? getParameterAsInt(request,"numWalks") : 1; - if(numWalks < minRandomWalks) { - numWalks = minRandomWalks; - } + try { - final Hash[] tips = getTransactionToApproveStatement(depth, reference, numWalks); - if(tips == null) { - return ErrorResponse.create("The subtangle is not solid"); - } - return GetTransactionsToApproveResponse.create(tips[0], tips[1]); + List tips = getTransactionToApproveStatement(depth, reference); + return GetTransactionsToApproveResponse.create(tips.get(0), tips.get(1)); + } catch (RuntimeException e) { log.info("Tip selection failed: " + e.getLocalizedMessage()); return ErrorResponse.create(e.getLocalizedMessage()); @@ -587,57 +578,29 @@ public static void incEllapsedTime_getTxToApprove(long ellapsedTime) { ellapsedTime_getTxToApprove += ellapsedTime; } - public synchronized Hash[] getTransactionToApproveStatement(int depth, final String reference, final int numWalks) throws Exception { - int tipsToApprove = 2; - Hash[] tips = new Hash[tipsToApprove]; - final SecureRandom random = new SecureRandom(); - final int randomWalkCount = numWalks > maxRandomWalks || numWalks < 1 ? maxRandomWalks:numWalks; - Hash referenceHash = null; - int maxDepth = instance.tipsManager.getMaxDepth(); - if (depth > maxDepth) { - depth = maxDepth; + public synchronized List getTransactionToApproveStatement(int depth, Optional reference) throws Exception { + + if (invalidSubtangleStatus()) { + throw new IllegalStateException("This operations cannot be executed: The subtangle has not been updated yet."); } - if(reference != null) { - referenceHash = new Hash(reference); - if (!TransactionViewModel.exists(instance.tangle, referenceHash)) { - throw new RuntimeException(REFERENCE_TRANSACTION_NOT_FOUND); - } else { - TransactionViewModel transactionViewModel = TransactionViewModel.fromHash(instance.tangle, referenceHash); - if (transactionViewModel.snapshotIndex() != 0 - && transactionViewModel.snapshotIndex() < instance.milestone.latestSolidSubtangleMilestoneIndex - depth) { - throw new RuntimeException(REFERENCE_TRANSACTION_TOO_OLD); - } - } + + List tips = instance.tipsSelector.getTransactionsToApprove(depth, reference); + + if (log.isDebugEnabled()) { + gatherStatisticsOnTipSelection(); } - instance.milestone.latestSnapshot.rwlock.readLock().lock(); - try { - Set visitedHashes = new HashSet<>(); - Map diff = new HashMap<>(); - for (int i = 0; i < tipsToApprove; i++) { - tips[i] = instance.tipsManager.transactionToApprove(visitedHashes, diff, referenceHash, tips[0], depth, randomWalkCount, random); - //update world view, so next tips selected will be inter-consistent - if (tips[i] == null || !instance.ledgerValidator.updateDiff(visitedHashes, diff, tips[i])) { - return null; - } - } - API.incCounter_getTxToApprove(); - if ((getCounter_getTxToApprove() % 100) == 0) { - String sb = "Last 100 getTxToApprove consumed " + - API.getEllapsedTime_getTxToApprove() / 1000000000L + - " seconds processing time."; - log.info(sb); - counter_getTxToApprove = 0; - ellapsedTime_getTxToApprove = 0L; - } + return tips; + } - if (instance.ledgerValidator.checkConsistency(Arrays.asList(tips))) { - return tips; - } - } finally { - instance.milestone.latestSnapshot.rwlock.readLock().unlock(); + private void gatherStatisticsOnTipSelection() { + API.incCounter_getTxToApprove(); + if ((getCounter_getTxToApprove() % 100) == 0) { + String sb = "Last 100 getTxToApprove consumed " + API.getEllapsedTime_getTxToApprove() / 1000000000L + " seconds processing time."; + log.debug(sb); + counter_getTxToApprove = 0; + ellapsedTime_getTxToApprove = 0L; } - throw new RuntimeException("inconsistent tips pair selected"); } private synchronized AbstractResponse getTipsStatement() throws Exception { @@ -1113,7 +1076,7 @@ public void shutDown() { //only available on testnet private synchronized void storeMessageStatement(final String address, final String message) throws Exception { - final Hash[] txToApprove = getTransactionToApproveStatement(3, null, 5); + final List txToApprove = getTransactionToApproveStatement(3, Optional.empty()); final int txMessageSize = TransactionViewModel.SIGNATURE_MESSAGE_FRAGMENT_TRINARY_SIZE / 3; @@ -1173,7 +1136,7 @@ private synchronized void storeMessageStatement(final String address, final Stri transactions = transactions.stream().map(tx -> StringUtils.rightPad(tx + bundleHash, TRYTES_SIZE, '9')).collect(Collectors.toList()); // do pow - List powResult = attachToTangleStatement(txToApprove[0], txToApprove[1], 9, transactions); + List powResult = attachToTangleStatement(txToApprove.get(0), txToApprove.get(1), 9, transactions); broadcastTransactionStatement(powResult); } } diff --git a/src/main/java/com/iota/iri/service/TipsManager.java b/src/main/java/com/iota/iri/service/TipsManager.java deleted file mode 100644 index a9e9206048..0000000000 --- a/src/main/java/com/iota/iri/service/TipsManager.java +++ /dev/null @@ -1,519 +0,0 @@ -package com.iota.iri.service; - -import com.iota.iri.LedgerValidator; -import com.iota.iri.Milestone; -import com.iota.iri.TransactionValidator; -import com.iota.iri.controllers.ApproveeViewModel; -import com.iota.iri.controllers.MilestoneViewModel; -import com.iota.iri.controllers.TipsViewModel; -import com.iota.iri.controllers.TransactionViewModel; -import com.iota.iri.model.Hash; -import com.iota.iri.storage.Tangle; -import com.iota.iri.utils.IotaUtils; -import com.iota.iri.utils.SafeUtils; -import com.iota.iri.utils.collections.impl.BoundedHashSet; -import com.iota.iri.utils.collections.interfaces.BoundedSet; -import com.iota.iri.zmq.MessageQ; -import org.apache.commons.collections4.CollectionUtils; -import org.apache.commons.collections4.SetUtils; -import org.apache.commons.lang3.ObjectUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.nio.Buffer; -import java.nio.ByteBuffer; -import java.util.*; - -public class TipsManager { - - public static final int MAX_ANCESTORS_SIZE = 1000; - - private final Logger log = LoggerFactory.getLogger(TipsManager.class); - private final Tangle tangle; - private final TipsViewModel tipsViewModel; - private final Milestone milestone; - private final LedgerValidator ledgerValidator; - private final TransactionValidator transactionValidator; - private final MessageQ messageQ; - private final boolean testnet; - private final int milestoneStartIndex; - - public static final int SUBHASH_LENGTH = 16; - private int RATING_THRESHOLD = 75; // Must be in [0..100] range - private boolean shuttingDown = false; - private int RESCAN_TX_TO_REQUEST_INTERVAL = 750; - private final int maxDepth; - private Thread solidityRescanHandle; - - public void setRATING_THRESHOLD(int value) { - if (value < 0) { - value = 0; - } - if (value > 100) { - value = 100; - } - RATING_THRESHOLD = value; - } - - public TipsManager(final Tangle tangle, - final LedgerValidator ledgerValidator, - final TransactionValidator transactionValidator, - final TipsViewModel tipsViewModel, - final Milestone milestone, - final int maxDepth, - final MessageQ messageQ, - final boolean testnet, - final int milestoneStartIndex) { - this.tangle = tangle; - this.ledgerValidator = ledgerValidator; - this.transactionValidator = transactionValidator; - this.tipsViewModel = tipsViewModel; - this.milestone = milestone; - this.maxDepth = maxDepth; - this.messageQ = messageQ; - this.testnet = testnet; - this.milestoneStartIndex = milestoneStartIndex; - } - - public void init() { - solidityRescanHandle = new Thread(() -> { - - while (!shuttingDown) { - try { - scanTipsForSolidity(); - } catch (Exception e) { - log.error("Error during solidity scan : {}", e); - } - try { - Thread.sleep(RESCAN_TX_TO_REQUEST_INTERVAL); - } catch (InterruptedException e) { - log.error("Solidity rescan interrupted."); - } - } - }, "Tip Solidity Rescan"); - solidityRescanHandle.start(); - } - - private void scanTipsForSolidity() throws Exception { - int size = tipsViewModel.nonSolidSize(); - if (size != 0) { - Hash hash = tipsViewModel.getRandomNonSolidTipHash(); - boolean isTip = true; - if (hash != null && TransactionViewModel.fromHash(tangle, hash).getApprovers(tangle).size() != 0) { - tipsViewModel.removeTipHash(hash); - isTip = false; - } - if (hash != null && isTip && transactionValidator.checkSolidity(hash, false)) { - //if(hash != null && TransactionViewModel.fromHash(hash).isSolid() && isTip) { - tipsViewModel.setSolid(hash); - } - } - } - - public void shutdown() throws InterruptedException { - shuttingDown = true; - try { - if (solidityRescanHandle != null && solidityRescanHandle.isAlive()) { - solidityRescanHandle.join(); - } - } catch (Exception e) { - log.error("Error in shutdown", e); - } - - } - - Hash transactionToApprove(final Set visitedHashes, final Map diff, final Hash reference, final Hash extraTip, int depth, final int iterations, Random seed) throws Exception { - - long startTime = System.nanoTime(); - if (depth > maxDepth) { - depth = maxDepth; - } - - if (milestone.latestSolidSubtangleMilestoneIndex > milestoneStartIndex || - milestone.latestMilestoneIndex == milestoneStartIndex) { - - Set analyzedTips = new HashSet<>(); - Set maxDepthOk = new HashSet<>(); - try { - Hash tip = entryPoint(reference, extraTip, depth); - Map cumulativeWeights = calculateCumulativeWeight(visitedHashes, tip, - extraTip != null, new HashSet<>()); - analyzedTips.clear(); - if (ledgerValidator.updateDiff(visitedHashes, diff, tip)) { - return markovChainMonteCarlo(visitedHashes, diff, tip, extraTip, cumulativeWeights, iterations, milestone.latestSolidSubtangleMilestoneIndex - depth * 2, maxDepthOk, seed); - } else { - throw new RuntimeException("starting tip failed consistency check: " + tip.toString()); - } - } catch (Exception e) { - e.printStackTrace(); - log.error("Encountered error: " + e.getLocalizedMessage()); - throw e; - } finally { - API.incEllapsedTime_getTxToApprove(System.nanoTime() - startTime); - } - } - return null; - } - - Hash entryPoint(final Hash reference, final Hash extraTip, final int depth) throws Exception { - - if (extraTip == null) { - //trunk - return reference != null ? reference : milestone.latestSolidSubtangleMilestone; - } - - //branch (extraTip) - int milestoneIndex = Math.max(milestone.latestSolidSubtangleMilestoneIndex - depth - 1, 0); - MilestoneViewModel milestoneViewModel = - MilestoneViewModel.findClosestNextMilestone(tangle, milestoneIndex, testnet, milestoneStartIndex); - if (milestoneViewModel != null && milestoneViewModel.getHash() != null) { - return milestoneViewModel.getHash(); - } - - return milestone.latestSolidSubtangleMilestone; - } - - Hash markovChainMonteCarlo(final Set visitedHashes, final Map diff, Hash tip, Hash extraTip, Map cumulativeWeight, - int iterations, int maxDepth, Set maxDepthOk, Random seed) throws Exception { - Map monteCarloIntegrations = new HashMap<>(); - Hash tail; - for(int i = iterations; i-- > 0; ) { - tail = randomWalk(visitedHashes, diff, tip, extraTip, cumulativeWeight, - maxDepth, maxDepthOk, seed); - if(monteCarloIntegrations.containsKey(tail)) { - monteCarloIntegrations.put(tail, monteCarloIntegrations.get(tail) + 1); - } - else { - monteCarloIntegrations.put(tail, 1); - } - } - return monteCarloIntegrations.entrySet().stream().reduce((a, b) -> { - if (a.getValue() > b.getValue()) { - return a; - } - else if (a.getValue() < b.getValue()) { - return b; - } - else if (seed.nextBoolean()) { - return a; - } - else { - return b; - } - }).map(Map.Entry::getKey).orElse(null); - } - - /** - * Performs a walk from {@code start} until you reach a tip or {@code extraTip}. The path depends of the values - * of transaction weights given in {@code cumulativeWeights}. If a tx weight is missing, then calculate it on - * the fly. - * - * @param visitedHashes hashes of transactions that were validated and their weights can be disregarded when we have - * {@code extraTip} is not {@code null}. - * @param diff map of address to change in balance since last snapshot. - * @param start hash of the transaction that starts the walk. - * @param extraTip an extra ending point for the walk. If not null the walk will ignore the weights of - * {@code visitedHashes}. - * @param cumulativeWeights maps transaction hashes to weights. Missing data is computed by this method. - * @param maxDepth the transactions we are traversing may not be below this depth measured in number of snapshots. - * @param maxDepthOk transaction hashes that we know are not below {@code maxDepth} - * @param rnd generates random doubles to make the walk less deterministic - * @return a tip's hash - * @throws Exception - */ - Hash randomWalk(final Set visitedHashes, final Map diff, final Hash start, final Hash extraTip, final Map cumulativeWeights, final int maxDepth, final Set maxDepthOk, Random rnd) throws Exception { - Hash tip = start, tail = tip; - Hash[] tips; - Set tipSet; - Set analyzedTips = new HashSet<>(); - int traversedTails = 0; - TransactionViewModel transactionViewModel; - int approverIndex; - double ratingWeight; - double[] walkRatings; - Map myDiff = new HashMap<>(diff); - Set myApprovedHashes = new HashSet<>(visitedHashes); - - while (tip != null) { - transactionViewModel = TransactionViewModel.fromHash(tangle, tip); - tipSet = transactionViewModel.getApprovers(tangle).getHashes(); - if (transactionViewModel.getCurrentIndex() == 0) { - if (transactionViewModel.getType() == TransactionViewModel.PREFILLED_SLOT) { - log.info("Reason to stop: transactionViewModel == null"); - messageQ.publish("rtsn %s", transactionViewModel.getHash()); - break; - } - else if (!transactionValidator.checkSolidity(transactionViewModel.getHash(), false)) { - log.info("Reason to stop: !checkSolidity"); - messageQ.publish("rtss %s", transactionViewModel.getHash()); - break; - } - else if (belowMaxDepth(transactionViewModel.getHash(), maxDepth, maxDepthOk)) { - log.info("Reason to stop: belowMaxDepth"); - break; - } - else if (!ledgerValidator.updateDiff(myApprovedHashes, myDiff, transactionViewModel.getHash())) { - log.info("Reason to stop: !LedgerValidator"); - messageQ.publish("rtsv %s", transactionViewModel.getHash()); - break; - } - else if (transactionViewModel.getHash().equals(extraTip)) { - log.info("Reason to stop: transactionViewModel==extraTip"); - messageQ.publish("rtsd %s", transactionViewModel.getHash()); - break; - } - // set the tail here! - tail = tip; - traversedTails++; - } - if (tipSet.size() == 0) { - log.info("Reason to stop: TransactionViewModel is a tip"); - messageQ.publish("rtst %s", tip); - break; - } - else if (tipSet.size() == 1) { - Iterator hashIterator = tipSet.iterator(); - if (hashIterator.hasNext()) { - tip = hashIterator.next(); - } - else { - tip = null; - } - } - else { - // walk to the next approver - tips = tipSet.toArray(new Hash[tipSet.size()]); - if (!cumulativeWeights.containsKey(IotaUtils.getSubHash(tip, SUBHASH_LENGTH))) { - cumulativeWeights.putAll(calculateCumulativeWeight(myApprovedHashes, tip, extraTip != null, - analyzedTips)); - analyzedTips.clear(); - } - - walkRatings = new double[tips.length]; - double maxRating = 0; - ByteBuffer subHash = IotaUtils.getSubHash(tip, SUBHASH_LENGTH); - long tipRating = cumulativeWeights.get(subHash); - for (int i = 0; i < tips.length; i++) { - subHash = IotaUtils.getSubHash(tip, SUBHASH_LENGTH); - //transition probability = ((Hx-Hy)^-3)/maxRating - walkRatings[i] = Math.pow(tipRating - cumulativeWeights.getOrDefault(subHash,0), -3); - maxRating += walkRatings[i]; - } - ratingWeight = rnd.nextDouble() * maxRating; - for (approverIndex = tips.length; approverIndex-- > 1; ) { - ratingWeight -= walkRatings[approverIndex]; - if (ratingWeight <= 0) { - break; - } - } - tip = tips[approverIndex]; - if (transactionViewModel.getHash().equals(tip)) { - log.info("Reason to stop: transactionViewModel==itself"); - messageQ.publish("rtsl %s", transactionViewModel.getHash()); - break; - } - } - } - log.info("Tx traversed to find tip: " + traversedTails); - messageQ.publish("mctn %d", traversedTails); - return tail; - } - - static long capSum(long a, long b, long max) { - if (a + b < 0 || a + b > max) { - return max; - } - return a + b; - } - - /** - * Updates the cumulative weight of txs. - * A cumulative weight of each tx is 1 + the number of ancestors it has. - * - * See https://github.com/alongalky/iota-docs/blob/master/cumulative.md - * - * - * @param myApprovedHashes the current hashes of the snapshot at the time of calculation - * @param currentTxHash the transaction from where the analysis starts - * @param confirmLeftBehind if true attempt to give more weight to previously - * unconfirmed txs - * @throws Exception if there is a problem accessing the db - */ - Map calculateCumulativeWeight(Set myApprovedHashes, Hash currentTxHash, boolean confirmLeftBehind, - Set analyzedTips) throws Exception { - log.info("Start calculating cw starting with tx hash {}", currentTxHash); - log.debug("Start topological sort"); - long start = System.currentTimeMillis(); - LinkedHashSet txHashesToRate = sortTransactionsInTopologicalOrder(currentTxHash); - log.debug("Subtangle size: {}", txHashesToRate.size()); - log.debug("Topological sort done. Start traversing on txs in order and calculate weight"); - Map cumulativeWeights = calculateCwInOrder(txHashesToRate, myApprovedHashes, confirmLeftBehind, - analyzedTips); - log.debug("Cumulative weights calculation done in {} ms", System.currentTimeMillis() - start); - return cumulativeWeights; - } - - private LinkedHashSet sortTransactionsInTopologicalOrder(Hash startTx) throws Exception { - LinkedHashSet sortedTxs = new LinkedHashSet<>(); - Set temporary = new HashSet<>(); - Deque stack = new ArrayDeque<>(); - Map> txToDirectApprovers = new HashMap<>(); - - stack.push(startTx); - while (CollectionUtils.isNotEmpty(stack)) { - Hash txHash = stack.peek(); - if (!sortedTxs.contains(txHash)) { - Collection appHashes = getTxDirectApproversHashes(txHash, txToDirectApprovers); - if (CollectionUtils.isNotEmpty(appHashes)) { - Hash txApp = getAndRemoveApprover(appHashes); - if (!temporary.add(txApp)) { - throw new IllegalStateException("A circle or a collision was found in a subtangle on hash: " - + txApp); - } - stack.push(txApp); - continue; - } - } - else { - txHash = stack.pop(); - temporary.remove(txHash); - continue; - } - sortedTxs.add(txHash); - } - - return sortedTxs; - } - - private Hash getAndRemoveApprover(Collection appHashes) { - Iterator hashIterator = appHashes.iterator(); - Hash txApp = hashIterator.next(); - hashIterator.remove(); - return txApp; - } - - private Collection getTxDirectApproversHashes(Hash txHash, - Map> txToDirectApprovers) throws Exception { - Collection txApprovers = txToDirectApprovers.get(txHash); - if (txApprovers == null) { - ApproveeViewModel approvers = TransactionViewModel.fromHash(tangle, txHash).getApprovers(tangle); - Collection appHashes = CollectionUtils.emptyIfNull(approvers.getHashes()); - txApprovers = new HashSet<>(appHashes.size()); - for (Hash appHash : appHashes) { - //if not genesis (the tx that confirms itself) - if (ObjectUtils.notEqual(Hash.NULL_HASH, appHash)) { - txApprovers.add(appHash); - } - } - txToDirectApprovers.put(txHash, txApprovers); - } - return txApprovers; - } - - //must specify using LinkedHashSet since Java has no interface that guarantees uniqueness and insertion order - private Map calculateCwInOrder(LinkedHashSet txsToRate, - Set myApprovedHashes, boolean confirmLeftBehind, Set analyzedTips) throws Exception { - Map> txSubHashToApprovers = new HashMap<>(); - Map txSubHashToCumulativeWeight = new HashMap<>(); - - Iterator txHashIterator = txsToRate.iterator(); - while (txHashIterator.hasNext()) { - Hash txHash = txHashIterator.next(); - if (analyzedTips.add(txHash)) { - txSubHashToCumulativeWeight = updateCw(txSubHashToApprovers, txSubHashToCumulativeWeight, txHash, - myApprovedHashes, confirmLeftBehind); - } - txSubHashToApprovers = updateApproversAndReleaseMemory(txSubHashToApprovers, txHash, myApprovedHashes, - confirmLeftBehind); - txHashIterator.remove(); - } - - return txSubHashToCumulativeWeight; - } - - - private Map> updateApproversAndReleaseMemory( - Map> txSubHashToApprovers, - Hash txHash, Set myApprovedHashes, boolean confirmLeftBehind) throws Exception { - ByteBuffer txSubHash = IotaUtils.getSubHash(txHash, SUBHASH_LENGTH); - BoundedSet approvers = - new BoundedHashSet<>(SetUtils.emptyIfNull(txSubHashToApprovers.get(txSubHash)), MAX_ANCESTORS_SIZE); - - if (shouldIncludeTransaction(txHash, myApprovedHashes, confirmLeftBehind)) { - approvers.add(txSubHash); - } - - TransactionViewModel transactionViewModel = TransactionViewModel.fromHash(tangle, txHash); - Hash trunkHash = transactionViewModel.getTrunkTransactionHash(); - Buffer trunkSubHash = IotaUtils.getSubHash(trunkHash, SUBHASH_LENGTH); - Hash branchHash = transactionViewModel.getBranchTransactionHash(); - Buffer branchSubHash = IotaUtils.getSubHash(branchHash, SUBHASH_LENGTH); - if (!approvers.isFull()) { - Set trunkApprovers = new BoundedHashSet<>(approvers, MAX_ANCESTORS_SIZE); - trunkApprovers.addAll(CollectionUtils.emptyIfNull(txSubHashToApprovers.get(trunkSubHash))); - Set branchApprovers = new BoundedHashSet<>(approvers, MAX_ANCESTORS_SIZE); - branchApprovers.addAll(CollectionUtils.emptyIfNull(txSubHashToApprovers.get(branchSubHash))); - txSubHashToApprovers.put(trunkSubHash, trunkApprovers); - txSubHashToApprovers.put(branchSubHash, branchApprovers); - } - else { - txSubHashToApprovers.put(trunkSubHash, approvers); - txSubHashToApprovers.put(branchSubHash, approvers); - } - txSubHashToApprovers.remove(txSubHash); - - return txSubHashToApprovers; - } - - private static boolean shouldIncludeTransaction(Hash txHash, Set myApprovedSubHashes, - boolean confirmLeftBehind) { - return !confirmLeftBehind || !SafeUtils.isContaining(myApprovedSubHashes, txHash); - } - - private Map updateCw(Map> txSubHashToApprovers, - Map txToCumulativeWeight, Hash txHash, - Set myApprovedHashes, boolean confirmLeftBehind) { - ByteBuffer txSubHash = IotaUtils.getSubHash(txHash, SUBHASH_LENGTH); - Set approvers = txSubHashToApprovers.get(txSubHash); - int weight = CollectionUtils.emptyIfNull(approvers).size(); - if (shouldIncludeTransaction(txHash, myApprovedHashes, confirmLeftBehind)) { - ++weight; - } - txToCumulativeWeight.put(txSubHash, weight); - return txToCumulativeWeight; - } - - public int getMaxDepth() { - return maxDepth; - } - - boolean belowMaxDepth(Hash tip, int depth, Set maxDepthOk) throws Exception { - //if tip is confirmed stop - if (TransactionViewModel.fromHash(tangle, tip).snapshotIndex() >= depth) { - return false; - } - //if tip unconfirmed, check if any referenced tx is confirmed below maxDepth - Queue nonAnalyzedTransactions = new LinkedList<>(Collections.singleton(tip)); - Set analyzedTransactions = new HashSet<>(); - Hash hash; - while ((hash = nonAnalyzedTransactions.poll()) != null) { - if (analyzedTransactions.add(hash)) { - TransactionViewModel transaction = TransactionViewModel.fromHash(tangle, hash); - if (transaction.snapshotIndex() != 0 && transaction.snapshotIndex() < depth) { - return true; - } - if (transaction.snapshotIndex() == 0) { - if (maxDepthOk.contains(hash)) { - //log.info("Memoization!"); - } - else { - nonAnalyzedTransactions.offer(transaction.getTrunkTransactionHash()); - nonAnalyzedTransactions.offer(transaction.getBranchTransactionHash()); - } - } - } - } - maxDepthOk.add(tip); - return false; - } -} diff --git a/src/main/java/com/iota/iri/service/TipsSolidifier.java b/src/main/java/com/iota/iri/service/TipsSolidifier.java new file mode 100644 index 0000000000..010d21fbba --- /dev/null +++ b/src/main/java/com/iota/iri/service/TipsSolidifier.java @@ -0,0 +1,76 @@ +package com.iota.iri.service; + +import com.iota.iri.TransactionValidator; +import com.iota.iri.controllers.TipsViewModel; +import com.iota.iri.controllers.TransactionViewModel; +import com.iota.iri.model.Hash; +import com.iota.iri.storage.Tangle; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TipsSolidifier { + + private final Logger log = LoggerFactory.getLogger(TipsSolidifier.class); + private final Tangle tangle; + private final TipsViewModel tipsViewModel; + private final TransactionValidator transactionValidator; + + private boolean shuttingDown = false; + private int RESCAN_TX_TO_REQUEST_INTERVAL = 750; + private Thread solidityRescanHandle; + + public TipsSolidifier(final Tangle tangle, + final TransactionValidator transactionValidator, + final TipsViewModel tipsViewModel) { + this.tangle = tangle; + this.transactionValidator = transactionValidator; + this.tipsViewModel = tipsViewModel; + } + + public void init() { + solidityRescanHandle = new Thread(() -> { + + while (!shuttingDown) { + try { + scanTipsForSolidity(); + } catch (Exception e) { + log.error("Error during solidity scan : {}", e); + } + try { + Thread.sleep(RESCAN_TX_TO_REQUEST_INTERVAL); + } catch (InterruptedException e) { + log.error("Solidity rescan interrupted."); + } + } + }, "Tip Solidity Rescan"); + solidityRescanHandle.start(); + } + + private void scanTipsForSolidity() throws Exception { + int size = tipsViewModel.nonSolidSize(); + if (size != 0) { + Hash hash = tipsViewModel.getRandomNonSolidTipHash(); + boolean isTip = true; + if (hash != null && TransactionViewModel.fromHash(tangle, hash).getApprovers(tangle).size() != 0) { + tipsViewModel.removeTipHash(hash); + isTip = false; + } + if (hash != null && isTip && transactionValidator.checkSolidity(hash, false)) { + //if(hash != null && TransactionViewModel.fromHash(hash).isSolid() && isTip) { + tipsViewModel.setSolid(hash); + } + } + } + + public void shutdown() { + shuttingDown = true; + try { + if (solidityRescanHandle != null && solidityRescanHandle.isAlive()) { + solidityRescanHandle.join(); + } + } catch (Exception e) { + log.error("Error in shutdown", e); + } + + } +} diff --git a/src/main/java/com/iota/iri/service/tipselection/impl/TipSelectorImpl.java b/src/main/java/com/iota/iri/service/tipselection/impl/TipSelectorImpl.java index 0cbc76980b..d7e4e66977 100644 --- a/src/main/java/com/iota/iri/service/tipselection/impl/TipSelectorImpl.java +++ b/src/main/java/com/iota/iri/service/tipselection/impl/TipSelectorImpl.java @@ -42,17 +42,16 @@ public int getMaxDepth() { public TipSelectorImpl(Tangle tangle, LedgerValidator ledgerValidator, TransactionValidator transactionValidator, + EntryPointSelector entryPointSelector, + RatingCalculator ratingCalculator, + Walker walkerAlpha, Milestone milestone, - int maxDepth, - MessageQ messageQ, - boolean testnet, - int milestoneStartIndex, - double alpha) { + int maxDepth) { - this.entryPointSelector = new EntryPointSelectorImpl(tangle, milestone, testnet, milestoneStartIndex); - this.ratingCalculator = new CumulativeWeightCalculator(tangle); + this.entryPointSelector = entryPointSelector; + this.ratingCalculator = ratingCalculator; - this.walker = new WalkerAlpha(alpha, new SecureRandom(), tangle, messageQ, new TailFinderImpl(tangle)); + this.walker = walkerAlpha; //used by walkValidator this.maxDepth = maxDepth; diff --git a/src/test/java/com/iota/iri/integration/NodeIntegrationTests.java b/src/test/java/com/iota/iri/integration/NodeIntegrationTests.java index 3f27ce53a4..10d06c5694 100644 --- a/src/test/java/com/iota/iri/integration/NodeIntegrationTests.java +++ b/src/test/java/com/iota/iri/integration/NodeIntegrationTests.java @@ -17,9 +17,7 @@ import org.junit.Before; import org.junit.rules.TemporaryFolder; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; +import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; @@ -117,7 +115,7 @@ Runnable spawnCoordinator(API api, long spacing) { return () -> { long index = 0; try { - newMilestone(api, new Hash[]{Hash.NULL_HASH, Hash.NULL_HASH}, index++); + newMilestone(api, Arrays.asList(Hash.NULL_HASH, Hash.NULL_HASH), index++); } catch (Exception e) { e.printStackTrace(); } @@ -137,10 +135,10 @@ Runnable spawnCoordinator(API api, long spacing) { } private void sendMilestone(API api, long index) throws Exception { - newMilestone(api, api.getTransactionToApproveStatement(10, null, 1), index); + newMilestone(api, api.getTransactionToApproveStatement(10, Optional.empty()), index); } - private void newMilestone(API api, Hash[] tips, long index) throws Exception { + private void newMilestone(API api, List tips, long index) throws Exception { List transactions = new ArrayList<>(); transactions.add(new int[TRINARY_SIZE]); Converter.copyTrits(index, transactions.get(0), OBSOLETE_TAG_TRINARY_OFFSET, OBSOLETE_TAG_TRINARY_SIZE); @@ -148,7 +146,7 @@ private void newMilestone(API api, Hash[] tips, long index) throws Exception { Hash coordinator = new Hash(Configuration.TESTNET_COORDINATOR_ADDRESS); System.arraycopy(coordinator.trits(), 0, transactions.get(0), ADDRESS_TRINARY_OFFSET, ADDRESS_TRINARY_SIZE); setBundleHash(transactions, null); - List elements = api.attachToTangleStatement(tips[0], tips[1], 13, transactions.stream().map(Converter::trytes).collect(Collectors.toList())); + List elements = api.attachToTangleStatement(tips.get(0), tips.get(0), 13, transactions.stream().map(Converter::trytes).collect(Collectors.toList())); api.storeTransactionStatement(elements); api.broadcastTransactionStatement(elements); } From 1e0d340d5f0d4c3c6311f5dfe98cd502ce648629 Mon Sep 17 00:00:00 2001 From: Gal Rogozinski Date: Wed, 13 Jun 2018 11:04:29 +0300 Subject: [PATCH 43/45] CI - download testnet Db files from S3 (#806) --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index 56825e9999..acbb3c45ce 100644 --- a/.travis.yml +++ b/.travis.yml @@ -37,6 +37,8 @@ script: - git clone https://github.com/iotaledger/iri-regression-tests.git - cd iri-regression-tests - git checkout -f master + - curl -LO https://s3.eu-central-1.amazonaws.com/iotaledger-dbfiles/dev/testnet_files.tgz + - tar -xzf testnet_files.tgz - mkdir iri - cp -rf ../target iri/target - bash run_all_stable_tests.sh $VERSION From 50ab0357357d73a86e78904af5f6e21f3d6e0bd2 Mon Sep 17 00:00:00 2001 From: Alon Elmaliah Date: Mon, 18 Jun 2018 17:09:17 +0300 Subject: [PATCH 44/45] tip selection: set default alpha value to 0.001 (#817) --- src/main/java/com/iota/iri/conf/Configuration.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/com/iota/iri/conf/Configuration.java b/src/main/java/com/iota/iri/conf/Configuration.java index 9aa9e5cb52..63c94c973f 100644 --- a/src/main/java/com/iota/iri/conf/Configuration.java +++ b/src/main/java/com/iota/iri/conf/Configuration.java @@ -169,7 +169,7 @@ public enum DefaultConfSettings { conf.put(DefaultConfSettings.TRANSACTION_PACKET_SIZE.name(), PACKET_SIZE); conf.put(DefaultConfSettings.REQUEST_HASH_SIZE.name(), REQ_HASH_SIZE); conf.put(DefaultConfSettings.SNAPSHOT_TIME.name(), GLOBAL_SNAPSHOT_TIME); - conf.put(DefaultConfSettings.TIPSELECTION_ALPHA.name(), "0.1"); + conf.put(DefaultConfSettings.TIPSELECTION_ALPHA.name(), "0.001"); } From dda05dd31fc2d09af550df02dd02a80a9d5d836f Mon Sep 17 00:00:00 2001 From: alon-e Date: Wed, 20 Jun 2018 12:30:33 +0300 Subject: [PATCH 45/45] version bump v1.5.0 --- DOCKER.md | 6 +++--- README.md | 3 +-- changelog.txt | 15 +++++++++++++++ pom.xml | 2 +- src/main/java/com/iota/iri/IRI.java | 2 +- 5 files changed, 21 insertions(+), 7 deletions(-) diff --git a/DOCKER.md b/DOCKER.md index 2867a81e79..53ec517fc2 100644 --- a/DOCKER.md +++ b/DOCKER.md @@ -2,13 +2,13 @@ Run the official iotaledger/iri container, passing the mandatory -p option: -```docker run iotaledger/iri:v1.4.2.4 -p 14265``` +```docker run iotaledger/iri:v1.5.0 -p 14265``` This will get your a running IRI with its API listening on port 14265, no neighbours and an empty database. The IRI Docker container by default expects data at /iri/data. Use the `-v` option of the `docker run` command to mount volumes so to have persistent data. You can also pass more command line options to the docker run command and those will be passed to IRI. If you want to use a iri.ini file with the docker container, supposing it's stored under /path/to/conf/iri.ini on your docker host, then pass `-v /path/to/conf:/iri/conf` and add -c /iri/conf/iri.ini as docker run arguments. So for example the `docker run` command above would become: -```docker run -v /path/to/conf:/iri/conf -v /path/to/data:/iri/data iotaledger/iri:v1.4.2.4 -p 14265 -c /iri/conf/iri.ini``` +```docker run -v /path/to/conf:/iri/conf -v /path/to/data:/iri/data iotaledger/iri:v1.5.0 -p 14265 -c /iri/conf/iri.ini``` Please refer to the IRI documentation for further command line options and iri.ini options. @@ -61,7 +61,7 @@ ExecStart=/usr/bin/docker run \ -p 14265:14265 \ -p 15600:15600 \ -p 14600:14600/udp \ -iotaledger/iri:v1.4.2.4 \ +iotaledger/iri:v1.5.0 \ -p 14265 \ --zmq-enabled \ --testnet diff --git a/README.md b/README.md index 6e05f6451e..5ec45fa694 100644 --- a/README.md +++ b/README.md @@ -16,8 +16,7 @@ It is specially designed for users seeking a fast, efficient and fully-compatibl Running an IRI node also allows light wallet users a node to directly connect to for their own wallet transactions. -* **Latest release:** 1.4.2.4 Release -* **License:** GPLv3 +-* **License:** GPLv3 # How to get started diff --git a/changelog.txt b/changelog.txt index b9aa3a31d1..df2bb12c41 100644 --- a/changelog.txt +++ b/changelog.txt @@ -1,3 +1,18 @@ +1.5.0 + - Rework of the Tip Selection algorithm (#778) + - Validate the alpha value (#817) + - TipSelection: update API reference (#773) + - Inserted check for number of available processors. (#758) + - Improved Docker support (#744) + - Faster PearlDiver (PoW) (#733) + - Kerl hashing speed improvement (#628) + - Logging routing rework (#727) + + Minor changes and fixes + - Fixed `attachmentTimestampUpperBound` value (#777) + - Fixed `getBalances` `tips` parameter parsing (#776) + - Added hash to `tx_trytes` ZMQ topic (#739) + 1.4.2.4 - Tag indexing (#728) - TCP networking issues fixed (#592) diff --git a/pom.xml b/pom.xml index 6353698d92..e1a6cc1e5e 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ com.iota iri - 1.4.2.4 + 1.5.0 IRI IOTA Reference Implementation diff --git a/src/main/java/com/iota/iri/IRI.java b/src/main/java/com/iota/iri/IRI.java index 310498dc93..23ad86fcdc 100644 --- a/src/main/java/com/iota/iri/IRI.java +++ b/src/main/java/com/iota/iri/IRI.java @@ -22,7 +22,7 @@ public class IRI { public static final String MAINNET_NAME = "IRI"; public static final String TESTNET_NAME = "IRI Testnet"; - public static final String VERSION = "1.4.2.4"; + public static final String VERSION = "1.5.0"; public static void main(String[] args) throws Exception { // Logging is configured first before any references to Logger or LoggerFactory.