@@ -32,9 +32,9 @@ class ChunkedAdjacencyListsTest {
3232
3333 @ Test
3434 void shouldWriteSingleTargetList () {
35- var adjacencyLists = ChunkedAdjacencyLists .of (0 , 0 );
35+ var adjacencyLists = ChunkedAdjacencyLists .of (0 , 0 );
3636
37- var input = new long []{ 42L , 1337L , 5L };
37+ var input = new long []{42L , 1337L , 5L };
3838 adjacencyLists .add (0 , input , 0 , 3 , 3 );
3939
4040 var expectedTargets = new long []{42L , 1337L , 5L };
@@ -73,26 +73,26 @@ void shouldWriteMultipleTimesIntoTargetList() {
7373 void shouldWriteWithProperties () {
7474 var adjacencyLists = ChunkedAdjacencyLists .of (2 , 0 );
7575
76- var input = new long []{ 42L , 1337L , 5L , 6L };
77- var properties = new long [][]{ {42L , 1337L , 5L , 6L }, {8L , 8L , 8L , 8L }};
76+ var input = new long []{42L , 1337L , 5L , 6L };
77+ var properties = new long [][]{{42L , 1337L , 5L , 6L }, {8L , 8L , 8L , 8L }};
7878 adjacencyLists .add (0 , input , properties , 0 , 4 , 4 );
7979
8080 adjacencyLists .consume ((nodeId , targets , actualProperties , position , length ) -> assertThat (actualProperties )
81- .hasDimensions (2 , 7 )
82- .contains (new long []{42L , 1337L , 5L , 6L , 0L , 0L , 0L }, Index .atIndex (0 ))
83- .contains (new long []{8L , 8L , 8L , 8L , 0L , 0L , 0L }, Index .atIndex (1 )));
81+ .hasDimensions (2 , 4 )
82+ .contains (new long []{42L , 1337L , 5L , 6L }, Index .atIndex (0 ))
83+ .contains (new long []{8L , 8L , 8L , 8L }, Index .atIndex (1 )));
8484 }
8585
8686 @ Test
8787 void shouldAllowConsumptionOfAllElements () {
8888 var adjacencyLists = ChunkedAdjacencyLists .of (0 , 0 );
8989
90- adjacencyLists .add (1 , new long []{ 42L , 1337L , 5L }, 0 , 3 , 3 );
91- adjacencyLists .add (8 , new long []{ 1L , 2L }, 0 , 2 , 2 );
90+ adjacencyLists .add (1 , new long []{42L , 1337L , 5L }, 0 , 3 , 3 );
91+ adjacencyLists .add (8 , new long []{1L , 2L }, 0 , 2 , 2 );
9292
9393 // Skip 2 pages
9494 var largeIndex = 3 * 4096 + 1 ;
95- adjacencyLists .add (largeIndex , new long []{ 42L , 42L }, 0 , 2 , 2 );
95+ adjacencyLists .add (largeIndex , new long []{42L , 42L }, 0 , 2 , 2 );
9696
9797 adjacencyLists .consume ((id , targets , properties , compressedBytesSize , compressedTargets ) -> {
9898 assertThat (properties ).isNull ();
@@ -160,9 +160,8 @@ void addWithPreAggregatedWeights() {
160160 assertThat (actualTargets ).containsExactly (expectedTargets );
161161
162162 assertThat (actualProperties )
163- // there is an additional entry, because we increase the buffers in size
164- .hasDimensions (1 , 6 )
165- .contains (new long []{3L , 3L , 4L , 0L , 0L , 0L }, Index .atIndex (0 ));
163+ // there is an additional entry, because we double the buffers in size
164+ .hasDimensions (1 , 4 ).contains (new long []{3L , 3L , 4L , 0L }, Index .atIndex (0 ));
166165 });
167166 }
168167}
0 commit comments