forked from GitHub-Mirror/riotX-android
Timeline : fix merging issues
This commit is contained in:
@ -4,7 +4,11 @@ import com.zhuinden.monarchy.Monarchy
|
||||
import im.vector.matrix.android.InstrumentedTest
|
||||
import im.vector.matrix.android.api.session.events.model.Event
|
||||
import im.vector.matrix.android.api.session.events.model.EventType
|
||||
import im.vector.matrix.android.internal.database.helper.*
|
||||
import im.vector.matrix.android.internal.database.helper.add
|
||||
import im.vector.matrix.android.internal.database.helper.addAll
|
||||
import im.vector.matrix.android.internal.database.helper.isUnlinked
|
||||
import im.vector.matrix.android.internal.database.helper.lastStateIndex
|
||||
import im.vector.matrix.android.internal.database.helper.merge
|
||||
import im.vector.matrix.android.internal.database.model.ChunkEntity
|
||||
import im.vector.matrix.android.internal.session.room.timeline.PaginationDirection
|
||||
import io.realm.Realm
|
||||
@ -102,7 +106,7 @@ internal class ChunkEntityTest : InstrumentedTest {
|
||||
val chunk2: ChunkEntity = realm.createObject()
|
||||
chunk1.addAll("roomId", createFakeListOfEvents(30), PaginationDirection.BACKWARDS)
|
||||
chunk2.addAll("roomId", createFakeListOfEvents(30), PaginationDirection.BACKWARDS)
|
||||
chunk1.merge(chunk2, PaginationDirection.BACKWARDS)
|
||||
chunk1.merge("roomId", chunk2, PaginationDirection.BACKWARDS)
|
||||
chunk1.events.size shouldEqual 60
|
||||
}
|
||||
}
|
||||
@ -118,7 +122,7 @@ internal class ChunkEntityTest : InstrumentedTest {
|
||||
chunk2.isLast = false
|
||||
chunk1.addAll("roomId", eventsForChunk1, PaginationDirection.FORWARDS)
|
||||
chunk2.addAll("roomId", eventsForChunk2, PaginationDirection.BACKWARDS)
|
||||
chunk1.merge(chunk2, PaginationDirection.BACKWARDS)
|
||||
chunk1.merge("roomId", chunk2, PaginationDirection.BACKWARDS)
|
||||
chunk1.events.size shouldEqual 40
|
||||
chunk1.isLast.shouldBeTrue()
|
||||
}
|
||||
@ -131,7 +135,7 @@ internal class ChunkEntityTest : InstrumentedTest {
|
||||
val chunk2: ChunkEntity = realm.createObject()
|
||||
chunk1.addAll("roomId", createFakeListOfEvents(30), PaginationDirection.BACKWARDS, isUnlinked = true)
|
||||
chunk2.addAll("roomId", createFakeListOfEvents(30), PaginationDirection.BACKWARDS, isUnlinked = false)
|
||||
chunk1.merge(chunk2, PaginationDirection.BACKWARDS)
|
||||
chunk1.merge("roomId", chunk2, PaginationDirection.BACKWARDS)
|
||||
chunk1.isUnlinked().shouldBeFalse()
|
||||
}
|
||||
}
|
||||
@ -143,7 +147,7 @@ internal class ChunkEntityTest : InstrumentedTest {
|
||||
val chunk2: ChunkEntity = realm.createObject()
|
||||
chunk1.addAll("roomId", createFakeListOfEvents(30), PaginationDirection.BACKWARDS, isUnlinked = true)
|
||||
chunk2.addAll("roomId", createFakeListOfEvents(30), PaginationDirection.BACKWARDS, isUnlinked = true)
|
||||
chunk1.merge(chunk2, PaginationDirection.BACKWARDS)
|
||||
chunk1.merge("roomId", chunk2, PaginationDirection.BACKWARDS)
|
||||
chunk1.isUnlinked().shouldBeTrue()
|
||||
}
|
||||
}
|
||||
@ -157,7 +161,7 @@ internal class ChunkEntityTest : InstrumentedTest {
|
||||
chunk1.prevToken = prevToken
|
||||
chunk1.addAll("roomId", createFakeListOfEvents(30), PaginationDirection.BACKWARDS, isUnlinked = true)
|
||||
chunk2.addAll("roomId", createFakeListOfEvents(30), PaginationDirection.BACKWARDS, isUnlinked = true)
|
||||
chunk1.merge(chunk2, PaginationDirection.FORWARDS)
|
||||
chunk1.merge("roomId", chunk2, PaginationDirection.FORWARDS)
|
||||
chunk1.prevToken shouldEqual prevToken
|
||||
}
|
||||
}
|
||||
@ -171,7 +175,7 @@ internal class ChunkEntityTest : InstrumentedTest {
|
||||
chunk1.nextToken = nextToken
|
||||
chunk1.addAll("roomId", createFakeListOfEvents(30), PaginationDirection.BACKWARDS, isUnlinked = true)
|
||||
chunk2.addAll("roomId", createFakeListOfEvents(30), PaginationDirection.BACKWARDS, isUnlinked = true)
|
||||
chunk1.merge(chunk2, PaginationDirection.BACKWARDS)
|
||||
chunk1.merge("roomId", chunk2, PaginationDirection.BACKWARDS)
|
||||
chunk1.nextToken shouldEqual nextToken
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user