r.0.9.1.source-code.LogRecordRepo.kt Maven / Gradle / Ivy
The newest version!
package se.wollan.tolr
import se.wollan.datascope.*
import se.wollan.time.HLCTimestamp
internal interface LogRecordRepo {
val insertChannel: FanoutReceiveChannel
/** get one page of records starting with, and not including, the specified node-timestamp filtered by type */
suspend fun listByTypeLaterThan(pattern: RecordTypePattern, since: NodeTimestamp): LogPage
suspend fun listLatestPerNodeTimestamps(): Map
suspend fun contains(nodeTimestamp: NodeTimestamp): Boolean
/**
* Zero mean no rows.
* RowId will increase in strictly ascending order on inserts due to no deletes.
* More info: https://sqlite.org/autoinc.html
*/
suspend fun getLargestRowId(): Long
/** get one page of records with timestamps > args for each nodeId */
suspend fun listLaterThanNodeTimestamps(timestamps: Map): List
suspend fun insert(records: List)
suspend fun insertIfMissing(records: List): Int
suspend fun update(records: List)
}
internal class SqliteRecordRepo(
private val db: SqliteDatabase,
private val configurationProvider: ConfigurationProvider) : LogRecordRepo {
private val _insertChannel = FanoutChannel()
override val insertChannel: FanoutReceiveChannel = _insertChannel
// TODO: skip _internal _types when returning data to consuming project, applies to this and listByType
// only impl this when we have actual internal types to begin with, so skip for now!
override suspend fun listByTypeLaterThan(pattern: RecordTypePattern, since: NodeTimestamp): LogPage =
db.execRead("""SELECT rowid, $COLS, total FROM (
SELECT rowid, $COLS, SUM(size) OVER (ORDER BY timestamp, nodeId) - size AS total
FROM tolr_records
WHERE (timestamp, nodeId) > (@timestamp, @nodeId) ${pattern.whereClause}
) WHERE total <= ${pageSize()}""",
listOf("@timestamp" to since.timestamp.time, "@nodeId" to since.nodeId.value).appendPatternParam(pattern))
.toLogPage()
override suspend fun listLatestPerNodeTimestamps(): Map =
db.execRead("SELECT nodeId, MAX(timestamp) AS timestamp FROM tolr_records GROUP BY nodeId")
.associate { NodeId(it.getString("nodeId")) to HLCTimestamp(it.getLong("timestamp")) }
override suspend fun contains(nodeTimestamp: NodeTimestamp): Boolean =
db.execRead("SELECT 1 FROM tolr_records WHERE timestamp = @timestamp AND nodeId = @nodeId LIMIT 1",
listOf("@timestamp" to nodeTimestamp.timestamp.time, "@nodeId" to nodeTimestamp.nodeId.value)).isNotEmpty()
override suspend fun getLargestRowId(): Long =
db.execRead("SELECT IFNULL(MAX(rowid), 0) FROM tolr_records").singleLong()
override suspend fun listLaterThanNodeTimestamps(timestamps: Map): List {
if (timestamps.isEmpty())
return emptyList()
val unionAllSql = List(timestamps.size, ::filterOnNodeIdAndLaterThanSql).joinToString(" UNION ALL ")
val sql = """SELECT rowid, $COLS_EXCEPT_SIZE FROM (
SELECT rowid, $COLS_EXCEPT_SIZE, SUM(size) OVER (ORDER BY timestamp, nodeId) AS total FROM ($unionAllSql)
) WHERE total <= ${pageSize()}"""
val params = timestamps.entries
.flatMapIndexed { i, (n, ts) -> listOf("@timestamp$i" to ts.time, "@nodeId$i" to n.value) }
return db.execRead(sql, params).map { it.toDomainModel() }
}
override suspend fun insert(records: List) = db.write {
db.execBatchWrite("INSERT INTO tolr_records ($COLS) VALUES ($PARAMS)",
PARAM_LIST, records.asSequence().map { it.toParamValues() })
db.addPostCommitHook { _insertChannel.send(records) }
}
override suspend fun insertIfMissing(records: List): Int = db.write {
val preRowId = getLargestRowId()
val affected = db.execBatchWrite("INSERT OR IGNORE INTO tolr_records ($COLS) VALUES ($PARAMS)",
PARAM_LIST, records.asSequence().map { it.toParamValues() })
if (affected < records.size) {
// this is an edge case where we sync with multiple remotes at the same time with same records
// -> we need to figure out which records that was actually inserted
val insertedRecords = db.execRead("SELECT ROWID, $COLS_EXCEPT_SIZE FROM tolr_records WHERE rowid > @preRowId",
listOf("@preRowId" to preRowId)).map { it.toDomainModel() }
db.addPostCommitHook { _insertChannel.send(insertedRecords) }
} else {
db.addPostCommitHook { _insertChannel.send(records) }
}
affected
}
override suspend fun update(records: List): Unit = db.write {
val affected = db.execBatchWrite(
"UPDATE tolr_records SET type = @type, payload = @payload, size = @size WHERE timestamp = @timestamp AND nodeId = @nodeId",
PARAM_LIST, records.asSequence().map { it.toParamValues() })
check(affected == records.size) { "could not find ${records.size - affected} of ${records.size} records to update" }
}
private suspend fun List.toLogPage(): LogPage {
val lastRow = lastOrNull()
val hasNext = lastRow != null && lastRow.getInt(COL_SIZE) + lastRow.getInt("total") > pageSize().value
if (!hasNext)
return LogPage(next = null, records = map { it.toDomainModel() })
if (size == 1) {
// hasNext and one row means that row is > pageSize -> lets return it and tell client to continue looping
val result = this[0].toDomainModel()
return LogPage(next = result.nodeTimestamp, records = listOf(result))
}
// rows >= 2. last row breaks page size, meaning we have _at least_ one more row in result set
val result = dropLast(1).map { it.toDomainModel() }
return LogPage(next = result.last().nodeTimestamp, records = result)
}
private suspend fun pageSize(): PageSize = configurationProvider.getConfiguration().pageSize
}
internal class SkipEmptyWritesRecordRepoDecorator(private val inner: LogRecordRepo) : LogRecordRepo by inner {
override suspend fun insert(records: List) {
if (records.isNotEmpty())
inner.insert(records)
}
override suspend fun insertIfMissing(records: List): Int =
if (records.isNotEmpty()) inner.insertIfMissing(records) else 0
override suspend fun update(records: List) {
if (records.isNotEmpty())
inner.update(records)
}
}
private const val COL_ROWID = "rowid" // manually include this when reading data!
private const val COL_TIMESTAMP = "timestamp"
private const val COL_NODE_ID = "nodeId"
private const val COL_TYPE = "type"
private const val COL_PAYLOAD = "payload"
private const val COL_SIZE = "size"
private val COL_LIST: List = listOf(COL_TIMESTAMP, COL_NODE_ID, COL_TYPE, COL_PAYLOAD, COL_SIZE)
private val COLS: String = COL_LIST.joinToString()
private val COLS_EXCEPT_SIZE: String = COL_LIST.filterNot { it == COL_SIZE }.joinToString()
private val PARAMS: String = COL_LIST.joinToString { "@$it" }
private val PARAM_LIST: List = COL_LIST.map { "@$it" }
private const val PARAM_PATTERN = "@pattern"
private fun SqliteRow.toDomainModel() = LogRecord(
timestamp = HLCTimestamp(getLong(COL_TIMESTAMP)),
nodeId = NodeId(getString(COL_NODE_ID)),
type = RecordType(getString(COL_TYPE)),
payload = RecordPayload(getString(COL_PAYLOAD)),
rowId = getLong(COL_ROWID)
)
private fun LogRecord.toParamPairs(): List> = listOf(
"@$COL_TIMESTAMP" to timestamp.time,
"@$COL_NODE_ID" to nodeId.value,
"@$COL_TYPE" to type.value,
"@$COL_PAYLOAD" to payload.value,
"@$COL_SIZE" to size
)
private fun LogRecord.toParamValues(): List = listOf(
timestamp.time,
nodeId.value,
type.value,
payload.value,
size
)
private fun filterOnNodeIdAndLaterThanSql(paramIdx: Int) =
"SELECT rowid, $COLS FROM tolr_records WHERE timestamp > @timestamp$paramIdx AND nodeId = @nodeId$paramIdx"
private val RecordTypePattern.whereClause get() = when {
isMatchAll() -> ""
hasWildcard() -> "AND type GLOB $PARAM_PATTERN"
else -> "AND type = $PARAM_PATTERN"
}
private fun List.appendPatternParam(pattern: RecordTypePattern) = when {
pattern.isMatchAll() -> this
else -> this + (PARAM_PATTERN to pattern.value)
}