forked from projectmoon/tenebrous-dicebot
Clean up migration to make it more readable
This commit is contained in:
parent
a53ce85f38
commit
3cec676b2f
|
@ -1,11 +1,12 @@
|
||||||
use crate::db::errors::{DataError, MigrationError};
|
use crate::db::errors::{DataError, MigrationError};
|
||||||
|
use crate::db::variables;
|
||||||
use crate::db::Database;
|
use crate::db::Database;
|
||||||
use phf::phf_map;
|
use phf::phf_map;
|
||||||
|
|
||||||
pub(super) type DataMigration = fn(&Database) -> Result<(), DataError>;
|
pub(super) type DataMigration = fn(&Database) -> Result<(), DataError>;
|
||||||
|
|
||||||
static MIGRATIONS: phf::Map<u32, DataMigration> = phf_map! {
|
static MIGRATIONS: phf::Map<u32, DataMigration> = phf_map! {
|
||||||
1u32 => super::variables::migrations::migration1,
|
1u32 => variables::migrations::add_room_user_variable_count,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn get_migrations(versions: &[u32]) -> Result<Vec<DataMigration>, MigrationError> {
|
pub fn get_migrations(versions: &[u32]) -> Result<Vec<DataMigration>, MigrationError> {
|
||||||
|
|
|
@ -3,19 +3,29 @@ use crate::db::errors::{DataError, MigrationError};
|
||||||
use crate::db::Database;
|
use crate::db::Database;
|
||||||
use byteorder::LittleEndian;
|
use byteorder::LittleEndian;
|
||||||
use sled::transaction::TransactionError;
|
use sled::transaction::TransactionError;
|
||||||
use sled::Batch;
|
use sled::{Batch, IVec};
|
||||||
use zerocopy::byteorder::U32;
|
use zerocopy::byteorder::U32;
|
||||||
use zerocopy::AsBytes;
|
use zerocopy::AsBytes;
|
||||||
|
|
||||||
//TODO we will make this set variable count properly.
|
//Not to be confused with the super::RoomAndUser delineator.
|
||||||
pub(in crate::db) fn migration1(db: &Database) -> Result<(), DataError> {
|
#[derive(PartialEq, Eq, std::hash::Hash)]
|
||||||
let tree = &db.variables.0;
|
struct RoomAndUser {
|
||||||
let prefix = variables_space_prefix("");
|
room_id: String,
|
||||||
|
username: String,
|
||||||
|
}
|
||||||
|
|
||||||
//Extract a vec of tuples, consisting of room id + username.
|
/// Create a version 0 user variable key.
|
||||||
let results: Vec<(String, String)> = tree
|
fn v0_variable_key(info: &RoomAndUser, variable_name: &str) -> Vec<u8> {
|
||||||
.scan_prefix(prefix)
|
let mut key = vec![];
|
||||||
.map(|entry| {
|
key.extend_from_slice(info.room_id.as_bytes());
|
||||||
|
key.extend_from_slice(info.username.as_bytes());
|
||||||
|
key.extend_from_slice(variable_name.as_bytes());
|
||||||
|
key
|
||||||
|
}
|
||||||
|
|
||||||
|
fn map_value_to_room_and_user(
|
||||||
|
entry: sled::Result<(IVec, IVec)>,
|
||||||
|
) -> Result<RoomAndUser, MigrationError> {
|
||||||
if let Ok((key, _)) = entry {
|
if let Ok((key, _)) = entry {
|
||||||
let keys: Vec<Result<&str, _>> = key
|
let keys: Vec<Result<&str, _>> = key
|
||||||
.split(|&b| b == 0xff)
|
.split(|&b| b == 0xff)
|
||||||
|
@ -23,7 +33,10 @@ pub(in crate::db) fn migration1(db: &Database) -> Result<(), DataError> {
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
if let &[_, Ok(room_id), Ok(username), Ok(_variable)] = keys.as_slice() {
|
if let &[_, Ok(room_id), Ok(username), Ok(_variable)] = keys.as_slice() {
|
||||||
Ok((room_id.to_owned(), username.to_owned()))
|
Ok(RoomAndUser {
|
||||||
|
room_id: room_id.to_owned(),
|
||||||
|
username: username.to_owned(),
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
Err(MigrationError::MigrationFailed(
|
Err(MigrationError::MigrationFailed(
|
||||||
"a key violates utf8 schema".to_string(),
|
"a key violates utf8 schema".to_string(),
|
||||||
|
@ -34,10 +47,19 @@ pub(in crate::db) fn migration1(db: &Database) -> Result<(), DataError> {
|
||||||
"encountered unexpected key".to_string(),
|
"encountered unexpected key".to_string(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
|
|
||||||
|
pub(in crate::db) fn add_room_user_variable_count(db: &Database) -> Result<(), DataError> {
|
||||||
|
let tree = &db.variables.0;
|
||||||
|
let prefix = variables_space_prefix("");
|
||||||
|
|
||||||
|
//Extract a vec of tuples, consisting of room id + username.
|
||||||
|
let results: Vec<RoomAndUser> = tree
|
||||||
|
.scan_prefix(prefix)
|
||||||
|
.map(map_value_to_room_and_user)
|
||||||
.collect::<Result<Vec<_>, MigrationError>>()?;
|
.collect::<Result<Vec<_>, MigrationError>>()?;
|
||||||
|
|
||||||
let counts: HashMap<(String, String), u32> =
|
let counts: HashMap<RoomAndUser, u32> =
|
||||||
results
|
results
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.fold(HashMap::new(), |mut count_map, room_and_user| {
|
.fold(HashMap::new(), |mut count_map, room_and_user| {
|
||||||
|
@ -47,15 +69,19 @@ pub(in crate::db) fn migration1(db: &Database) -> Result<(), DataError> {
|
||||||
});
|
});
|
||||||
|
|
||||||
//Start a transaction on the variables tree.
|
//Start a transaction on the variables tree.
|
||||||
//Delete the old variable_count variable if exists.
|
|
||||||
//Add variable count according to new schema.
|
|
||||||
let tx_result: Result<_, TransactionError<DataError>> = db.variables.0.transaction(|tx_vars| {
|
let tx_result: Result<_, TransactionError<DataError>> = db.variables.0.transaction(|tx_vars| {
|
||||||
let batch = counts.iter().fold(Batch::default(), |mut batch, entry| {
|
let batch = counts.iter().fold(Batch::default(), |mut batch, entry| {
|
||||||
let key =
|
let (info, count) = entry;
|
||||||
variables_space_key(RoomAndUser(&(entry.0).0, &(entry.0).1), VARIABLE_COUNT_KEY);
|
|
||||||
|
|
||||||
let db_value: U32<LittleEndian> = U32::new(*entry.1);
|
//Add variable count according to new schema.
|
||||||
|
let delineator = super::RoomAndUser(&info.room_id, &info.username);
|
||||||
|
let key = variables_space_key(delineator, VARIABLE_COUNT_KEY);
|
||||||
|
let db_value: U32<LittleEndian> = U32::new(*count);
|
||||||
batch.insert(key, db_value.as_bytes());
|
batch.insert(key, db_value.as_bytes());
|
||||||
|
|
||||||
|
//Delete the old variable_count variable if exists.
|
||||||
|
let old_key = v0_variable_key(&info, "variable_count");
|
||||||
|
batch.remove(old_key);
|
||||||
batch
|
batch
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue