diff --git a/.circleci/config.yml b/.circleci/config.yml index 03f3b40c9..404edd16e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -17,6 +17,7 @@ workflows: - package_cw3 - package_cw20 - package_cw721 + - package_storage_plus - lint - wasm-build deploy: @@ -569,6 +570,38 @@ jobs: - target key: cargocache-wasm-rust:1.44.1-{{ checksum "~/project/Cargo.lock" }} + + package_storage_plus: + docker: + - image: rust:1.44.1 + working_directory: ~/project/packages/storage-plus + steps: + - checkout: + path: ~/project + - run: + name: Version information + command: rustc --version; cargo --version; rustup --version; rustup target list --installed + - restore_cache: + keys: + - cargocache-v2-storage-plus:1.44.1-{{ checksum "~/project/Cargo.lock" }} + - run: + name: Build library for native target + command: cargo build --locked + - run: + name: Run unit tests + command: cargo test --locked + - run: + name: Build library for native target (with iterator) + command: cargo build --locked --features iterator + - run: + name: Run unit tests (with iterator) + command: cargo test --locked --features iterator + - save_cache: + paths: + - /usr/local/cargo/registry + - target + key: cargocache-v2-storage-plus:1.44.1-{{ checksum "~/project/Cargo.lock" }} + # This job roughly follows the instructions from https://circleci.com/blog/publishing-to-github-releases-via-circleci/ build_and_upload_contracts: docker: diff --git a/Cargo.lock b/Cargo.lock index 5fc4b47a7..8432c3dcf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -113,6 +113,15 @@ dependencies = [ "serde", ] +[[package]] +name = "cw-storage-plus" +version = "0.2.2" +dependencies = [ + "cosmwasm-std", + "schemars", + "serde", +] + [[package]] name = "cw0" version = "0.2.3" diff --git a/packages/storage-plus/Cargo.toml b/packages/storage-plus/Cargo.toml new file mode 100644 index 000000000..fa982d681 --- /dev/null +++ b/packages/storage-plus/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "cw-storage-plus" +version = "0.2.2" +authors = ["Ethan Frey "] +edition = "2018" +description = "Enhanced/experimental storage engines" +license = "Apache-2.0" +repository = "https://github.com/CosmWasm/cosmwasm-plus" +homepage = "https://cosmwasm.com" +documentation = "https://docs.cosmwasm.com" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[features] +iterator = ["cosmwasm-std/iterator"] + +[dependencies] +cosmwasm-std = { version = "0.11.0" } +schemars = "0.7" +serde = { version = "1.0.103", default-features = false, features = ["derive"] } diff --git a/packages/storage-plus/NOTICE b/packages/storage-plus/NOTICE new file mode 100644 index 000000000..838a67f47 --- /dev/null +++ b/packages/storage-plus/NOTICE @@ -0,0 +1,14 @@ +CW-Storage-Plus: Enhanced/experimental storage engines for CosmWasm +Copyright (C) 2020 Confio OÜ + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/packages/storage-plus/README.md b/packages/storage-plus/README.md new file mode 100644 index 000000000..16272f60d --- /dev/null +++ b/packages/storage-plus/README.md @@ -0,0 +1,298 @@ +# CW-Storage-Plus: Enhanced/experimental storage engines for CosmWasm + +The ideas in here are based on the `cosmwasm-storage` crate. However, +after much usage, we decided a complete rewrite could allow us to add +more powerful and easy touse interfaces. Here are those interfaces. + +**Status: experimental** + +You currently should not be using this crate outside of the `cosmwasm-plus` +repo. This is a first draft of many types. We will update the status +after they have been used more heavily and the interfaces stabilized. + +The ideas/desired functionality in here should be more or final, +just the form to express them which will keep changing. + +## Usage Overview + +We introduce two main classes to provide a productive abstraction +on top of `cosmwasm_std::Storage`. They are `Item`, which is +a typed wrapper around one database key, providing some helper functions +for interacting with it without dealing with raw bytes. And `Map`, +which allows you to store multiple typed objects under a prefix, +indexed by a simple (`&[u8]`) or compound (eg. `(&[u8], &[u8])`) primary key. + +These correspond to the concepts represented in `cosmwasm_storage` by +`Singleton` and `Bucket`, but with a re-designed API and implementation +to require less typing for developers and less gas usage in the contracts. + +## Item + +The usage of an [`Item`](./src/item.rs) is pretty straight-forward. +You must simply provide the proper type, as well as a database key not +used by any other item. Then it will provide you with a nice interface +to interact with such data. + +If you are coming from using `Singleton`, the biggest change is that +we no longer store `Storage` inside, meaning we don't need read and write +variants of the object, just one type. Furthermore, we use `const fn` +to create the `Item`, allowing it to be defined as a global compile-time +constant rather than a function that must be constructed each time, +which saves gas as well as typing. + +Example Usage: + +```rust +#[derive(Serialize, Deserialize, PartialEq, Debug)] +struct Config { + pub owner: String, + pub max_tokens: i32, +} + +// note const constructor rather than 2 functions with Singleton +const CONFIG: Item = Item::new(b"config"); + +fn demo() -> StdResult<()> { + let mut store = MockStorage::new(); + + // may_load returns Option, so None if data is missing + // load returns T and Err(StdError::NotFound{}) if data is missing + let empty = CONFIG.may_load(&store)?; + assert_eq!(None, empty); + let cfg = Config { + owner: "admin".to_string(), + max_tokens: 1234, + }; + CONFIG.save(&mut store, &cfg)?; + let loaded = CONFIG.load(&store)?; + assert_eq!(cfg, loaded); + + // update an item with a closure (includes read and write) + // returns the newly saved value + let output = CONFIG.update(&mut store, |mut c| -> StdResult<_> { + c.max_tokens *= 2; + Ok(c) + })?; + assert_eq!(2468, output.max_tokens); + + // you can error in an update and nothing is saved + let failed = CONFIG.update(&mut store, |_| -> StdResult<_> { + Err(StdError::generic_err("failure mode")) + }); + assert!(failed.is_err()); + + // loading data will show the first update was saved + let loaded = CONFIG.load(&store)?; + let expected = Config { + owner: "admin".to_string(), + max_tokens: 2468, + }; + assert_eq!(expected, loaded); + + // we can remove data as well + CONFIG.remove(&mut store); + let empty = CONFIG.may_load(&store)?; + assert_eq!(None, empty); + + Ok(()) +} +``` + +## Map + +The usage of an [`Map`](./src/item.rs) is a little more complex, but +is still pretty straight-forward. You can imagine it as a storage-backed +`BTreeMap`, allowing key-value lookups with typed values. In addition, +we support not only simple binary keys (`&[u8]`), but tuples, which are +combined. This allows us to store allowances as composite keys +eg. `(owner, spender)` to look up the balance. + +Beyond direct lookups, we have a super power not found in Ethereum - +iteration. That's right, you can list all items in a `Map`, or only +part of them. We can efficiently allow pagination over these items as +well, starting after the last query ended at low, low gas costs. +This requires the `iterator` feature to be enabled in `cw-storage-plus` +(which automatically enables it in `cosmwasm-std` as well). + +If you are coming from using `Bucket`, the biggest change is that +we no longer store `Storage` inside, meaning we don't need read and write +variants of the object, just one type. Furthermore, we use `const fn` +to create the `Bucket`, allowing it to be defined as a global compile-time +constant rather than a function that must be constructed each time, +which saves gas as well as typing. In addition, the composite indexes +(tuples) is more ergonomic and expressive of intention, and the range +interface has been improved. + +Here is an example with normal (simple) keys: + +```rust +#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] +struct Data { + pub name: String, + pub age: i32, +} + +const PEOPLE: Map<&[u8], Data> = Map::new(b"people"); + +fn demo() -> StdResult<()> { + let mut store = MockStorage::new(); + let data = Data { + name: "John".to_string(), + age: 32, + }; + + // load and save with extra key argument + let empty = PEOPLE.may_load(&store, b"john")?; + assert_eq!(None, empty); + PEOPLE.save(&mut store, b"john", &data)?; + let loaded = PEOPLE.load(&store, b"john")?; + assert_eq!(data, loaded); + + // nothing on another key + let missing = PEOPLE.may_load(&store, b"jack")?; + assert_eq!(None, missing); + + // update function for new or existing keys + let birthday = |d: Option| -> StdResult { + match d { + Some(one) => Ok(Data { + name: one.name, + age: one.age + 1, + }), + None => Ok(Data { + name: "Newborn".to_string(), + age: 0, + }), + } + }; + + let old_john = PEOPLE.update(&mut store, b"john", birthday)?; + assert_eq!(33, old_john.age); + assert_eq!("John", old_john.name.as_str()); + + let new_jack = PEOPLE.update(&mut store, b"jack", birthday)?; + assert_eq!(0, new_jack.age); + assert_eq!("Newborn", new_jack.name.as_str()); + + // update also changes the store + assert_eq!(old_john, PEOPLE.load(&store, b"john")?); + assert_eq!(new_jack, PEOPLE.load(&store, b"jack")?); + + // removing leaves us empty + PEOPLE.remove(&mut store, b"john"); + let empty = PEOPLE.may_load(&store, b"john")?; + assert_eq!(None, empty); + + Ok(()) +} +``` + +### Composite Keys + +There are times when we want to use multiple items as a key, for example, when +storing allowances based on account owner and spender. We could try to manually +concatenate them before calling, but that can lead ot overlap, and is a bit +low-level for us. Also, by explicitly separating the keys, we can easily provide +helpers to do range queries over a prefix, such as "show me all allowances for +one owner" (first part of the composite key). Just like you'd expect from your +favorite database. + +Here how we use it with composite keys. Just define a tuple as a key and use that +everywhere you used a byte slice above. + +```rust +// Note the tuple for primary key. We support one slice, or a 2 or 3-tuple +// adding longer tuples is quite easy but unlikely to be needed. +const ALLOWANCE: Map<(&[u8], &[u8]), u64> = Map::new(b"allow"); + +fn demo() -> StdResult<()> { + let mut store = MockStorage::new(); + + // save and load on a composite key + let empty = ALLOWANCE.may_load(&store, (b"owner", b"spender"))?; + assert_eq!(None, empty); + ALLOWANCE.save(&mut store, (b"owner", b"spender"), &777)?; + let loaded = ALLOWANCE.load(&store, (b"owner", b"spender"))?; + assert_eq!(777, loaded); + + // doesn't appear under other key (even if a concat would be the same) + let different = ALLOWANCE.may_load(&store, (b"owners", b"pender")).unwrap(); + assert_eq!(None, different); + + // simple update + ALLOWANCE.update(&mut store, (b"owner", b"spender"), |v| { + Ok(v.unwrap_or_default() + 222) + })?; + let loaded = ALLOWANCE.load(&store, (b"owner", b"spender"))?; + assert_eq!(999, loaded); + + Ok(()) +} +``` + +### Path + +Under the scenes, we create a `Path` from the `Map` when accessing a key. +`PEOPLE.load(&store, b"jack") == PEOPLE.key(b"jack").load()`. +`Map.key()` returns a `Path`, which has the same interface as `Item`, +reusing the calculated path to this key. + +For simple keys, this is just a bit less typing and a bit less gas if you +use the same key for many calls. However, for composite keys, like +`(b"owner", b"spender")` it is **much** less typing. And highly recommended anywhere +you will use the a composite key even twice: + +```rust +#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] +struct Data { + pub name: String, + pub age: i32, +} + +const PEOPLE: Map<&[u8], Data> = Map::new(b"people"); +const ALLOWANCE: Map<(&[u8], &[u8]), u64> = Map::new(b"allow"); + +fn demo() -> StdResult<()> { + let mut store = MockStorage::new(); + let data = Data { + name: "John".to_string(), + age: 32, + }; + + // create a Path one time to use below + let john = PEOPLE.key(b"john"); + + // Use this just like an Item above + let empty = john.may_load(&store)?; + assert_eq!(None, empty); + john.save(&mut store, &data)?; + let loaded = john.load(&store)?; + assert_eq!(data, loaded); + john.remove(&mut store); + let empty = john.may_load(&store)?; + assert_eq!(None, empty); + + // Same for composite keys, just use both parts in key(). + // Notice how much less verbose than the above example. + let allow = ALLOWANCE.key((b"owner", b"spender")); + allow.save(&mut store, &1234)?; + let loaded = allow.load(&store)?; + assert_eq!(1234, loaded); + allow.update(&mut store, |x| Ok(x.unwrap_or_default() * 2))?; + let loaded = allow.load(&store)?; + assert_eq!(2468, loaded); + + Ok(()) +} +``` + +### Prefix + +We + +**TODO** + +## Indexed Map + +TODO: we are working on a version of a map that manages multiple +secondary indexed transparently. That work is coming soon. \ No newline at end of file diff --git a/packages/storage-plus/src/helpers.rs b/packages/storage-plus/src/helpers.rs new file mode 100644 index 000000000..8e7c5a2c4 --- /dev/null +++ b/packages/storage-plus/src/helpers.rs @@ -0,0 +1,141 @@ +//! This module is an implemention of a namespacing scheme described +//! in https://github.com/webmaster128/key-namespacing#length-prefixed-keys +//! +//! Everything in this file is only responsible for building such keys +//! and is in no way specific to any kind of storage. + +use serde::de::DeserializeOwned; +use std::any::type_name; + +use cosmwasm_std::{from_slice, StdError, StdResult}; + +/// may_deserialize parses json bytes from storage (Option), returning Ok(None) if no data present +/// +/// value is an odd type, but this is meant to be easy to use with output from storage.get (Option>) +/// and value.map(|s| s.as_slice()) seems trickier than &value +pub(crate) fn may_deserialize( + value: &Option>, +) -> StdResult> { + match value { + Some(vec) => Ok(Some(from_slice(&vec)?)), + None => Ok(None), + } +} + +/// must_deserialize parses json bytes from storage (Option), returning NotFound error if no data present +pub(crate) fn must_deserialize(value: &Option>) -> StdResult { + match value { + Some(vec) => from_slice(&vec), + None => Err(StdError::not_found(type_name::())), + } +} + +/// Customization of namespaces_with_key for when +/// there are multiple sets we do not want to combine just to call this +pub(crate) fn nested_namespaces_with_key( + top_names: &[&[u8]], + sub_names: &[&[u8]], + key: &[u8], +) -> Vec { + let mut size = key.len(); + for &namespace in top_names { + size += namespace.len() + 2; + } + for &namespace in sub_names { + size += namespace.len() + 2; + } + + let mut out = Vec::with_capacity(size); + for &namespace in top_names { + out.extend_from_slice(&encode_length(namespace)); + out.extend_from_slice(namespace); + } + for &namespace in sub_names { + out.extend_from_slice(&encode_length(namespace)); + out.extend_from_slice(namespace); + } + out.extend_from_slice(key); + out +} + +/// Encodes the length of a given namespace as a 2 byte big endian encoded integer +pub(crate) fn encode_length(namespace: &[u8]) -> [u8; 2] { + if namespace.len() > 0xFFFF { + panic!("only supports namespaces up to length 0xFFFF") + } + let length_bytes = (namespace.len() as u32).to_be_bytes(); + [length_bytes[2], length_bytes[3]] +} + +#[cfg(test)] +mod test { + use super::*; + use cosmwasm_std::{to_vec, StdError}; + use serde::{Deserialize, Serialize}; + + #[derive(Serialize, Deserialize, PartialEq, Debug)] + struct Person { + pub name: String, + pub age: i32, + } + + #[test] + fn encode_length_works() { + assert_eq!(encode_length(b""), *b"\x00\x00"); + assert_eq!(encode_length(b"a"), *b"\x00\x01"); + assert_eq!(encode_length(b"aa"), *b"\x00\x02"); + assert_eq!(encode_length(b"aaa"), *b"\x00\x03"); + assert_eq!(encode_length(&vec![1; 255]), *b"\x00\xff"); + assert_eq!(encode_length(&vec![1; 256]), *b"\x01\x00"); + assert_eq!(encode_length(&vec![1; 12345]), *b"\x30\x39"); + assert_eq!(encode_length(&vec![1; 65535]), *b"\xff\xff"); + } + + #[test] + #[should_panic(expected = "only supports namespaces up to length 0xFFFF")] + fn encode_length_panics_for_large_values() { + encode_length(&vec![1; 65536]); + } + + #[test] + fn may_deserialize_handles_some() { + let person = Person { + name: "Maria".to_string(), + age: 42, + }; + let value = to_vec(&person).unwrap(); + + let may_parse: Option = may_deserialize(&Some(value)).unwrap(); + assert_eq!(may_parse, Some(person)); + } + + #[test] + fn may_deserialize_handles_none() { + let may_parse = may_deserialize::(&None).unwrap(); + assert_eq!(may_parse, None); + } + + #[test] + fn must_deserialize_handles_some() { + let person = Person { + name: "Maria".to_string(), + age: 42, + }; + let value = to_vec(&person).unwrap(); + let loaded = Some(value); + + let parsed: Person = must_deserialize(&loaded).unwrap(); + assert_eq!(parsed, person); + } + + #[test] + fn must_deserialize_handles_none() { + let parsed = must_deserialize::(&None); + match parsed.unwrap_err() { + StdError::NotFound { kind, .. } => { + assert_eq!(kind, "cw_storage_plus::helpers::test::Person") + } + e => panic!("Unexpected error {}", e), + } + } +} diff --git a/packages/storage-plus/src/item.rs b/packages/storage-plus/src/item.rs new file mode 100644 index 000000000..5d55c1285 --- /dev/null +++ b/packages/storage-plus/src/item.rs @@ -0,0 +1,290 @@ +use serde::de::DeserializeOwned; +use serde::Serialize; +use std::marker::PhantomData; + +use cosmwasm_std::{to_vec, StdError, StdResult, Storage}; + +use crate::helpers::{may_deserialize, must_deserialize}; + +/// Item stores one typed item at the given key. +/// This is an analog of Singleton. +/// It functions just as Path but doesn't ue a Vec and thus has a const fn constructor. +pub struct Item<'a, T> { + // this is full key - no need to length-prefix it, we only store one item + storage_key: &'a [u8], + // see https://doc.rust-lang.org/std/marker/struct.PhantomData.html#unused-type-parameters for why this is needed + data_type: PhantomData, +} + +impl<'a, T> Item<'a, T> { + pub const fn new(storage_key: &'a [u8]) -> Self { + Item { + storage_key, + data_type: PhantomData, + } + } +} + +impl<'a, T> Item<'a, T> +where + T: Serialize + DeserializeOwned, +{ + /// save will serialize the model and store, returns an error on serialization issues + pub fn save(&self, store: &mut S, data: &T) -> StdResult<()> { + store.set(self.storage_key, &to_vec(data)?); + Ok(()) + } + + pub fn remove(&self, store: &mut S) { + store.remove(self.storage_key); + } + + /// load will return an error if no data is set at the given key, or on parse error + pub fn load(&self, store: &S) -> StdResult { + let value = store.get(self.storage_key); + must_deserialize(&value) + } + + /// may_load will parse the data stored at the key if present, returns Ok(None) if no data there. + /// returns an error on issues parsing + pub fn may_load(&self, store: &S) -> StdResult> { + let value = store.get(self.storage_key); + may_deserialize(&value) + } + + /// Loads the data, perform the specified action, and store the result + /// in the database. This is shorthand for some common sequences, which may be useful. + /// + /// If the data exists, `action(Some(value))` is called. Otherwise `action(None)` is called. + pub fn update(&mut self, store: &mut S, action: A) -> Result + where + A: FnOnce(T) -> Result, + E: From, + S: Storage, + { + let input = self.load(store)?; + let output = action(input)?; + self.save(store, &output)?; + Ok(output) + } +} + +#[cfg(test)] +mod test { + use super::*; + use cosmwasm_std::testing::MockStorage; + use serde::{Deserialize, Serialize}; + + use cosmwasm_std::StdError; + + #[derive(Serialize, Deserialize, PartialEq, Debug)] + struct Config { + pub owner: String, + pub max_tokens: i32, + } + + // note const constructor rather than 2 funcs with Singleton + const CONFIG: Item = Item::new(b"config"); + + #[test] + fn save_and_load() { + let mut store = MockStorage::new(); + + assert!(CONFIG.load(&store).is_err()); + assert_eq!(CONFIG.may_load(&store).unwrap(), None); + + let cfg = Config { + owner: "admin".to_string(), + max_tokens: 1234, + }; + CONFIG.save(&mut store, &cfg).unwrap(); + + assert_eq!(cfg, CONFIG.load(&store).unwrap()); + } + + #[test] + fn remove_works() { + let mut store = MockStorage::new(); + + // store data + let cfg = Config { + owner: "admin".to_string(), + max_tokens: 1234, + }; + CONFIG.save(&mut store, &cfg).unwrap(); + assert_eq!(cfg, CONFIG.load(&store).unwrap()); + + // remove it and loads None + CONFIG.remove(&mut store); + assert_eq!(None, CONFIG.may_load(&store).unwrap()); + + // safe to remove 2 times + CONFIG.remove(&mut store); + assert_eq!(None, CONFIG.may_load(&store).unwrap()); + } + + #[test] + fn isolated_reads() { + let mut store = MockStorage::new(); + + let cfg = Config { + owner: "admin".to_string(), + max_tokens: 1234, + }; + CONFIG.save(&mut store, &cfg).unwrap(); + + let reader = Item::::new(b"config"); + assert_eq!(cfg, reader.load(&store).unwrap()); + + let other_reader = Item::::new(b"config2"); + assert_eq!(other_reader.may_load(&store).unwrap(), None); + } + + #[test] + fn update_success() { + let mut store = MockStorage::new(); + + let cfg = Config { + owner: "admin".to_string(), + max_tokens: 1234, + }; + CONFIG.save(&mut store, &cfg).unwrap(); + + let output = CONFIG.update(&mut store, |mut c| -> StdResult<_> { + c.max_tokens *= 2; + Ok(c) + }); + let expected = Config { + owner: "admin".to_string(), + max_tokens: 2468, + }; + assert_eq!(output.unwrap(), expected); + assert_eq!(CONFIG.load(&store).unwrap(), expected); + } + + #[test] + fn update_can_change_variable_from_outer_scope() { + let mut store = MockStorage::new(); + let cfg = Config { + owner: "admin".to_string(), + max_tokens: 1234, + }; + CONFIG.save(&mut store, &cfg).unwrap(); + + let mut old_max_tokens = 0i32; + CONFIG + .update(&mut store, |mut c| -> StdResult<_> { + old_max_tokens = c.max_tokens; + c.max_tokens *= 2; + Ok(c) + }) + .unwrap(); + assert_eq!(old_max_tokens, 1234); + } + + #[test] + fn update_does_not_change_data_on_error() { + let mut store = MockStorage::new(); + + let cfg = Config { + owner: "admin".to_string(), + max_tokens: 1234, + }; + CONFIG.save(&mut store, &cfg).unwrap(); + + let output = CONFIG.update(&mut store, &|_c| Err(StdError::underflow(4, 7))); + match output.unwrap_err() { + StdError::Underflow { .. } => {} + err => panic!("Unexpected error: {:?}", err), + } + assert_eq!(CONFIG.load(&store).unwrap(), cfg); + } + + #[test] + fn update_supports_custom_errors() { + #[derive(Debug)] + enum MyError { + Std(StdError), + Foo, + } + + impl From for MyError { + fn from(original: StdError) -> MyError { + MyError::Std(original) + } + } + + let mut store = MockStorage::new(); + + let cfg = Config { + owner: "admin".to_string(), + max_tokens: 1234, + }; + CONFIG.save(&mut store, &cfg).unwrap(); + + let res = CONFIG.update(&mut store, |mut c| { + if c.max_tokens > 5000 { + return Err(MyError::Foo); + } + if c.max_tokens > 20 { + return Err(StdError::generic_err("broken stuff").into()); // Uses Into to convert StdError to MyError + } + if c.max_tokens > 10 { + to_vec(&c)?; // Uses From to convert StdError to MyError + } + c.max_tokens += 20; + Ok(c) + }); + match res.unwrap_err() { + MyError::Std(StdError::GenericErr { .. }) => {} + err => panic!("Unexpected error: {:?}", err), + } + assert_eq!(CONFIG.load(&store).unwrap(), cfg); + } + + #[test] + fn readme_works() -> StdResult<()> { + let mut store = MockStorage::new(); + + // may_load returns Option, so None if data is missing + // load returns T and Err(StdError::NotFound{}) if data is missing + let empty = CONFIG.may_load(&store)?; + assert_eq!(None, empty); + let cfg = Config { + owner: "admin".to_string(), + max_tokens: 1234, + }; + CONFIG.save(&mut store, &cfg)?; + let loaded = CONFIG.load(&store)?; + assert_eq!(cfg, loaded); + + // update an item with a closure (includes read and write) + // returns the newly saved value + let output = CONFIG.update(&mut store, |mut c| -> StdResult<_> { + c.max_tokens *= 2; + Ok(c) + })?; + assert_eq!(2468, output.max_tokens); + + // you can error in an update and nothing is saved + let failed = CONFIG.update(&mut store, |_| -> StdResult<_> { + Err(StdError::generic_err("failure mode")) + }); + assert!(failed.is_err()); + + // loading data will show the first update was saved + let loaded = CONFIG.load(&store)?; + let expected = Config { + owner: "admin".to_string(), + max_tokens: 2468, + }; + assert_eq!(expected, loaded); + + // we can remove data as well + CONFIG.remove(&mut store); + let empty = CONFIG.may_load(&store)?; + assert_eq!(None, empty); + + Ok(()) + } +} diff --git a/packages/storage-plus/src/iter_helpers.rs b/packages/storage-plus/src/iter_helpers.rs new file mode 100644 index 000000000..72ccb8449 --- /dev/null +++ b/packages/storage-plus/src/iter_helpers.rs @@ -0,0 +1,253 @@ +#![cfg(feature = "iterator")] + +use serde::de::DeserializeOwned; + +use cosmwasm_std::{from_slice, StdResult}; +use cosmwasm_std::{Order, Storage, KV}; + +use crate::helpers::encode_length; + +pub(crate) fn deserialize_kv(kv: KV) -> StdResult> { + let (k, v) = kv; + let t = from_slice::(&v)?; + Ok((k, t)) +} + +/// Calculates the raw key prefix for a given namespace as documented +/// in https://github.com/webmaster128/key-namespacing#length-prefixed-keys +pub(crate) fn to_length_prefixed(namespace: &[u8]) -> Vec { + let mut out = Vec::with_capacity(namespace.len() + 2); + out.extend_from_slice(&encode_length(namespace)); + out.extend_from_slice(namespace); + out +} + +pub(crate) fn range_with_prefix<'a, S: Storage>( + storage: &'a S, + namespace: &[u8], + start: Option<&[u8]>, + end: Option<&[u8]>, + order: Order, +) -> Box + 'a> { + // prepare start, end with prefix + let start = match start { + Some(s) => concat(namespace, s), + None => namespace.to_vec(), + }; + let end = match end { + Some(e) => concat(namespace, e), + // end is updating last byte by one + None => namespace_upper_bound(namespace), + }; + + // get iterator from storage + let base_iterator = storage.range(Some(&start), Some(&end), order); + + // make a copy for the closure to handle lifetimes safely + let prefix = namespace.to_vec(); + let mapped = base_iterator.map(move |(k, v)| (trim(&prefix, &k), v)); + Box::new(mapped) +} + +#[inline] +fn trim(namespace: &[u8], key: &[u8]) -> Vec { + key[namespace.len()..].to_vec() +} + +#[inline] +fn concat(namespace: &[u8], key: &[u8]) -> Vec { + let mut k = namespace.to_vec(); + k.extend_from_slice(key); + k +} + +/// Returns a new vec of same length and last byte incremented by one +/// If last bytes are 255, we handle overflow up the chain. +/// If all bytes are 255, this returns wrong data - but that is never possible as a namespace +fn namespace_upper_bound(input: &[u8]) -> Vec { + let mut copy = input.to_vec(); + // zero out all trailing 255, increment first that is not such + for i in (0..input.len()).rev() { + if copy[i] == 255 { + copy[i] = 0; + } else { + copy[i] += 1; + break; + } + } + copy +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn to_length_prefixed_works() { + assert_eq!(to_length_prefixed(b""), b"\x00\x00"); + assert_eq!(to_length_prefixed(b"a"), b"\x00\x01a"); + assert_eq!(to_length_prefixed(b"ab"), b"\x00\x02ab"); + assert_eq!(to_length_prefixed(b"abc"), b"\x00\x03abc"); + } + + #[test] + fn to_length_prefixed_works_for_long_prefix() { + let long_namespace1 = vec![0; 256]; + let prefix1 = to_length_prefixed(&long_namespace1); + assert_eq!(prefix1.len(), 256 + 2); + assert_eq!(&prefix1[0..2], b"\x01\x00"); + + let long_namespace2 = vec![0; 30000]; + let prefix2 = to_length_prefixed(&long_namespace2); + assert_eq!(prefix2.len(), 30000 + 2); + assert_eq!(&prefix2[0..2], b"\x75\x30"); + + let long_namespace3 = vec![0; 0xFFFF]; + let prefix3 = to_length_prefixed(&long_namespace3); + assert_eq!(prefix3.len(), 0xFFFF + 2); + assert_eq!(&prefix3[0..2], b"\xFF\xFF"); + } + + #[test] + #[should_panic(expected = "only supports namespaces up to length 0xFFFF")] + fn to_length_prefixed_panics_for_too_long_prefix() { + let limit = 0xFFFF; + let long_namespace = vec![0; limit + 1]; + to_length_prefixed(&long_namespace); + } + + #[test] + fn to_length_prefixed_calculates_capacity_correctly() { + // Those tests cannot guarantee the required capacity was calculated correctly before + // the vector allocation but increase the likelyhood of a proper implementation. + + let key = to_length_prefixed(b""); + assert_eq!(key.capacity(), key.len()); + + let key = to_length_prefixed(b"h"); + assert_eq!(key.capacity(), key.len()); + + let key = to_length_prefixed(b"hij"); + assert_eq!(key.capacity(), key.len()); + } +} + +// currently disabled tests as they require a bunch of legacy non-sense +// TODO: enable +#[cfg(test)] +#[cfg(not(feature = "iterator"))] +mod namespace_test { + use super::*; + use cosmwasm_std::testing::MockStorage; + + #[test] + fn test_range() { + let mut storage = MockStorage::new(); + let prefix = to_length_prefixed(b"foo"); + let other_prefix = to_length_prefixed(b"food"); + + // set some values in this range + set_with_prefix(&mut storage, &prefix, b"bar", b"none"); + set_with_prefix(&mut storage, &prefix, b"snowy", b"day"); + + // set some values outside this range + set_with_prefix(&mut storage, &other_prefix, b"moon", b"buggy"); + + // ensure we get proper result from prefixed_range iterator + let mut iter = range_with_prefix(&storage, &prefix, None, None, Order::Descending); + let first = iter.next().unwrap(); + assert_eq!(first, (b"snowy".to_vec(), b"day".to_vec())); + let second = iter.next().unwrap(); + assert_eq!(second, (b"bar".to_vec(), b"none".to_vec())); + assert!(iter.next().is_none()); + + // ensure we get raw result from base range + let iter = storage.range(None, None, Order::Ascending); + assert_eq!(3, iter.count()); + + // foo comes first + let mut iter = storage.range(None, None, Order::Ascending); + let first = iter.next().unwrap(); + let expected_key = concat(&prefix, b"bar"); + assert_eq!(first, (expected_key, b"none".to_vec())); + } + + #[test] + fn test_range_with_prefix_wrapover() { + let mut storage = MockStorage::new(); + // if we don't properly wrap over there will be issues here (note 255+1 is used to calculate end) + let prefix = to_length_prefixed(b"f\xff\xff"); + let other_prefix = to_length_prefixed(b"f\xff\x44"); + + // set some values in this range + set_with_prefix(&mut storage, &prefix, b"bar", b"none"); + set_with_prefix(&mut storage, &prefix, b"snowy", b"day"); + + // set some values outside this range + set_with_prefix(&mut storage, &other_prefix, b"moon", b"buggy"); + + // ensure we get proper result from prefixed_range iterator + let iter = range_with_prefix(&storage, &prefix, None, None, Order::Descending); + let elements: Vec = iter.collect(); + assert_eq!( + elements, + vec![ + (b"snowy".to_vec(), b"day".to_vec()), + (b"bar".to_vec(), b"none".to_vec()), + ] + ); + } + + #[test] + fn test_range_with_start_end_set() { + let mut storage = MockStorage::new(); + // if we don't properly wrap over there will be issues here (note 255+1 is used to calculate end) + let prefix = to_length_prefixed(b"f\xff\xff"); + let other_prefix = to_length_prefixed(b"f\xff\x44"); + + // set some values in this range + set_with_prefix(&mut storage, &prefix, b"bar", b"none"); + set_with_prefix(&mut storage, &prefix, b"snowy", b"day"); + + // set some values outside this range + set_with_prefix(&mut storage, &other_prefix, b"moon", b"buggy"); + + // make sure start and end are applied properly + let res: Vec = + range_with_prefix(&storage, &prefix, Some(b"b"), Some(b"c"), Order::Ascending) + .collect(); + assert_eq!(res.len(), 1); + assert_eq!(res[0], (b"bar".to_vec(), b"none".to_vec())); + + // make sure start and end are applied properly + let res: Vec = range_with_prefix( + &storage, + &prefix, + Some(b"bas"), + Some(b"sno"), + Order::Ascending, + ) + .collect(); + assert_eq!(res.len(), 0); + + let res: Vec = + range_with_prefix(&storage, &prefix, Some(b"ant"), None, Order::Ascending).collect(); + assert_eq!(res.len(), 2); + assert_eq!(res[0], (b"bar".to_vec(), b"none".to_vec())); + assert_eq!(res[1], (b"snowy".to_vec(), b"day".to_vec())); + } + + #[test] + fn test_namespace_upper_bound() { + assert_eq!(namespace_upper_bound(b"bob"), b"boc".to_vec()); + assert_eq!(namespace_upper_bound(b"fo\xfe"), b"fo\xff".to_vec()); + assert_eq!(namespace_upper_bound(b"fo\xff"), b"fp\x00".to_vec()); + // multiple \xff roll over + assert_eq!( + namespace_upper_bound(b"fo\xff\xff\xff"), + b"fp\x00\x00\x00".to_vec() + ); + // \xff not at the end are ignored + assert_eq!(namespace_upper_bound(b"\xffabc"), b"\xffabd".to_vec()); + } +} diff --git a/packages/storage-plus/src/keys.rs b/packages/storage-plus/src/keys.rs new file mode 100644 index 000000000..03f2d6a4b --- /dev/null +++ b/packages/storage-plus/src/keys.rs @@ -0,0 +1,54 @@ +pub trait PrimaryKey<'a> { + type Prefix: Prefixer<'a>; + + /// returns a slice of key steps, which can be optionally combined + fn key(&self) -> Vec<&'a [u8]>; +} + +impl<'a> PrimaryKey<'a> for &'a [u8] { + type Prefix = (); + + fn key(&self) -> Vec<&'a [u8]> { + // this is simple, we don't add more prefixes + vec![self] + } +} + +impl<'a> PrimaryKey<'a> for (&'a [u8], &'a [u8]) { + type Prefix = &'a [u8]; + + fn key(&self) -> Vec<&'a [u8]> { + vec![self.0, self.1] + } +} + +impl<'a> PrimaryKey<'a> for (&'a [u8], &'a [u8], &'a [u8]) { + type Prefix = (&'a [u8], &'a [u8]); + + fn key(&self) -> Vec<&'a [u8]> { + vec![self.0, self.1, self.2] + } +} + +pub trait Prefixer<'a> { + /// returns 0 or more namespaces that should length-prefixed and concatenated for range searches + fn prefix(&self) -> Vec<&'a [u8]>; +} + +impl<'a> Prefixer<'a> for () { + fn prefix(&self) -> Vec<&'a [u8]> { + vec![] + } +} + +impl<'a> Prefixer<'a> for &'a [u8] { + fn prefix(&self) -> Vec<&'a [u8]> { + vec![self] + } +} + +impl<'a> Prefixer<'a> for (&'a [u8], &'a [u8]) { + fn prefix(&self) -> Vec<&'a [u8]> { + vec![self.0, self.1] + } +} diff --git a/packages/storage-plus/src/legacy_helpers.rs b/packages/storage-plus/src/legacy_helpers.rs new file mode 100644 index 000000000..5e81c0767 --- /dev/null +++ b/packages/storage-plus/src/legacy_helpers.rs @@ -0,0 +1,159 @@ +// This code is intentionally included not in lib.rs +// Most of it will be deleted. But maybe we want to borrow some chunks, so keeping them here. + +/// Calculates the raw key prefix for a given nested namespace +/// as documented in https://github.com/webmaster128/key-namespacing#nesting +pub(crate) fn to_length_prefixed_nested(namespaces: &[&[u8]]) -> Vec { + let mut size = 0; + for &namespace in namespaces { + size += namespace.len() + 2; + } + + let mut out = Vec::with_capacity(size); + for &namespace in namespaces { + out.extend_from_slice(&encode_length(namespace)); + out.extend_from_slice(namespace); + } + out +} + +pub(crate) fn length_prefixed_with_key(namespace: &[u8], key: &[u8]) -> Vec { + let mut out = Vec::with_capacity(namespace.len() + 2 + key.len()); + out.extend_from_slice(&encode_length(namespace)); + out.extend_from_slice(namespace); + out.extend_from_slice(key); + out +} + +/// This is equivalent concat(to_length_prefixed_nested(namespaces), key) +/// But more efficient when the intermediate namespaces often must be recalculated +pub(crate) fn namespaces_with_key(namespaces: &[&[u8]], key: &[u8]) -> Vec { + let mut size = key.len(); + for &namespace in namespaces { + size += namespace.len() + 2; + } + + let mut out = Vec::with_capacity(size); + for &namespace in namespaces { + out.extend_from_slice(&encode_length(namespace)); + out.extend_from_slice(namespace); + } + out.extend_from_slice(key); + out +} + +// pub(crate) fn decode_length(prefix: [u8; 2]) -> usize { +pub(crate) fn decode_length(prefix: &[u8]) -> usize { + // TODO: enforce exactly 2 bytes somehow, but usable with slices + (prefix[0] as usize) * 256 + (prefix[1] as usize) +} + +pub(crate) fn get_with_prefix( + storage: &S, + namespace: &[u8], + key: &[u8], +) -> Option> { + storage.get(&concat(namespace, key)) +} + +pub(crate) fn set_with_prefix( + storage: &mut S, + namespace: &[u8], + key: &[u8], + value: &[u8], +) { + storage.set(&concat(namespace, key), value); +} + +pub(crate) fn remove_with_prefix(storage: &mut S, namespace: &[u8], key: &[u8]) { + storage.remove(&concat(namespace, key)); +} + +#[cfg(test)] +mod legacy_test { + use super::*; + use crate::helpers::*; + use cosmwasm_std::testing::MockStorage; + + #[test] + fn to_length_prefixed_nested_works() { + assert_eq!(to_length_prefixed_nested(&[]), b""); + assert_eq!(to_length_prefixed_nested(&[b""]), b"\x00\x00"); + assert_eq!(to_length_prefixed_nested(&[b"", b""]), b"\x00\x00\x00\x00"); + + assert_eq!(to_length_prefixed_nested(&[b"a"]), b"\x00\x01a"); + assert_eq!( + to_length_prefixed_nested(&[b"a", b"ab"]), + b"\x00\x01a\x00\x02ab" + ); + assert_eq!( + to_length_prefixed_nested(&[b"a", b"ab", b"abc"]), + b"\x00\x01a\x00\x02ab\x00\x03abc" + ); + } + + #[test] + fn to_length_prefixed_nested_allows_many_long_namespaces() { + // The 0xFFFF limit is for each namespace, not for the combination of them + + let long_namespace1 = vec![0xaa; 0xFFFD]; + let long_namespace2 = vec![0xbb; 0xFFFE]; + let long_namespace3 = vec![0xcc; 0xFFFF]; + + let prefix = + to_length_prefixed_nested(&[&long_namespace1, &long_namespace2, &long_namespace3]); + assert_eq!(&prefix[0..2], b"\xFF\xFD"); + assert_eq!(&prefix[2..(2 + 0xFFFD)], long_namespace1.as_slice()); + assert_eq!(&prefix[(2 + 0xFFFD)..(2 + 0xFFFD + 2)], b"\xFF\xFe"); + assert_eq!( + &prefix[(2 + 0xFFFD + 2)..(2 + 0xFFFD + 2 + 0xFFFE)], + long_namespace2.as_slice() + ); + assert_eq!( + &prefix[(2 + 0xFFFD + 2 + 0xFFFE)..(2 + 0xFFFD + 2 + 0xFFFE + 2)], + b"\xFF\xFf" + ); + assert_eq!( + &prefix[(2 + 0xFFFD + 2 + 0xFFFE + 2)..(2 + 0xFFFD + 2 + 0xFFFE + 2 + 0xFFFF)], + long_namespace3.as_slice() + ); + } + + #[test] + fn to_length_prefixed_nested_calculates_capacity_correctly() { + // Those tests cannot guarantee the required capacity was calculated correctly before + // the vector allocation but increase the likelyhood of a proper implementation. + + let key = to_length_prefixed_nested(&[]); + assert_eq!(key.capacity(), key.len()); + + let key = to_length_prefixed_nested(&[b""]); + assert_eq!(key.capacity(), key.len()); + + let key = to_length_prefixed_nested(&[b"a"]); + assert_eq!(key.capacity(), key.len()); + + let key = to_length_prefixed_nested(&[b"a", b"bc"]); + assert_eq!(key.capacity(), key.len()); + + let key = to_length_prefixed_nested(&[b"a", b"bc", b"def"]); + assert_eq!(key.capacity(), key.len()); + } + + + #[test] + fn prefix_get_set() { + let mut storage = MockStorage::new(); + let prefix = to_length_prefixed(b"foo"); + + set_with_prefix(&mut storage, &prefix, b"bar", b"gotcha"); + let rfoo = get_with_prefix(&storage, &prefix, b"bar"); + assert_eq!(rfoo, Some(b"gotcha".to_vec())); + + // no collisions with other prefixes + let other_prefix = to_length_prefixed(b"fo"); + let collision = get_with_prefix(&storage, &other_prefix, b"obar"); + assert_eq!(collision, None); + } + +} \ No newline at end of file diff --git a/packages/storage-plus/src/lib.rs b/packages/storage-plus/src/lib.rs new file mode 100644 index 000000000..c032464ea --- /dev/null +++ b/packages/storage-plus/src/lib.rs @@ -0,0 +1,14 @@ +mod helpers; +mod item; +mod iter_helpers; +mod keys; +mod map; +mod path; +mod prefix; + +pub use item::Item; +pub use keys::{Prefixer, PrimaryKey}; +pub use map::Map; +pub use path::Path; +#[cfg(feature = "iterator")] +pub use prefix::Prefix; diff --git a/packages/storage-plus/src/map.rs b/packages/storage-plus/src/map.rs new file mode 100644 index 000000000..46a2bc4aa --- /dev/null +++ b/packages/storage-plus/src/map.rs @@ -0,0 +1,362 @@ +use serde::de::DeserializeOwned; +use serde::Serialize; +use std::marker::PhantomData; + +use crate::keys::PrimaryKey; +use crate::path::Path; +#[cfg(feature = "iterator")] +use crate::{Prefix, Prefixer}; +use cosmwasm_std::{StdError, StdResult, Storage}; + +pub struct Map<'a, K, T> { + namespace: &'a [u8], + // see https://doc.rust-lang.org/std/marker/struct.PhantomData.html#unused-type-parameters for why this is needed + key_type: PhantomData, + data_type: PhantomData, +} + +impl<'a, K, T> Map<'a, K, T> { + pub const fn new(namespace: &'a [u8]) -> Self { + Map { + namespace, + data_type: PhantomData, + key_type: PhantomData, + } + } +} + +impl<'a, K, T> Map<'a, K, T> +where + T: Serialize + DeserializeOwned, + K: PrimaryKey<'a>, +{ + pub fn key(&self, k: K) -> Path { + Path::new(self.namespace, &k.key()) + } + + #[cfg(feature = "iterator")] + pub fn prefix(&self, p: K::Prefix) -> Prefix { + Prefix::new(self.namespace, &p.prefix()) + } + + pub fn save(&self, store: &mut S, k: K, data: &T) -> StdResult<()> { + self.key(k).save(store, data) + } + + pub fn remove(&self, store: &mut S, k: K) { + self.key(k).remove(store) + } + + /// load will return an error if no data is set at the given key, or on parse error + pub fn load(&self, store: &S, k: K) -> StdResult { + self.key(k).load(store) + } + + /// may_load will parse the data stored at the key if present, returns Ok(None) if no data there. + /// returns an error on issues parsing + pub fn may_load(&self, store: &S, k: K) -> StdResult> { + self.key(k).may_load(store) + } + + /// Loads the data, perform the specified action, and store the result + /// in the database. This is shorthand for some common sequences, which may be useful. + /// + /// If the data exists, `action(Some(value))` is called. Otherwise `action(None)` is called. + pub fn update(&mut self, store: &mut S, k: K, action: A) -> Result + where + A: FnOnce(Option) -> Result, + E: From, + S: Storage, + { + self.key(k).update(store, action) + } +} + +/// short-cut for simple keys, rather than .prefix(()).range(...) +#[cfg(feature = "iterator")] +impl<'a, T> Map<'a, &'a [u8], T> +where + T: Serialize + DeserializeOwned, +{ + // I would prefer not to copy code from Prefix, but no other way + // with lifetimes (create Prefix inside function and return ref = no no) + pub fn range<'c, S: Storage>( + &'c self, + store: &'c S, + start: Option<&[u8]>, + end: Option<&[u8]>, + order: cosmwasm_std::Order, + ) -> Box>> + 'c> { + // put the imports here, so we don't have to feature flag them above + use crate::iter_helpers::{deserialize_kv, range_with_prefix, to_length_prefixed}; + + let prefix = to_length_prefixed(self.namespace); + let mapped = range_with_prefix(store, &prefix, start, end, order).map(deserialize_kv::); + Box::new(mapped) + } +} + +#[cfg(test)] +mod test { + use super::*; + use serde::{Deserialize, Serialize}; + + use cosmwasm_std::testing::MockStorage; + #[cfg(feature = "iterator")] + use cosmwasm_std::{Order, StdResult}; + + #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] + struct Data { + pub name: String, + pub age: i32, + } + + const PEOPLE: Map<&[u8], Data> = Map::new(b"people"); + + const ALLOWANCE: Map<(&[u8], &[u8]), u64> = Map::new(b"allow"); + + #[test] + fn create_path() { + let path = PEOPLE.key(b"john"); + let key = path.storage_key; + // this should be prefixed(people) || john + assert_eq!("people".len() + "john".len() + 2, key.len()); + assert_eq!(b"people".to_vec().as_slice(), &key[2..8]); + assert_eq!(b"john".to_vec().as_slice(), &key[8..]); + + let path = ALLOWANCE.key((b"john", b"maria")); + let key = path.storage_key; + // this should be prefixed(allow) || prefixed(john) || maria + assert_eq!("allow".len() + "john".len() + "maria".len() + 4, key.len()); + assert_eq!(b"allow".to_vec().as_slice(), &key[2..7]); + assert_eq!(b"john".to_vec().as_slice(), &key[9..13]); + assert_eq!(b"maria".to_vec().as_slice(), &key[13..]); + } + + #[test] + fn save_and_load() { + let mut store = MockStorage::new(); + + // save and load on one key + let john = PEOPLE.key(b"john"); + let data = Data { + name: "John".to_string(), + age: 32, + }; + assert_eq!(None, john.may_load(&store).unwrap()); + john.save(&mut store, &data).unwrap(); + assert_eq!(data, john.load(&store).unwrap()); + + // nothing on another key + assert_eq!(None, PEOPLE.may_load(&store, b"jack").unwrap()); + + // same named path gets the data + assert_eq!(data, PEOPLE.load(&store, b"john").unwrap()); + + // removing leaves us empty + john.remove(&mut store); + assert_eq!(None, john.may_load(&store).unwrap()); + } + + #[test] + fn composite_keys() { + let mut store = MockStorage::new(); + + // save and load on a composite key + let allow = ALLOWANCE.key((b"owner", b"spender")); + assert_eq!(None, allow.may_load(&store).unwrap()); + allow.save(&mut store, &1234).unwrap(); + assert_eq!(1234, allow.load(&store).unwrap()); + + // not under other key + let different = ALLOWANCE.may_load(&store, (b"owners", b"pender")).unwrap(); + assert_eq!(None, different); + + // matches under a proper copy + let same = ALLOWANCE.load(&store, (b"owner", b"spender")).unwrap(); + assert_eq!(1234, same); + } + + #[test] + #[cfg(feature = "iterator")] + fn range_simple_key() { + let mut store = MockStorage::new(); + + // save and load on two keys + let data = Data { + name: "John".to_string(), + age: 32, + }; + PEOPLE.save(&mut store, b"john", &data).unwrap(); + + let data2 = Data { + name: "Jim".to_string(), + age: 44, + }; + PEOPLE.save(&mut store, b"jim", &data2).unwrap(); + + // let's try to iterate! + let all: StdResult> = PEOPLE.range(&store, None, None, Order::Ascending).collect(); + let all = all.unwrap(); + assert_eq!(2, all.len()); + assert_eq!( + all, + vec![(b"jim".to_vec(), data2), (b"john".to_vec(), data)] + ); + } + + #[test] + #[cfg(feature = "iterator")] + fn range_composite_key() { + let mut store = MockStorage::new(); + + // save and load on three keys, one under different owner + ALLOWANCE + .save(&mut store, (b"owner", b"spender"), &1000) + .unwrap(); + ALLOWANCE + .save(&mut store, (b"owner", b"spender2"), &3000) + .unwrap(); + ALLOWANCE + .save(&mut store, (b"owner2", b"spender"), &5000) + .unwrap(); + + // let's try to iterate! + let all: StdResult> = ALLOWANCE + .prefix(b"owner") + .range(&store, None, None, Order::Ascending) + .collect(); + let all = all.unwrap(); + assert_eq!(2, all.len()); + assert_eq!( + all, + vec![(b"spender".to_vec(), 1000), (b"spender2".to_vec(), 3000)] + ); + } + + #[test] + fn basic_update() { + let mut store = MockStorage::new(); + + let add_ten = |a: Option| -> StdResult<_> { Ok(a.unwrap_or_default() + 10) }; + + // save and load on three keys, one under different owner + let key: (&[u8], &[u8]) = (b"owner", b"spender"); + ALLOWANCE.update(&mut store, key, add_ten).unwrap(); + let twenty = ALLOWANCE.update(&mut store, key, add_ten).unwrap(); + assert_eq!(20, twenty); + let loaded = ALLOWANCE.load(&store, key).unwrap(); + assert_eq!(20, loaded); + } + + #[test] + fn readme_works() -> StdResult<()> { + let mut store = MockStorage::new(); + let data = Data { + name: "John".to_string(), + age: 32, + }; + + // load and save with extra key argument + let empty = PEOPLE.may_load(&store, b"john")?; + assert_eq!(None, empty); + PEOPLE.save(&mut store, b"john", &data)?; + let loaded = PEOPLE.load(&store, b"john")?; + assert_eq!(data, loaded); + + // nothing on another key + let missing = PEOPLE.may_load(&store, b"jack")?; + assert_eq!(None, missing); + + // update function for new or existing keys + let birthday = |d: Option| -> StdResult { + match d { + Some(one) => Ok(Data { + name: one.name, + age: one.age + 1, + }), + None => Ok(Data { + name: "Newborn".to_string(), + age: 0, + }), + } + }; + + let old_john = PEOPLE.update(&mut store, b"john", birthday)?; + assert_eq!(33, old_john.age); + assert_eq!("John", old_john.name.as_str()); + + let new_jack = PEOPLE.update(&mut store, b"jack", birthday)?; + assert_eq!(0, new_jack.age); + assert_eq!("Newborn", new_jack.name.as_str()); + + // update also changes the store + assert_eq!(old_john, PEOPLE.load(&store, b"john")?); + assert_eq!(new_jack, PEOPLE.load(&store, b"jack")?); + + // removing leaves us empty + PEOPLE.remove(&mut store, b"john"); + let empty = PEOPLE.may_load(&store, b"john")?; + assert_eq!(None, empty); + + Ok(()) + } + + #[test] + fn readme_works_composite_keys() -> StdResult<()> { + let mut store = MockStorage::new(); + + // save and load on a composite key + let empty = ALLOWANCE.may_load(&store, (b"owner", b"spender"))?; + assert_eq!(None, empty); + ALLOWANCE.save(&mut store, (b"owner", b"spender"), &777)?; + let loaded = ALLOWANCE.load(&store, (b"owner", b"spender"))?; + assert_eq!(777, loaded); + + // doesn't appear under other key (even if a concat would be the same) + let different = ALLOWANCE.may_load(&store, (b"owners", b"pender")).unwrap(); + assert_eq!(None, different); + + // simple update + ALLOWANCE.update(&mut store, (b"owner", b"spender"), |v| { + Ok(v.unwrap_or_default() + 222) + })?; + let loaded = ALLOWANCE.load(&store, (b"owner", b"spender"))?; + assert_eq!(999, loaded); + + Ok(()) + } + + #[test] + fn readme_works_with_path() -> StdResult<()> { + let mut store = MockStorage::new(); + let data = Data { + name: "John".to_string(), + age: 32, + }; + + // create a Path one time to use below + let john = PEOPLE.key(b"john"); + + // Use this just like an Item above + let empty = john.may_load(&store)?; + assert_eq!(None, empty); + john.save(&mut store, &data)?; + let loaded = john.load(&store)?; + assert_eq!(data, loaded); + john.remove(&mut store); + let empty = john.may_load(&store)?; + assert_eq!(None, empty); + + // same for composite keys, just use both parts in key() + let allow = ALLOWANCE.key((b"owner", b"spender")); + allow.save(&mut store, &1234)?; + let loaded = allow.load(&store)?; + assert_eq!(1234, loaded); + allow.update(&mut store, |x| Ok(x.unwrap_or_default() * 2))?; + let loaded = allow.load(&store)?; + assert_eq!(2468, loaded); + + Ok(()) + } +} diff --git a/packages/storage-plus/src/path.rs b/packages/storage-plus/src/path.rs new file mode 100644 index 000000000..f813625cd --- /dev/null +++ b/packages/storage-plus/src/path.rs @@ -0,0 +1,70 @@ +use serde::de::DeserializeOwned; +use serde::Serialize; +use std::marker::PhantomData; + +use crate::helpers::{may_deserialize, must_deserialize, nested_namespaces_with_key}; +use cosmwasm_std::{to_vec, StdError, StdResult, Storage}; + +pub struct Path +where + T: Serialize + DeserializeOwned, +{ + /// all namespaces prefixes and concatenated with the key + pub(crate) storage_key: Vec, + // see https://doc.rust-lang.org/std/marker/struct.PhantomData.html#unused-type-parameters for why this is needed + data: PhantomData, +} + +impl Path +where + T: Serialize + DeserializeOwned, +{ + pub fn new(namespace: &[u8], keys: &[&[u8]]) -> Self { + let l = keys.len(); + // FIXME: make this more efficient + let storage_key = nested_namespaces_with_key(&[namespace], &keys[0..l - 1], keys[l - 1]); + Path { + storage_key, + data: PhantomData, + } + } + + /// save will serialize the model and store, returns an error on serialization issues + pub fn save(&self, store: &mut S, data: &T) -> StdResult<()> { + store.set(&self.storage_key, &to_vec(data)?); + Ok(()) + } + + pub fn remove(&self, store: &mut S) { + store.remove(&self.storage_key); + } + + /// load will return an error if no data is set at the given key, or on parse error + pub fn load(&self, store: &S) -> StdResult { + let value = store.get(&self.storage_key); + must_deserialize(&value) + } + + /// may_load will parse the data stored at the key if present, returns Ok(None) if no data there. + /// returns an error on issues parsing + pub fn may_load(&self, store: &S) -> StdResult> { + let value = store.get(&self.storage_key); + may_deserialize(&value) + } + + /// Loads the data, perform the specified action, and store the result + /// in the database. This is shorthand for some common sequences, which may be useful. + /// + /// If the data exists, `action(Some(value))` is called. Otherwise `action(None)` is called. + pub fn update(&self, store: &mut S, action: A) -> Result + where + A: FnOnce(Option) -> Result, + E: From, + S: Storage, + { + let input = self.may_load(store)?; + let output = action(input)?; + self.save(store, &output)?; + Ok(output) + } +} diff --git a/packages/storage-plus/src/prefix.rs b/packages/storage-plus/src/prefix.rs new file mode 100644 index 000000000..5e93d1f56 --- /dev/null +++ b/packages/storage-plus/src/prefix.rs @@ -0,0 +1,45 @@ +#![cfg(feature = "iterator")] +use serde::de::DeserializeOwned; +use serde::Serialize; +use std::marker::PhantomData; + +use crate::helpers::nested_namespaces_with_key; +use crate::iter_helpers::{deserialize_kv, range_with_prefix}; +use cosmwasm_std::{Order, StdResult, Storage, KV}; + +pub struct Prefix +where + T: Serialize + DeserializeOwned, +{ + /// all namespaces prefixes and concatenated with the key + pub(crate) storage_prefix: Vec, + // see https://doc.rust-lang.org/std/marker/struct.PhantomData.html#unused-type-parameters for why this is needed + data: PhantomData, +} + +impl Prefix +where + T: Serialize + DeserializeOwned, +{ + pub fn new(top_name: &[u8], sub_names: &[&[u8]]) -> Self { + // FIXME: we can use a custom function here, probably make this cleaner + let storage_prefix = nested_namespaces_with_key(&[top_name], sub_names, b""); + Prefix { + storage_prefix, + data: PhantomData, + } + } + + // TODO: parse out composite key prefix??? + pub fn range<'a, S: Storage>( + &'a self, + store: &'a S, + start: Option<&[u8]>, + end: Option<&[u8]>, + order: Order, + ) -> Box>> + 'a> { + let mapped = range_with_prefix(store, &self.storage_prefix, start, end, order) + .map(deserialize_kv::); + Box::new(mapped) + } +} diff --git a/scripts/publish.sh b/scripts/publish.sh index 176484be5..3f8a4f662 100755 --- a/scripts/publish.sh +++ b/scripts/publish.sh @@ -3,7 +3,7 @@ set -o errexit -o nounset -o pipefail command -v shellcheck > /dev/null && shellcheck "$0" # these are imported by other packages -BASE_PACKAGES="cw0" +BASE_PACKAGES="cw0 storage-plus" ALL_PACKAGES="cw1 cw2 cw3 cw20 cw721" # these are imported by other contracts