Can we make the structs JSONToken & JSONTokens Public?
dvaerum opened this issue · 2 comments
I was wondering if there was a reason for why the 2 structs and their methods, e.g. load_from_file, are not public?
Lines 185 to 351 in 78b79cf
/// A single stored token. | |
#[derive(Debug, Clone)] | |
struct JSONToken { | |
scopes: Vec<String>, | |
token: TokenInfo, | |
hash: ScopeHash, | |
filter: ScopeFilter, | |
} | |
impl<'de> Deserialize<'de> for JSONToken { | |
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> | |
where | |
D: serde::Deserializer<'de>, | |
{ | |
#[derive(Deserialize)] | |
struct RawJSONToken { | |
scopes: Vec<String>, | |
token: TokenInfo, | |
} | |
let RawJSONToken { scopes, token } = RawJSONToken::deserialize(deserializer)?; | |
let ScopeSet { hash, filter, .. } = ScopeSet::from(&scopes); | |
Ok(JSONToken { | |
scopes, | |
token, | |
hash, | |
filter, | |
}) | |
} | |
} | |
impl Serialize for JSONToken { | |
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> | |
where | |
S: serde::Serializer, | |
{ | |
#[derive(Serialize)] | |
struct RawJSONToken<'a> { | |
scopes: &'a [String], | |
token: &'a TokenInfo, | |
} | |
RawJSONToken { | |
scopes: &self.scopes, | |
token: &self.token, | |
} | |
.serialize(serializer) | |
} | |
} | |
/// List of tokens in a JSON object | |
#[derive(Debug, Clone)] | |
pub(crate) struct JSONTokens { | |
token_map: HashMap<ScopeHash, JSONToken>, | |
} | |
impl Serialize for JSONTokens { | |
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> | |
where | |
S: serde::Serializer, | |
{ | |
serializer.collect_seq(self.token_map.values()) | |
} | |
} | |
impl<'de> Deserialize<'de> for JSONTokens { | |
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> | |
where | |
D: serde::Deserializer<'de>, | |
{ | |
struct V; | |
impl<'de> serde::de::Visitor<'de> for V { | |
type Value = JSONTokens; | |
// Format a message stating what data this Visitor expects to receive. | |
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { | |
formatter.write_str("a sequence of JSONToken's") | |
} | |
fn visit_seq<M>(self, mut access: M) -> Result<Self::Value, M::Error> | |
where | |
M: serde::de::SeqAccess<'de>, | |
{ | |
let mut token_map = HashMap::with_capacity(access.size_hint().unwrap_or(0)); | |
while let Some(json_token) = access.next_element::<JSONToken>()? { | |
token_map.insert(json_token.hash, json_token); | |
} | |
Ok(JSONTokens { token_map }) | |
} | |
} | |
// Instantiate our Visitor and ask the Deserializer to drive | |
// it over the input data. | |
deserializer.deserialize_seq(V) | |
} | |
} | |
impl JSONTokens { | |
pub(crate) fn new() -> Self { | |
JSONTokens { | |
token_map: HashMap::new(), | |
} | |
} | |
async fn load_from_file(filename: &Path) -> Result<Self, io::Error> { | |
let contents = tokio::fs::read(filename).await?; | |
serde_json::from_slice(&contents).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)) | |
} | |
fn get<T>( | |
&self, | |
ScopeSet { | |
hash, | |
filter, | |
scopes, | |
}: ScopeSet<T>, | |
) -> Option<TokenInfo> | |
where | |
T: AsRef<str>, | |
{ | |
if let Some(json_token) = self.token_map.get(&hash) { | |
return Some(json_token.token.clone()); | |
} | |
let requested_scopes_are_subset_of = |other_scopes: &[String]| { | |
scopes | |
.iter() | |
.all(|s| other_scopes.iter().any(|t| t.as_str() == s.as_ref())) | |
}; | |
// No exact match for the scopes provided. Search for any tokens that | |
// exist for a superset of the scopes requested. | |
self.token_map | |
.values() | |
.filter(|json_token| filter.is_subset_of(json_token.filter) == FilterResponse::Maybe) | |
.find(|v: &&JSONToken| requested_scopes_are_subset_of(&v.scopes)) | |
.map(|t: &JSONToken| t.token.clone()) | |
} | |
fn set<T>( | |
&mut self, | |
ScopeSet { | |
hash, | |
filter, | |
scopes, | |
}: ScopeSet<T>, | |
token: TokenInfo, | |
) -> Result<(), io::Error> | |
where | |
T: AsRef<str>, | |
{ | |
use std::collections::hash_map::Entry; | |
match self.token_map.entry(hash) { | |
Entry::Occupied(mut entry) => { | |
entry.get_mut().token = token; | |
} | |
Entry::Vacant(entry) => { | |
let json_token = JSONToken { | |
scopes: scopes.iter().map(|x| x.as_ref().to_owned()).collect(), | |
token, | |
hash, | |
filter, | |
}; | |
entry.insert(json_token); | |
} | |
} | |
Ok(()) | |
} | |
} |
Because, from the way I read the code, I could reuse it to write my own TokenStorage implementation and keep the same json-token-format as the one used by this library without having to re-implement the code
Lines 116 to 124 in 78b79cf
#[async_trait] | |
pub trait TokenStorage: Send + Sync { | |
/// Store a token for the given set of scopes so that it can be retrieved later by get() | |
/// TokenInfo can be serialized with serde. | |
async fn set(&self, scopes: &[&str], token: TokenInfo) -> anyhow::Result<()>; | |
/// Retrieve a token stored by set for the given set of scopes | |
async fn get(&self, scopes: &[&str]) -> Option<TokenInfo>; | |
} |
Is there something I am missing or not understanding?
I think the canonical way so far of writing your own storage backend would be implementing the TokenStorage
trait. If you think that using the library implementation of JSON serialization would help in you writing a custom storage type, I'd be open to make these public - just send a PR.
Awesome, I will make a pull request 👍