diff options
Diffstat (limited to '')
| -rw-r--r-- | filamento/Cargo.toml | 5 | ||||
| -rw-r--r-- | filamento/migrations/20240113011930_luz.sql | 7 | ||||
| -rw-r--r-- | filamento/src/caps.rs | 184 | ||||
| -rw-r--r-- | filamento/src/db.rs | 26 | ||||
| -rw-r--r-- | filamento/src/disco.rs | 20 | ||||
| -rw-r--r-- | filamento/src/error.rs | 33 | ||||
| -rw-r--r-- | filamento/src/lib.rs | 3 | ||||
| -rw-r--r-- | filamento/src/logic/online.rs | 1 | ||||
| -rw-r--r-- | filamento/src/logic/process_stanza.rs | 2 | 
9 files changed, 270 insertions, 11 deletions
| diff --git a/filamento/Cargo.toml b/filamento/Cargo.toml index e9be687..3530ab9 100644 --- a/filamento/Cargo.toml +++ b/filamento/Cargo.toml @@ -8,13 +8,16 @@ futures = "0.3.31"  lampada = { version = "0.1.0", path = "../lampada" }  tokio = "1.42.0"  thiserror = "2.0.11" -stanza = { version = "0.1.0", path = "../stanza", features = ["rfc_6121", "xep_0203", "xep_0030", "xep_0060", "xep_0172"] } +stanza = { version = "0.1.0", path = "../stanza", features = ["rfc_6121", "xep_0203", "xep_0030", "xep_0060", "xep_0172", "xep_0390", "xep_0128"] }  sqlx = { version = "0.8.3", features = ["sqlite", "runtime-tokio", "uuid", "chrono"] }  # TODO: re-export jid?  jid = { version = "0.1.0", path = "../jid", features = ["sqlx"] }  uuid = { version = "1.13.1", features = ["v4"] }  tracing = "0.1.41"  chrono = "0.4.40" +sha2 = "0.10.8" +sha3 = "0.10.8" +base64 = "0.22.1"  [dev-dependencies]  tracing-subscriber = "0.3.19" diff --git a/filamento/migrations/20240113011930_luz.sql b/filamento/migrations/20240113011930_luz.sql index 3b56664..c2b5a97 100644 --- a/filamento/migrations/20240113011930_luz.sql +++ b/filamento/migrations/20240113011930_luz.sql @@ -129,3 +129,10 @@ create table cached_status (  );  insert into cached_status (id) values (0); + +create table capability_hash_nodes ( +    node text primary key not null, +    timestamp text not null, +    -- TODO: normalization +    capabilities text not null +); diff --git a/filamento/src/caps.rs b/filamento/src/caps.rs new file mode 100644 index 0000000..c87e48a --- /dev/null +++ b/filamento/src/caps.rs @@ -0,0 +1,184 @@ +use std::str::FromStr; + +use base64::{Engine, prelude::BASE64_STANDARD}; +use sha2::{Digest, Sha256}; +use sha3::Sha3_256; +use stanza::{ +    xep_0030::info, +    xep_0300::{self, Algo, Hash}, +    xep_0390::C, +}; + +use crate::{ +    disco::{Identity, Info, identity::Category}, +    error::{CapsDecodeError, HashNodeConversionError}, +}; + +pub fn caps(query: info::Query) -> C { +    let mut string = String::new(); + +    // features string +    let mut features = Vec::new(); +    for feature in query.features { +        let mut string = String::new(); +        string.push_str(&feature.var); +        string.push('\x1f'); +        features.push(string); +    } +    features.sort(); +    let features_string = features.concat(); +    string.push_str(&features_string); +    string.push('\x1c'); + +    // identities string +    let mut identities = Vec::new(); +    for identity in query.identities { +        let mut string = String::new(); +        string.push_str(&identity.category); +        string.push('\x1f'); +        string.push_str(&identity.r#type); +        string.push('\x1f'); +        string.push_str(&identity.lang.unwrap_or_default()); +        string.push('\x1f'); +        string.push_str(&identity.name.unwrap_or_default()); +        string.push('\x1f'); +        string.push('\x1e'); +        identities.push(string); +    } +    identities.sort(); +    let identities_string = identities.concat(); +    string.push_str(&identities_string); +    string.push('\x1c'); + +    // extensions string +    let mut extensions = Vec::new(); +    for extension in query.extensions { +        let mut string = String::new(); +        let mut fields = Vec::new(); +        for field in extension.fields { +            let mut string = String::new(); +            string.push_str(&field.var.unwrap_or_default()); +            string.push('\x1f'); +            let mut values = Vec::new(); +            for value in field.values { +                let mut string = String::new(); +                string.push_str(&value.0); +                string.push('\x1f'); +                values.push(string); +            } +            values.sort(); +            let values_string = values.concat(); +            string.push_str(&values_string); +            string.push('\x1e'); +            fields.push(string); +        } +        fields.sort(); +        let fields_string = fields.concat(); +        string.push_str(&fields_string); +        string.push('\x1d'); +        extensions.push(string); +    } +    extensions.sort(); +    let extensions_string = extensions.concat(); +    string.push_str(&extensions_string); +    string.push('\x1c'); + +    let mut sha256 = Sha256::new(); + +    sha256.update(&string); + +    let result = sha256.finalize(); +    let sha256_result = BASE64_STANDARD.encode(result); + +    let mut sha3_256 = Sha3_256::new(); + +    sha3_256.update(string); + +    let result = sha3_256.finalize(); +    let sha3_256_result = BASE64_STANDARD.encode(result); + +    C(vec![ +        Hash { +            algo: Algo::SHA256, +            hash: sha256_result, +        }, +        Hash { +            algo: Algo::SHA3256, +            hash: sha3_256_result, +        }, +    ]) +} + +/// takes a base64 encoded cached caps string and converts it into a disco info result +pub fn info(info: String) -> Result<Info, CapsDecodeError> { +    let info = String::from_utf8(BASE64_STANDARD.decode(info)?)?; + +    let mut strings = info.split_terminator('\x1c'); + +    let features_string = strings.next().ok_or(CapsDecodeError::MissingFeatures)?; +    let mut features = Vec::new(); +    for feature in features_string.split_terminator('\x1f') { +        features.push(feature.to_owned()); +    } + +    let identities_string = strings.next().ok_or(CapsDecodeError::MissingIdentities)?; +    let mut identities = Vec::new(); +    for identity in identities_string.split_terminator('\x1e') { +        let mut identity_string = identity.split_terminator('\x1f'); +        let category = identity_string +            .next() +            .ok_or(CapsDecodeError::MissingIdentityCategory)?; +        let r#type = identity_string +            .next() +            .ok_or(CapsDecodeError::MissingIdentityType)?; +        let _ = identity_string +            .next() +            .ok_or(CapsDecodeError::MissingIdentityLang)?; +        let name = identity_string +            .next() +            .ok_or(CapsDecodeError::MissingIdentityName)?; +        let name = if name.is_empty() { +            None +        } else { +            Some(name.to_string()) +        }; + +        let category = Category::from_category_and_type(category, r#type); +        identities.push(Identity { name, category }) +    } + +    // TODO: service discovery extensions + +    Ok(Info { +        node: None, +        features, +        identities, +    }) +} + +pub fn hash_to_node(hash: xep_0300::Hash) -> String { +    let mut string = String::from("urn:xmpp:caps#"); +    string.push_str(&hash.algo.to_string()); +    string.push('.'); +    string.push_str(&hash.hash); +    string +} + +pub fn node_to_hash(node: String) -> Result<Hash, HashNodeConversionError> { +    let string = node +        .strip_prefix("urn:xmpp:caps#") +        .ok_or(HashNodeConversionError::NoPrefix)?; +    let (algo, hash) = string +        .rsplit_once('.') +        .ok_or(HashNodeConversionError::MissingPeriod)?; +    Ok(Hash { +        algo: Algo::from_str(algo).unwrap(), +        hash: hash.to_string(), +    }) +} + +static CLIENT_INFO: Info = Info { +    node: None, +    features: vec![], +    identities: vec![], +}; diff --git a/filamento/src/db.rs b/filamento/src/db.rs index f92bfb2..c19f16c 100644 --- a/filamento/src/db.rs +++ b/filamento/src/db.rs @@ -1,5 +1,6 @@  use std::{collections::HashSet, path::Path}; +use chrono::Utc;  use jid::JID;  use sqlx::{SqlitePool, migrate};  use uuid::Uuid; @@ -560,4 +561,29 @@ impl Db {              .await?;          Ok(())      } + +    pub(crate) async fn read_capabilities(&self, node: &str) -> Result<String, Error> { +        #[derive(sqlx::FromRow)] +        struct Row { +            capabilities: String, +        } +        let row: Row = +            sqlx::query_as("select capabilities from capability_hash_nodes where node = ?") +                .bind(node) +                .fetch_one(&self.db) +                .await?; +        Ok(row.capabilities) +    } + +    pub(crate) async fn upsert_capabilities( +        &self, +        node: &str, +        capabilities: &str, +    ) -> Result<(), Error> { +        let now = Utc::now(); +        sqlx::query!( +            "insert into capability_hash_nodes (node, timestamp, capabilities) values (?, ?, ?) on conflict do update set timestamp = ?, capabilities = ?", node, now, capabilities, now, capabilities +        ).execute(&self.db).await?; +        Ok(()) +    }  } diff --git a/filamento/src/disco.rs b/filamento/src/disco.rs index cc48215..580f647 100644 --- a/filamento/src/disco.rs +++ b/filamento/src/disco.rs @@ -6,9 +6,9 @@ pub use identity::Identity;  #[derive(Debug, Clone)]  pub struct Info { -    node: Option<String>, -    features: Vec<Feature>, -    identities: Vec<Identity>, +    pub node: Option<String>, +    pub features: Vec<String>, +    pub identities: Vec<Identity>,  }  impl From<info::Query> for Info { @@ -16,7 +16,7 @@ impl From<info::Query> for Info {          let features = value              .features              .into_iter() -            .map(|feature| feature.into()) +            .map(|feature| feature.var)              .collect();          let identities = value              .identities @@ -37,7 +37,7 @@ impl From<Info> for info::Query {          let features = value              .features              .into_iter() -            .map(|feature| feature.into()) +            .map(|feature| info::Feature { var: feature })              .collect();          let identities = value              .identities @@ -49,6 +49,7 @@ impl From<Info> for info::Query {              node: value.node,              features,              identities, +            extensions: Vec::new(),          }      }  } @@ -108,7 +109,7 @@ impl From<Item> for items::Item {      }  } -mod feature { +pub mod feature {      use stanza::xep_0030::info;      // https://xmpp.org/registrar/disco-features.html @@ -1061,13 +1062,13 @@ mod feature {      }  } -mod identity { +pub mod identity {      use stanza::xep_0030::info;      #[derive(Debug, Clone)]      pub struct Identity { -        name: Option<String>, -        category: Category, +        pub name: Option<String>, +        pub category: Category,      }      impl From<info::Identity> for Identity { @@ -1086,6 +1087,7 @@ mod identity {                  category: value.category.to_string(),                  name: value.name,                  r#type: value.category.r#type(), +                lang: None,              }          }      } diff --git a/filamento/src/error.rs b/filamento/src/error.rs index 9ecc330..1dd4f47 100644 --- a/filamento/src/error.rs +++ b/filamento/src/error.rs @@ -1,4 +1,4 @@ -use std::sync::Arc; +use std::{string::FromUtf8Error, sync::Arc};  use jid::JID;  use lampada::error::{ConnectionError, ReadError, WriteError}; @@ -227,3 +227,34 @@ pub enum NickError {      #[error("disconnected")]      Disconnected,  } + +#[derive(Debug, Error, Clone)] +pub enum CapsDecodeError { +    #[error("base64 decode: {0}")] +    Base64Decode(#[from] base64::DecodeError), +    #[error("utf8: {0}")] +    UTF8(#[from] FromUtf8Error), +    #[error("missing features")] +    MissingFeatures, +    #[error("missing identities")] +    MissingIdentities, +    #[error("missing identity category")] +    MissingIdentityCategory, +    #[error("missing identity type")] +    MissingIdentityType, +    #[error("missing identity language")] +    MissingIdentityLang, +    #[error("missing identity name")] +    MissingIdentityName, +} + +#[derive(Debug, Error, Clone)] +pub enum HashNodeConversionError { +    #[error("no prefix")] +    NoPrefix, +    #[error("missing period")] +    MissingPeriod, +} + +// #[derive(Debug, Error, Clone)] +// pub enum CapsError {} diff --git a/filamento/src/lib.rs b/filamento/src/lib.rs index 6118f75..c44edca 100644 --- a/filamento/src/lib.rs +++ b/filamento/src/lib.rs @@ -35,6 +35,7 @@ use tracing::{debug, info};  use user::User;  use uuid::Uuid; +pub mod caps;  pub mod chat;  pub mod db;  pub mod disco; @@ -122,6 +123,8 @@ pub enum Command {      },      /// change user nickname      ChangeNick(String, oneshot::Sender<Result<(), NickError>>), +    // /// get capability node +    // GetCaps(String, oneshot::Sender<Result<Info, CapsError>>),  }  #[derive(Debug, Clone)] diff --git a/filamento/src/logic/online.rs b/filamento/src/logic/online.rs index d32f527..b069f59 100644 --- a/filamento/src/logic/online.rs +++ b/filamento/src/logic/online.rs @@ -557,6 +557,7 @@ pub async fn handle_disco_info(              node,              features: Vec::new(),              identities: Vec::new(), +            extensions: Vec::new(),          })),          errors: Vec::new(),      }; diff --git a/filamento/src/logic/process_stanza.rs b/filamento/src/logic/process_stanza.rs index 2f6644e..e9787a9 100644 --- a/filamento/src/logic/process_stanza.rs +++ b/filamento/src/logic/process_stanza.rs @@ -232,7 +232,9 @@ pub async fn recv_iq(                                  category: "client".to_string(),                                  name: Some("filamento".to_string()),                                  r#type: "pc".to_string(), +                                lang: None,                              }], +                            extensions: Vec::new(),                          };                          let iq = Iq {                              from: Some(connection.jid().clone()), | 
