feat: add support for NIP-09

This commit is contained in:
2024-10-08 16:30:26 +07:00
parent d2b5ae0507
commit c40762cc04
13 changed files with 270 additions and 436 deletions

View File

@@ -325,8 +325,8 @@ pub async fn login(
.reconcile(
Filter::new()
.authors(authors.clone())
.kinds(vec![Kind::Metadata, Kind::ContactList])
.limit(authors.len() * 10),
.kinds(vec![Kind::Metadata, Kind::ContactList, Kind::EventDeletion])
.limit(authors.len() * 20),
NegentropyOptions::default(),
)
.await
@@ -409,8 +409,13 @@ pub async fn login(
.reconcile(
Filter::new()
.authors(trusted)
.kinds(vec![Kind::Metadata, Kind::TextNote, Kind::Repost])
.limit(20000),
.kinds(vec![
Kind::Metadata,
Kind::TextNote,
Kind::Repost,
Kind::EventDeletion,
])
.limit(30000),
NegentropyOptions::default(),
)
.await

View File

@@ -5,7 +5,7 @@ use specta::Type;
use std::{str::FromStr, time::Duration};
use tauri::State;
use crate::common::{create_event_tags, filter_converstation, parse_event, Meta};
use crate::common::{create_tags, parse_event, process_event, Meta};
use crate::{Nostr, DEFAULT_DIFFICULTY, FETCH_LIMIT};
#[derive(Debug, Clone, Serialize, Type)]
@@ -215,22 +215,7 @@ pub async fn get_all_events_by_author(
.get_events_of(vec![filter], EventSource::Database)
.await
{
Ok(events) => {
let fils = filter_converstation(events);
let futures = fils.iter().map(|ev| async move {
let raw = ev.as_json();
let parsed = if ev.kind == Kind::TextNote {
Some(parse_event(&ev.content).await)
} else {
None
};
RichEvent { raw, parsed }
});
let rich_events = join_all(futures).await;
Ok(rich_events)
}
Ok(events) => Ok(process_event(client, events).await),
Err(err) => Err(err.to_string()),
}
}
@@ -264,23 +249,7 @@ pub async fn get_all_events_by_authors(
.get_events_of(vec![filter], EventSource::Database)
.await
{
Ok(events) => {
let fils = filter_converstation(events);
let futures = fils.iter().map(|ev| async move {
let raw = ev.as_json();
let parsed = if ev.kind == Kind::TextNote {
Some(parse_event(&ev.content).await)
} else {
None
};
RichEvent { raw, parsed }
});
let rich_events = join_all(futures).await;
Ok(rich_events)
}
Ok(events) => Ok(process_event(client, events).await),
Err(err) => Err(err.to_string()),
}
}
@@ -312,22 +281,7 @@ pub async fn get_all_events_by_hashtags(
)
.await
{
Ok(events) => {
let fils = filter_converstation(events);
let futures = fils.iter().map(|ev| async move {
let raw = ev.as_json();
let parsed = if ev.kind == Kind::TextNote {
Some(parse_event(&ev.content).await)
} else {
None
};
RichEvent { raw, parsed }
});
let rich_events = join_all(futures).await;
Ok(rich_events)
}
Ok(events) => Ok(process_event(client, events).await),
Err(err) => Err(err.to_string()),
}
}
@@ -351,22 +305,7 @@ pub async fn get_local_events(
.until(as_of);
match client.database().query(vec![filter]).await {
Ok(events) => {
let fils = filter_converstation(events);
let futures = fils.iter().map(|ev| async move {
let raw = ev.as_json();
let parsed = if ev.kind == Kind::TextNote {
Some(parse_event(&ev.content).await)
} else {
None
};
RichEvent { raw, parsed }
});
let rich_events = join_all(futures).await;
Ok(rich_events)
}
Ok(events) => Ok(process_event(client, events).await),
Err(err) => Err(err.to_string()),
}
}
@@ -396,22 +335,7 @@ pub async fn get_global_events(
)
.await
{
Ok(events) => {
let fils = filter_converstation(events);
let futures = fils.iter().map(|ev| async move {
let raw = ev.as_json();
let parsed = if ev.kind == Kind::TextNote {
Some(parse_event(&ev.content).await)
} else {
None
};
RichEvent { raw, parsed }
});
let rich_events = join_all(futures).await;
Ok(rich_events)
}
Ok(events) => Ok(process_event(client, events).await),
Err(err) => Err(err.to_string()),
}
}
@@ -427,7 +351,7 @@ pub async fn publish(
let client = &state.client;
// Create tags from content
let mut tags = create_event_tags(&content);
let mut tags = create_tags(&content);
// Add client tag
// TODO: allow user config this setting
@@ -464,10 +388,8 @@ pub async fn reply(
let client = &state.client;
let database = client.database();
// Create tags from content
let mut tags = create_event_tags(&content);
let reply_id = EventId::parse(&to).map_err(|err| err.to_string())?;
let mut tags = create_tags(&content);
match database.query(vec![Filter::new().id(reply_id)]).await {
Ok(events) => {
@@ -651,23 +573,7 @@ pub async fn search(
)
.await
{
Ok(events) => {
let fils = filter_converstation(events);
let futures = fils.iter().map(|ev| async move {
let raw = ev.as_json();
let parsed = if ev.kind == Kind::TextNote {
Some(parse_event(&ev.content).await)
} else {
None
};
RichEvent { raw, parsed }
});
let rich_events = join_all(futures).await;
Ok(rich_events)
}
Ok(events) => Ok(process_event(client, events).await),
Err(e) => Err(e.to_string()),
}
}
@@ -689,3 +595,16 @@ pub async fn is_deleted_event(id: String, state: State<'_, Nostr>) -> Result<boo
Err(e) => Err(e.to_string()),
}
}
#[tauri::command]
#[specta::specta]
pub async fn request_delete(id: String, state: State<'_, Nostr>) -> Result<(), String> {
let client = &state.client;
let event_id = EventId::from_str(&id).map_err(|err| err.to_string())?;
let builder = EventBuilder::delete(vec![event_id]);
match client.send_event_builder(builder).await {
Ok(_) => Ok(()),
Err(e) => Err(e.to_string()),
}
}

View File

@@ -1,3 +1,4 @@
use futures::future::join_all;
use linkify::LinkFinder;
use nostr_sdk::prelude::*;
use reqwest::Client as ReqClient;
@@ -7,7 +8,7 @@ use std::collections::HashSet;
use std::str::FromStr;
use std::time::Duration;
use crate::Settings;
use crate::RichEvent;
#[derive(Debug, Clone, Serialize, Type)]
pub struct Meta {
@@ -18,6 +19,7 @@ pub struct Meta {
pub hashtags: Vec<String>,
}
const IMAGES: [&str; 7] = ["jpg", "jpeg", "gif", "png", "webp", "avif", "tiff"];
const NOSTR_EVENTS: [&str; 10] = [
"@nevent1",
"@note1",
@@ -42,135 +44,12 @@ const NOSTR_MENTIONS: [&str; 10] = [
"Nostr:nprofile1",
"Nostr:naddr1",
];
const IMAGES: [&str; 7] = ["jpg", "jpeg", "gif", "png", "webp", "avif", "tiff"];
pub fn get_latest_event(events: &[Event]) -> Option<&Event> {
events.iter().next()
}
pub fn filter_converstation(events: Vec<Event>) -> Vec<Event> {
events
.into_iter()
.filter_map(|ev| {
if ev.kind == Kind::TextNote {
let tags: Vec<&str> = ev
.tags
.iter()
.filter(|t| {
t.kind() == TagKind::SingleLetter(SingleLetterTag::lowercase(Alphabet::E))
})
.filter_map(|t| t.content())
.collect();
if tags.is_empty() {
Some(ev)
} else {
None
}
} else {
Some(ev)
}
})
.collect::<Vec<Event>>()
}
pub fn dedup_event(events: &[Event]) -> Vec<Event> {
let mut seen_ids = HashSet::new();
events
.iter()
.filter(|&event| {
let e = TagKind::SingleLetter(SingleLetterTag::lowercase(Alphabet::E));
let e_tags: Vec<&Tag> = event.tags.iter().filter(|el| el.kind() == e).collect();
let ids: Vec<&str> = e_tags.iter().filter_map(|tag| tag.content()).collect();
let is_dup = ids.iter().any(|id| seen_ids.contains(*id));
for id in &ids {
seen_ids.insert(*id);
}
!is_dup
})
.cloned()
.collect()
}
pub async fn parse_event(content: &str) -> Meta {
let mut finder = LinkFinder::new();
finder.url_must_have_scheme(false);
// Get urls
let urls: Vec<_> = finder.links(content).collect();
// Get words
let words: Vec<_> = content.split_whitespace().collect();
let hashtags = words
.iter()
.filter(|&&word| word.starts_with('#'))
.map(|&s| s.to_string())
.collect::<Vec<_>>();
let events = words
.iter()
.filter(|&&word| NOSTR_EVENTS.iter().any(|&el| word.starts_with(el)))
.map(|&s| s.to_string())
.collect::<Vec<_>>();
let mentions = words
.iter()
.filter(|&&word| NOSTR_MENTIONS.iter().any(|&el| word.starts_with(el)))
.map(|&s| s.to_string())
.collect::<Vec<_>>();
let mut images = Vec::new();
let mut text = content.to_string();
if !urls.is_empty() {
let client = ReqClient::new();
for url in urls {
let url_str = url.as_str();
if let Ok(parsed_url) = Url::from_str(url_str) {
if let Some(ext) = parsed_url
.path_segments()
.and_then(|segments| segments.last().and_then(|s| s.split('.').last()))
{
if IMAGES.contains(&ext) {
text = text.replace(url_str, "");
images.push(url_str.to_string());
// Process the next item.
continue;
}
}
// Check the content type of URL via HEAD request
if let Ok(res) = client.head(url_str).send().await {
if let Some(content_type) = res.headers().get("Content-Type") {
if content_type.to_str().unwrap_or("").starts_with("image") {
text = text.replace(url_str, "");
images.push(url_str.to_string());
// Process the next item.
continue;
}
}
}
}
}
}
// Clean up the resulting content string to remove extra spaces
let cleaned_text = text.trim().to_string();
Meta {
content: cleaned_text,
events,
mentions,
hashtags,
images,
}
}
pub fn create_event_tags(content: &str) -> Vec<Tag> {
pub fn create_tags(content: &str) -> Vec<Tag> {
let mut tags: Vec<Tag> = vec![];
let mut tag_set: HashSet<String> = HashSet::new();
@@ -251,6 +130,72 @@ pub fn create_event_tags(content: &str) -> Vec<Tag> {
tags
}
pub async fn process_event(client: &Client, events: Vec<Event>) -> Vec<RichEvent> {
// Remove event thread if event is TextNote
let events: Vec<Event> = events
.into_iter()
.filter_map(|ev| {
if ev.kind == Kind::TextNote {
let tags = ev
.tags
.iter()
.filter(|t| t.is_reply() || t.is_root())
.filter_map(|t| t.content())
.collect::<Vec<_>>();
if tags.is_empty() {
Some(ev)
} else {
None
}
} else {
Some(ev)
}
})
.collect();
// Get deletion request by event's authors
let ids: Vec<EventId> = events.iter().map(|ev| ev.id).collect();
let filter = Filter::new().events(ids).kind(Kind::EventDeletion);
let mut final_events: Vec<Event> = events.clone();
if let Ok(requests) = client.database().query(vec![filter]).await {
if !requests.is_empty() {
let ids: Vec<&str> = requests
.iter()
.flat_map(|ev| ev.get_tags_content(TagKind::e()))
.collect();
// Remove event if event is deleted by author
final_events = events
.into_iter()
.filter_map(|ev| {
if ids.iter().any(|&i| i == ev.id.to_hex()) {
None
} else {
Some(ev)
}
})
.collect();
}
};
// Convert raw event to rich event
let futures = final_events.iter().map(|ev| async move {
let raw = ev.as_json();
let parsed = if ev.kind == Kind::TextNote {
Some(parse_event(&ev.content).await)
} else {
None
};
RichEvent { raw, parsed }
});
join_all(futures).await
}
pub async fn init_nip65(client: &Client, public_key: &str) {
let author = PublicKey::from_str(public_key).unwrap();
let filter = Filter::new().author(author).kind(Kind::RelayList).limit(1);
@@ -286,73 +231,78 @@ pub async fn init_nip65(client: &Client, public_key: &str) {
}
}
pub async fn get_user_settings(client: &Client) -> Result<Settings, String> {
let ident = "lume_v4:settings";
let signer = client
.signer()
.await
.map_err(|e| format!("Failed to get signer: {:?}", e))?;
let public_key = signer
.public_key()
.await
.map_err(|e| format!("Failed to get public key: {:?}", e))?;
pub async fn parse_event(content: &str) -> Meta {
let mut finder = LinkFinder::new();
finder.url_must_have_scheme(false);
let filter = Filter::new()
.author(public_key)
.kind(Kind::ApplicationSpecificData)
.identifier(ident)
.limit(1);
// Get urls
let urls: Vec<_> = finder.links(content).collect();
// Get words
let words: Vec<_> = content.split_whitespace().collect();
match client
.get_events_of(
vec![filter],
EventSource::both(Some(Duration::from_secs(5))),
)
.await
{
Ok(events) => {
if let Some(event) = events.first() {
match signer.nip44_decrypt(&public_key, &event.content).await {
Ok(decrypted) => match serde_json::from_str(&decrypted) {
Ok(parsed) => Ok(parsed),
Err(_) => Err("Could not parse settings payload".into()),
},
Err(e) => Err(format!("Failed to decrypt settings content: {:?}", e)),
let hashtags = words
.iter()
.filter(|&&word| word.starts_with('#'))
.map(|&s| s.to_string())
.collect::<Vec<_>>();
let events = words
.iter()
.filter(|&&word| NOSTR_EVENTS.iter().any(|&el| word.starts_with(el)))
.map(|&s| s.to_string())
.collect::<Vec<_>>();
let mentions = words
.iter()
.filter(|&&word| NOSTR_MENTIONS.iter().any(|&el| word.starts_with(el)))
.map(|&s| s.to_string())
.collect::<Vec<_>>();
let mut images = Vec::new();
let mut text = content.to_string();
if !urls.is_empty() {
let client = ReqClient::new();
for url in urls {
let url_str = url.as_str();
if let Ok(parsed_url) = Url::from_str(url_str) {
if let Some(ext) = parsed_url
.path_segments()
.and_then(|segments| segments.last().and_then(|s| s.split('.').last()))
{
if IMAGES.contains(&ext) {
text = text.replace(url_str, "");
images.push(url_str.to_string());
// Process the next item.
continue;
}
}
// Check the content type of URL via HEAD request
if let Ok(res) = client.head(url_str).send().await {
if let Some(content_type) = res.headers().get("Content-Type") {
if content_type.to_str().unwrap_or("").starts_with("image") {
text = text.replace(url_str, "");
images.push(url_str.to_string());
// Process the next item.
continue;
}
}
}
} else {
Err("Settings not found.".into())
}
}
Err(e) => Err(format!(
"Failed to get events for ApplicationSpecificData: {:?}",
e
)),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_parse_event() {
let content = "Check this image: https://example.com/image.jpg #cool @npub1";
let meta = parse_event(content).await;
assert_eq!(meta.content, "Check this image: #cool @npub1");
assert_eq!(meta.images, vec!["https://example.com/image.jpg"]);
assert_eq!(meta.hashtags, vec!["#cool"]);
assert_eq!(meta.mentions, vec!["@npub1"]);
}
#[tokio::test]
async fn test_parse_video() {
let content = "Check this video: https://example.com/video.mp4 #cool @npub1";
let meta = parse_event(content).await;
assert_eq!(meta.content, "Check this video: #cool @npub1");
assert_eq!(meta.images, Vec::<String>::new());
assert_eq!(meta.hashtags, vec!["#cool"]);
assert_eq!(meta.mentions, vec!["@npub1"]);
}
// Clean up the resulting content string to remove extra spaces
let cleaned_text = text.trim().to_string();
Meta {
content: cleaned_text,
events,
mentions,
hashtags,
images,
}
}

View File

@@ -143,6 +143,7 @@ fn main() {
get_local_events,
get_global_events,
is_deleted_event,
request_delete,
search,
publish,
reply,