refactor the parser
Some checks failed
Rust / build (ubuntu-latest, stable) (push) Failing after 1m56s
Rust / build (macos-latest, stable) (push) Has been cancelled
Rust / build (windows-latest, stable) (push) Has been cancelled

This commit is contained in:
2026-03-03 15:03:23 +07:00
parent 29c7a82d1d
commit e73d4e3da8
7 changed files with 240 additions and 17 deletions

1
Cargo.lock generated
View File

@@ -1243,6 +1243,7 @@ name = "common"
version = "1.0.0-beta"
dependencies = [
"anyhow",
"bech32",
"chrono",
"dirs 5.0.1",
"futures",

View File

@@ -1,6 +1,7 @@
use std::hash::Hash;
use std::ops::Range;
use common::EventUtils;
use common::{EventUtils, NostrParser};
use nostr_sdk::prelude::*;
/// New message.
@@ -91,6 +92,18 @@ impl PartialOrd for Message {
}
}
#[derive(Debug, Clone)]
pub struct Mention {
pub public_key: PublicKey,
pub range: Range<usize>,
}
impl Mention {
pub fn new(public_key: PublicKey, range: Range<usize>) -> Self {
Self { public_key, range }
}
}
/// Rendered message.
#[derive(Debug, Clone)]
pub struct RenderedMessage {
@@ -102,7 +115,7 @@ pub struct RenderedMessage {
/// Message created time as unix timestamp
pub created_at: Timestamp,
/// List of mentioned public keys in the message
pub mentions: Vec<PublicKey>,
pub mentions: Vec<Mention>,
/// List of event of the message this message is a reply to
pub replies_to: Vec<EventId>,
}
@@ -184,20 +197,17 @@ impl Hash for RenderedMessage {
}
/// Extracts all mentions (public keys) from a content string.
fn extract_mentions(content: &str) -> Vec<PublicKey> {
fn extract_mentions(content: &str) -> Vec<Mention> {
let parser = NostrParser::new();
let tokens = parser.parse(content);
tokens
.filter_map(|token| match token {
Token::Nostr(nip21) => match nip21 {
Nip21::Pubkey(pubkey) => Some(pubkey),
Nip21::Profile(profile) => Some(profile.public_key),
_ => None,
},
.filter_map(|token| match token.value {
Nip21::Pubkey(public_key) => Some(Mention::new(public_key, token.range)),
Nip21::Profile(profile) => Some(Mention::new(profile.public_key, token.range)),
_ => None,
})
.collect::<Vec<_>>()
.collect()
}
/// Extracts all reply (ids) from the event tags.

View File

@@ -702,7 +702,7 @@ impl ChatPanel {
let text = self
.rendered_texts_by_id
.entry(rendered.id)
.or_insert_with(|| RenderedText::new(&rendered.content, &[]))
.or_insert_with(|| RenderedText::new(&rendered.content, &rendered.mentions))
.element(ix.into(), window, cx);
self.render_text_message(ix, rendered, text, cx)

View File

@@ -1,6 +1,7 @@
use std::ops::Range;
use std::sync::Arc;
use chat::Mention;
use common::RangeExt;
use gpui::{
AnyElement, App, ElementId, FontStyle, FontWeight, HighlightStyle, InteractiveText,
@@ -24,11 +25,6 @@ impl From<HighlightStyle> for Highlight {
}
}
#[derive(Debug)]
pub struct Mention {
pub range: Range<usize>,
}
#[derive(Default)]
pub struct RenderedText {
pub text: SharedString,
@@ -96,7 +92,10 @@ impl RenderedText {
}
}
Highlight::Mention => HighlightStyle {
font_weight: Some(FontWeight::BOLD),
underline: Some(UnderlineStyle {
thickness: 1.0.into(),
..Default::default()
}),
..Default::default()
},
Highlight::Highlight(highlight) => *highlight,

View File

@@ -19,3 +19,4 @@ log.workspace = true
dirs = "5.0"
qrcode = "0.14.1"
bech32 = "0.11.1"

View File

@@ -1,11 +1,13 @@
pub use debounced_delay::*;
pub use display::*;
pub use event::*;
pub use parser::*;
pub use paths::*;
pub use range::*;
mod debounced_delay;
mod display;
mod event;
mod parser;
mod paths;
mod range;

210
crates/common/src/parser.rs Normal file
View File

@@ -0,0 +1,210 @@
use std::ops::Range;
use nostr::prelude::*;
const BECH32_SEPARATOR: u8 = b'1';
const SCHEME_WITH_COLON: &str = "nostr:";
/// Nostr parsed token with its range in the original text
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Token {
/// The parsed NIP-21 URI
///
/// <https://github.com/nostr-protocol/nips/blob/master/21.md>
pub value: Nip21,
/// The range of this token in the original text
pub range: Range<usize>,
}
#[derive(Debug, Clone, Copy)]
struct Match {
start: usize,
end: usize,
}
/// Nostr parser
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct NostrParser;
impl Default for NostrParser {
fn default() -> Self {
Self::new()
}
}
impl NostrParser {
/// Create new parser
pub const fn new() -> Self {
Self
}
/// Parse text
pub fn parse<'a>(&self, text: &'a str) -> NostrParserIter<'a> {
NostrParserIter::new(text)
}
}
struct FindMatches<'a> {
bytes: &'a [u8],
pos: usize,
}
impl<'a> FindMatches<'a> {
fn new(text: &'a str) -> Self {
Self {
bytes: text.as_bytes(),
pos: 0,
}
}
fn try_parse_nostr_uri(&mut self) -> Option<Match> {
let start = self.pos;
let bytes = self.bytes;
let len = bytes.len();
// Check if we have "nostr:" prefix
if len - start < SCHEME_WITH_COLON.len() {
return None;
}
// Check for "nostr:" prefix (case-insensitive)
let scheme_prefix = &bytes[start..start + SCHEME_WITH_COLON.len()];
if !scheme_prefix.eq_ignore_ascii_case(SCHEME_WITH_COLON.as_bytes()) {
return None;
}
// Skip the scheme
let pos = start + SCHEME_WITH_COLON.len();
// Parse bech32 entity
let mut has_separator = false;
let mut end = pos;
while end < len {
let byte = bytes[end];
// Check for bech32 separator
if byte == BECH32_SEPARATOR && !has_separator {
has_separator = true;
end += 1;
continue;
}
// Check if character is valid for bech32
if !byte.is_ascii_alphanumeric() {
break;
}
end += 1;
}
// Must have at least one character after separator
if !has_separator || end <= pos + 1 {
return None;
}
// Update position
self.pos = end;
Some(Match { start, end })
}
}
impl Iterator for FindMatches<'_> {
type Item = Match;
fn next(&mut self) -> Option<Self::Item> {
while self.pos < self.bytes.len() {
// Try to parse nostr URI
if let Some(mat) = self.try_parse_nostr_uri() {
return Some(mat);
}
// Skip one character if no match found
self.pos += 1;
}
None
}
}
enum HandleMatch {
Token(Token),
Recursion,
}
pub struct NostrParserIter<'a> {
/// The original text
text: &'a str,
/// Matches found
matches: FindMatches<'a>,
/// A pending match
pending_match: Option<Match>,
/// Last match end index
last_match_end: usize,
}
impl<'a> NostrParserIter<'a> {
fn new(text: &'a str) -> Self {
Self {
text,
matches: FindMatches::new(text),
pending_match: None,
last_match_end: 0,
}
}
fn handle_match(&mut self, mat: Match) -> HandleMatch {
// Update last match end
self.last_match_end = mat.end;
// Extract the matched string
let data: &str = &self.text[mat.start..mat.end];
// Parse NIP-21 URI
match Nip21::parse(data) {
Ok(uri) => HandleMatch::Token(Token {
value: uri,
range: mat.start..mat.end,
}),
// If the nostr URI parsing is invalid, skip it
Err(_) => HandleMatch::Recursion,
}
}
}
impl<'a> Iterator for NostrParserIter<'a> {
type Item = Token;
fn next(&mut self) -> Option<Self::Item> {
// Handle a pending match
if let Some(pending_match) = self.pending_match.take() {
return match self.handle_match(pending_match) {
HandleMatch::Token(token) => Some(token),
HandleMatch::Recursion => self.next(),
};
}
match self.matches.next() {
Some(mat) => {
// Skip any text before this match
if mat.start > self.last_match_end {
// Update pending match
// This will be handled at next iteration, in `handle_match` method.
self.pending_match = Some(mat);
// Skip the text before the match
self.last_match_end = mat.start;
return self.next();
}
// Handle match
match self.handle_match(mat) {
HandleMatch::Token(token) => Some(token),
HandleMatch::Recursion => self.next(),
}
}
None => None,
}
}
}