basic grimoire spell support.

This commit is contained in:
fiatjaf
2025-12-19 15:16:22 -03:00
parent 9bf728d850
commit 8f38468103
5 changed files with 522 additions and 107 deletions

View File

@@ -9,7 +9,7 @@ import (
"github.com/urfave/cli/v3" "github.com/urfave/cli/v3"
) )
var filter = &cli.Command{ var filterCmd = &cli.Command{
Name: "filter", Name: "filter",
Usage: "applies an event filter to an event to see if it matches.", Usage: "applies an event filter to an event to see if it matches.",
Description: ` Description: `

2
go.mod
View File

@@ -104,3 +104,5 @@ require (
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
rsc.io/qr v0.2.0 // indirect rsc.io/qr v0.2.0 // indirect
) )
replace fiatjaf.com/nostr => ../nostrlib

View File

@@ -28,7 +28,7 @@ var app = &cli.Command{
Commands: []*cli.Command{ Commands: []*cli.Command{
event, event,
req, req,
filter, filterCmd,
fetch, fetch,
count, count,
decode, decode,
@@ -53,6 +53,7 @@ var app = &cli.Command{
git, git,
nip, nip,
syncCmd, syncCmd,
spell,
}, },
Version: version, Version: version,
Flags: []cli.Flag{ Flags: []cli.Flag{

224
req.go
View File

@@ -9,6 +9,7 @@ import (
"slices" "slices"
"strings" "strings"
"sync" "sync"
"time"
"fiatjaf.com/nostr" "fiatjaf.com/nostr"
"fiatjaf.com/nostr/eventstore" "fiatjaf.com/nostr/eventstore"
@@ -77,11 +78,6 @@ example:
Name: "paginate-interval", Name: "paginate-interval",
Usage: "time between queries when using --paginate", Usage: "time between queries when using --paginate",
}, },
&cli.UintFlag{
Name: "paginate-global-limit",
Usage: "global limit at which --paginate should stop",
DefaultText: "uses the value given by --limit/-l or infinite",
},
&cli.BoolFlag{ &cli.BoolFlag{
Name: "bare", Name: "bare",
Usage: "when printing the filter, print just the filter, not enveloped in a [\"REQ\", ...] array", Usage: "when printing the filter, print just the filter, not enveloped in a [\"REQ\", ...] array",
@@ -226,106 +222,7 @@ example:
} }
} }
} else { } else {
var results chan nostr.RelayEvent performReq(ctx, filter, relayUrls, c.Bool("stream"), c.Bool("outbox"), c.Uint("outbox-relays-per-pubkey"), c.Bool("paginate"), c.Duration("paginate-interval"), "nak-req")
var closeds chan nostr.RelayClosed
opts := nostr.SubscriptionOptions{
Label: "nak-req",
}
if c.Bool("paginate") {
paginator := sys.Pool.PaginatorWithInterval(c.Duration("paginate-interval"))
results = paginator(ctx, relayUrls, filter, opts)
} else if c.Bool("outbox") {
defs := make([]nostr.DirectedFilter, 0, len(filter.Authors)*2)
// hardcoded relays, if any
for _, relayUrl := range relayUrls {
defs = append(defs, nostr.DirectedFilter{
Filter: filter,
Relay: relayUrl,
})
}
// relays for each pubkey
errg := errgroup.Group{}
errg.SetLimit(16)
mu := sync.Mutex{}
for _, pubkey := range filter.Authors {
errg.Go(func() error {
n := int(c.Uint("outbox-relays-per-pubkey"))
for _, url := range sys.FetchOutboxRelays(ctx, pubkey, n) {
if slices.Contains(relayUrls, url) {
// already hardcoded, ignore
continue
}
if !nostr.IsValidRelayURL(url) {
continue
}
matchUrl := func(def nostr.DirectedFilter) bool { return def.Relay == url }
idx := slices.IndexFunc(defs, matchUrl)
if idx == -1 {
// new relay, add it
mu.Lock()
// check again after locking to prevent races
idx = slices.IndexFunc(defs, matchUrl)
if idx == -1 {
// then add it
filter := filter.Clone()
filter.Authors = []nostr.PubKey{pubkey}
defs = append(defs, nostr.DirectedFilter{
Filter: filter,
Relay: url,
})
mu.Unlock()
continue // done with this relay url
}
// otherwise we'll just use the idx
mu.Unlock()
}
// existing relay, add this pubkey
defs[idx].Authors = append(defs[idx].Authors, pubkey)
}
return nil
})
}
errg.Wait()
if c.Bool("stream") {
results, closeds = sys.Pool.BatchedSubscribeManyNotifyClosed(ctx, defs, opts)
} else {
results, closeds = sys.Pool.BatchedQueryManyNotifyClosed(ctx, defs, opts)
}
} else {
if c.Bool("stream") {
results, closeds = sys.Pool.SubscribeManyNotifyClosed(ctx, relayUrls, filter, opts)
} else {
results, closeds = sys.Pool.FetchManyNotifyClosed(ctx, relayUrls, filter, opts)
}
}
readevents:
for {
select {
case ie, ok := <-results:
if !ok {
break readevents
}
stdout(ie.Event)
case closed := <-closeds:
if closed.HandledAuth {
logverbose("%s CLOSED: %s\n", closed.Relay.URL, closed.Reason)
} else {
log("%s CLOSED: %s\n", closed.Relay.URL, closed.Reason)
}
case <-ctx.Done():
break readevents
}
}
} }
} else { } else {
// no relays given, will just print the filter // no relays given, will just print the filter
@@ -346,6 +243,123 @@ example:
}, },
} }
func performReq(
ctx context.Context,
filter nostr.Filter,
relayUrls []string,
stream bool,
outbox bool,
outboxRelaysPerPubKey uint64,
paginate bool,
paginateInterval time.Duration,
label string,
) {
var results chan nostr.RelayEvent
var closeds chan nostr.RelayClosed
opts := nostr.SubscriptionOptions{
Label: label,
}
if paginate {
paginator := sys.Pool.PaginatorWithInterval(paginateInterval)
results = paginator(ctx, relayUrls, filter, opts)
} else if outbox {
defs := make([]nostr.DirectedFilter, 0, len(filter.Authors)*2)
for _, relayUrl := range relayUrls {
defs = append(defs, nostr.DirectedFilter{
Filter: filter,
Relay: relayUrl,
})
}
// relays for each pubkey
errg := errgroup.Group{}
errg.SetLimit(16)
mu := sync.Mutex{}
logverbose("gathering outbox relays for %d authors...\n", len(filter.Authors))
for _, pubkey := range filter.Authors {
errg.Go(func() error {
n := int(outboxRelaysPerPubKey)
for _, url := range sys.FetchOutboxRelays(ctx, pubkey, n) {
if slices.Contains(relayUrls, url) {
// already specified globally, ignore
continue
}
if !nostr.IsValidRelayURL(url) {
continue
}
matchUrl := func(def nostr.DirectedFilter) bool { return def.Relay == url }
idx := slices.IndexFunc(defs, matchUrl)
if idx == -1 {
// new relay, add it
mu.Lock()
// check again after locking to prevent races
idx = slices.IndexFunc(defs, matchUrl)
if idx == -1 {
// then add it
filter := filter.Clone()
filter.Authors = []nostr.PubKey{pubkey}
defs = append(defs, nostr.DirectedFilter{
Filter: filter,
Relay: url,
})
mu.Unlock()
continue // done with this relay url
}
// otherwise we'll just use the idx
mu.Unlock()
}
// existing relay, add this pubkey
defs[idx].Authors = append(defs[idx].Authors, pubkey)
}
return nil
})
}
errg.Wait()
if stream {
logverbose("running subscription with %d directed filters...\n", len(defs))
results, closeds = sys.Pool.BatchedSubscribeManyNotifyClosed(ctx, defs, opts)
} else {
logverbose("running query with %d directed filters...\n", len(defs))
results, closeds = sys.Pool.BatchedQueryManyNotifyClosed(ctx, defs, opts)
}
} else {
if stream {
logverbose("running subscription to %d relays...\n", len(relayUrls))
results, closeds = sys.Pool.SubscribeManyNotifyClosed(ctx, relayUrls, filter, opts)
} else {
logverbose("running query to %d relays...\n", len(relayUrls))
results, closeds = sys.Pool.FetchManyNotifyClosed(ctx, relayUrls, filter, opts)
}
}
readevents:
for {
select {
case ie, ok := <-results:
if !ok {
break readevents
}
stdout(ie.Event)
case closed := <-closeds:
if closed.HandledAuth {
logverbose("%s CLOSED: %s\n", closed.Relay.URL, closed.Reason)
} else {
log("%s CLOSED: %s\n", closed.Relay.URL, closed.Reason)
}
case <-ctx.Done():
break readevents
}
}
}
var reqFilterFlags = []cli.Flag{ var reqFilterFlags = []cli.Flag{
&PubKeySliceFlag{ &PubKeySliceFlag{
Name: "author", Name: "author",

398
spell.go Normal file
View File

@@ -0,0 +1,398 @@
package main
import (
"bufio"
"context"
"fmt"
"os"
"path/filepath"
"strconv"
"strings"
"time"
"fiatjaf.com/nostr"
"fiatjaf.com/nostr/nip19"
"fiatjaf.com/nostr/sdk/hints"
"github.com/fatih/color"
"github.com/markusmobius/go-dateparser"
"github.com/urfave/cli/v3"
)
var spell = &cli.Command{
Name: "spell",
Usage: "downloads a spell event and executes its REQ request",
ArgsUsage: "[nevent_code]",
Description: `fetches a spell event (kind 777) and executes REQ command encoded in its tags.`,
Flags: append(defaultKeyFlags,
&cli.UintFlag{
Name: "outbox-relays-per-pubkey",
Aliases: []string{"n"},
Usage: "number of outbox relays to use for each pubkey",
Value: 3,
},
),
Action: func(ctx context.Context, c *cli.Command) error {
// load history from file
var history []SpellHistoryEntry
historyPath, err := getSpellHistoryPath()
if err == nil {
file, err := os.Open(historyPath)
if err == nil {
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
var entry SpellHistoryEntry
if err := json.Unmarshal([]byte(scanner.Text()), &entry); err != nil {
continue // skip invalid entries
}
history = append(history, entry)
}
}
}
if c.Args().Len() == 0 {
log("recent spells:\n")
for i, entry := range history {
if i >= 10 {
break
}
displayName := entry.Name
if displayName == "" {
displayName = entry.Content
if len(displayName) > 28 {
displayName = displayName[:27] + "…"
}
}
if displayName != "" {
displayName = displayName + ": "
}
desc := entry.Content
if len(desc) > 50 {
desc = desc[0:49] + "…"
}
lastUsed := entry.LastUsed.Format("2006-01-02 15:04")
stdout(fmt.Sprintf(" %s %s%s - %s\n",
color.BlueString(entry.Identifier),
displayName,
color.YellowString(lastUsed),
desc,
))
}
return nil
}
// decode nevent to get the spell event
var pointer nostr.EventPointer
identifier := c.Args().First()
prefix, value, err := nip19.Decode(identifier)
if err == nil {
if prefix != "nevent" {
return fmt.Errorf("expected nevent code, got %s", prefix)
}
pointer = value.(nostr.EventPointer)
} else {
// search our history
for _, entry := range history {
if entry.Identifier == identifier {
pointer = entry.Pointer
break
}
}
}
if pointer.ID == nostr.ZeroID {
return fmt.Errorf("invalid spell reference")
}
// fetch spell
relays := pointer.Relays
if pointer.Author != nostr.ZeroPK {
for _, url := range relays {
sys.Hints.Save(pointer.Author, nostr.NormalizeURL(url), hints.LastInHint, nostr.Now())
}
relays = append(relays, sys.FetchOutboxRelays(ctx, pointer.Author, 3)...)
}
spell := sys.Pool.QuerySingle(ctx, relays, nostr.Filter{IDs: []nostr.ID{pointer.ID}},
nostr.SubscriptionOptions{Label: "nak-spell-f"})
if spell == nil {
return fmt.Errorf("spell event not found")
}
if spell.Kind != 777 {
return fmt.Errorf("event is not a spell (expected kind 777, got %d)", spell.Kind)
}
// parse spell tags to build REQ filter
spellFilter, err := buildSpellReq(ctx, c, spell.Tags)
if err != nil {
return fmt.Errorf("failed to parse spell tags: %w", err)
}
// determine relays to query
var spellRelays []string
var outbox bool
relaysTag := spell.Event.Tags.Find("relays")
if relaysTag == nil {
// if this tag doesn't exist assume $outbox
relaysTag = nostr.Tag{"relays", "$outbox"}
}
for i := 1; i < len(relaysTag); i++ {
switch relaysTag[i] {
case "$outbox":
outbox = true
default:
relays = append(relays, relaysTag[i])
}
}
stream := !spell.Tags.Has("close-on-eose")
// fill in the author if we didn't have it
pointer.Author = spell.PubKey
// add to history before execution
{
idStr := nip19.EncodeNevent(spell.ID, nil, nostr.ZeroPK)
identifier = "spell" + idStr[len(idStr)-7:]
nameTag := spell.Tags.Find("name")
var name string
if nameTag != nil {
name = nameTag[1]
}
if len(history) > 100 {
history = history[:100]
}
// write back to file
file, err := os.Create(historyPath)
if err != nil {
return err
}
data, _ := json.Marshal(SpellHistoryEntry{
Identifier: identifier,
Name: name,
Content: spell.Content,
LastUsed: time.Now(),
Pointer: pointer,
})
file.Write(data)
file.Write([]byte{'\n'})
for i, entry := range history {
// limit history size (keep last 100)
if i == 100 {
break
}
data, _ := json.Marshal(entry)
file.Write(data)
file.Write([]byte{'\n'})
}
file.Close()
logverbose("executing %s: %s relays=%v outbox=%v stream=%v\n",
identifier, spellFilter, spellRelays, outbox, stream)
}
// execute
performReq(ctx, spellFilter, spellRelays, stream, outbox, c.Uint("outbox-relays-per-pubkey"), false, 0, "nak-spell")
return nil
},
}
func buildSpellReq(ctx context.Context, c *cli.Command, tags nostr.Tags) (nostr.Filter, error) {
filter := nostr.Filter{}
getMe := func() (nostr.PubKey, error) {
kr, _, err := gatherKeyerFromArguments(ctx, c)
if err != nil {
return nostr.ZeroPK, fmt.Errorf("failed to get keyer: %w", err)
}
pubkey, err := kr.GetPublicKey(ctx)
if err != nil {
return nostr.ZeroPK, fmt.Errorf("failed to get public key from keyer: %w", err)
}
return pubkey, nil
}
for _, tag := range tags {
if len(tag) == 0 {
continue
}
switch tag[0] {
case "cmd":
if len(tag) < 2 || tag[1] != "REQ" {
return nostr.Filter{}, fmt.Errorf("only REQ commands are supported")
}
case "k":
for i := 1; i < len(tag); i++ {
if kind, err := strconv.Atoi(tag[i]); err == nil {
filter.Kinds = append(filter.Kinds, nostr.Kind(kind))
}
}
case "authors":
for i := 1; i < len(tag); i++ {
switch tag[i] {
case "$me":
me, err := getMe()
if err != nil {
return nostr.Filter{}, err
}
filter.Authors = append(filter.Authors, me)
case "$contacts":
me, err := getMe()
if err != nil {
return nostr.Filter{}, err
}
for _, f := range sys.FetchFollowList(ctx, me).Items {
filter.Authors = append(filter.Authors, f.Pubkey)
}
default:
pubkey, err := nostr.PubKeyFromHex(tag[i])
if err != nil {
return nostr.Filter{}, fmt.Errorf("invalid pubkey '%s' in 'authors': %w", tag[i], err)
}
filter.Authors = append(filter.Authors, pubkey)
}
}
case "ids":
for i := 1; i < len(tag); i++ {
id, err := nostr.IDFromHex(tag[i])
if err != nil {
return nostr.Filter{}, fmt.Errorf("invalid id '%s' in 'authors': %w", tag[i], err)
}
filter.IDs = append(filter.IDs, id)
}
case "tag":
if len(tag) < 3 {
continue
}
tagName := tag[1]
if filter.Tags == nil {
filter.Tags = make(nostr.TagMap)
}
for i := 2; i < len(tag); i++ {
switch tag[i] {
case "$me":
me, err := getMe()
if err != nil {
return nostr.Filter{}, err
}
filter.Tags[tagName] = append(filter.Tags[tagName], me.Hex())
case "$contacts":
me, err := getMe()
if err != nil {
return nostr.Filter{}, err
}
for _, f := range sys.FetchFollowList(ctx, me).Items {
filter.Tags[tagName] = append(filter.Tags[tagName], f.Pubkey.Hex())
}
default:
filter.Tags[tagName] = append(filter.Tags[tagName], tag[i])
}
}
case "limit":
if len(tag) >= 2 {
if limit, err := strconv.Atoi(tag[1]); err == nil {
filter.Limit = limit
}
}
case "since":
if len(tag) >= 2 {
date, err := dateparser.Parse(&dateparser.Configuration{
DefaultTimezone: time.Local,
CurrentTime: time.Now(),
}, tag[1])
if err != nil {
return nostr.Filter{}, fmt.Errorf("invalid date %s: %w", tag[1], err)
}
filter.Since = nostr.Timestamp(date.Time.Unix())
}
case "until":
if len(tag) >= 2 {
date, err := dateparser.Parse(&dateparser.Configuration{
DefaultTimezone: time.Local,
CurrentTime: time.Now(),
}, tag[1])
if err != nil {
return nostr.Filter{}, fmt.Errorf("invalid date %s: %w", tag[1], err)
}
filter.Until = nostr.Timestamp(date.Time.Unix())
}
case "search":
if len(tag) >= 2 {
filter.Search = tag[1]
}
}
}
return filter, nil
}
func parseRelativeTime(timeStr string) (nostr.Timestamp, error) {
// Handle special cases
switch timeStr {
case "now":
return nostr.Now(), nil
}
// Try to parse as relative time (e.g., "7d", "1h", "30m")
if strings.HasSuffix(timeStr, "d") {
days := strings.TrimSuffix(timeStr, "d")
if daysInt, err := strconv.Atoi(days); err == nil {
return nostr.Now() - nostr.Timestamp(daysInt*24*60*60), nil
}
} else if strings.HasSuffix(timeStr, "h") {
hours := strings.TrimSuffix(timeStr, "h")
if hoursInt, err := strconv.Atoi(hours); err == nil {
return nostr.Now() - nostr.Timestamp(hoursInt*60*60), nil
}
} else if strings.HasSuffix(timeStr, "m") {
minutes := strings.TrimSuffix(timeStr, "m")
if minutesInt, err := strconv.Atoi(minutes); err == nil {
return nostr.Now() - nostr.Timestamp(minutesInt*60), nil
}
}
// try to parse as direct timestamp
if ts, err := strconv.ParseInt(timeStr, 10, 64); err == nil {
return nostr.Timestamp(ts), nil
}
return 0, fmt.Errorf("invalid time format: %s", timeStr)
}
type SpellHistoryEntry struct {
Identifier string `json:"_id"`
Name string `json:"name,omitempty"`
Content string `json:"content,omitempty"`
LastUsed time.Time `json:"last_used"`
Pointer nostr.EventPointer `json:"pointer"`
}
func getSpellHistoryPath() (string, error) {
home, err := os.UserHomeDir()
if err != nil {
return "", err
}
historyDir := filepath.Join(home, ".config", "nak", "spells")
// create directory if it doesn't exist
if err := os.MkdirAll(historyDir, 0755); err != nil {
return "", err
}
return filepath.Join(historyDir, "history"), nil
}