Skip to content

Commit 327458e

Browse files
committed
brain/*: remove speaking
Fixes #94.
1 parent 09e645f commit 327458e

11 files changed

+0
-1121
lines changed

brain/brain.go

-5
Original file line numberDiff line numberDiff line change
@@ -31,11 +31,6 @@ type Interface interface {
3131
// will be the same on each iteration and must not retain them.
3232
Think(ctx context.Context, tag string, prefix []string) iter.Seq[func(id, suf *[]byte) error]
3333

34-
// Speak generates a full message and appends it to w.
35-
//
36-
// The prompt is in reverse order and has entropy reduction applied.
37-
Speak(ctx context.Context, tag string, prompt []string, w *Builder) error
38-
3934
// Forget forgets everything learned from a single given message.
4035
// If nothing has been learned from the message, it must prevent anything
4136
// from being learned from a message with that ID.

brain/braintest/braintest_test.go

-47
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@ package braintest_test
33
import (
44
"context"
55
"iter"
6-
"math/rand/v2"
76
"slices"
87
"strings"
98
"sync"
@@ -88,52 +87,6 @@ func (m *membrain) Think(ctx context.Context, tag string, prompt []string) iter.
8887
}
8988
}
9089

91-
func (m *membrain) Speak(ctx context.Context, tag string, prompt []string, w *brain.Builder) error {
92-
m.mu.Lock()
93-
defer m.mu.Unlock()
94-
var s string
95-
if len(prompt) == 0 {
96-
u := slices.Clone(m.tups[tag].tups[""])
97-
d := 0
98-
for k, v := range u {
99-
if m.tups[tag].forgort[v[0]] {
100-
u[d], u[k] = u[k], u[d]
101-
d++
102-
}
103-
}
104-
u = u[d:]
105-
if len(u) == 0 {
106-
return nil
107-
}
108-
t := u[rand.IntN(len(u))]
109-
w.Append(t[0], []byte(t[1]))
110-
s = brain.ReduceEntropy(t[1])
111-
} else {
112-
s = brain.ReduceEntropy(prompt[len(prompt)-1])
113-
}
114-
for range 256 {
115-
u := slices.Clone(m.tups[tag].tups[s])
116-
d := 0
117-
for k, v := range u {
118-
if m.tups[tag].forgort[v[0]] {
119-
u[d], u[k] = u[k], u[d]
120-
d++
121-
}
122-
}
123-
u = u[d:]
124-
if len(u) == 0 {
125-
break
126-
}
127-
t := u[rand.IntN(len(u))]
128-
if t[1] == "" {
129-
break
130-
}
131-
w.Append(t[0], []byte(t[1]))
132-
s = brain.ReduceEntropy(t[1])
133-
}
134-
return nil
135-
}
136-
13790
func TestTests(t *testing.T) {
13891
braintest.Test(context.Background(), t, func(ctx context.Context) brain.Interface { return new(membrain) })
13992
}

brain/builder.go

-50
This file was deleted.

brain/builder_test.go

-116
This file was deleted.

brain/kvbrain/speak.go

-120
Original file line numberDiff line numberDiff line change
@@ -5,17 +5,10 @@ import (
55
"context"
66
"fmt"
77
"iter"
8-
"math/rand/v2"
98

109
"github.com/dgraph-io/badger/v4"
11-
12-
"github.com/zephyrtronium/robot/brain"
13-
"github.com/zephyrtronium/robot/deque"
14-
"github.com/zephyrtronium/robot/tpool"
1510
)
1611

17-
var prependerPool tpool.Pool[deque.Deque[string]]
18-
1912
func (br *Brain) Think(ctx context.Context, tag string, prompt []string) iter.Seq[func(id *[]byte, suf *[]byte) error] {
2013
return func(yield func(func(id *[]byte, suf *[]byte) error) bool) {
2114
erf := func(err error) { yield(func(id, suf *[]byte) error { return err }) }
@@ -79,116 +72,3 @@ func (br *Brain) Think(ctx context.Context, tag string, prompt []string) iter.Se
7972
}
8073
}
8174
}
82-
83-
// Speak generates a full message and appends it to w.
84-
// The prompt is in reverse order and has entropy reduction applied.
85-
func (br *Brain) Speak(ctx context.Context, tag string, prompt []string, w *brain.Builder) error {
86-
search := prependerPool.Get().Prepend(prompt...)
87-
defer func() { prependerPool.Put(search.Reset()) }()
88-
89-
tb := hashTag(make([]byte, 0, tagHashLen), tag)
90-
b := make([]byte, 0, 128)
91-
var id string
92-
opts := badger.DefaultIteratorOptions
93-
// We don't actually need to iterate over values, only the single value
94-
// that we decide to use per suffix. So, we can disable value prefetch.
95-
opts.PrefetchValues = false
96-
opts.Prefix = hashTag(nil, tag)
97-
for range 1024 {
98-
var err error
99-
var l int
100-
b = append(b[:0], tb...)
101-
b, id, l, err = br.next(b, search.Slice(), opts)
102-
if err != nil {
103-
return err
104-
}
105-
if len(b) == 0 {
106-
break
107-
}
108-
w.Append(id, b)
109-
search = search.DropEnd(search.Len() - l - 1).Prepend(brain.ReduceEntropy(string(b)))
110-
}
111-
return nil
112-
}
113-
114-
// next finds a single token to continue a prompt.
115-
// The returned values are, in order,
116-
// b with its contents replaced with the new term,
117-
// the ID of the message used for the term,
118-
// the number of terms of the prompt which matched to produce the new term,
119-
// and any error.
120-
// If the returned term is the empty string, generation should end.
121-
func (br *Brain) next(b []byte, prompt []string, opts badger.IteratorOptions) ([]byte, string, int, error) {
122-
// These definitions are outside the loop to ensure we don't bias toward
123-
// smaller contexts.
124-
var (
125-
key []byte
126-
skip brain.Skip
127-
n uint64
128-
)
129-
b = appendPrefix(b, prompt)
130-
if len(prompt) == 0 {
131-
// If we have no prompt, then we want to make sure we select only
132-
// options that start a message.
133-
b = append(b, '\xff')
134-
}
135-
for {
136-
var seen uint64
137-
err := br.knowledge.View(func(txn *badger.Txn) error {
138-
it := txn.NewIterator(opts)
139-
defer it.Close()
140-
it.Seek(b)
141-
for it.ValidForPrefix(b) {
142-
if n == 0 {
143-
item := it.Item()
144-
// TODO(zeph): for #43, check deleted uuids so we never
145-
// pick a message that has been deleted
146-
key = item.KeyCopy(key[:0])
147-
n = skip.N(rand.Uint64(), rand.Uint64())
148-
}
149-
it.Next()
150-
n--
151-
seen++
152-
}
153-
return nil
154-
})
155-
if err != nil {
156-
return nil, "", len(prompt), fmt.Errorf("couldn't read knowledge: %w", err)
157-
}
158-
// Try to lose context.
159-
// We want to do so when we have a long context and almost no options,
160-
// or at random with even a short context.
161-
// Note that in the latter case we use a 1/2 chance; it seems high, but
162-
// n.b. the caller will recover the last token that we discard.
163-
if len(prompt) > 4 && seen <= 2 || len(prompt) > 2 && rand.Uint32()&1 == 0 {
164-
// We haven't seen enough options, and we have context we could
165-
// lose. Do so and try again from the beginning.
166-
prompt = prompt[:len(prompt)-1]
167-
b = appendPrefix(b[:tagHashLen], prompt)
168-
continue
169-
}
170-
if key == nil {
171-
// We never saw any options. Since we always select the first, this
172-
// means there were no options. Don't look for nothing in the DB.
173-
return b[:0], "", len(prompt), nil
174-
}
175-
err = br.knowledge.View(func(txn *badger.Txn) error {
176-
item, err := txn.Get(key)
177-
if err != nil {
178-
return fmt.Errorf("couldn't get item for key %q: %w", key, err)
179-
}
180-
b, err = item.ValueCopy(b[:0])
181-
if err != nil {
182-
return fmt.Errorf("couldn't get value for key %q: %w", key, err)
183-
}
184-
return nil
185-
})
186-
// The id is everything after the first byte following the hash for
187-
// empty prefixes, and everything after the first \xff\xff otherwise.
188-
id := key[tagHashLen+1:]
189-
if len(prompt) > 0 {
190-
_, id, _ = bytes.Cut(key, []byte{0xff, 0xff})
191-
}
192-
return b, string(id), len(prompt), err
193-
}
194-
}

0 commit comments

Comments
 (0)