Skip to content

Commit 6526a00

Browse files
committed
ci: make field notes queries bounded
1 parent e126957 commit 6526a00

1 file changed

Lines changed: 56 additions & 28 deletions

File tree

scripts/discord-backup-field-notes.sh

Lines changed: 56 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -29,15 +29,23 @@ date_utc_days_ago() {
2929
run_sql() {
3030
local title=$1
3131
local query=$2
32+
local output
3233
{
3334
printf "\n## %s\n\n" "$title"
34-
DISCRAWL_NO_AUTO_UPDATE=1 go run ./cmd/discrawl --config "$CONFIG" --json sql "$query" | jq -c .
35+
if output=$(DISCRAWL_NO_AUTO_UPDATE=1 go run ./cmd/discrawl --config "$CONFIG" --json sql "$query" 2>&1); then
36+
printf '%s\n' "$output" | jq -c .
37+
else
38+
printf '[]\n'
39+
printf '\n_query skipped: %s_\n' "$(printf '%s' "$output" | tail -n 1)"
40+
fi
3541
} >>"$TMP_DIR/context.md"
3642
}
3743

3844
fallback_query() {
3945
local query=$1
40-
DISCRAWL_NO_AUTO_UPDATE=1 go run ./cmd/discrawl --config "$CONFIG" --json sql "$query"
46+
if ! DISCRAWL_NO_AUTO_UPDATE=1 go run ./cmd/discrawl --config "$CONFIG" --json sql "$query"; then
47+
printf '[]\n'
48+
fi
4149
}
4250

4351
write_fallback_notes() {
@@ -46,19 +54,19 @@ write_fallback_notes() {
4654
generated_at=$(date -u '+%Y-%m-%d %H:%M UTC')
4755
latest_message=$(fallback_query "select max(created_at) as latest_message from messages;" | jq -r '.[0].latest_message // "unknown"')
4856

49-
fallback_query "
50-
select coalesce(nullif(channel_name, ''), channel_id) as channel, count(*) as matches
51-
from messages
52-
where created_at >= $since_30 and $human_filter and $love_terms
57+
fallback_query "$recent_human_cte
58+
select channel, count(*) as matches
59+
from recent
60+
where $body_love_terms
5361
group by 1
5462
order by matches desc
5563
limit 4;
5664
" | jq -r '.[] | "- " + .channel + ": " + (.matches | tostring) + " positive mentions in the last 30 days."' >"$TMP_DIR/fallback-love.md"
5765

58-
fallback_query "
59-
select coalesce(nullif(channel_name, ''), channel_id) as channel, count(*) as matches
60-
from messages
61-
where created_at >= $since_30 and $human_filter and $complaint_terms
66+
fallback_query "$recent_human_cte
67+
select channel, count(*) as matches
68+
from recent
69+
where $body_complaint_terms
6270
group by 1
6371
order by matches desc
6472
limit 4;
@@ -126,9 +134,25 @@ run_openclaw_agent() {
126134
anchor_expr="(select max(created_at) from messages)"
127135
since_7="strftime('%Y-%m-%dT%H:%M:%fZ', datetime($anchor_expr, '-7 days'))"
128136
since_30="strftime('%Y-%m-%dT%H:%M:%fZ', datetime($anchor_expr, '-30 days'))"
129-
human_filter="lower(coalesce(author_name, '')) not in ('github', 'dependabot')"
137+
human_filter="lower(coalesce(mem.username, mem.display_name, m.author_id, '')) not in ('github', 'dependabot')"
130138
love_terms="(lower(coalesce(normalized_content, content, '')) like '%love%' or lower(coalesce(normalized_content, content, '')) like '%great%' or lower(coalesce(normalized_content, content, '')) like '%awesome%' or lower(coalesce(normalized_content, content, '')) like '%amazing%' or lower(coalesce(normalized_content, content, '')) like '%thanks%' or lower(coalesce(normalized_content, content, '')) like '%thank you%' or lower(coalesce(normalized_content, content, '')) like '%works%' or lower(coalesce(normalized_content, content, '')) like '%useful%' or lower(coalesce(normalized_content, content, '')) like '%helpful%' or lower(coalesce(normalized_content, content, '')) like '%fast%')"
131139
complaint_terms="(lower(coalesce(normalized_content, content, '')) like '%bug%' or lower(coalesce(normalized_content, content, '')) like '%broken%' or lower(coalesce(normalized_content, content, '')) like '%fail%' or lower(coalesce(normalized_content, content, '')) like '%error%' or lower(coalesce(normalized_content, content, '')) like '%crash%' or lower(coalesce(normalized_content, content, '')) like '%regression%' or lower(coalesce(normalized_content, content, '')) like '%slow%' or lower(coalesce(normalized_content, content, '')) like '%confusing%' or lower(coalesce(normalized_content, content, '')) like '%annoying%' or lower(coalesce(normalized_content, content, '')) like '%not working%' or lower(coalesce(normalized_content, content, '')) like '%cannot%' or lower(coalesce(normalized_content, content, '')) like '%can''t%')"
140+
body_love_terms="(lower(body) like '%love%' or lower(body) like '%great%' or lower(body) like '%awesome%' or lower(body) like '%amazing%' or lower(body) like '%thanks%' or lower(body) like '%thank you%' or lower(body) like '%works%' or lower(body) like '%useful%' or lower(body) like '%helpful%' or lower(body) like '%fast%')"
141+
body_complaint_terms="(lower(body) like '%bug%' or lower(body) like '%broken%' or lower(body) like '%fail%' or lower(body) like '%error%' or lower(body) like '%crash%' or lower(body) like '%regression%' or lower(body) like '%slow%' or lower(body) like '%confusing%' or lower(body) like '%annoying%' or lower(body) like '%not working%' or lower(body) like '%cannot%' or lower(body) like '%can''t%')"
142+
recent_human_cte="
143+
with recent as (
144+
select
145+
m.created_at,
146+
coalesce(nullif(c.name, ''), m.channel_id) as channel,
147+
coalesce(nullif(mem.display_name, ''), nullif(mem.username, ''), m.author_id, '') as author,
148+
coalesce(nullif(m.content, ''), m.normalized_content, '') as body
149+
from messages m
150+
left join channels c on c.id = m.channel_id
151+
left join members mem on mem.guild_id = m.guild_id and mem.user_id = m.author_id
152+
where $human_filter
153+
order by m.rowid desc
154+
limit 50000
155+
)"
132156
github_since=$(date_utc_days_ago 30)
133157

134158
cat >"$TMP_DIR/context.md" <<EOF
@@ -165,44 +189,48 @@ from messages where created_at >= $since_30;
165189
"
166190

167191
run_sql "Human Hot Channels This Week" "
168-
select coalesce(nullif(channel_name, ''), channel_id) as channel, count(*) as messages
169-
from messages
170-
where created_at >= $since_7 and $human_filter
192+
$recent_human_cte
193+
select channel, count(*) as messages
194+
from recent
171195
group by 1
172196
order by messages desc
173197
limit 8;
174198
"
175199

176-
run_sql "What People Seem To Love" "
177-
select coalesce(nullif(channel_name, ''), channel_id) as channel, count(*) as matches
178-
from messages
179-
where created_at >= $since_30 and $human_filter and $love_terms
200+
run_sql "What People Seem To Love In Recent Messages" "
201+
$recent_human_cte
202+
select channel, count(*) as matches
203+
from recent
204+
where $body_love_terms
180205
group by 1
181206
order by matches desc
182207
limit 8;
183208
"
184209

185210
run_sql "Love Samples" "
186-
select created_at, coalesce(nullif(channel_name, ''), channel_id) as channel, coalesce(nullif(author_name, ''), author_id) as author, substr(coalesce(content, normalized_content, ''), 1, 260) as sample
187-
from messages
188-
where created_at >= $since_30 and $human_filter and $love_terms
211+
${recent_human_cte}
212+
select created_at, channel, author, substr(body, 1, 260) as sample
213+
from recent
214+
where $body_love_terms
189215
order by created_at desc
190216
limit 10;
191217
"
192218

193-
run_sql "What People Complain About" "
194-
select coalesce(nullif(channel_name, ''), channel_id) as channel, count(*) as matches
195-
from messages
196-
where created_at >= $since_30 and $human_filter and $complaint_terms
219+
run_sql "What People Complain About In Recent Messages" "
220+
$recent_human_cte
221+
select channel, count(*) as matches
222+
from recent
223+
where $body_complaint_terms
197224
group by 1
198225
order by matches desc
199226
limit 8;
200227
"
201228

202229
run_sql "Complaint Samples" "
203-
select created_at, coalesce(nullif(channel_name, ''), channel_id) as channel, coalesce(nullif(author_name, ''), author_id) as author, substr(coalesce(content, normalized_content, ''), 1, 320) as sample
204-
from messages
205-
where created_at >= $since_30 and $human_filter and $complaint_terms
230+
${recent_human_cte}
231+
select created_at, channel, author, substr(body, 1, 320) as sample
232+
from recent
233+
where $body_complaint_terms
206234
order by created_at desc
207235
limit 12;
208236
"

0 commit comments

Comments
 (0)