Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion convex/schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,9 @@ export default defineSchema({
english: v.string(),
grammarType: v.optional(v.string()), // noun, verb, preposition, adjective, etc.
forvoAudioUrl: v.optional(v.string()), // Future: audio from Forvo or ElevenLabs
}).index("by_song_line", ["songId", "lineNumber"]),
})
.index("by_song_line", ["songId", "lineNumber"])
.index("by_persian", ["persian"]),

// Track user's learning progress for individual words
// Learning state is keyed by persian text so repeated words (e.g., "برای") sync across all instances
Expand Down
73 changes: 29 additions & 44 deletions convex/wordProgress.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ export const getByUser = query({
handler: async (ctx) => {
const userId = await getAuthUserId(ctx);
if (!userId) return [];

return await ctx.db
.query("wordProgress")
.withIndex("by_user", (q) => q.eq("userId", userId))
Expand All @@ -25,9 +25,7 @@ export const getByUserWord = query({

return await ctx.db
.query("wordProgress")
.withIndex("by_user", (q) =>
q.eq("userId", userId)
)
.withIndex("by_user", (q) => q.eq("userId", userId))
.filter((q) => q.eq(q.field("wordId"), args.wordId))
.first();
},
Expand All @@ -42,9 +40,7 @@ export const getByUserPersian = query({

return await ctx.db
.query("wordProgress")
.withIndex("by_user", (q) =>
q.eq("userId", userId)
)
.withIndex("by_user", (q) => q.eq("userId", userId))
.filter((q) => q.eq(q.field("persian"), args.persian))
.first();
},
Expand All @@ -64,13 +60,11 @@ export const getByUserPersians = query({
uniquePersians.map(async (persian) => {
const progress = await ctx.db
.query("wordProgress")
.withIndex("by_user", (q) =>
q.eq("userId", userId)
)
.withIndex("by_user", (q) => q.eq("userId", userId))
.filter((q) => q.eq(q.field("persian"), persian))
.first();
Comment on lines 61 to 65
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

🧩 Analysis chain

🏁 Script executed:

# Check current indexes in convex/schema.ts for wordProgress
rg -n 'wordProgress.*defineTable|\.index\(' convex/schema.ts -A 40 | head -100

Repository: EtanHey/songscript

Length of output: 4583


🏁 Script executed:

# Check the specific patterns at the mentioned lines in convex/wordProgress.ts
rg -n '\.withIndex\("by_user"' convex/wordProgress.ts -C 3

Repository: EtanHey/songscript

Length of output: 3042


🏁 Script executed:

# Check if composite indexes are already defined
rg -n '\.index\("by_user' convex/schema.ts -A 2

Repository: EtanHey/songscript

Length of output: 1033


🏁 Script executed:

# Verify the coding guideline - check if there are queries/mutations in wordProgress.ts vs songs.ts
echo "=== Exports in convex/wordProgress.ts ===" && \
rg -n '^export (const|function|default)' convex/wordProgress.ts | head -20 && \
echo "" && \
echo "=== Exports in convex/songs.ts ===" && \
rg -n '^export (const|function|default)' convex/songs.ts | head -20

Repository: EtanHey/songscript

Length of output: 1092


Add composite indexes by_user_word and by_user_persian to eliminate post-index filtering scans.

The by_user index is single-column only. All flagged call sites use .withIndex("by_user").filter(...) patterns, which scan all user rows before applying the filter—causing O(user_rows) performance degradation in hot paths. The codebase already defines composite indexes for visitorId-based lookups (by_visitor_word, by_visitor_persian); apply the same pattern for userId.

Required schema changes
# convex/schema.ts (wordProgress indexes)
 .index("by_user", ["userId"])
+.index("by_user_word", ["userId", "wordId"])
+.index("by_user_persian", ["userId", "persian"]),
# convex/wordProgress.ts examples
- .withIndex("by_user", (q) => q.eq("userId", userId))
- .filter((q) => q.eq(q.field("wordId"), wordId))
+ .withIndex("by_user_word", (q) =>
+   q.eq("userId", userId).eq("wordId", wordId)
+ )

- .withIndex("by_user", (q) => q.eq("userId", userId))
- .filter((q) => q.eq(q.field("persian"), persian))
+ .withIndex("by_user_persian", (q) =>
+   q.eq("userId", userId).eq("persian", persian)
+ )

Affected locations: lines 28–30, 43–45, 63–65, 84–86, 104–106, 141–143, 177–179, 227–229, 552–554.

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@convex/wordProgress.ts` around lines 61 - 65, The queries against the
"wordProgress" table use .withIndex("by_user").filter(...) which forces a
post-index scan; add composite indexes named by_user_word (userId + word) and
by_user_persian (userId + persian) to the Convex schema and replace the
.withIndex("by_user") calls with .withIndex("by_user_word") or
.withIndex("by_user_persian") at the affected call sites (e.g., the query in
wordProgress.ts that calls ctx.db.query("wordProgress").withIndex("by_user", (q)
=> q.eq("userId", userId)).filter((q) => q.eq(q.field("persian"),
persian)).first()) so each query can match the composite index instead of
filtering after the index; update all listed locations (the other
withIndex("by_user").filter usages) to pick the correct composite index for
their filter key.

return { persian, progress };
})
}),
);
return results;
},
Expand All @@ -87,13 +81,11 @@ export const getByUserWords = query({
args.wordIds.map(async (wordId) => {
const progress = await ctx.db
.query("wordProgress")
.withIndex("by_user", (q) =>
q.eq("userId", userId)
)
.withIndex("by_user", (q) => q.eq("userId", userId))
.filter((q) => q.eq(q.field("wordId"), wordId))
.first();
return { wordId, progress };
})
}),
);
return results;
},
Expand All @@ -109,9 +101,7 @@ export const incrementViewCount = mutation({
// Look up by persian text first - this is the canonical key
const existingPersian = await ctx.db
.query("wordProgress")
.withIndex("by_user", (q) =>
q.eq("userId", userId)
)
.withIndex("by_user", (q) => q.eq("userId", userId))
.filter((q) => q.eq(q.field("persian"), args.persian))
.first();

Expand Down Expand Up @@ -148,9 +138,7 @@ export const incrementPlayCount = mutation({
// Look up by persian text first - this is the canonical key
const existingPersian = await ctx.db
.query("wordProgress")
.withIndex("by_user", (q) =>
q.eq("userId", userId)
)
.withIndex("by_user", (q) => q.eq("userId", userId))
.filter((q) => q.eq(q.field("persian"), args.persian))
.first();

Expand Down Expand Up @@ -186,27 +174,26 @@ export const toggleLearned = mutation({
// Find all progress records for this persian word
const allMatching = await ctx.db
.query("wordProgress")
.withIndex("by_user", (q) =>
q.eq("userId", userId)
)
.withIndex("by_user", (q) => q.eq("userId", userId))
.filter((q) => q.eq(q.field("persian"), args.persian))
.collect();

// Determine the new learned state (toggle from current)
const currentLearned = allMatching.length > 0 ? allMatching[0].learned : false;
const currentLearned =
allMatching.length > 0 ? allMatching[0].learned : false;
const newLearned = !currentLearned;

// Update ALL matching records
// Only update lastSeen when marking as learned, not when unmarking
const updateData: { learned: boolean; lastSeen?: number } = { learned: newLearned };
const updateData: { learned: boolean; lastSeen?: number } = {
learned: newLearned,
};
if (newLearned) {
updateData.lastSeen = Date.now();
}

await Promise.all(
allMatching.map((record) =>
ctx.db.patch(record._id, updateData)
)
allMatching.map((record) => ctx.db.patch(record._id, updateData)),
);

// If no records exist yet, create one for this specific word instance
Expand Down Expand Up @@ -237,23 +224,21 @@ export const setLearned = mutation({
// Find all progress records for this persian word
const allMatching = await ctx.db
.query("wordProgress")
.withIndex("by_user", (q) =>
q.eq("userId", userId)
)
.withIndex("by_user", (q) => q.eq("userId", userId))
.filter((q) => q.eq(q.field("persian"), args.persian))
.collect();

// Update ALL matching records
// Only update lastSeen when marking as learned, not when unmarking
const updateData: { learned: boolean; lastSeen?: number } = { learned: args.learned };
const updateData: { learned: boolean; lastSeen?: number } = {
learned: args.learned,
};
if (args.learned) {
updateData.lastSeen = Date.now();
}

await Promise.all(
allMatching.map((record) =>
ctx.db.patch(record._id, updateData)
)
allMatching.map((record) => ctx.db.patch(record._id, updateData)),
);

// If no records exist yet, create one for this specific word instance
Expand Down Expand Up @@ -428,7 +413,7 @@ export const getVocabularyByLanguage = query({
masteryLevel,
sourceLanguage: song.sourceLanguage,
};
})
}),
);

// Filter out nulls and dedupe by persian text (keep highest practice count)
Expand Down Expand Up @@ -485,8 +470,10 @@ export const getWordDetailsWithSongs = query({
args: { persian: v.string() },
handler: async (ctx, args) => {
// Find all word instances with this persian text across all songs
const allWords = await ctx.db.query("words").collect();
const matchingWords = allWords.filter((w) => w.persian === args.persian);
const matchingWords = await ctx.db
.query("words")
.withIndex("by_persian", (q) => q.eq("persian", args.persian))
.collect();

if (matchingWords.length === 0) {
return null;
Expand All @@ -512,7 +499,7 @@ export const getWordDetailsWithSongs = query({
const line = await ctx.db
.query("lyrics")
.withIndex("by_song", (q) =>
q.eq("songId", songId).eq("lineNumber", lineNumber)
q.eq("songId", songId).eq("lineNumber", lineNumber),
)
.first();

Expand All @@ -524,7 +511,7 @@ export const getWordDetailsWithSongs = query({
lineNumber,
linePreview: line?.original?.slice(0, 50) || "",
};
})
}),
);

const validSongs = songsWithContext.filter(Boolean) as NonNullable<
Expand Down Expand Up @@ -562,9 +549,7 @@ export const getWordPracticeHistory = query({
// Get the progress record for this word
const progress = await ctx.db
.query("wordProgress")
.withIndex("by_user", (q) =>
q.eq("userId", userId)
)
.withIndex("by_user", (q) => q.eq("userId", userId))
.filter((q) => q.eq(q.field("persian"), args.persian))
.first();

Expand Down