Skip to content

Commit

Permalink
style: comment
Browse files Browse the repository at this point in the history
  • Loading branch information
zanussbaum committed Dec 20, 2024
1 parent 6a4cef3 commit fad15ff
Showing 1 changed file with 1 addition and 0 deletions.
1 change: 1 addition & 0 deletions src/layers/attention.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ export class MultiHeadAttention extends Module {
const valueHeads = await this.reshapeToHeads(value);

// Compute attention for each head
// this will be slow, we should create bmm
const headOutputs: Tensor[] = [];
for (let i = 0; i < this.num_heads; i++) {
const [headOutput] = await this.scaledDotProductAttention(
Expand Down

0 comments on commit fad15ff

Please sign in to comment.