Skip to content

Commit e141ce6

Browse files
Fix FlashAttention debug test, FP32 assert (#7684)
1 parent 2e66683 commit e141ce6

File tree

2 files changed

+5
-7
lines changed

2 files changed

+5
-7
lines changed

ggml-cuda/fattn-vec-f32.cuh

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -278,14 +278,10 @@ void ggml_cuda_flash_attn_ext_vec_f32_case_impl(ggml_backend_cuda_context & ctx,
278278

279279
template <int D, ggml_type type_K, ggml_type type_V>
280280
void ggml_cuda_flash_attn_ext_vec_f32_case(ggml_backend_cuda_context & ctx, ggml_tensor * dst) {
281-
ggml_tensor * KQV = dst;
282281
ggml_tensor * Q = dst->src[0];
283282
ggml_tensor * K = dst->src[1];
284283
ggml_tensor * V = dst->src[2];
285284

286-
const int32_t precision = KQV->op_params[2];
287-
GGML_ASSERT(precision == GGML_PREC_DEFAULT);
288-
289285
GGML_ASSERT(K->type == type_K);
290286
GGML_ASSERT(V->type == type_V);
291287

tests/test-backend-ops.cpp

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1584,9 +1584,11 @@ struct test_flash_attn_ext : public test_case {
15841584
: hs(hs), nh(nh), kv(kv), nb(nb), mask(mask), max_bias(max_bias), type_KV(type_KV) {}
15851585

15861586
ggml_tensor * build_graph(ggml_context * ctx) override {
1587-
ggml_tensor * q = ggml_new_tensor_4d(ctx, GGML_TYPE_F32, hs, nb, nh, 1);
1588-
ggml_tensor * k = ggml_new_tensor_4d(ctx, type_KV, hs, kv, nh, 1);
1589-
ggml_tensor * v = ggml_new_tensor_4d(ctx, type_KV, hs, kv, nh, 1);
1587+
const int64_t hs_padded = GGML_PAD(hs, ggml_blck_size(type_KV));
1588+
1589+
ggml_tensor * q = ggml_new_tensor_4d(ctx, GGML_TYPE_F32, hs_padded, nb, nh, 1);
1590+
ggml_tensor * k = ggml_new_tensor_4d(ctx, type_KV, hs_padded, kv, nh, 1);
1591+
ggml_tensor * v = ggml_new_tensor_4d(ctx, type_KV, hs_padded, kv, nh, 1);
15901592
ggml_tensor * m = mask ? ggml_new_tensor_4d(ctx, GGML_TYPE_F16, kv, GGML_PAD(nb, GGML_KQ_MASK_PAD), 1, 1) : nullptr;
15911593
ggml_tensor * out = ggml_flash_attn_ext(ctx, q, k, v, m, 1.0f/sqrtf(hs), max_bias);
15921594
return out;

0 commit comments

Comments
 (0)