flash_attn_kernel.h 2.1 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
// Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once

#include "paddle/phi/core/dense_tensor.h"
#include "paddle/phi/core/device_context.h"

namespace phi {

C
Chitsing KUI 已提交
22
template <typename T, typename Context>
23 24 25 26 27 28 29 30 31 32 33 34
void FlashAttnUnpaddedKernel(const Context& ctx,
                             const DenseTensor& q,
                             const DenseTensor& k,
                             const DenseTensor& v,
                             const DenseTensor& cu_seqlens_q,
                             const DenseTensor& cu_seqlens_k,
                             int64_t max_seqlen_q,
                             int64_t max_seqlen_k,
                             float scale,
                             float dropout,
                             bool causal,
                             bool return_softmax,
S
sneaxiy 已提交
35
                             bool is_test,
36 37 38 39
                             DenseTensor* out,
                             DenseTensor* softmax,
                             DenseTensor* softmax_lse,
                             DenseTensor* seed_offset);
C
Chitsing KUI 已提交
40

41 42 43 44 45 46 47 48
template <typename T, typename Context>
void FlashAttnKernel(const Context& ctx,
                     const DenseTensor& q,
                     const DenseTensor& k,
                     const DenseTensor& v,
                     float dropout,
                     bool causal,
                     bool return_softmax,
S
sneaxiy 已提交
49
                     bool is_test,
50 51
                     DenseTensor* out,
                     DenseTensor* softmax,
52
                     DenseTensor* softmax_lse,
53 54 55
                     DenseTensor* seed_offset);

}  // namespace phi