1+ /* Copyright 2025 The xLLM Authors. All Rights Reserved.
2+
3+ Licensed under the Apache License, Version 2.0 (the "License");
4+ you may not use this file except in compliance with the License.
5+ You may obtain a copy of the License at
6+
7+ https://github.com/jd-opensource/xllm/blob/main/LICENSE
8+
9+ Unless required by applicable law or agreed to in writing, software
10+ distributed under the License is distributed on an "AS IS" BASIS,
11+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+ See the License for the specific language governing permissions and
13+ limitations under the License.
14+ ==============================================================================*/
15+
16+ #include " flashinfer_workspace.h"
17+
18+ #include " global_flags.h"
19+
20+ namespace xllm {
21+
22+ void FlashinferWorkspace::initialize (const torch::Device& device) {
23+ float_workspace_buffer_ =
24+ torch::empty ({FLAGS_workspace_buffer_size},
25+ torch::dtype (torch::kUInt8 ).device (device));
26+ int_workspace_buffer_ =
27+ torch::empty ({FLAGS_workspace_buffer_size},
28+ torch::dtype (torch::kUInt8 ).device (device));
29+ page_locked_int_workspace_buffer_ = torch::empty (
30+ {FLAGS_workspace_buffer_size},
31+ torch::dtype (torch::kUInt8 ).device (torch::kCPU ).pinned_memory (true ));
32+ }
33+
34+ torch::Tensor FlashinferWorkspace::get_float_workspace_buffer () {
35+ return float_workspace_buffer_;
36+ }
37+
38+ torch::Tensor FlashinferWorkspace::get_int_workspace_buffer () {
39+ return int_workspace_buffer_;
40+ }
41+
42+ torch::Tensor FlashinferWorkspace::get_page_locked_int_workspace_buffer () {
43+ return page_locked_int_workspace_buffer_;
44+ }
45+
46+ } // namespace xllm
0 commit comments