Skip to content

Commit ef6506e

Browse files
committed
Move dataset.go to common
Signed-off-by: Qifan Deng <[email protected]>
1 parent c09b608 commit ef6506e

File tree

12 files changed

+35
-35
lines changed

12 files changed

+35
-35
lines changed

.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,5 +7,5 @@ vendor
77
.DS_Store
88
*.test
99
manifests/dev-config.yaml
10-
pkg/llm-d-inference-sim/.llm-d
10+
pkg/common/.llm-d
1111
pkg/llm-d-inference-sim/tests-tmp/

pkg/common/config.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -176,10 +176,10 @@ type Configuration struct {
176176
DPSize int `yaml:"data-parallel-size" json:"data-parallel-size"`
177177

178178
// Dataset configuration for response generation from a dataset. sqlite db file is expected.
179-
Dataset Dataset
179+
Dataset DatasetConf
180180
}
181181

182-
type Dataset struct {
182+
type DatasetConf struct {
183183
// Path is the local path to the sqlite db file, default is empty
184184
// when path is empty Url will be checked
185185
Path string `yaml:"path" json:"path"`

pkg/llm-d-inference-sim/dataset.go renamed to pkg/common/dataset.go

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ See the License for the specific language governing permissions and
1414
limitations under the License.
1515
*/
1616

17-
package llmdinferencesim
17+
package common
1818

1919
import (
2020
"context"
@@ -35,7 +35,7 @@ import (
3535

3636
type Dataset struct {
3737
db *sql.DB
38-
logger logr.Logger
38+
Logger logr.Logger
3939
}
4040

4141
// use constants for expected column names and types
@@ -60,7 +60,7 @@ func (d *Dataset) downloadDataset(url string, savePath string) error {
6060
// Goroutine to listen for signal
6161
go func() {
6262
<-sigs
63-
d.logger.Info("Interrupt signal received, cancelling download...")
63+
d.Logger.Info("Interrupt signal received, cancelling download...")
6464
cancel()
6565
}()
6666

@@ -71,7 +71,7 @@ func (d *Dataset) downloadDataset(url string, savePath string) error {
7171
defer func() {
7272
cerr := out.Close()
7373
if cerr != nil {
74-
d.logger.Error(cerr, "failed to close file after download")
74+
d.Logger.Error(cerr, "failed to close file after download")
7575
}
7676
}()
7777

@@ -82,7 +82,7 @@ func (d *Dataset) downloadDataset(url string, savePath string) error {
8282
defer func() {
8383
cerr := resp.Body.Close()
8484
if cerr != nil {
85-
d.logger.Error(cerr, "failed to close response body after download")
85+
d.Logger.Error(cerr, "failed to close response body after download")
8686
}
8787
}()
8888

@@ -94,7 +94,7 @@ func (d *Dataset) downloadDataset(url string, savePath string) error {
9494
pr := &progressReader{
9595
Reader: resp.Body,
9696
total: resp.ContentLength,
97-
logger: d.logger,
97+
logger: d.Logger,
9898
ctx: ctx,
9999
startTime: time.Now(),
100100
hasShownSpeed: false,
@@ -105,7 +105,7 @@ func (d *Dataset) downloadDataset(url string, savePath string) error {
105105
// Remove incomplete file
106106
cerr := os.Remove(savePath)
107107
if cerr != nil {
108-
d.logger.Error(cerr, "failed to remove incomplete file after download")
108+
d.Logger.Error(cerr, "failed to remove incomplete file after download")
109109
}
110110
// If context was cancelled, return a specific error
111111
if errors.Is(err, context.Canceled) {
@@ -117,7 +117,7 @@ func (d *Dataset) downloadDataset(url string, savePath string) error {
117117
if written == 0 {
118118
cerr := os.Remove(savePath)
119119
if cerr != nil {
120-
d.logger.Error(cerr, "failed to remove empty file after download")
120+
d.Logger.Error(cerr, "failed to remove empty file after download")
121121
}
122122
return errors.New("downloaded file is empty")
123123
}
@@ -126,7 +126,7 @@ func (d *Dataset) downloadDataset(url string, savePath string) error {
126126
if err := out.Sync(); err != nil {
127127
cerr := os.Remove(savePath)
128128
if cerr != nil {
129-
d.logger.Error(cerr, "failed to remove incomplete file after download")
129+
d.Logger.Error(cerr, "failed to remove incomplete file after download")
130130
}
131131
return fmt.Errorf("failed to sync file: %w", err)
132132
}
@@ -187,7 +187,7 @@ func (d *Dataset) verifyDB() error {
187187
}
188188
defer func() {
189189
if cerr := rows.Close(); cerr != nil {
190-
d.logger.Error(cerr, "failed to close rows after querying table info")
190+
d.Logger.Error(cerr, "failed to close rows after querying table info")
191191
}
192192
}()
193193

@@ -243,7 +243,7 @@ func (d *Dataset) connectToDB(path string) error {
243243
if d.db != nil {
244244
err := d.db.Close()
245245
if err != nil {
246-
d.logger.Error(err, "failed to close existing database connection")
246+
d.Logger.Error(err, "failed to close existing database connection")
247247
}
248248
d.db = nil
249249
}
@@ -265,10 +265,10 @@ func (d *Dataset) connectToDB(path string) error {
265265

266266
count, err := d.getRecordsCount()
267267
if err != nil {
268-
d.logger.Error(err, "failed to get records count")
268+
d.Logger.Error(err, "failed to get records count")
269269
return fmt.Errorf("failed to query database: %w", err)
270270
}
271-
d.logger.Info("Database connected successfully", "path", path, "records count", count)
271+
d.Logger.Info("Database connected successfully", "path", path, "records count", count)
272272

273273
return nil
274274
}
@@ -294,13 +294,13 @@ func (d *Dataset) Init(path string, url string, savePath string) error {
294294
if err != nil {
295295
return fmt.Errorf("failed to create parent directory: %w", err)
296296
}
297-
d.logger.Info("Downloading dataset from URL", "url", url, "to", savePath)
297+
d.Logger.Info("Downloading dataset from URL", "url", url, "to", savePath)
298298
err = d.downloadDataset(url, savePath)
299299
if err != nil {
300300
return fmt.Errorf("failed to download dataset: %w", err)
301301
}
302302
}
303-
d.logger.Info("Using dataset from", "path", savePath)
303+
d.Logger.Info("Using dataset from", "path", savePath)
304304

305305
return d.connectToDB(savePath)
306306
}

pkg/llm-d-inference-sim/dataset_test.go renamed to pkg/common/dataset_test.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ See the License for the specific language governing permissions and
1414
limitations under the License.
1515
*/
1616

17-
package llmdinferencesim
17+
package common
1818

1919
import (
2020
"encoding/json"
@@ -42,7 +42,7 @@ var _ = Describe("Dataset", func() {
4242

4343
BeforeEach(func() {
4444
dataset = &Dataset{
45-
logger: logr.Discard(),
45+
Logger: logr.Discard(),
4646
}
4747
file_folder = ".llm-d"
4848
savePath = file_folder + "/test.sqlite3"

pkg/common/utils.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,7 @@ func GetRandomText(numOfTokens int) string {
146146
// - finish reason is stop
147147
// if ignore_eos is true - the response will be generated with exactly maxCompletionTokens tokens
148148
// - request was validated so that when ignore_eos is true, maxCompletionTokens must be defined
149-
func GetRandomTokens(maxCompletionTokens *int64, ignore_eos bool) ([]string, string) {
149+
func GetRandomTokens(maxCompletionTokens *int64, ignore_eos bool, dataset *Dataset) ([]string, string) {
150150
numOfTokens := 0
151151
finishReason := StopFinishReason
152152

@@ -260,9 +260,9 @@ func calcBucketBoundaries(maxTokens int, bucketIndex int) (start int, end int) {
260260
return start, end
261261
}
262262

263-
// GetResponseTokens returns needed tokens, from a given text
263+
// EchoResponseTokens returns needed tokens, from a given text
264264
// considering max completion tokens if it is not nil, and a finish reason (stop or length)
265-
func GetResponseTokens(maxCompletionTokens *int64, text string) ([]string, string) {
265+
func EchoResponseTokens(maxCompletionTokens *int64, text string) ([]string, string) {
266266
tokens := Tokenize(text)
267267
// no max completion tokens, return entire text
268268
if maxCompletionTokens == nil {

0 commit comments

Comments
 (0)