From 5bf1af19cf08badd7fbbcb95b377ed98185b37fc Mon Sep 17 00:00:00 2001 From: Yi-Jyun Pan Date: Wed, 17 Sep 2025 02:42:54 +0800 Subject: [PATCH 01/14] feat: implement points and events --- docs/scope.md | 1 + ent/client.go | 372 ++++++- ent/ent.go | 4 + ent/events.go | 175 ++++ ent/events/events.go | 99 ++ ent/events/where.go | 244 +++++ ent/events_create.go | 265 +++++ ent/events_delete.go | 88 ++ ent/events_query.go | 619 ++++++++++++ ent/events_update.go | 421 ++++++++ ent/gql_collection.go | 223 +++++ ent/gql_edge.go | 40 + ent/gql_mutation_input.go | 176 +++- ent/gql_node.go | 62 ++ ent/gql_pagination.go | 500 ++++++++++ ent/gql_where_input.go | 650 +++++++++++++ ent/hook/hook.go | 24 + ent/intercept/intercept.go | 60 ++ ent/internal/globalid.go | 2 +- ent/internal/schema.go | 2 +- ent/migrate/schema.go | 68 ++ ent/mutation.go | 1877 +++++++++++++++++++++++++++++++----- ent/points.go | 191 ++++ ent/points/points.go | 135 +++ ent/points/where.go | 364 +++++++ ent/points_create.go | 329 +++++++ ent/points_delete.go | 88 ++ ent/points_query.go | 627 ++++++++++++ ent/points_update.go | 531 ++++++++++ ent/predicate/predicate.go | 6 + ent/runtime/runtime.go | 35 + ent/schema/events.go | 55 ++ ent/schema/points.go | 48 + ent/schema/user.go | 2 + ent/tx.go | 6 + ent/user.go | 87 +- ent/user/user.go | 60 ++ ent/user/where.go | 46 + ent/user_create.go | 64 ++ ent/user_query.go | 212 +++- ent/user_update.go | 326 +++++++ graph/ent.graphqls | 341 +++++++ graph/ent.resolvers.go | 14 + 43 files changed, 9280 insertions(+), 259 deletions(-) create mode 100644 ent/events.go create mode 100644 ent/events/events.go create mode 100644 ent/events/where.go create mode 100644 ent/events_create.go create mode 100644 ent/events_delete.go create mode 100644 ent/events_query.go create mode 100644 ent/events_update.go create mode 100644 ent/points.go create mode 100644 ent/points/points.go create mode 100644 ent/points/where.go create mode 100644 ent/points_create.go create mode 100644 ent/points_delete.go create mode 100644 ent/points_query.go create mode 100644 ent/points_update.go create mode 100644 ent/schema/events.go create mode 100644 ent/schema/points.go diff --git a/docs/scope.md b/docs/scope.md index 40fc873..39419e0 100644 --- a/docs/scope.md +++ b/docs/scope.md @@ -4,6 +4,7 @@ - `me`:針對自身的操作 - `user`:使用者操作 + - 包含登入記錄、點數查詢 - `group`:群組操作 - `scopeset`:範圍集合操作 - `database`:題庫對應資料庫的操作 diff --git a/ent/client.go b/ent/client.go index c64efd2..1870083 100644 --- a/ent/client.go +++ b/ent/client.go @@ -16,7 +16,9 @@ import ( "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "github.com/database-playground/backend-v2/ent/database" + "github.com/database-playground/backend-v2/ent/events" "github.com/database-playground/backend-v2/ent/group" + "github.com/database-playground/backend-v2/ent/points" "github.com/database-playground/backend-v2/ent/question" "github.com/database-playground/backend-v2/ent/scopeset" "github.com/database-playground/backend-v2/ent/user" @@ -29,8 +31,12 @@ type Client struct { Schema *migrate.Schema // Database is the client for interacting with the Database builders. Database *DatabaseClient + // Events is the client for interacting with the Events builders. + Events *EventsClient // Group is the client for interacting with the Group builders. Group *GroupClient + // Points is the client for interacting with the Points builders. + Points *PointsClient // Question is the client for interacting with the Question builders. Question *QuestionClient // ScopeSet is the client for interacting with the ScopeSet builders. @@ -49,7 +55,9 @@ func NewClient(opts ...Option) *Client { func (c *Client) init() { c.Schema = migrate.NewSchema(c.driver) c.Database = NewDatabaseClient(c.config) + c.Events = NewEventsClient(c.config) c.Group = NewGroupClient(c.config) + c.Points = NewPointsClient(c.config) c.Question = NewQuestionClient(c.config) c.ScopeSet = NewScopeSetClient(c.config) c.User = NewUserClient(c.config) @@ -146,7 +154,9 @@ func (c *Client) Tx(ctx context.Context) (*Tx, error) { ctx: ctx, config: cfg, Database: NewDatabaseClient(cfg), + Events: NewEventsClient(cfg), Group: NewGroupClient(cfg), + Points: NewPointsClient(cfg), Question: NewQuestionClient(cfg), ScopeSet: NewScopeSetClient(cfg), User: NewUserClient(cfg), @@ -170,7 +180,9 @@ func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error) ctx: ctx, config: cfg, Database: NewDatabaseClient(cfg), + Events: NewEventsClient(cfg), Group: NewGroupClient(cfg), + Points: NewPointsClient(cfg), Question: NewQuestionClient(cfg), ScopeSet: NewScopeSetClient(cfg), User: NewUserClient(cfg), @@ -202,21 +214,21 @@ func (c *Client) Close() error { // Use adds the mutation hooks to all the entity clients. // In order to add hooks to a specific client, call: `client.Node.Use(...)`. func (c *Client) Use(hooks ...Hook) { - c.Database.Use(hooks...) - c.Group.Use(hooks...) - c.Question.Use(hooks...) - c.ScopeSet.Use(hooks...) - c.User.Use(hooks...) + for _, n := range []interface{ Use(...Hook) }{ + c.Database, c.Events, c.Group, c.Points, c.Question, c.ScopeSet, c.User, + } { + n.Use(hooks...) + } } // Intercept adds the query interceptors to all the entity clients. // In order to add interceptors to a specific client, call: `client.Node.Intercept(...)`. func (c *Client) Intercept(interceptors ...Interceptor) { - c.Database.Intercept(interceptors...) - c.Group.Intercept(interceptors...) - c.Question.Intercept(interceptors...) - c.ScopeSet.Intercept(interceptors...) - c.User.Intercept(interceptors...) + for _, n := range []interface{ Intercept(...Interceptor) }{ + c.Database, c.Events, c.Group, c.Points, c.Question, c.ScopeSet, c.User, + } { + n.Intercept(interceptors...) + } } // Mutate implements the ent.Mutator interface. @@ -224,8 +236,12 @@ func (c *Client) Mutate(ctx context.Context, m Mutation) (Value, error) { switch m := m.(type) { case *DatabaseMutation: return c.Database.mutate(ctx, m) + case *EventsMutation: + return c.Events.mutate(ctx, m) case *GroupMutation: return c.Group.mutate(ctx, m) + case *PointsMutation: + return c.Points.mutate(ctx, m) case *QuestionMutation: return c.Question.mutate(ctx, m) case *ScopeSetMutation: @@ -386,6 +402,155 @@ func (c *DatabaseClient) mutate(ctx context.Context, m *DatabaseMutation) (Value } } +// EventsClient is a client for the Events schema. +type EventsClient struct { + config +} + +// NewEventsClient returns a client for the Events from the given config. +func NewEventsClient(c config) *EventsClient { + return &EventsClient{config: c} +} + +// Use adds a list of mutation hooks to the hooks stack. +// A call to `Use(f, g, h)` equals to `events.Hooks(f(g(h())))`. +func (c *EventsClient) Use(hooks ...Hook) { + c.hooks.Events = append(c.hooks.Events, hooks...) +} + +// Intercept adds a list of query interceptors to the interceptors stack. +// A call to `Intercept(f, g, h)` equals to `events.Intercept(f(g(h())))`. +func (c *EventsClient) Intercept(interceptors ...Interceptor) { + c.inters.Events = append(c.inters.Events, interceptors...) +} + +// Create returns a builder for creating a Events entity. +func (c *EventsClient) Create() *EventsCreate { + mutation := newEventsMutation(c.config, OpCreate) + return &EventsCreate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// CreateBulk returns a builder for creating a bulk of Events entities. +func (c *EventsClient) CreateBulk(builders ...*EventsCreate) *EventsCreateBulk { + return &EventsCreateBulk{config: c.config, builders: builders} +} + +// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates +// a builder and applies setFunc on it. +func (c *EventsClient) MapCreateBulk(slice any, setFunc func(*EventsCreate, int)) *EventsCreateBulk { + rv := reflect.ValueOf(slice) + if rv.Kind() != reflect.Slice { + return &EventsCreateBulk{err: fmt.Errorf("calling to EventsClient.MapCreateBulk with wrong type %T, need slice", slice)} + } + builders := make([]*EventsCreate, rv.Len()) + for i := 0; i < rv.Len(); i++ { + builders[i] = c.Create() + setFunc(builders[i], i) + } + return &EventsCreateBulk{config: c.config, builders: builders} +} + +// Update returns an update builder for Events. +func (c *EventsClient) Update() *EventsUpdate { + mutation := newEventsMutation(c.config, OpUpdate) + return &EventsUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOne returns an update builder for the given entity. +func (c *EventsClient) UpdateOne(_m *Events) *EventsUpdateOne { + mutation := newEventsMutation(c.config, OpUpdateOne, withEvents(_m)) + return &EventsUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOneID returns an update builder for the given id. +func (c *EventsClient) UpdateOneID(id int) *EventsUpdateOne { + mutation := newEventsMutation(c.config, OpUpdateOne, withEventsID(id)) + return &EventsUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// Delete returns a delete builder for Events. +func (c *EventsClient) Delete() *EventsDelete { + mutation := newEventsMutation(c.config, OpDelete) + return &EventsDelete{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// DeleteOne returns a builder for deleting the given entity. +func (c *EventsClient) DeleteOne(_m *Events) *EventsDeleteOne { + return c.DeleteOneID(_m.ID) +} + +// DeleteOneID returns a builder for deleting the given entity by its id. +func (c *EventsClient) DeleteOneID(id int) *EventsDeleteOne { + builder := c.Delete().Where(events.ID(id)) + builder.mutation.id = &id + builder.mutation.op = OpDeleteOne + return &EventsDeleteOne{builder} +} + +// Query returns a query builder for Events. +func (c *EventsClient) Query() *EventsQuery { + return &EventsQuery{ + config: c.config, + ctx: &QueryContext{Type: TypeEvents}, + inters: c.Interceptors(), + } +} + +// Get returns a Events entity by its id. +func (c *EventsClient) Get(ctx context.Context, id int) (*Events, error) { + return c.Query().Where(events.ID(id)).Only(ctx) +} + +// GetX is like Get, but panics if an error occurs. +func (c *EventsClient) GetX(ctx context.Context, id int) *Events { + obj, err := c.Get(ctx, id) + if err != nil { + panic(err) + } + return obj +} + +// QueryUser queries the user edge of a Events. +func (c *EventsClient) QueryUser(_m *Events) *UserQuery { + query := (&UserClient{config: c.config}).Query() + query.path = func(context.Context) (fromV *sql.Selector, _ error) { + id := _m.ID + step := sqlgraph.NewStep( + sqlgraph.From(events.Table, events.FieldID, id), + sqlgraph.To(user.Table, user.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, events.UserTable, events.UserColumn), + ) + fromV = sqlgraph.Neighbors(_m.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// Hooks returns the client hooks. +func (c *EventsClient) Hooks() []Hook { + return c.hooks.Events +} + +// Interceptors returns the client interceptors. +func (c *EventsClient) Interceptors() []Interceptor { + return c.inters.Events +} + +func (c *EventsClient) mutate(ctx context.Context, m *EventsMutation) (Value, error) { + switch m.Op() { + case OpCreate: + return (&EventsCreate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx) + case OpUpdate: + return (&EventsUpdate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx) + case OpUpdateOne: + return (&EventsUpdateOne{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx) + case OpDelete, OpDeleteOne: + return (&EventsDelete{config: c.config, hooks: c.Hooks(), mutation: m}).Exec(ctx) + default: + return nil, fmt.Errorf("ent: unknown Events mutation op: %q", m.Op()) + } +} + // GroupClient is a client for the Group schema. type GroupClient struct { config @@ -537,6 +702,157 @@ func (c *GroupClient) mutate(ctx context.Context, m *GroupMutation) (Value, erro } } +// PointsClient is a client for the Points schema. +type PointsClient struct { + config +} + +// NewPointsClient returns a client for the Points from the given config. +func NewPointsClient(c config) *PointsClient { + return &PointsClient{config: c} +} + +// Use adds a list of mutation hooks to the hooks stack. +// A call to `Use(f, g, h)` equals to `points.Hooks(f(g(h())))`. +func (c *PointsClient) Use(hooks ...Hook) { + c.hooks.Points = append(c.hooks.Points, hooks...) +} + +// Intercept adds a list of query interceptors to the interceptors stack. +// A call to `Intercept(f, g, h)` equals to `points.Intercept(f(g(h())))`. +func (c *PointsClient) Intercept(interceptors ...Interceptor) { + c.inters.Points = append(c.inters.Points, interceptors...) +} + +// Create returns a builder for creating a Points entity. +func (c *PointsClient) Create() *PointsCreate { + mutation := newPointsMutation(c.config, OpCreate) + return &PointsCreate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// CreateBulk returns a builder for creating a bulk of Points entities. +func (c *PointsClient) CreateBulk(builders ...*PointsCreate) *PointsCreateBulk { + return &PointsCreateBulk{config: c.config, builders: builders} +} + +// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates +// a builder and applies setFunc on it. +func (c *PointsClient) MapCreateBulk(slice any, setFunc func(*PointsCreate, int)) *PointsCreateBulk { + rv := reflect.ValueOf(slice) + if rv.Kind() != reflect.Slice { + return &PointsCreateBulk{err: fmt.Errorf("calling to PointsClient.MapCreateBulk with wrong type %T, need slice", slice)} + } + builders := make([]*PointsCreate, rv.Len()) + for i := 0; i < rv.Len(); i++ { + builders[i] = c.Create() + setFunc(builders[i], i) + } + return &PointsCreateBulk{config: c.config, builders: builders} +} + +// Update returns an update builder for Points. +func (c *PointsClient) Update() *PointsUpdate { + mutation := newPointsMutation(c.config, OpUpdate) + return &PointsUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOne returns an update builder for the given entity. +func (c *PointsClient) UpdateOne(_m *Points) *PointsUpdateOne { + mutation := newPointsMutation(c.config, OpUpdateOne, withPoints(_m)) + return &PointsUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// UpdateOneID returns an update builder for the given id. +func (c *PointsClient) UpdateOneID(id int) *PointsUpdateOne { + mutation := newPointsMutation(c.config, OpUpdateOne, withPointsID(id)) + return &PointsUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// Delete returns a delete builder for Points. +func (c *PointsClient) Delete() *PointsDelete { + mutation := newPointsMutation(c.config, OpDelete) + return &PointsDelete{config: c.config, hooks: c.Hooks(), mutation: mutation} +} + +// DeleteOne returns a builder for deleting the given entity. +func (c *PointsClient) DeleteOne(_m *Points) *PointsDeleteOne { + return c.DeleteOneID(_m.ID) +} + +// DeleteOneID returns a builder for deleting the given entity by its id. +func (c *PointsClient) DeleteOneID(id int) *PointsDeleteOne { + builder := c.Delete().Where(points.ID(id)) + builder.mutation.id = &id + builder.mutation.op = OpDeleteOne + return &PointsDeleteOne{builder} +} + +// Query returns a query builder for Points. +func (c *PointsClient) Query() *PointsQuery { + return &PointsQuery{ + config: c.config, + ctx: &QueryContext{Type: TypePoints}, + inters: c.Interceptors(), + } +} + +// Get returns a Points entity by its id. +func (c *PointsClient) Get(ctx context.Context, id int) (*Points, error) { + return c.Query().Where(points.ID(id)).Only(ctx) +} + +// GetX is like Get, but panics if an error occurs. +func (c *PointsClient) GetX(ctx context.Context, id int) *Points { + obj, err := c.Get(ctx, id) + if err != nil { + panic(err) + } + return obj +} + +// QueryUser queries the user edge of a Points. +func (c *PointsClient) QueryUser(_m *Points) *UserQuery { + query := (&UserClient{config: c.config}).Query() + query.path = func(context.Context) (fromV *sql.Selector, _ error) { + id := _m.ID + step := sqlgraph.NewStep( + sqlgraph.From(points.Table, points.FieldID, id), + sqlgraph.To(user.Table, user.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, points.UserTable, points.UserColumn), + ) + fromV = sqlgraph.Neighbors(_m.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// Hooks returns the client hooks. +func (c *PointsClient) Hooks() []Hook { + hooks := c.hooks.Points + return append(hooks[:len(hooks):len(hooks)], points.Hooks[:]...) +} + +// Interceptors returns the client interceptors. +func (c *PointsClient) Interceptors() []Interceptor { + inters := c.inters.Points + return append(inters[:len(inters):len(inters)], points.Interceptors[:]...) +} + +func (c *PointsClient) mutate(ctx context.Context, m *PointsMutation) (Value, error) { + switch m.Op() { + case OpCreate: + return (&PointsCreate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx) + case OpUpdate: + return (&PointsUpdate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx) + case OpUpdateOne: + return (&PointsUpdateOne{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx) + case OpDelete, OpDeleteOne: + return (&PointsDelete{config: c.config, hooks: c.Hooks(), mutation: m}).Exec(ctx) + default: + return nil, fmt.Errorf("ent: unknown Points mutation op: %q", m.Op()) + } +} + // QuestionClient is a client for the Question schema. type QuestionClient struct { config @@ -959,6 +1275,38 @@ func (c *UserClient) QueryGroup(_m *User) *GroupQuery { return query } +// QueryPoints queries the points edge of a User. +func (c *UserClient) QueryPoints(_m *User) *PointsQuery { + query := (&PointsClient{config: c.config}).Query() + query.path = func(context.Context) (fromV *sql.Selector, _ error) { + id := _m.ID + step := sqlgraph.NewStep( + sqlgraph.From(user.Table, user.FieldID, id), + sqlgraph.To(points.Table, points.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, user.PointsTable, user.PointsColumn), + ) + fromV = sqlgraph.Neighbors(_m.driver.Dialect(), step) + return fromV, nil + } + return query +} + +// QueryEvents queries the events edge of a User. +func (c *UserClient) QueryEvents(_m *User) *EventsQuery { + query := (&EventsClient{config: c.config}).Query() + query.path = func(context.Context) (fromV *sql.Selector, _ error) { + id := _m.ID + step := sqlgraph.NewStep( + sqlgraph.From(user.Table, user.FieldID, id), + sqlgraph.To(events.Table, events.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, user.EventsTable, user.EventsColumn), + ) + fromV = sqlgraph.Neighbors(_m.driver.Dialect(), step) + return fromV, nil + } + return query +} + // Hooks returns the client hooks. func (c *UserClient) Hooks() []Hook { hooks := c.hooks.User @@ -989,9 +1337,9 @@ func (c *UserClient) mutate(ctx context.Context, m *UserMutation) (Value, error) // hooks and interceptors per client, for fast access. type ( hooks struct { - Database, Group, Question, ScopeSet, User []ent.Hook + Database, Events, Group, Points, Question, ScopeSet, User []ent.Hook } inters struct { - Database, Group, Question, ScopeSet, User []ent.Interceptor + Database, Events, Group, Points, Question, ScopeSet, User []ent.Interceptor } ) diff --git a/ent/ent.go b/ent/ent.go index ed53c2e..372ef72 100644 --- a/ent/ent.go +++ b/ent/ent.go @@ -13,7 +13,9 @@ import ( "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "github.com/database-playground/backend-v2/ent/database" + "github.com/database-playground/backend-v2/ent/events" "github.com/database-playground/backend-v2/ent/group" + "github.com/database-playground/backend-v2/ent/points" "github.com/database-playground/backend-v2/ent/question" "github.com/database-playground/backend-v2/ent/scopeset" "github.com/database-playground/backend-v2/ent/user" @@ -78,7 +80,9 @@ func checkColumn(t, c string) error { initCheck.Do(func() { columnCheck = sql.NewColumnCheck(map[string]func(string) bool{ database.Table: database.ValidColumn, + events.Table: events.ValidColumn, group.Table: group.ValidColumn, + points.Table: points.ValidColumn, question.Table: question.ValidColumn, scopeset.Table: scopeset.ValidColumn, user.Table: user.ValidColumn, diff --git a/ent/events.go b/ent/events.go new file mode 100644 index 0000000..1a53e92 --- /dev/null +++ b/ent/events.go @@ -0,0 +1,175 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "encoding/json" + "fmt" + "strings" + "time" + + "entgo.io/ent" + "entgo.io/ent/dialect/sql" + "github.com/database-playground/backend-v2/ent/events" + "github.com/database-playground/backend-v2/ent/user" +) + +// Events is the model entity for the Events schema. +type Events struct { + config `json:"-"` + // ID of the ent. + ID int `json:"id,omitempty"` + // UserID holds the value of the "user_id" field. + UserID int `json:"user_id,omitempty"` + // Type holds the value of the "type" field. + Type string `json:"type,omitempty"` + // TriggeredAt holds the value of the "triggered_at" field. + TriggeredAt time.Time `json:"triggered_at,omitempty"` + // Payload holds the value of the "payload" field. + Payload map[string]interface{} `json:"payload,omitempty"` + // Edges holds the relations/edges for other nodes in the graph. + // The values are being populated by the EventsQuery when eager-loading is set. + Edges EventsEdges `json:"edges"` + selectValues sql.SelectValues +} + +// EventsEdges holds the relations/edges for other nodes in the graph. +type EventsEdges struct { + // User holds the value of the user edge. + User *User `json:"user,omitempty"` + // loadedTypes holds the information for reporting if a + // type was loaded (or requested) in eager-loading or not. + loadedTypes [1]bool + // totalCount holds the count of the edges above. + totalCount [1]map[string]int +} + +// UserOrErr returns the User value or an error if the edge +// was not loaded in eager-loading, or loaded but was not found. +func (e EventsEdges) UserOrErr() (*User, error) { + if e.User != nil { + return e.User, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: user.Label} + } + return nil, &NotLoadedError{edge: "user"} +} + +// scanValues returns the types for scanning values from sql.Rows. +func (*Events) scanValues(columns []string) ([]any, error) { + values := make([]any, len(columns)) + for i := range columns { + switch columns[i] { + case events.FieldPayload: + values[i] = new([]byte) + case events.FieldID, events.FieldUserID: + values[i] = new(sql.NullInt64) + case events.FieldType: + values[i] = new(sql.NullString) + case events.FieldTriggeredAt: + values[i] = new(sql.NullTime) + default: + values[i] = new(sql.UnknownType) + } + } + return values, nil +} + +// assignValues assigns the values that were returned from sql.Rows (after scanning) +// to the Events fields. +func (_m *Events) assignValues(columns []string, values []any) error { + if m, n := len(values), len(columns); m < n { + return fmt.Errorf("mismatch number of scan values: %d != %d", m, n) + } + for i := range columns { + switch columns[i] { + case events.FieldID: + value, ok := values[i].(*sql.NullInt64) + if !ok { + return fmt.Errorf("unexpected type %T for field id", value) + } + _m.ID = int(value.Int64) + case events.FieldUserID: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for field user_id", values[i]) + } else if value.Valid { + _m.UserID = int(value.Int64) + } + case events.FieldType: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field type", values[i]) + } else if value.Valid { + _m.Type = value.String + } + case events.FieldTriggeredAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field triggered_at", values[i]) + } else if value.Valid { + _m.TriggeredAt = value.Time + } + case events.FieldPayload: + if value, ok := values[i].(*[]byte); !ok { + return fmt.Errorf("unexpected type %T for field payload", values[i]) + } else if value != nil && len(*value) > 0 { + if err := json.Unmarshal(*value, &_m.Payload); err != nil { + return fmt.Errorf("unmarshal field payload: %w", err) + } + } + default: + _m.selectValues.Set(columns[i], values[i]) + } + } + return nil +} + +// Value returns the ent.Value that was dynamically selected and assigned to the Events. +// This includes values selected through modifiers, order, etc. +func (_m *Events) Value(name string) (ent.Value, error) { + return _m.selectValues.Get(name) +} + +// QueryUser queries the "user" edge of the Events entity. +func (_m *Events) QueryUser() *UserQuery { + return NewEventsClient(_m.config).QueryUser(_m) +} + +// Update returns a builder for updating this Events. +// Note that you need to call Events.Unwrap() before calling this method if this Events +// was returned from a transaction, and the transaction was committed or rolled back. +func (_m *Events) Update() *EventsUpdateOne { + return NewEventsClient(_m.config).UpdateOne(_m) +} + +// Unwrap unwraps the Events entity that was returned from a transaction after it was closed, +// so that all future queries will be executed through the driver which created the transaction. +func (_m *Events) Unwrap() *Events { + _tx, ok := _m.config.driver.(*txDriver) + if !ok { + panic("ent: Events is not a transactional entity") + } + _m.config.driver = _tx.drv + return _m +} + +// String implements the fmt.Stringer. +func (_m *Events) String() string { + var builder strings.Builder + builder.WriteString("Events(") + builder.WriteString(fmt.Sprintf("id=%v, ", _m.ID)) + builder.WriteString("user_id=") + builder.WriteString(fmt.Sprintf("%v", _m.UserID)) + builder.WriteString(", ") + builder.WriteString("type=") + builder.WriteString(_m.Type) + builder.WriteString(", ") + builder.WriteString("triggered_at=") + builder.WriteString(_m.TriggeredAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("payload=") + builder.WriteString(fmt.Sprintf("%v", _m.Payload)) + builder.WriteByte(')') + return builder.String() +} + +// EventsSlice is a parsable slice of Events. +type EventsSlice []*Events diff --git a/ent/events/events.go b/ent/events/events.go new file mode 100644 index 0000000..c76be14 --- /dev/null +++ b/ent/events/events.go @@ -0,0 +1,99 @@ +// Code generated by ent, DO NOT EDIT. + +package events + +import ( + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" +) + +const ( + // Label holds the string label denoting the events type in the database. + Label = "events" + // FieldID holds the string denoting the id field in the database. + FieldID = "id" + // FieldUserID holds the string denoting the user_id field in the database. + FieldUserID = "user_id" + // FieldType holds the string denoting the type field in the database. + FieldType = "type" + // FieldTriggeredAt holds the string denoting the triggered_at field in the database. + FieldTriggeredAt = "triggered_at" + // FieldPayload holds the string denoting the payload field in the database. + FieldPayload = "payload" + // EdgeUser holds the string denoting the user edge name in mutations. + EdgeUser = "user" + // Table holds the table name of the events in the database. + Table = "events" + // UserTable is the table that holds the user relation/edge. + UserTable = "events" + // UserInverseTable is the table name for the User entity. + // It exists in this package in order to avoid circular dependency with the "user" package. + UserInverseTable = "users" + // UserColumn is the table column denoting the user relation/edge. + UserColumn = "user_id" +) + +// Columns holds all SQL columns for events fields. +var Columns = []string{ + FieldID, + FieldUserID, + FieldType, + FieldTriggeredAt, + FieldPayload, +} + +// ValidColumn reports if the column name is valid (part of the table columns). +func ValidColumn(column string) bool { + for i := range Columns { + if column == Columns[i] { + return true + } + } + return false +} + +var ( + // TypeValidator is a validator for the "type" field. It is called by the builders before save. + TypeValidator func(string) error + // DefaultTriggeredAt holds the default value on creation for the "triggered_at" field. + DefaultTriggeredAt func() time.Time +) + +// OrderOption defines the ordering options for the Events queries. +type OrderOption func(*sql.Selector) + +// ByID orders the results by the id field. +func ByID(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldID, opts...).ToFunc() +} + +// ByUserID orders the results by the user_id field. +func ByUserID(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldUserID, opts...).ToFunc() +} + +// ByType orders the results by the type field. +func ByType(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldType, opts...).ToFunc() +} + +// ByTriggeredAt orders the results by the triggered_at field. +func ByTriggeredAt(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldTriggeredAt, opts...).ToFunc() +} + +// ByUserField orders the results by user field. +func ByUserField(field string, opts ...sql.OrderTermOption) OrderOption { + return func(s *sql.Selector) { + sqlgraph.OrderByNeighborTerms(s, newUserStep(), sql.OrderByField(field, opts...)) + } +} +func newUserStep() *sqlgraph.Step { + return sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(UserInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, UserTable, UserColumn), + ) +} diff --git a/ent/events/where.go b/ent/events/where.go new file mode 100644 index 0000000..a542615 --- /dev/null +++ b/ent/events/where.go @@ -0,0 +1,244 @@ +// Code generated by ent, DO NOT EDIT. + +package events + +import ( + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "github.com/database-playground/backend-v2/ent/predicate" +) + +// ID filters vertices based on their ID field. +func ID(id int) predicate.Events { + return predicate.Events(sql.FieldEQ(FieldID, id)) +} + +// IDEQ applies the EQ predicate on the ID field. +func IDEQ(id int) predicate.Events { + return predicate.Events(sql.FieldEQ(FieldID, id)) +} + +// IDNEQ applies the NEQ predicate on the ID field. +func IDNEQ(id int) predicate.Events { + return predicate.Events(sql.FieldNEQ(FieldID, id)) +} + +// IDIn applies the In predicate on the ID field. +func IDIn(ids ...int) predicate.Events { + return predicate.Events(sql.FieldIn(FieldID, ids...)) +} + +// IDNotIn applies the NotIn predicate on the ID field. +func IDNotIn(ids ...int) predicate.Events { + return predicate.Events(sql.FieldNotIn(FieldID, ids...)) +} + +// IDGT applies the GT predicate on the ID field. +func IDGT(id int) predicate.Events { + return predicate.Events(sql.FieldGT(FieldID, id)) +} + +// IDGTE applies the GTE predicate on the ID field. +func IDGTE(id int) predicate.Events { + return predicate.Events(sql.FieldGTE(FieldID, id)) +} + +// IDLT applies the LT predicate on the ID field. +func IDLT(id int) predicate.Events { + return predicate.Events(sql.FieldLT(FieldID, id)) +} + +// IDLTE applies the LTE predicate on the ID field. +func IDLTE(id int) predicate.Events { + return predicate.Events(sql.FieldLTE(FieldID, id)) +} + +// UserID applies equality check predicate on the "user_id" field. It's identical to UserIDEQ. +func UserID(v int) predicate.Events { + return predicate.Events(sql.FieldEQ(FieldUserID, v)) +} + +// Type applies equality check predicate on the "type" field. It's identical to TypeEQ. +func Type(v string) predicate.Events { + return predicate.Events(sql.FieldEQ(FieldType, v)) +} + +// TriggeredAt applies equality check predicate on the "triggered_at" field. It's identical to TriggeredAtEQ. +func TriggeredAt(v time.Time) predicate.Events { + return predicate.Events(sql.FieldEQ(FieldTriggeredAt, v)) +} + +// UserIDEQ applies the EQ predicate on the "user_id" field. +func UserIDEQ(v int) predicate.Events { + return predicate.Events(sql.FieldEQ(FieldUserID, v)) +} + +// UserIDNEQ applies the NEQ predicate on the "user_id" field. +func UserIDNEQ(v int) predicate.Events { + return predicate.Events(sql.FieldNEQ(FieldUserID, v)) +} + +// UserIDIn applies the In predicate on the "user_id" field. +func UserIDIn(vs ...int) predicate.Events { + return predicate.Events(sql.FieldIn(FieldUserID, vs...)) +} + +// UserIDNotIn applies the NotIn predicate on the "user_id" field. +func UserIDNotIn(vs ...int) predicate.Events { + return predicate.Events(sql.FieldNotIn(FieldUserID, vs...)) +} + +// TypeEQ applies the EQ predicate on the "type" field. +func TypeEQ(v string) predicate.Events { + return predicate.Events(sql.FieldEQ(FieldType, v)) +} + +// TypeNEQ applies the NEQ predicate on the "type" field. +func TypeNEQ(v string) predicate.Events { + return predicate.Events(sql.FieldNEQ(FieldType, v)) +} + +// TypeIn applies the In predicate on the "type" field. +func TypeIn(vs ...string) predicate.Events { + return predicate.Events(sql.FieldIn(FieldType, vs...)) +} + +// TypeNotIn applies the NotIn predicate on the "type" field. +func TypeNotIn(vs ...string) predicate.Events { + return predicate.Events(sql.FieldNotIn(FieldType, vs...)) +} + +// TypeGT applies the GT predicate on the "type" field. +func TypeGT(v string) predicate.Events { + return predicate.Events(sql.FieldGT(FieldType, v)) +} + +// TypeGTE applies the GTE predicate on the "type" field. +func TypeGTE(v string) predicate.Events { + return predicate.Events(sql.FieldGTE(FieldType, v)) +} + +// TypeLT applies the LT predicate on the "type" field. +func TypeLT(v string) predicate.Events { + return predicate.Events(sql.FieldLT(FieldType, v)) +} + +// TypeLTE applies the LTE predicate on the "type" field. +func TypeLTE(v string) predicate.Events { + return predicate.Events(sql.FieldLTE(FieldType, v)) +} + +// TypeContains applies the Contains predicate on the "type" field. +func TypeContains(v string) predicate.Events { + return predicate.Events(sql.FieldContains(FieldType, v)) +} + +// TypeHasPrefix applies the HasPrefix predicate on the "type" field. +func TypeHasPrefix(v string) predicate.Events { + return predicate.Events(sql.FieldHasPrefix(FieldType, v)) +} + +// TypeHasSuffix applies the HasSuffix predicate on the "type" field. +func TypeHasSuffix(v string) predicate.Events { + return predicate.Events(sql.FieldHasSuffix(FieldType, v)) +} + +// TypeEqualFold applies the EqualFold predicate on the "type" field. +func TypeEqualFold(v string) predicate.Events { + return predicate.Events(sql.FieldEqualFold(FieldType, v)) +} + +// TypeContainsFold applies the ContainsFold predicate on the "type" field. +func TypeContainsFold(v string) predicate.Events { + return predicate.Events(sql.FieldContainsFold(FieldType, v)) +} + +// TriggeredAtEQ applies the EQ predicate on the "triggered_at" field. +func TriggeredAtEQ(v time.Time) predicate.Events { + return predicate.Events(sql.FieldEQ(FieldTriggeredAt, v)) +} + +// TriggeredAtNEQ applies the NEQ predicate on the "triggered_at" field. +func TriggeredAtNEQ(v time.Time) predicate.Events { + return predicate.Events(sql.FieldNEQ(FieldTriggeredAt, v)) +} + +// TriggeredAtIn applies the In predicate on the "triggered_at" field. +func TriggeredAtIn(vs ...time.Time) predicate.Events { + return predicate.Events(sql.FieldIn(FieldTriggeredAt, vs...)) +} + +// TriggeredAtNotIn applies the NotIn predicate on the "triggered_at" field. +func TriggeredAtNotIn(vs ...time.Time) predicate.Events { + return predicate.Events(sql.FieldNotIn(FieldTriggeredAt, vs...)) +} + +// TriggeredAtGT applies the GT predicate on the "triggered_at" field. +func TriggeredAtGT(v time.Time) predicate.Events { + return predicate.Events(sql.FieldGT(FieldTriggeredAt, v)) +} + +// TriggeredAtGTE applies the GTE predicate on the "triggered_at" field. +func TriggeredAtGTE(v time.Time) predicate.Events { + return predicate.Events(sql.FieldGTE(FieldTriggeredAt, v)) +} + +// TriggeredAtLT applies the LT predicate on the "triggered_at" field. +func TriggeredAtLT(v time.Time) predicate.Events { + return predicate.Events(sql.FieldLT(FieldTriggeredAt, v)) +} + +// TriggeredAtLTE applies the LTE predicate on the "triggered_at" field. +func TriggeredAtLTE(v time.Time) predicate.Events { + return predicate.Events(sql.FieldLTE(FieldTriggeredAt, v)) +} + +// PayloadIsNil applies the IsNil predicate on the "payload" field. +func PayloadIsNil() predicate.Events { + return predicate.Events(sql.FieldIsNull(FieldPayload)) +} + +// PayloadNotNil applies the NotNil predicate on the "payload" field. +func PayloadNotNil() predicate.Events { + return predicate.Events(sql.FieldNotNull(FieldPayload)) +} + +// HasUser applies the HasEdge predicate on the "user" edge. +func HasUser() predicate.Events { + return predicate.Events(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, UserTable, UserColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasUserWith applies the HasEdge predicate on the "user" edge with a given conditions (other predicates). +func HasUserWith(preds ...predicate.User) predicate.Events { + return predicate.Events(func(s *sql.Selector) { + step := newUserStep() + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// And groups predicates with the AND operator between them. +func And(predicates ...predicate.Events) predicate.Events { + return predicate.Events(sql.AndPredicates(predicates...)) +} + +// Or groups predicates with the OR operator between them. +func Or(predicates ...predicate.Events) predicate.Events { + return predicate.Events(sql.OrPredicates(predicates...)) +} + +// Not applies the not operator on the given predicate. +func Not(p predicate.Events) predicate.Events { + return predicate.Events(sql.NotPredicates(p)) +} diff --git a/ent/events_create.go b/ent/events_create.go new file mode 100644 index 0000000..aa21b72 --- /dev/null +++ b/ent/events_create.go @@ -0,0 +1,265 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/database-playground/backend-v2/ent/events" + "github.com/database-playground/backend-v2/ent/user" +) + +// EventsCreate is the builder for creating a Events entity. +type EventsCreate struct { + config + mutation *EventsMutation + hooks []Hook +} + +// SetUserID sets the "user_id" field. +func (_c *EventsCreate) SetUserID(v int) *EventsCreate { + _c.mutation.SetUserID(v) + return _c +} + +// SetType sets the "type" field. +func (_c *EventsCreate) SetType(v string) *EventsCreate { + _c.mutation.SetType(v) + return _c +} + +// SetTriggeredAt sets the "triggered_at" field. +func (_c *EventsCreate) SetTriggeredAt(v time.Time) *EventsCreate { + _c.mutation.SetTriggeredAt(v) + return _c +} + +// SetNillableTriggeredAt sets the "triggered_at" field if the given value is not nil. +func (_c *EventsCreate) SetNillableTriggeredAt(v *time.Time) *EventsCreate { + if v != nil { + _c.SetTriggeredAt(*v) + } + return _c +} + +// SetPayload sets the "payload" field. +func (_c *EventsCreate) SetPayload(v map[string]interface{}) *EventsCreate { + _c.mutation.SetPayload(v) + return _c +} + +// SetUser sets the "user" edge to the User entity. +func (_c *EventsCreate) SetUser(v *User) *EventsCreate { + return _c.SetUserID(v.ID) +} + +// Mutation returns the EventsMutation object of the builder. +func (_c *EventsCreate) Mutation() *EventsMutation { + return _c.mutation +} + +// Save creates the Events in the database. +func (_c *EventsCreate) Save(ctx context.Context) (*Events, error) { + _c.defaults() + return withHooks(ctx, _c.sqlSave, _c.mutation, _c.hooks) +} + +// SaveX calls Save and panics if Save returns an error. +func (_c *EventsCreate) SaveX(ctx context.Context) *Events { + v, err := _c.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (_c *EventsCreate) Exec(ctx context.Context) error { + _, err := _c.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (_c *EventsCreate) ExecX(ctx context.Context) { + if err := _c.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (_c *EventsCreate) defaults() { + if _, ok := _c.mutation.TriggeredAt(); !ok { + v := events.DefaultTriggeredAt() + _c.mutation.SetTriggeredAt(v) + } +} + +// check runs all checks and user-defined validators on the builder. +func (_c *EventsCreate) check() error { + if _, ok := _c.mutation.UserID(); !ok { + return &ValidationError{Name: "user_id", err: errors.New(`ent: missing required field "Events.user_id"`)} + } + if _, ok := _c.mutation.GetType(); !ok { + return &ValidationError{Name: "type", err: errors.New(`ent: missing required field "Events.type"`)} + } + if v, ok := _c.mutation.GetType(); ok { + if err := events.TypeValidator(v); err != nil { + return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Events.type": %w`, err)} + } + } + if _, ok := _c.mutation.TriggeredAt(); !ok { + return &ValidationError{Name: "triggered_at", err: errors.New(`ent: missing required field "Events.triggered_at"`)} + } + if len(_c.mutation.UserIDs()) == 0 { + return &ValidationError{Name: "user", err: errors.New(`ent: missing required edge "Events.user"`)} + } + return nil +} + +func (_c *EventsCreate) sqlSave(ctx context.Context) (*Events, error) { + if err := _c.check(); err != nil { + return nil, err + } + _node, _spec := _c.createSpec() + if err := sqlgraph.CreateNode(ctx, _c.driver, _spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return nil, err + } + id := _spec.ID.Value.(int64) + _node.ID = int(id) + _c.mutation.id = &_node.ID + _c.mutation.done = true + return _node, nil +} + +func (_c *EventsCreate) createSpec() (*Events, *sqlgraph.CreateSpec) { + var ( + _node = &Events{config: _c.config} + _spec = sqlgraph.NewCreateSpec(events.Table, sqlgraph.NewFieldSpec(events.FieldID, field.TypeInt)) + ) + if value, ok := _c.mutation.GetType(); ok { + _spec.SetField(events.FieldType, field.TypeString, value) + _node.Type = value + } + if value, ok := _c.mutation.TriggeredAt(); ok { + _spec.SetField(events.FieldTriggeredAt, field.TypeTime, value) + _node.TriggeredAt = value + } + if value, ok := _c.mutation.Payload(); ok { + _spec.SetField(events.FieldPayload, field.TypeJSON, value) + _node.Payload = value + } + if nodes := _c.mutation.UserIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: events.UserTable, + Columns: []string{events.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeInt), + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _node.UserID = nodes[0] + _spec.Edges = append(_spec.Edges, edge) + } + return _node, _spec +} + +// EventsCreateBulk is the builder for creating many Events entities in bulk. +type EventsCreateBulk struct { + config + err error + builders []*EventsCreate +} + +// Save creates the Events entities in the database. +func (_c *EventsCreateBulk) Save(ctx context.Context) ([]*Events, error) { + if _c.err != nil { + return nil, _c.err + } + specs := make([]*sqlgraph.CreateSpec, len(_c.builders)) + nodes := make([]*Events, len(_c.builders)) + mutators := make([]Mutator, len(_c.builders)) + for i := range _c.builders { + func(i int, root context.Context) { + builder := _c.builders[i] + builder.defaults() + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*EventsMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err := builder.check(); err != nil { + return nil, err + } + builder.mutation = mutation + var err error + nodes[i], specs[i] = builder.createSpec() + if i < len(mutators)-1 { + _, err = mutators[i+1].Mutate(root, _c.builders[i+1].mutation) + } else { + spec := &sqlgraph.BatchCreateSpec{Nodes: specs} + // Invoke the actual operation on the latest mutation in the chain. + if err = sqlgraph.BatchCreate(ctx, _c.driver, spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + } + } + if err != nil { + return nil, err + } + mutation.id = &nodes[i].ID + if specs[i].ID.Value != nil { + id := specs[i].ID.Value.(int64) + nodes[i].ID = int(id) + } + mutation.done = true + return nodes[i], nil + }) + for i := len(builder.hooks) - 1; i >= 0; i-- { + mut = builder.hooks[i](mut) + } + mutators[i] = mut + }(i, ctx) + } + if len(mutators) > 0 { + if _, err := mutators[0].Mutate(ctx, _c.builders[0].mutation); err != nil { + return nil, err + } + } + return nodes, nil +} + +// SaveX is like Save, but panics if an error occurs. +func (_c *EventsCreateBulk) SaveX(ctx context.Context) []*Events { + v, err := _c.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (_c *EventsCreateBulk) Exec(ctx context.Context) error { + _, err := _c.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (_c *EventsCreateBulk) ExecX(ctx context.Context) { + if err := _c.Exec(ctx); err != nil { + panic(err) + } +} diff --git a/ent/events_delete.go b/ent/events_delete.go new file mode 100644 index 0000000..88de681 --- /dev/null +++ b/ent/events_delete.go @@ -0,0 +1,88 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/database-playground/backend-v2/ent/events" + "github.com/database-playground/backend-v2/ent/predicate" +) + +// EventsDelete is the builder for deleting a Events entity. +type EventsDelete struct { + config + hooks []Hook + mutation *EventsMutation +} + +// Where appends a list predicates to the EventsDelete builder. +func (_d *EventsDelete) Where(ps ...predicate.Events) *EventsDelete { + _d.mutation.Where(ps...) + return _d +} + +// Exec executes the deletion query and returns how many vertices were deleted. +func (_d *EventsDelete) Exec(ctx context.Context) (int, error) { + return withHooks(ctx, _d.sqlExec, _d.mutation, _d.hooks) +} + +// ExecX is like Exec, but panics if an error occurs. +func (_d *EventsDelete) ExecX(ctx context.Context) int { + n, err := _d.Exec(ctx) + if err != nil { + panic(err) + } + return n +} + +func (_d *EventsDelete) sqlExec(ctx context.Context) (int, error) { + _spec := sqlgraph.NewDeleteSpec(events.Table, sqlgraph.NewFieldSpec(events.FieldID, field.TypeInt)) + if ps := _d.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + affected, err := sqlgraph.DeleteNodes(ctx, _d.driver, _spec) + if err != nil && sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + _d.mutation.done = true + return affected, err +} + +// EventsDeleteOne is the builder for deleting a single Events entity. +type EventsDeleteOne struct { + _d *EventsDelete +} + +// Where appends a list predicates to the EventsDelete builder. +func (_d *EventsDeleteOne) Where(ps ...predicate.Events) *EventsDeleteOne { + _d._d.mutation.Where(ps...) + return _d +} + +// Exec executes the deletion query. +func (_d *EventsDeleteOne) Exec(ctx context.Context) error { + n, err := _d._d.Exec(ctx) + switch { + case err != nil: + return err + case n == 0: + return &NotFoundError{events.Label} + default: + return nil + } +} + +// ExecX is like Exec, but panics if an error occurs. +func (_d *EventsDeleteOne) ExecX(ctx context.Context) { + if err := _d.Exec(ctx); err != nil { + panic(err) + } +} diff --git a/ent/events_query.go b/ent/events_query.go new file mode 100644 index 0000000..221b484 --- /dev/null +++ b/ent/events_query.go @@ -0,0 +1,619 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "fmt" + "math" + + "entgo.io/ent" + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/database-playground/backend-v2/ent/events" + "github.com/database-playground/backend-v2/ent/predicate" + "github.com/database-playground/backend-v2/ent/user" +) + +// EventsQuery is the builder for querying Events entities. +type EventsQuery struct { + config + ctx *QueryContext + order []events.OrderOption + inters []Interceptor + predicates []predicate.Events + withUser *UserQuery + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*Events) error + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Where adds a new predicate for the EventsQuery builder. +func (_q *EventsQuery) Where(ps ...predicate.Events) *EventsQuery { + _q.predicates = append(_q.predicates, ps...) + return _q +} + +// Limit the number of records to be returned by this query. +func (_q *EventsQuery) Limit(limit int) *EventsQuery { + _q.ctx.Limit = &limit + return _q +} + +// Offset to start from. +func (_q *EventsQuery) Offset(offset int) *EventsQuery { + _q.ctx.Offset = &offset + return _q +} + +// Unique configures the query builder to filter duplicate records on query. +// By default, unique is set to true, and can be disabled using this method. +func (_q *EventsQuery) Unique(unique bool) *EventsQuery { + _q.ctx.Unique = &unique + return _q +} + +// Order specifies how the records should be ordered. +func (_q *EventsQuery) Order(o ...events.OrderOption) *EventsQuery { + _q.order = append(_q.order, o...) + return _q +} + +// QueryUser chains the current query on the "user" edge. +func (_q *EventsQuery) QueryUser() *UserQuery { + query := (&UserClient{config: _q.config}).Query() + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := _q.prepareQuery(ctx); err != nil { + return nil, err + } + selector := _q.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(events.Table, events.FieldID, selector), + sqlgraph.To(user.Table, user.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, events.UserTable, events.UserColumn), + ) + fromU = sqlgraph.SetNeighbors(_q.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// First returns the first Events entity from the query. +// Returns a *NotFoundError when no Events was found. +func (_q *EventsQuery) First(ctx context.Context) (*Events, error) { + nodes, err := _q.Limit(1).All(setContextOp(ctx, _q.ctx, ent.OpQueryFirst)) + if err != nil { + return nil, err + } + if len(nodes) == 0 { + return nil, &NotFoundError{events.Label} + } + return nodes[0], nil +} + +// FirstX is like First, but panics if an error occurs. +func (_q *EventsQuery) FirstX(ctx context.Context) *Events { + node, err := _q.First(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return node +} + +// FirstID returns the first Events ID from the query. +// Returns a *NotFoundError when no Events ID was found. +func (_q *EventsQuery) FirstID(ctx context.Context) (id int, err error) { + var ids []int + if ids, err = _q.Limit(1).IDs(setContextOp(ctx, _q.ctx, ent.OpQueryFirstID)); err != nil { + return + } + if len(ids) == 0 { + err = &NotFoundError{events.Label} + return + } + return ids[0], nil +} + +// FirstIDX is like FirstID, but panics if an error occurs. +func (_q *EventsQuery) FirstIDX(ctx context.Context) int { + id, err := _q.FirstID(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return id +} + +// Only returns a single Events entity found by the query, ensuring it only returns one. +// Returns a *NotSingularError when more than one Events entity is found. +// Returns a *NotFoundError when no Events entities are found. +func (_q *EventsQuery) Only(ctx context.Context) (*Events, error) { + nodes, err := _q.Limit(2).All(setContextOp(ctx, _q.ctx, ent.OpQueryOnly)) + if err != nil { + return nil, err + } + switch len(nodes) { + case 1: + return nodes[0], nil + case 0: + return nil, &NotFoundError{events.Label} + default: + return nil, &NotSingularError{events.Label} + } +} + +// OnlyX is like Only, but panics if an error occurs. +func (_q *EventsQuery) OnlyX(ctx context.Context) *Events { + node, err := _q.Only(ctx) + if err != nil { + panic(err) + } + return node +} + +// OnlyID is like Only, but returns the only Events ID in the query. +// Returns a *NotSingularError when more than one Events ID is found. +// Returns a *NotFoundError when no entities are found. +func (_q *EventsQuery) OnlyID(ctx context.Context) (id int, err error) { + var ids []int + if ids, err = _q.Limit(2).IDs(setContextOp(ctx, _q.ctx, ent.OpQueryOnlyID)); err != nil { + return + } + switch len(ids) { + case 1: + id = ids[0] + case 0: + err = &NotFoundError{events.Label} + default: + err = &NotSingularError{events.Label} + } + return +} + +// OnlyIDX is like OnlyID, but panics if an error occurs. +func (_q *EventsQuery) OnlyIDX(ctx context.Context) int { + id, err := _q.OnlyID(ctx) + if err != nil { + panic(err) + } + return id +} + +// All executes the query and returns a list of EventsSlice. +func (_q *EventsQuery) All(ctx context.Context) ([]*Events, error) { + ctx = setContextOp(ctx, _q.ctx, ent.OpQueryAll) + if err := _q.prepareQuery(ctx); err != nil { + return nil, err + } + qr := querierAll[[]*Events, *EventsQuery]() + return withInterceptors[[]*Events](ctx, _q, qr, _q.inters) +} + +// AllX is like All, but panics if an error occurs. +func (_q *EventsQuery) AllX(ctx context.Context) []*Events { + nodes, err := _q.All(ctx) + if err != nil { + panic(err) + } + return nodes +} + +// IDs executes the query and returns a list of Events IDs. +func (_q *EventsQuery) IDs(ctx context.Context) (ids []int, err error) { + if _q.ctx.Unique == nil && _q.path != nil { + _q.Unique(true) + } + ctx = setContextOp(ctx, _q.ctx, ent.OpQueryIDs) + if err = _q.Select(events.FieldID).Scan(ctx, &ids); err != nil { + return nil, err + } + return ids, nil +} + +// IDsX is like IDs, but panics if an error occurs. +func (_q *EventsQuery) IDsX(ctx context.Context) []int { + ids, err := _q.IDs(ctx) + if err != nil { + panic(err) + } + return ids +} + +// Count returns the count of the given query. +func (_q *EventsQuery) Count(ctx context.Context) (int, error) { + ctx = setContextOp(ctx, _q.ctx, ent.OpQueryCount) + if err := _q.prepareQuery(ctx); err != nil { + return 0, err + } + return withInterceptors[int](ctx, _q, querierCount[*EventsQuery](), _q.inters) +} + +// CountX is like Count, but panics if an error occurs. +func (_q *EventsQuery) CountX(ctx context.Context) int { + count, err := _q.Count(ctx) + if err != nil { + panic(err) + } + return count +} + +// Exist returns true if the query has elements in the graph. +func (_q *EventsQuery) Exist(ctx context.Context) (bool, error) { + ctx = setContextOp(ctx, _q.ctx, ent.OpQueryExist) + switch _, err := _q.FirstID(ctx); { + case IsNotFound(err): + return false, nil + case err != nil: + return false, fmt.Errorf("ent: check existence: %w", err) + default: + return true, nil + } +} + +// ExistX is like Exist, but panics if an error occurs. +func (_q *EventsQuery) ExistX(ctx context.Context) bool { + exist, err := _q.Exist(ctx) + if err != nil { + panic(err) + } + return exist +} + +// Clone returns a duplicate of the EventsQuery builder, including all associated steps. It can be +// used to prepare common query builders and use them differently after the clone is made. +func (_q *EventsQuery) Clone() *EventsQuery { + if _q == nil { + return nil + } + return &EventsQuery{ + config: _q.config, + ctx: _q.ctx.Clone(), + order: append([]events.OrderOption{}, _q.order...), + inters: append([]Interceptor{}, _q.inters...), + predicates: append([]predicate.Events{}, _q.predicates...), + withUser: _q.withUser.Clone(), + // clone intermediate query. + sql: _q.sql.Clone(), + path: _q.path, + } +} + +// WithUser tells the query-builder to eager-load the nodes that are connected to +// the "user" edge. The optional arguments are used to configure the query builder of the edge. +func (_q *EventsQuery) WithUser(opts ...func(*UserQuery)) *EventsQuery { + query := (&UserClient{config: _q.config}).Query() + for _, opt := range opts { + opt(query) + } + _q.withUser = query + return _q +} + +// GroupBy is used to group vertices by one or more fields/columns. +// It is often used with aggregate functions, like: count, max, mean, min, sum. +// +// Example: +// +// var v []struct { +// UserID int `json:"user_id,omitempty"` +// Count int `json:"count,omitempty"` +// } +// +// client.Events.Query(). +// GroupBy(events.FieldUserID). +// Aggregate(ent.Count()). +// Scan(ctx, &v) +func (_q *EventsQuery) GroupBy(field string, fields ...string) *EventsGroupBy { + _q.ctx.Fields = append([]string{field}, fields...) + grbuild := &EventsGroupBy{build: _q} + grbuild.flds = &_q.ctx.Fields + grbuild.label = events.Label + grbuild.scan = grbuild.Scan + return grbuild +} + +// Select allows the selection one or more fields/columns for the given query, +// instead of selecting all fields in the entity. +// +// Example: +// +// var v []struct { +// UserID int `json:"user_id,omitempty"` +// } +// +// client.Events.Query(). +// Select(events.FieldUserID). +// Scan(ctx, &v) +func (_q *EventsQuery) Select(fields ...string) *EventsSelect { + _q.ctx.Fields = append(_q.ctx.Fields, fields...) + sbuild := &EventsSelect{EventsQuery: _q} + sbuild.label = events.Label + sbuild.flds, sbuild.scan = &_q.ctx.Fields, sbuild.Scan + return sbuild +} + +// Aggregate returns a EventsSelect configured with the given aggregations. +func (_q *EventsQuery) Aggregate(fns ...AggregateFunc) *EventsSelect { + return _q.Select().Aggregate(fns...) +} + +func (_q *EventsQuery) prepareQuery(ctx context.Context) error { + for _, inter := range _q.inters { + if inter == nil { + return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)") + } + if trv, ok := inter.(Traverser); ok { + if err := trv.Traverse(ctx, _q); err != nil { + return err + } + } + } + for _, f := range _q.ctx.Fields { + if !events.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + } + if _q.path != nil { + prev, err := _q.path(ctx) + if err != nil { + return err + } + _q.sql = prev + } + return nil +} + +func (_q *EventsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Events, error) { + var ( + nodes = []*Events{} + _spec = _q.querySpec() + loadedTypes = [1]bool{ + _q.withUser != nil, + } + ) + _spec.ScanValues = func(columns []string) ([]any, error) { + return (*Events).scanValues(nil, columns) + } + _spec.Assign = func(columns []string, values []any) error { + node := &Events{config: _q.config} + nodes = append(nodes, node) + node.Edges.loadedTypes = loadedTypes + return node.assignValues(columns, values) + } + if len(_q.modifiers) > 0 { + _spec.Modifiers = _q.modifiers + } + for i := range hooks { + hooks[i](ctx, _spec) + } + if err := sqlgraph.QueryNodes(ctx, _q.driver, _spec); err != nil { + return nil, err + } + if len(nodes) == 0 { + return nodes, nil + } + if query := _q.withUser; query != nil { + if err := _q.loadUser(ctx, query, nodes, nil, + func(n *Events, e *User) { n.Edges.User = e }); err != nil { + return nil, err + } + } + for i := range _q.loadTotal { + if err := _q.loadTotal[i](ctx, nodes); err != nil { + return nil, err + } + } + return nodes, nil +} + +func (_q *EventsQuery) loadUser(ctx context.Context, query *UserQuery, nodes []*Events, init func(*Events), assign func(*Events, *User)) error { + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*Events) + for i := range nodes { + fk := nodes[i].UserID + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) + } + if len(ids) == 0 { + return nil + } + query.Where(user.IDIn(ids...)) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + nodes, ok := nodeids[n.ID] + if !ok { + return fmt.Errorf(`unexpected foreign-key "user_id" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) + } + } + return nil +} + +func (_q *EventsQuery) sqlCount(ctx context.Context) (int, error) { + _spec := _q.querySpec() + if len(_q.modifiers) > 0 { + _spec.Modifiers = _q.modifiers + } + _spec.Node.Columns = _q.ctx.Fields + if len(_q.ctx.Fields) > 0 { + _spec.Unique = _q.ctx.Unique != nil && *_q.ctx.Unique + } + return sqlgraph.CountNodes(ctx, _q.driver, _spec) +} + +func (_q *EventsQuery) querySpec() *sqlgraph.QuerySpec { + _spec := sqlgraph.NewQuerySpec(events.Table, events.Columns, sqlgraph.NewFieldSpec(events.FieldID, field.TypeInt)) + _spec.From = _q.sql + if unique := _q.ctx.Unique; unique != nil { + _spec.Unique = *unique + } else if _q.path != nil { + _spec.Unique = true + } + if fields := _q.ctx.Fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, events.FieldID) + for i := range fields { + if fields[i] != events.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, fields[i]) + } + } + if _q.withUser != nil { + _spec.Node.AddColumnOnce(events.FieldUserID) + } + } + if ps := _q.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if limit := _q.ctx.Limit; limit != nil { + _spec.Limit = *limit + } + if offset := _q.ctx.Offset; offset != nil { + _spec.Offset = *offset + } + if ps := _q.order; len(ps) > 0 { + _spec.Order = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + return _spec +} + +func (_q *EventsQuery) sqlQuery(ctx context.Context) *sql.Selector { + builder := sql.Dialect(_q.driver.Dialect()) + t1 := builder.Table(events.Table) + columns := _q.ctx.Fields + if len(columns) == 0 { + columns = events.Columns + } + selector := builder.Select(t1.Columns(columns...)...).From(t1) + if _q.sql != nil { + selector = _q.sql + selector.Select(selector.Columns(columns...)...) + } + if _q.ctx.Unique != nil && *_q.ctx.Unique { + selector.Distinct() + } + for _, p := range _q.predicates { + p(selector) + } + for _, p := range _q.order { + p(selector) + } + if offset := _q.ctx.Offset; offset != nil { + // limit is mandatory for offset clause. We start + // with default value, and override it below if needed. + selector.Offset(*offset).Limit(math.MaxInt32) + } + if limit := _q.ctx.Limit; limit != nil { + selector.Limit(*limit) + } + return selector +} + +// EventsGroupBy is the group-by builder for Events entities. +type EventsGroupBy struct { + selector + build *EventsQuery +} + +// Aggregate adds the given aggregation functions to the group-by query. +func (_g *EventsGroupBy) Aggregate(fns ...AggregateFunc) *EventsGroupBy { + _g.fns = append(_g.fns, fns...) + return _g +} + +// Scan applies the selector query and scans the result into the given value. +func (_g *EventsGroupBy) Scan(ctx context.Context, v any) error { + ctx = setContextOp(ctx, _g.build.ctx, ent.OpQueryGroupBy) + if err := _g.build.prepareQuery(ctx); err != nil { + return err + } + return scanWithInterceptors[*EventsQuery, *EventsGroupBy](ctx, _g.build, _g, _g.build.inters, v) +} + +func (_g *EventsGroupBy) sqlScan(ctx context.Context, root *EventsQuery, v any) error { + selector := root.sqlQuery(ctx).Select() + aggregation := make([]string, 0, len(_g.fns)) + for _, fn := range _g.fns { + aggregation = append(aggregation, fn(selector)) + } + if len(selector.SelectedColumns()) == 0 { + columns := make([]string, 0, len(*_g.flds)+len(_g.fns)) + for _, f := range *_g.flds { + columns = append(columns, selector.C(f)) + } + columns = append(columns, aggregation...) + selector.Select(columns...) + } + selector.GroupBy(selector.Columns(*_g.flds...)...) + if err := selector.Err(); err != nil { + return err + } + rows := &sql.Rows{} + query, args := selector.Query() + if err := _g.build.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} + +// EventsSelect is the builder for selecting fields of Events entities. +type EventsSelect struct { + *EventsQuery + selector +} + +// Aggregate adds the given aggregation functions to the selector query. +func (_s *EventsSelect) Aggregate(fns ...AggregateFunc) *EventsSelect { + _s.fns = append(_s.fns, fns...) + return _s +} + +// Scan applies the selector query and scans the result into the given value. +func (_s *EventsSelect) Scan(ctx context.Context, v any) error { + ctx = setContextOp(ctx, _s.ctx, ent.OpQuerySelect) + if err := _s.prepareQuery(ctx); err != nil { + return err + } + return scanWithInterceptors[*EventsQuery, *EventsSelect](ctx, _s.EventsQuery, _s, _s.inters, v) +} + +func (_s *EventsSelect) sqlScan(ctx context.Context, root *EventsQuery, v any) error { + selector := root.sqlQuery(ctx) + aggregation := make([]string, 0, len(_s.fns)) + for _, fn := range _s.fns { + aggregation = append(aggregation, fn(selector)) + } + switch n := len(*_s.selector.flds); { + case n == 0 && len(aggregation) > 0: + selector.Select(aggregation...) + case n != 0 && len(aggregation) > 0: + selector.AppendSelect(aggregation...) + } + rows := &sql.Rows{} + query, args := selector.Query() + if err := _s.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} diff --git a/ent/events_update.go b/ent/events_update.go new file mode 100644 index 0000000..7d50b06 --- /dev/null +++ b/ent/events_update.go @@ -0,0 +1,421 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/database-playground/backend-v2/ent/events" + "github.com/database-playground/backend-v2/ent/predicate" + "github.com/database-playground/backend-v2/ent/user" +) + +// EventsUpdate is the builder for updating Events entities. +type EventsUpdate struct { + config + hooks []Hook + mutation *EventsMutation +} + +// Where appends a list predicates to the EventsUpdate builder. +func (_u *EventsUpdate) Where(ps ...predicate.Events) *EventsUpdate { + _u.mutation.Where(ps...) + return _u +} + +// SetUserID sets the "user_id" field. +func (_u *EventsUpdate) SetUserID(v int) *EventsUpdate { + _u.mutation.SetUserID(v) + return _u +} + +// SetNillableUserID sets the "user_id" field if the given value is not nil. +func (_u *EventsUpdate) SetNillableUserID(v *int) *EventsUpdate { + if v != nil { + _u.SetUserID(*v) + } + return _u +} + +// SetType sets the "type" field. +func (_u *EventsUpdate) SetType(v string) *EventsUpdate { + _u.mutation.SetType(v) + return _u +} + +// SetNillableType sets the "type" field if the given value is not nil. +func (_u *EventsUpdate) SetNillableType(v *string) *EventsUpdate { + if v != nil { + _u.SetType(*v) + } + return _u +} + +// SetTriggeredAt sets the "triggered_at" field. +func (_u *EventsUpdate) SetTriggeredAt(v time.Time) *EventsUpdate { + _u.mutation.SetTriggeredAt(v) + return _u +} + +// SetNillableTriggeredAt sets the "triggered_at" field if the given value is not nil. +func (_u *EventsUpdate) SetNillableTriggeredAt(v *time.Time) *EventsUpdate { + if v != nil { + _u.SetTriggeredAt(*v) + } + return _u +} + +// SetPayload sets the "payload" field. +func (_u *EventsUpdate) SetPayload(v map[string]interface{}) *EventsUpdate { + _u.mutation.SetPayload(v) + return _u +} + +// ClearPayload clears the value of the "payload" field. +func (_u *EventsUpdate) ClearPayload() *EventsUpdate { + _u.mutation.ClearPayload() + return _u +} + +// SetUser sets the "user" edge to the User entity. +func (_u *EventsUpdate) SetUser(v *User) *EventsUpdate { + return _u.SetUserID(v.ID) +} + +// Mutation returns the EventsMutation object of the builder. +func (_u *EventsUpdate) Mutation() *EventsMutation { + return _u.mutation +} + +// ClearUser clears the "user" edge to the User entity. +func (_u *EventsUpdate) ClearUser() *EventsUpdate { + _u.mutation.ClearUser() + return _u +} + +// Save executes the query and returns the number of nodes affected by the update operation. +func (_u *EventsUpdate) Save(ctx context.Context) (int, error) { + return withHooks(ctx, _u.sqlSave, _u.mutation, _u.hooks) +} + +// SaveX is like Save, but panics if an error occurs. +func (_u *EventsUpdate) SaveX(ctx context.Context) int { + affected, err := _u.Save(ctx) + if err != nil { + panic(err) + } + return affected +} + +// Exec executes the query. +func (_u *EventsUpdate) Exec(ctx context.Context) error { + _, err := _u.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (_u *EventsUpdate) ExecX(ctx context.Context) { + if err := _u.Exec(ctx); err != nil { + panic(err) + } +} + +// check runs all checks and user-defined validators on the builder. +func (_u *EventsUpdate) check() error { + if v, ok := _u.mutation.GetType(); ok { + if err := events.TypeValidator(v); err != nil { + return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Events.type": %w`, err)} + } + } + if _u.mutation.UserCleared() && len(_u.mutation.UserIDs()) > 0 { + return errors.New(`ent: clearing a required unique edge "Events.user"`) + } + return nil +} + +func (_u *EventsUpdate) sqlSave(ctx context.Context) (_node int, err error) { + if err := _u.check(); err != nil { + return _node, err + } + _spec := sqlgraph.NewUpdateSpec(events.Table, events.Columns, sqlgraph.NewFieldSpec(events.FieldID, field.TypeInt)) + if ps := _u.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := _u.mutation.GetType(); ok { + _spec.SetField(events.FieldType, field.TypeString, value) + } + if value, ok := _u.mutation.TriggeredAt(); ok { + _spec.SetField(events.FieldTriggeredAt, field.TypeTime, value) + } + if value, ok := _u.mutation.Payload(); ok { + _spec.SetField(events.FieldPayload, field.TypeJSON, value) + } + if _u.mutation.PayloadCleared() { + _spec.ClearField(events.FieldPayload, field.TypeJSON) + } + if _u.mutation.UserCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: events.UserTable, + Columns: []string{events.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeInt), + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := _u.mutation.UserIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: events.UserTable, + Columns: []string{events.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeInt), + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if _node, err = sqlgraph.UpdateNodes(ctx, _u.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{events.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return 0, err + } + _u.mutation.done = true + return _node, nil +} + +// EventsUpdateOne is the builder for updating a single Events entity. +type EventsUpdateOne struct { + config + fields []string + hooks []Hook + mutation *EventsMutation +} + +// SetUserID sets the "user_id" field. +func (_u *EventsUpdateOne) SetUserID(v int) *EventsUpdateOne { + _u.mutation.SetUserID(v) + return _u +} + +// SetNillableUserID sets the "user_id" field if the given value is not nil. +func (_u *EventsUpdateOne) SetNillableUserID(v *int) *EventsUpdateOne { + if v != nil { + _u.SetUserID(*v) + } + return _u +} + +// SetType sets the "type" field. +func (_u *EventsUpdateOne) SetType(v string) *EventsUpdateOne { + _u.mutation.SetType(v) + return _u +} + +// SetNillableType sets the "type" field if the given value is not nil. +func (_u *EventsUpdateOne) SetNillableType(v *string) *EventsUpdateOne { + if v != nil { + _u.SetType(*v) + } + return _u +} + +// SetTriggeredAt sets the "triggered_at" field. +func (_u *EventsUpdateOne) SetTriggeredAt(v time.Time) *EventsUpdateOne { + _u.mutation.SetTriggeredAt(v) + return _u +} + +// SetNillableTriggeredAt sets the "triggered_at" field if the given value is not nil. +func (_u *EventsUpdateOne) SetNillableTriggeredAt(v *time.Time) *EventsUpdateOne { + if v != nil { + _u.SetTriggeredAt(*v) + } + return _u +} + +// SetPayload sets the "payload" field. +func (_u *EventsUpdateOne) SetPayload(v map[string]interface{}) *EventsUpdateOne { + _u.mutation.SetPayload(v) + return _u +} + +// ClearPayload clears the value of the "payload" field. +func (_u *EventsUpdateOne) ClearPayload() *EventsUpdateOne { + _u.mutation.ClearPayload() + return _u +} + +// SetUser sets the "user" edge to the User entity. +func (_u *EventsUpdateOne) SetUser(v *User) *EventsUpdateOne { + return _u.SetUserID(v.ID) +} + +// Mutation returns the EventsMutation object of the builder. +func (_u *EventsUpdateOne) Mutation() *EventsMutation { + return _u.mutation +} + +// ClearUser clears the "user" edge to the User entity. +func (_u *EventsUpdateOne) ClearUser() *EventsUpdateOne { + _u.mutation.ClearUser() + return _u +} + +// Where appends a list predicates to the EventsUpdate builder. +func (_u *EventsUpdateOne) Where(ps ...predicate.Events) *EventsUpdateOne { + _u.mutation.Where(ps...) + return _u +} + +// Select allows selecting one or more fields (columns) of the returned entity. +// The default is selecting all fields defined in the entity schema. +func (_u *EventsUpdateOne) Select(field string, fields ...string) *EventsUpdateOne { + _u.fields = append([]string{field}, fields...) + return _u +} + +// Save executes the query and returns the updated Events entity. +func (_u *EventsUpdateOne) Save(ctx context.Context) (*Events, error) { + return withHooks(ctx, _u.sqlSave, _u.mutation, _u.hooks) +} + +// SaveX is like Save, but panics if an error occurs. +func (_u *EventsUpdateOne) SaveX(ctx context.Context) *Events { + node, err := _u.Save(ctx) + if err != nil { + panic(err) + } + return node +} + +// Exec executes the query on the entity. +func (_u *EventsUpdateOne) Exec(ctx context.Context) error { + _, err := _u.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (_u *EventsUpdateOne) ExecX(ctx context.Context) { + if err := _u.Exec(ctx); err != nil { + panic(err) + } +} + +// check runs all checks and user-defined validators on the builder. +func (_u *EventsUpdateOne) check() error { + if v, ok := _u.mutation.GetType(); ok { + if err := events.TypeValidator(v); err != nil { + return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Events.type": %w`, err)} + } + } + if _u.mutation.UserCleared() && len(_u.mutation.UserIDs()) > 0 { + return errors.New(`ent: clearing a required unique edge "Events.user"`) + } + return nil +} + +func (_u *EventsUpdateOne) sqlSave(ctx context.Context) (_node *Events, err error) { + if err := _u.check(); err != nil { + return _node, err + } + _spec := sqlgraph.NewUpdateSpec(events.Table, events.Columns, sqlgraph.NewFieldSpec(events.FieldID, field.TypeInt)) + id, ok := _u.mutation.ID() + if !ok { + return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "Events.id" for update`)} + } + _spec.Node.ID.Value = id + if fields := _u.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, events.FieldID) + for _, f := range fields { + if !events.ValidColumn(f) { + return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + if f != events.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, f) + } + } + } + if ps := _u.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := _u.mutation.GetType(); ok { + _spec.SetField(events.FieldType, field.TypeString, value) + } + if value, ok := _u.mutation.TriggeredAt(); ok { + _spec.SetField(events.FieldTriggeredAt, field.TypeTime, value) + } + if value, ok := _u.mutation.Payload(); ok { + _spec.SetField(events.FieldPayload, field.TypeJSON, value) + } + if _u.mutation.PayloadCleared() { + _spec.ClearField(events.FieldPayload, field.TypeJSON) + } + if _u.mutation.UserCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: events.UserTable, + Columns: []string{events.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeInt), + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := _u.mutation.UserIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: events.UserTable, + Columns: []string{events.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeInt), + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + _node = &Events{config: _u.config} + _spec.Assign = _node.assignValues + _spec.ScanValues = _node.scanValues + if err = sqlgraph.UpdateNode(ctx, _u.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{events.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return nil, err + } + _u.mutation.done = true + return _node, nil +} diff --git a/ent/gql_collection.go b/ent/gql_collection.go index 50aff75..75425ec 100644 --- a/ent/gql_collection.go +++ b/ent/gql_collection.go @@ -8,7 +8,9 @@ import ( "entgo.io/contrib/entgql" "github.com/99designs/gqlgen/graphql" "github.com/database-playground/backend-v2/ent/database" + "github.com/database-playground/backend-v2/ent/events" "github.com/database-playground/backend-v2/ent/group" + "github.com/database-playground/backend-v2/ent/points" "github.com/database-playground/backend-v2/ent/question" "github.com/database-playground/backend-v2/ent/scopeset" "github.com/database-playground/backend-v2/ent/user" @@ -109,6 +111,103 @@ func newDatabasePaginateArgs(rv map[string]any) *databasePaginateArgs { return args } +// CollectFields tells the query-builder to eagerly load connected nodes by resolver context. +func (_q *EventsQuery) CollectFields(ctx context.Context, satisfies ...string) (*EventsQuery, error) { + fc := graphql.GetFieldContext(ctx) + if fc == nil { + return _q, nil + } + if err := _q.collectField(ctx, false, graphql.GetOperationContext(ctx), fc.Field, nil, satisfies...); err != nil { + return nil, err + } + return _q, nil +} + +func (_q *EventsQuery) collectField(ctx context.Context, oneNode bool, opCtx *graphql.OperationContext, collected graphql.CollectedField, path []string, satisfies ...string) error { + path = append([]string(nil), path...) + var ( + unknownSeen bool + fieldSeen = make(map[string]struct{}, len(events.Columns)) + selectedFields = []string{events.FieldID} + ) + for _, field := range graphql.CollectFields(opCtx, collected.Selections, satisfies) { + switch field.Name { + + case "user": + var ( + alias = field.Alias + path = append(path, alias) + query = (&UserClient{config: _q.config}).Query() + ) + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, userImplementors)...); err != nil { + return err + } + _q.withUser = query + if _, ok := fieldSeen[events.FieldUserID]; !ok { + selectedFields = append(selectedFields, events.FieldUserID) + fieldSeen[events.FieldUserID] = struct{}{} + } + case "userID": + if _, ok := fieldSeen[events.FieldUserID]; !ok { + selectedFields = append(selectedFields, events.FieldUserID) + fieldSeen[events.FieldUserID] = struct{}{} + } + case "type": + if _, ok := fieldSeen[events.FieldType]; !ok { + selectedFields = append(selectedFields, events.FieldType) + fieldSeen[events.FieldType] = struct{}{} + } + case "triggeredAt": + if _, ok := fieldSeen[events.FieldTriggeredAt]; !ok { + selectedFields = append(selectedFields, events.FieldTriggeredAt) + fieldSeen[events.FieldTriggeredAt] = struct{}{} + } + case "payload": + if _, ok := fieldSeen[events.FieldPayload]; !ok { + selectedFields = append(selectedFields, events.FieldPayload) + fieldSeen[events.FieldPayload] = struct{}{} + } + case "id": + case "__typename": + default: + unknownSeen = true + } + } + if !unknownSeen { + _q.Select(selectedFields...) + } + return nil +} + +type eventsPaginateArgs struct { + first, last *int + after, before *Cursor + opts []EventsPaginateOption +} + +func newEventsPaginateArgs(rv map[string]any) *eventsPaginateArgs { + args := &eventsPaginateArgs{} + if rv == nil { + return args + } + if v := rv[firstField]; v != nil { + args.first = v.(*int) + } + if v := rv[lastField]; v != nil { + args.last = v.(*int) + } + if v := rv[afterField]; v != nil { + args.after = v.(*Cursor) + } + if v := rv[beforeField]; v != nil { + args.before = v.(*Cursor) + } + if v, ok := rv[whereField].(*EventsWhereInput); ok { + args.opts = append(args.opts, WithEventsFilter(v.Filter)) + } + return args +} + // CollectFields tells the query-builder to eagerly load connected nodes by resolver context. func (_q *GroupQuery) CollectFields(ctx context.Context, satisfies ...string) (*GroupQuery, error) { fc := graphql.GetFieldContext(ctx) @@ -209,6 +308,104 @@ func newGroupPaginateArgs(rv map[string]any) *groupPaginateArgs { return args } +// CollectFields tells the query-builder to eagerly load connected nodes by resolver context. +func (_q *PointsQuery) CollectFields(ctx context.Context, satisfies ...string) (*PointsQuery, error) { + fc := graphql.GetFieldContext(ctx) + if fc == nil { + return _q, nil + } + if err := _q.collectField(ctx, false, graphql.GetOperationContext(ctx), fc.Field, nil, satisfies...); err != nil { + return nil, err + } + return _q, nil +} + +func (_q *PointsQuery) collectField(ctx context.Context, oneNode bool, opCtx *graphql.OperationContext, collected graphql.CollectedField, path []string, satisfies ...string) error { + path = append([]string(nil), path...) + var ( + unknownSeen bool + fieldSeen = make(map[string]struct{}, len(points.Columns)) + selectedFields = []string{points.FieldID} + ) + for _, field := range graphql.CollectFields(opCtx, collected.Selections, satisfies) { + switch field.Name { + + case "user": + var ( + alias = field.Alias + path = append(path, alias) + query = (&UserClient{config: _q.config}).Query() + ) + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, userImplementors)...); err != nil { + return err + } + _q.withUser = query + case "createdAt": + if _, ok := fieldSeen[points.FieldCreatedAt]; !ok { + selectedFields = append(selectedFields, points.FieldCreatedAt) + fieldSeen[points.FieldCreatedAt] = struct{}{} + } + case "updatedAt": + if _, ok := fieldSeen[points.FieldUpdatedAt]; !ok { + selectedFields = append(selectedFields, points.FieldUpdatedAt) + fieldSeen[points.FieldUpdatedAt] = struct{}{} + } + case "deletedAt": + if _, ok := fieldSeen[points.FieldDeletedAt]; !ok { + selectedFields = append(selectedFields, points.FieldDeletedAt) + fieldSeen[points.FieldDeletedAt] = struct{}{} + } + case "points": + if _, ok := fieldSeen[points.FieldPoints]; !ok { + selectedFields = append(selectedFields, points.FieldPoints) + fieldSeen[points.FieldPoints] = struct{}{} + } + case "description": + if _, ok := fieldSeen[points.FieldDescription]; !ok { + selectedFields = append(selectedFields, points.FieldDescription) + fieldSeen[points.FieldDescription] = struct{}{} + } + case "id": + case "__typename": + default: + unknownSeen = true + } + } + if !unknownSeen { + _q.Select(selectedFields...) + } + return nil +} + +type pointsPaginateArgs struct { + first, last *int + after, before *Cursor + opts []PointsPaginateOption +} + +func newPointsPaginateArgs(rv map[string]any) *pointsPaginateArgs { + args := &pointsPaginateArgs{} + if rv == nil { + return args + } + if v := rv[firstField]; v != nil { + args.first = v.(*int) + } + if v := rv[lastField]; v != nil { + args.last = v.(*int) + } + if v := rv[afterField]; v != nil { + args.after = v.(*Cursor) + } + if v := rv[beforeField]; v != nil { + args.before = v.(*Cursor) + } + if v, ok := rv[whereField].(*PointsWhereInput); ok { + args.opts = append(args.opts, WithPointsFilter(v.Filter)) + } + return args +} + // CollectFields tells the query-builder to eagerly load connected nodes by resolver context. func (_q *QuestionQuery) CollectFields(ctx context.Context, satisfies ...string) (*QuestionQuery, error) { fc := graphql.GetFieldContext(ctx) @@ -451,6 +648,32 @@ func (_q *UserQuery) collectField(ctx context.Context, oneNode bool, opCtx *grap return err } _q.withGroup = query + + case "points": + var ( + alias = field.Alias + path = append(path, alias) + query = (&PointsClient{config: _q.config}).Query() + ) + if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, pointsImplementors)...); err != nil { + return err + } + _q.WithNamedPoints(alias, func(wq *PointsQuery) { + *wq = *query + }) + + case "events": + var ( + alias = field.Alias + path = append(path, alias) + query = (&EventsClient{config: _q.config}).Query() + ) + if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, eventsImplementors)...); err != nil { + return err + } + _q.WithNamedEvents(alias, func(wq *EventsQuery) { + *wq = *query + }) case "createdAt": if _, ok := fieldSeen[user.FieldCreatedAt]; !ok { selectedFields = append(selectedFields, user.FieldCreatedAt) diff --git a/ent/gql_edge.go b/ent/gql_edge.go index 708fdf5..ce3f98d 100644 --- a/ent/gql_edge.go +++ b/ent/gql_edge.go @@ -20,6 +20,14 @@ func (_m *Database) Questions(ctx context.Context) (result []*Question, err erro return result, err } +func (_m *Events) User(ctx context.Context) (*User, error) { + result, err := _m.Edges.UserOrErr() + if IsNotLoaded(err) { + result, err = _m.QueryUser().Only(ctx) + } + return result, err +} + func (_m *Group) ScopeSets(ctx context.Context) (result []*ScopeSet, err error) { if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { result, err = _m.NamedScopeSets(graphql.GetFieldContext(ctx).Field.Alias) @@ -32,6 +40,14 @@ func (_m *Group) ScopeSets(ctx context.Context) (result []*ScopeSet, err error) return result, err } +func (_m *Points) User(ctx context.Context) (*User, error) { + result, err := _m.Edges.UserOrErr() + if IsNotLoaded(err) { + result, err = _m.QueryUser().Only(ctx) + } + return result, err +} + func (_m *Question) Database(ctx context.Context) (*Database, error) { result, err := _m.Edges.DatabaseOrErr() if IsNotLoaded(err) { @@ -59,3 +75,27 @@ func (_m *User) Group(ctx context.Context) (*Group, error) { } return result, err } + +func (_m *User) Points(ctx context.Context) (result []*Points, err error) { + if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { + result, err = _m.NamedPoints(graphql.GetFieldContext(ctx).Field.Alias) + } else { + result, err = _m.Edges.PointsOrErr() + } + if IsNotLoaded(err) { + result, err = _m.QueryPoints().All(ctx) + } + return result, err +} + +func (_m *User) Events(ctx context.Context) (result []*Events, err error) { + if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { + result, err = _m.NamedEvents(graphql.GetFieldContext(ctx).Field.Alias) + } else { + result, err = _m.Edges.EventsOrErr() + } + if IsNotLoaded(err) { + result, err = _m.QueryEvents().All(ctx) + } + return result, err +} diff --git a/ent/gql_mutation_input.go b/ent/gql_mutation_input.go index 257bfe6..4b6b6e9 100644 --- a/ent/gql_mutation_input.go +++ b/ent/gql_mutation_input.go @@ -3,6 +3,8 @@ package ent import ( + "time" + "github.com/database-playground/backend-v2/ent/question" ) @@ -82,6 +84,72 @@ func (c *DatabaseUpdateOne) SetInput(i UpdateDatabaseInput) *DatabaseUpdateOne { return c } +// CreateEventsInput represents a mutation input for creating eventsslice. +type CreateEventsInput struct { + Type string + TriggeredAt *time.Time + Payload map[string]interface{} + UserID int +} + +// Mutate applies the CreateEventsInput on the EventsMutation builder. +func (i *CreateEventsInput) Mutate(m *EventsMutation) { + m.SetType(i.Type) + if v := i.TriggeredAt; v != nil { + m.SetTriggeredAt(*v) + } + if v := i.Payload; v != nil { + m.SetPayload(v) + } + m.SetUserID(i.UserID) +} + +// SetInput applies the change-set in the CreateEventsInput on the EventsCreate builder. +func (c *EventsCreate) SetInput(i CreateEventsInput) *EventsCreate { + i.Mutate(c.Mutation()) + return c +} + +// UpdateEventsInput represents a mutation input for updating eventsslice. +type UpdateEventsInput struct { + Type *string + TriggeredAt *time.Time + ClearPayload bool + Payload map[string]interface{} + UserID *int +} + +// Mutate applies the UpdateEventsInput on the EventsMutation builder. +func (i *UpdateEventsInput) Mutate(m *EventsMutation) { + if v := i.Type; v != nil { + m.SetType(*v) + } + if v := i.TriggeredAt; v != nil { + m.SetTriggeredAt(*v) + } + if i.ClearPayload { + m.ClearPayload() + } + if v := i.Payload; v != nil { + m.SetPayload(v) + } + if v := i.UserID; v != nil { + m.SetUserID(*v) + } +} + +// SetInput applies the change-set in the UpdateEventsInput on the EventsUpdate builder. +func (c *EventsUpdate) SetInput(i UpdateEventsInput) *EventsUpdate { + i.Mutate(c.Mutation()) + return c +} + +// SetInput applies the change-set in the UpdateEventsInput on the EventsUpdateOne builder. +func (c *EventsUpdateOne) SetInput(i UpdateEventsInput) *EventsUpdateOne { + i.Mutate(c.Mutation()) + return c +} + // CreateGroupInput represents a mutation input for creating groups. type CreateGroupInput struct { Name string @@ -150,6 +218,66 @@ func (c *GroupUpdateOne) SetInput(i UpdateGroupInput) *GroupUpdateOne { return c } +// CreatePointsInput represents a mutation input for creating pointsslice. +type CreatePointsInput struct { + Points *int + Description *string + UserID int +} + +// Mutate applies the CreatePointsInput on the PointsMutation builder. +func (i *CreatePointsInput) Mutate(m *PointsMutation) { + if v := i.Points; v != nil { + m.SetPoints(*v) + } + if v := i.Description; v != nil { + m.SetDescription(*v) + } + m.SetUserID(i.UserID) +} + +// SetInput applies the change-set in the CreatePointsInput on the PointsCreate builder. +func (c *PointsCreate) SetInput(i CreatePointsInput) *PointsCreate { + i.Mutate(c.Mutation()) + return c +} + +// UpdatePointsInput represents a mutation input for updating pointsslice. +type UpdatePointsInput struct { + Points *int + ClearDescription bool + Description *string + UserID *int +} + +// Mutate applies the UpdatePointsInput on the PointsMutation builder. +func (i *UpdatePointsInput) Mutate(m *PointsMutation) { + if v := i.Points; v != nil { + m.SetPoints(*v) + } + if i.ClearDescription { + m.ClearDescription() + } + if v := i.Description; v != nil { + m.SetDescription(*v) + } + if v := i.UserID; v != nil { + m.SetUserID(*v) + } +} + +// SetInput applies the change-set in the UpdatePointsInput on the PointsUpdate builder. +func (c *PointsUpdate) SetInput(i UpdatePointsInput) *PointsUpdate { + i.Mutate(c.Mutation()) + return c +} + +// SetInput applies the change-set in the UpdatePointsInput on the PointsUpdateOne builder. +func (c *PointsUpdateOne) SetInput(i UpdatePointsInput) *PointsUpdateOne { + i.Mutate(c.Mutation()) + return c +} + // CreateQuestionInput represents a mutation input for creating questions. type CreateQuestionInput struct { Category string @@ -296,10 +424,12 @@ func (c *ScopeSetUpdateOne) SetInput(i UpdateScopeSetInput) *ScopeSetUpdateOne { // CreateUserInput represents a mutation input for creating users. type CreateUserInput struct { - Name string - Email string - Avatar *string - GroupID int + Name string + Email string + Avatar *string + GroupID int + PointIDs []int + EventIDs []int } // Mutate applies the CreateUserInput on the UserMutation builder. @@ -310,6 +440,12 @@ func (i *CreateUserInput) Mutate(m *UserMutation) { m.SetAvatar(*v) } m.SetGroupID(i.GroupID) + if v := i.PointIDs; len(v) > 0 { + m.AddPointIDs(v...) + } + if v := i.EventIDs; len(v) > 0 { + m.AddEventIDs(v...) + } } // SetInput applies the change-set in the CreateUserInput on the UserCreate builder. @@ -320,10 +456,16 @@ func (c *UserCreate) SetInput(i CreateUserInput) *UserCreate { // UpdateUserInput represents a mutation input for updating users. type UpdateUserInput struct { - Name *string - ClearAvatar bool - Avatar *string - GroupID *int + Name *string + ClearAvatar bool + Avatar *string + GroupID *int + ClearPoints bool + AddPointIDs []int + RemovePointIDs []int + ClearEvents bool + AddEventIDs []int + RemoveEventIDs []int } // Mutate applies the UpdateUserInput on the UserMutation builder. @@ -340,6 +482,24 @@ func (i *UpdateUserInput) Mutate(m *UserMutation) { if v := i.GroupID; v != nil { m.SetGroupID(*v) } + if i.ClearPoints { + m.ClearPoints() + } + if v := i.AddPointIDs; len(v) > 0 { + m.AddPointIDs(v...) + } + if v := i.RemovePointIDs; len(v) > 0 { + m.RemovePointIDs(v...) + } + if i.ClearEvents { + m.ClearEvents() + } + if v := i.AddEventIDs; len(v) > 0 { + m.AddEventIDs(v...) + } + if v := i.RemoveEventIDs; len(v) > 0 { + m.RemoveEventIDs(v...) + } } // SetInput applies the change-set in the UpdateUserInput on the UserUpdate builder. diff --git a/ent/gql_node.go b/ent/gql_node.go index 332cd6b..ea9c3e3 100644 --- a/ent/gql_node.go +++ b/ent/gql_node.go @@ -10,8 +10,10 @@ import ( "entgo.io/contrib/entgql" "github.com/99designs/gqlgen/graphql" "github.com/database-playground/backend-v2/ent/database" + "github.com/database-playground/backend-v2/ent/events" "github.com/database-playground/backend-v2/ent/group" "github.com/database-playground/backend-v2/ent/internal" + "github.com/database-playground/backend-v2/ent/points" "github.com/database-playground/backend-v2/ent/question" "github.com/database-playground/backend-v2/ent/scopeset" "github.com/database-playground/backend-v2/ent/user" @@ -28,11 +30,21 @@ var databaseImplementors = []string{"Database", "Node"} // IsNode implements the Node interface check for GQLGen. func (*Database) IsNode() {} +var eventsImplementors = []string{"Events", "Node"} + +// IsNode implements the Node interface check for GQLGen. +func (*Events) IsNode() {} + var groupImplementors = []string{"Group", "Node"} // IsNode implements the Node interface check for GQLGen. func (*Group) IsNode() {} +var pointsImplementors = []string{"Points", "Node"} + +// IsNode implements the Node interface check for GQLGen. +func (*Points) IsNode() {} + var questionImplementors = []string{"Question", "Node"} // IsNode implements the Node interface check for GQLGen. @@ -134,6 +146,15 @@ func (c *Client) noder(ctx context.Context, table string, id int) (Noder, error) } } return query.Only(ctx) + case events.Table: + query := c.Events.Query(). + Where(events.ID(id)) + if fc := graphql.GetFieldContext(ctx); fc != nil { + if err := query.collectField(ctx, true, graphql.GetOperationContext(ctx), fc.Field, nil, eventsImplementors...); err != nil { + return nil, err + } + } + return query.Only(ctx) case group.Table: query := c.Group.Query(). Where(group.ID(id)) @@ -143,6 +164,15 @@ func (c *Client) noder(ctx context.Context, table string, id int) (Noder, error) } } return query.Only(ctx) + case points.Table: + query := c.Points.Query(). + Where(points.ID(id)) + if fc := graphql.GetFieldContext(ctx); fc != nil { + if err := query.collectField(ctx, true, graphql.GetOperationContext(ctx), fc.Field, nil, pointsImplementors...); err != nil { + return nil, err + } + } + return query.Only(ctx) case question.Table: query := c.Question.Query(). Where(question.ID(id)) @@ -259,6 +289,22 @@ func (c *Client) noders(ctx context.Context, table string, ids []int) ([]Noder, *noder = node } } + case events.Table: + query := c.Events.Query(). + Where(events.IDIn(ids...)) + query, err := query.CollectFields(ctx, eventsImplementors...) + if err != nil { + return nil, err + } + nodes, err := query.All(ctx) + if err != nil { + return nil, err + } + for _, node := range nodes { + for _, noder := range idmap[node.ID] { + *noder = node + } + } case group.Table: query := c.Group.Query(). Where(group.IDIn(ids...)) @@ -275,6 +321,22 @@ func (c *Client) noders(ctx context.Context, table string, ids []int) ([]Noder, *noder = node } } + case points.Table: + query := c.Points.Query(). + Where(points.IDIn(ids...)) + query, err := query.CollectFields(ctx, pointsImplementors...) + if err != nil { + return nil, err + } + nodes, err := query.All(ctx) + if err != nil { + return nil, err + } + for _, node := range nodes { + for _, noder := range idmap[node.ID] { + *noder = node + } + } case question.Table: query := c.Question.Query(). Where(question.IDIn(ids...)) diff --git a/ent/gql_pagination.go b/ent/gql_pagination.go index 9997961..52085b1 100644 --- a/ent/gql_pagination.go +++ b/ent/gql_pagination.go @@ -15,7 +15,9 @@ import ( "github.com/99designs/gqlgen/graphql" "github.com/99designs/gqlgen/graphql/errcode" "github.com/database-playground/backend-v2/ent/database" + "github.com/database-playground/backend-v2/ent/events" "github.com/database-playground/backend-v2/ent/group" + "github.com/database-playground/backend-v2/ent/points" "github.com/database-playground/backend-v2/ent/question" "github.com/database-playground/backend-v2/ent/scopeset" "github.com/database-playground/backend-v2/ent/user" @@ -351,6 +353,255 @@ func (_m *Database) ToEdge(order *DatabaseOrder) *DatabaseEdge { } } +// EventsEdge is the edge representation of Events. +type EventsEdge struct { + Node *Events `json:"node"` + Cursor Cursor `json:"cursor"` +} + +// EventsConnection is the connection containing edges to Events. +type EventsConnection struct { + Edges []*EventsEdge `json:"edges"` + PageInfo PageInfo `json:"pageInfo"` + TotalCount int `json:"totalCount"` +} + +func (c *EventsConnection) build(nodes []*Events, pager *eventsPager, after *Cursor, first *int, before *Cursor, last *int) { + c.PageInfo.HasNextPage = before != nil + c.PageInfo.HasPreviousPage = after != nil + if first != nil && *first+1 == len(nodes) { + c.PageInfo.HasNextPage = true + nodes = nodes[:len(nodes)-1] + } else if last != nil && *last+1 == len(nodes) { + c.PageInfo.HasPreviousPage = true + nodes = nodes[:len(nodes)-1] + } + var nodeAt func(int) *Events + if last != nil { + n := len(nodes) - 1 + nodeAt = func(i int) *Events { + return nodes[n-i] + } + } else { + nodeAt = func(i int) *Events { + return nodes[i] + } + } + c.Edges = make([]*EventsEdge, len(nodes)) + for i := range nodes { + node := nodeAt(i) + c.Edges[i] = &EventsEdge{ + Node: node, + Cursor: pager.toCursor(node), + } + } + if l := len(c.Edges); l > 0 { + c.PageInfo.StartCursor = &c.Edges[0].Cursor + c.PageInfo.EndCursor = &c.Edges[l-1].Cursor + } + if c.TotalCount == 0 { + c.TotalCount = len(nodes) + } +} + +// EventsPaginateOption enables pagination customization. +type EventsPaginateOption func(*eventsPager) error + +// WithEventsOrder configures pagination ordering. +func WithEventsOrder(order *EventsOrder) EventsPaginateOption { + if order == nil { + order = DefaultEventsOrder + } + o := *order + return func(pager *eventsPager) error { + if err := o.Direction.Validate(); err != nil { + return err + } + if o.Field == nil { + o.Field = DefaultEventsOrder.Field + } + pager.order = &o + return nil + } +} + +// WithEventsFilter configures pagination filter. +func WithEventsFilter(filter func(*EventsQuery) (*EventsQuery, error)) EventsPaginateOption { + return func(pager *eventsPager) error { + if filter == nil { + return errors.New("EventsQuery filter cannot be nil") + } + pager.filter = filter + return nil + } +} + +type eventsPager struct { + reverse bool + order *EventsOrder + filter func(*EventsQuery) (*EventsQuery, error) +} + +func newEventsPager(opts []EventsPaginateOption, reverse bool) (*eventsPager, error) { + pager := &eventsPager{reverse: reverse} + for _, opt := range opts { + if err := opt(pager); err != nil { + return nil, err + } + } + if pager.order == nil { + pager.order = DefaultEventsOrder + } + return pager, nil +} + +func (p *eventsPager) applyFilter(query *EventsQuery) (*EventsQuery, error) { + if p.filter != nil { + return p.filter(query) + } + return query, nil +} + +func (p *eventsPager) toCursor(_m *Events) Cursor { + return p.order.Field.toCursor(_m) +} + +func (p *eventsPager) applyCursors(query *EventsQuery, after, before *Cursor) (*EventsQuery, error) { + direction := p.order.Direction + if p.reverse { + direction = direction.Reverse() + } + for _, predicate := range entgql.CursorsPredicate(after, before, DefaultEventsOrder.Field.column, p.order.Field.column, direction) { + query = query.Where(predicate) + } + return query, nil +} + +func (p *eventsPager) applyOrder(query *EventsQuery) *EventsQuery { + direction := p.order.Direction + if p.reverse { + direction = direction.Reverse() + } + query = query.Order(p.order.Field.toTerm(direction.OrderTermOption())) + if p.order.Field != DefaultEventsOrder.Field { + query = query.Order(DefaultEventsOrder.Field.toTerm(direction.OrderTermOption())) + } + if len(query.ctx.Fields) > 0 { + query.ctx.AppendFieldOnce(p.order.Field.column) + } + return query +} + +func (p *eventsPager) orderExpr(query *EventsQuery) sql.Querier { + direction := p.order.Direction + if p.reverse { + direction = direction.Reverse() + } + if len(query.ctx.Fields) > 0 { + query.ctx.AppendFieldOnce(p.order.Field.column) + } + return sql.ExprFunc(func(b *sql.Builder) { + b.Ident(p.order.Field.column).Pad().WriteString(string(direction)) + if p.order.Field != DefaultEventsOrder.Field { + b.Comma().Ident(DefaultEventsOrder.Field.column).Pad().WriteString(string(direction)) + } + }) +} + +// Paginate executes the query and returns a relay based cursor connection to Events. +func (_m *EventsQuery) Paginate( + ctx context.Context, after *Cursor, first *int, + before *Cursor, last *int, opts ...EventsPaginateOption, +) (*EventsConnection, error) { + if err := validateFirstLast(first, last); err != nil { + return nil, err + } + pager, err := newEventsPager(opts, last != nil) + if err != nil { + return nil, err + } + if _m, err = pager.applyFilter(_m); err != nil { + return nil, err + } + conn := &EventsConnection{Edges: []*EventsEdge{}} + ignoredEdges := !hasCollectedField(ctx, edgesField) + if hasCollectedField(ctx, totalCountField) || hasCollectedField(ctx, pageInfoField) { + hasPagination := after != nil || first != nil || before != nil || last != nil + if hasPagination || ignoredEdges { + c := _m.Clone() + c.ctx.Fields = nil + if conn.TotalCount, err = c.Count(ctx); err != nil { + return nil, err + } + conn.PageInfo.HasNextPage = first != nil && conn.TotalCount > 0 + conn.PageInfo.HasPreviousPage = last != nil && conn.TotalCount > 0 + } + } + if ignoredEdges || (first != nil && *first == 0) || (last != nil && *last == 0) { + return conn, nil + } + if _m, err = pager.applyCursors(_m, after, before); err != nil { + return nil, err + } + limit := paginateLimit(first, last) + if limit != 0 { + _m.Limit(limit) + } + if field := collectedField(ctx, edgesField, nodeField); field != nil { + if err := _m.collectField(ctx, limit == 1, graphql.GetOperationContext(ctx), *field, []string{edgesField, nodeField}); err != nil { + return nil, err + } + } + _m = pager.applyOrder(_m) + nodes, err := _m.All(ctx) + if err != nil { + return nil, err + } + conn.build(nodes, pager, after, first, before, last) + return conn, nil +} + +// EventsOrderField defines the ordering field of Events. +type EventsOrderField struct { + // Value extracts the ordering value from the given Events. + Value func(*Events) (ent.Value, error) + column string // field or computed. + toTerm func(...sql.OrderTermOption) events.OrderOption + toCursor func(*Events) Cursor +} + +// EventsOrder defines the ordering of Events. +type EventsOrder struct { + Direction OrderDirection `json:"direction"` + Field *EventsOrderField `json:"field"` +} + +// DefaultEventsOrder is the default ordering of Events. +var DefaultEventsOrder = &EventsOrder{ + Direction: entgql.OrderDirectionAsc, + Field: &EventsOrderField{ + Value: func(_m *Events) (ent.Value, error) { + return _m.ID, nil + }, + column: events.FieldID, + toTerm: events.ByID, + toCursor: func(_m *Events) Cursor { + return Cursor{ID: _m.ID} + }, + }, +} + +// ToEdge converts Events into EventsEdge. +func (_m *Events) ToEdge(order *EventsOrder) *EventsEdge { + if order == nil { + order = DefaultEventsOrder + } + return &EventsEdge{ + Node: _m, + Cursor: order.Field.toCursor(_m), + } +} + // GroupEdge is the edge representation of Group. type GroupEdge struct { Node *Group `json:"node"` @@ -600,6 +851,255 @@ func (_m *Group) ToEdge(order *GroupOrder) *GroupEdge { } } +// PointsEdge is the edge representation of Points. +type PointsEdge struct { + Node *Points `json:"node"` + Cursor Cursor `json:"cursor"` +} + +// PointsConnection is the connection containing edges to Points. +type PointsConnection struct { + Edges []*PointsEdge `json:"edges"` + PageInfo PageInfo `json:"pageInfo"` + TotalCount int `json:"totalCount"` +} + +func (c *PointsConnection) build(nodes []*Points, pager *pointsPager, after *Cursor, first *int, before *Cursor, last *int) { + c.PageInfo.HasNextPage = before != nil + c.PageInfo.HasPreviousPage = after != nil + if first != nil && *first+1 == len(nodes) { + c.PageInfo.HasNextPage = true + nodes = nodes[:len(nodes)-1] + } else if last != nil && *last+1 == len(nodes) { + c.PageInfo.HasPreviousPage = true + nodes = nodes[:len(nodes)-1] + } + var nodeAt func(int) *Points + if last != nil { + n := len(nodes) - 1 + nodeAt = func(i int) *Points { + return nodes[n-i] + } + } else { + nodeAt = func(i int) *Points { + return nodes[i] + } + } + c.Edges = make([]*PointsEdge, len(nodes)) + for i := range nodes { + node := nodeAt(i) + c.Edges[i] = &PointsEdge{ + Node: node, + Cursor: pager.toCursor(node), + } + } + if l := len(c.Edges); l > 0 { + c.PageInfo.StartCursor = &c.Edges[0].Cursor + c.PageInfo.EndCursor = &c.Edges[l-1].Cursor + } + if c.TotalCount == 0 { + c.TotalCount = len(nodes) + } +} + +// PointsPaginateOption enables pagination customization. +type PointsPaginateOption func(*pointsPager) error + +// WithPointsOrder configures pagination ordering. +func WithPointsOrder(order *PointsOrder) PointsPaginateOption { + if order == nil { + order = DefaultPointsOrder + } + o := *order + return func(pager *pointsPager) error { + if err := o.Direction.Validate(); err != nil { + return err + } + if o.Field == nil { + o.Field = DefaultPointsOrder.Field + } + pager.order = &o + return nil + } +} + +// WithPointsFilter configures pagination filter. +func WithPointsFilter(filter func(*PointsQuery) (*PointsQuery, error)) PointsPaginateOption { + return func(pager *pointsPager) error { + if filter == nil { + return errors.New("PointsQuery filter cannot be nil") + } + pager.filter = filter + return nil + } +} + +type pointsPager struct { + reverse bool + order *PointsOrder + filter func(*PointsQuery) (*PointsQuery, error) +} + +func newPointsPager(opts []PointsPaginateOption, reverse bool) (*pointsPager, error) { + pager := &pointsPager{reverse: reverse} + for _, opt := range opts { + if err := opt(pager); err != nil { + return nil, err + } + } + if pager.order == nil { + pager.order = DefaultPointsOrder + } + return pager, nil +} + +func (p *pointsPager) applyFilter(query *PointsQuery) (*PointsQuery, error) { + if p.filter != nil { + return p.filter(query) + } + return query, nil +} + +func (p *pointsPager) toCursor(_m *Points) Cursor { + return p.order.Field.toCursor(_m) +} + +func (p *pointsPager) applyCursors(query *PointsQuery, after, before *Cursor) (*PointsQuery, error) { + direction := p.order.Direction + if p.reverse { + direction = direction.Reverse() + } + for _, predicate := range entgql.CursorsPredicate(after, before, DefaultPointsOrder.Field.column, p.order.Field.column, direction) { + query = query.Where(predicate) + } + return query, nil +} + +func (p *pointsPager) applyOrder(query *PointsQuery) *PointsQuery { + direction := p.order.Direction + if p.reverse { + direction = direction.Reverse() + } + query = query.Order(p.order.Field.toTerm(direction.OrderTermOption())) + if p.order.Field != DefaultPointsOrder.Field { + query = query.Order(DefaultPointsOrder.Field.toTerm(direction.OrderTermOption())) + } + if len(query.ctx.Fields) > 0 { + query.ctx.AppendFieldOnce(p.order.Field.column) + } + return query +} + +func (p *pointsPager) orderExpr(query *PointsQuery) sql.Querier { + direction := p.order.Direction + if p.reverse { + direction = direction.Reverse() + } + if len(query.ctx.Fields) > 0 { + query.ctx.AppendFieldOnce(p.order.Field.column) + } + return sql.ExprFunc(func(b *sql.Builder) { + b.Ident(p.order.Field.column).Pad().WriteString(string(direction)) + if p.order.Field != DefaultPointsOrder.Field { + b.Comma().Ident(DefaultPointsOrder.Field.column).Pad().WriteString(string(direction)) + } + }) +} + +// Paginate executes the query and returns a relay based cursor connection to Points. +func (_m *PointsQuery) Paginate( + ctx context.Context, after *Cursor, first *int, + before *Cursor, last *int, opts ...PointsPaginateOption, +) (*PointsConnection, error) { + if err := validateFirstLast(first, last); err != nil { + return nil, err + } + pager, err := newPointsPager(opts, last != nil) + if err != nil { + return nil, err + } + if _m, err = pager.applyFilter(_m); err != nil { + return nil, err + } + conn := &PointsConnection{Edges: []*PointsEdge{}} + ignoredEdges := !hasCollectedField(ctx, edgesField) + if hasCollectedField(ctx, totalCountField) || hasCollectedField(ctx, pageInfoField) { + hasPagination := after != nil || first != nil || before != nil || last != nil + if hasPagination || ignoredEdges { + c := _m.Clone() + c.ctx.Fields = nil + if conn.TotalCount, err = c.Count(ctx); err != nil { + return nil, err + } + conn.PageInfo.HasNextPage = first != nil && conn.TotalCount > 0 + conn.PageInfo.HasPreviousPage = last != nil && conn.TotalCount > 0 + } + } + if ignoredEdges || (first != nil && *first == 0) || (last != nil && *last == 0) { + return conn, nil + } + if _m, err = pager.applyCursors(_m, after, before); err != nil { + return nil, err + } + limit := paginateLimit(first, last) + if limit != 0 { + _m.Limit(limit) + } + if field := collectedField(ctx, edgesField, nodeField); field != nil { + if err := _m.collectField(ctx, limit == 1, graphql.GetOperationContext(ctx), *field, []string{edgesField, nodeField}); err != nil { + return nil, err + } + } + _m = pager.applyOrder(_m) + nodes, err := _m.All(ctx) + if err != nil { + return nil, err + } + conn.build(nodes, pager, after, first, before, last) + return conn, nil +} + +// PointsOrderField defines the ordering field of Points. +type PointsOrderField struct { + // Value extracts the ordering value from the given Points. + Value func(*Points) (ent.Value, error) + column string // field or computed. + toTerm func(...sql.OrderTermOption) points.OrderOption + toCursor func(*Points) Cursor +} + +// PointsOrder defines the ordering of Points. +type PointsOrder struct { + Direction OrderDirection `json:"direction"` + Field *PointsOrderField `json:"field"` +} + +// DefaultPointsOrder is the default ordering of Points. +var DefaultPointsOrder = &PointsOrder{ + Direction: entgql.OrderDirectionAsc, + Field: &PointsOrderField{ + Value: func(_m *Points) (ent.Value, error) { + return _m.ID, nil + }, + column: points.FieldID, + toTerm: points.ByID, + toCursor: func(_m *Points) Cursor { + return Cursor{ID: _m.ID} + }, + }, +} + +// ToEdge converts Points into PointsEdge. +func (_m *Points) ToEdge(order *PointsOrder) *PointsEdge { + if order == nil { + order = DefaultPointsOrder + } + return &PointsEdge{ + Node: _m, + Cursor: order.Field.toCursor(_m), + } +} + // QuestionEdge is the edge representation of Question. type QuestionEdge struct { Node *Question `json:"node"` diff --git a/ent/gql_where_input.go b/ent/gql_where_input.go index b22f398..88b7da4 100644 --- a/ent/gql_where_input.go +++ b/ent/gql_where_input.go @@ -8,7 +8,9 @@ import ( "time" "github.com/database-playground/backend-v2/ent/database" + "github.com/database-playground/backend-v2/ent/events" "github.com/database-playground/backend-v2/ent/group" + "github.com/database-playground/backend-v2/ent/points" "github.com/database-playground/backend-v2/ent/predicate" "github.com/database-playground/backend-v2/ent/question" "github.com/database-playground/backend-v2/ent/scopeset" @@ -385,6 +387,258 @@ func (i *DatabaseWhereInput) P() (predicate.Database, error) { } } +// EventsWhereInput represents a where input for filtering Events queries. +type EventsWhereInput struct { + Predicates []predicate.Events `json:"-"` + Not *EventsWhereInput `json:"not,omitempty"` + Or []*EventsWhereInput `json:"or,omitempty"` + And []*EventsWhereInput `json:"and,omitempty"` + + // "id" field predicates. + ID *int `json:"id,omitempty"` + IDNEQ *int `json:"idNEQ,omitempty"` + IDIn []int `json:"idIn,omitempty"` + IDNotIn []int `json:"idNotIn,omitempty"` + IDGT *int `json:"idGT,omitempty"` + IDGTE *int `json:"idGTE,omitempty"` + IDLT *int `json:"idLT,omitempty"` + IDLTE *int `json:"idLTE,omitempty"` + + // "user_id" field predicates. + UserID *int `json:"userID,omitempty"` + UserIDNEQ *int `json:"userIDNEQ,omitempty"` + UserIDIn []int `json:"userIDIn,omitempty"` + UserIDNotIn []int `json:"userIDNotIn,omitempty"` + + // "type" field predicates. + Type *string `json:"type,omitempty"` + TypeNEQ *string `json:"typeNEQ,omitempty"` + TypeIn []string `json:"typeIn,omitempty"` + TypeNotIn []string `json:"typeNotIn,omitempty"` + TypeGT *string `json:"typeGT,omitempty"` + TypeGTE *string `json:"typeGTE,omitempty"` + TypeLT *string `json:"typeLT,omitempty"` + TypeLTE *string `json:"typeLTE,omitempty"` + TypeContains *string `json:"typeContains,omitempty"` + TypeHasPrefix *string `json:"typeHasPrefix,omitempty"` + TypeHasSuffix *string `json:"typeHasSuffix,omitempty"` + TypeEqualFold *string `json:"typeEqualFold,omitempty"` + TypeContainsFold *string `json:"typeContainsFold,omitempty"` + + // "triggered_at" field predicates. + TriggeredAt *time.Time `json:"triggeredAt,omitempty"` + TriggeredAtNEQ *time.Time `json:"triggeredAtNEQ,omitempty"` + TriggeredAtIn []time.Time `json:"triggeredAtIn,omitempty"` + TriggeredAtNotIn []time.Time `json:"triggeredAtNotIn,omitempty"` + TriggeredAtGT *time.Time `json:"triggeredAtGT,omitempty"` + TriggeredAtGTE *time.Time `json:"triggeredAtGTE,omitempty"` + TriggeredAtLT *time.Time `json:"triggeredAtLT,omitempty"` + TriggeredAtLTE *time.Time `json:"triggeredAtLTE,omitempty"` + + // "user" edge predicates. + HasUser *bool `json:"hasUser,omitempty"` + HasUserWith []*UserWhereInput `json:"hasUserWith,omitempty"` +} + +// AddPredicates adds custom predicates to the where input to be used during the filtering phase. +func (i *EventsWhereInput) AddPredicates(predicates ...predicate.Events) { + i.Predicates = append(i.Predicates, predicates...) +} + +// Filter applies the EventsWhereInput filter on the EventsQuery builder. +func (i *EventsWhereInput) Filter(q *EventsQuery) (*EventsQuery, error) { + if i == nil { + return q, nil + } + p, err := i.P() + if err != nil { + if err == ErrEmptyEventsWhereInput { + return q, nil + } + return nil, err + } + return q.Where(p), nil +} + +// ErrEmptyEventsWhereInput is returned in case the EventsWhereInput is empty. +var ErrEmptyEventsWhereInput = errors.New("ent: empty predicate EventsWhereInput") + +// P returns a predicate for filtering eventsslice. +// An error is returned if the input is empty or invalid. +func (i *EventsWhereInput) P() (predicate.Events, error) { + var predicates []predicate.Events + if i.Not != nil { + p, err := i.Not.P() + if err != nil { + return nil, fmt.Errorf("%w: field 'not'", err) + } + predicates = append(predicates, events.Not(p)) + } + switch n := len(i.Or); { + case n == 1: + p, err := i.Or[0].P() + if err != nil { + return nil, fmt.Errorf("%w: field 'or'", err) + } + predicates = append(predicates, p) + case n > 1: + or := make([]predicate.Events, 0, n) + for _, w := range i.Or { + p, err := w.P() + if err != nil { + return nil, fmt.Errorf("%w: field 'or'", err) + } + or = append(or, p) + } + predicates = append(predicates, events.Or(or...)) + } + switch n := len(i.And); { + case n == 1: + p, err := i.And[0].P() + if err != nil { + return nil, fmt.Errorf("%w: field 'and'", err) + } + predicates = append(predicates, p) + case n > 1: + and := make([]predicate.Events, 0, n) + for _, w := range i.And { + p, err := w.P() + if err != nil { + return nil, fmt.Errorf("%w: field 'and'", err) + } + and = append(and, p) + } + predicates = append(predicates, events.And(and...)) + } + predicates = append(predicates, i.Predicates...) + if i.ID != nil { + predicates = append(predicates, events.IDEQ(*i.ID)) + } + if i.IDNEQ != nil { + predicates = append(predicates, events.IDNEQ(*i.IDNEQ)) + } + if len(i.IDIn) > 0 { + predicates = append(predicates, events.IDIn(i.IDIn...)) + } + if len(i.IDNotIn) > 0 { + predicates = append(predicates, events.IDNotIn(i.IDNotIn...)) + } + if i.IDGT != nil { + predicates = append(predicates, events.IDGT(*i.IDGT)) + } + if i.IDGTE != nil { + predicates = append(predicates, events.IDGTE(*i.IDGTE)) + } + if i.IDLT != nil { + predicates = append(predicates, events.IDLT(*i.IDLT)) + } + if i.IDLTE != nil { + predicates = append(predicates, events.IDLTE(*i.IDLTE)) + } + if i.UserID != nil { + predicates = append(predicates, events.UserIDEQ(*i.UserID)) + } + if i.UserIDNEQ != nil { + predicates = append(predicates, events.UserIDNEQ(*i.UserIDNEQ)) + } + if len(i.UserIDIn) > 0 { + predicates = append(predicates, events.UserIDIn(i.UserIDIn...)) + } + if len(i.UserIDNotIn) > 0 { + predicates = append(predicates, events.UserIDNotIn(i.UserIDNotIn...)) + } + if i.Type != nil { + predicates = append(predicates, events.TypeEQ(*i.Type)) + } + if i.TypeNEQ != nil { + predicates = append(predicates, events.TypeNEQ(*i.TypeNEQ)) + } + if len(i.TypeIn) > 0 { + predicates = append(predicates, events.TypeIn(i.TypeIn...)) + } + if len(i.TypeNotIn) > 0 { + predicates = append(predicates, events.TypeNotIn(i.TypeNotIn...)) + } + if i.TypeGT != nil { + predicates = append(predicates, events.TypeGT(*i.TypeGT)) + } + if i.TypeGTE != nil { + predicates = append(predicates, events.TypeGTE(*i.TypeGTE)) + } + if i.TypeLT != nil { + predicates = append(predicates, events.TypeLT(*i.TypeLT)) + } + if i.TypeLTE != nil { + predicates = append(predicates, events.TypeLTE(*i.TypeLTE)) + } + if i.TypeContains != nil { + predicates = append(predicates, events.TypeContains(*i.TypeContains)) + } + if i.TypeHasPrefix != nil { + predicates = append(predicates, events.TypeHasPrefix(*i.TypeHasPrefix)) + } + if i.TypeHasSuffix != nil { + predicates = append(predicates, events.TypeHasSuffix(*i.TypeHasSuffix)) + } + if i.TypeEqualFold != nil { + predicates = append(predicates, events.TypeEqualFold(*i.TypeEqualFold)) + } + if i.TypeContainsFold != nil { + predicates = append(predicates, events.TypeContainsFold(*i.TypeContainsFold)) + } + if i.TriggeredAt != nil { + predicates = append(predicates, events.TriggeredAtEQ(*i.TriggeredAt)) + } + if i.TriggeredAtNEQ != nil { + predicates = append(predicates, events.TriggeredAtNEQ(*i.TriggeredAtNEQ)) + } + if len(i.TriggeredAtIn) > 0 { + predicates = append(predicates, events.TriggeredAtIn(i.TriggeredAtIn...)) + } + if len(i.TriggeredAtNotIn) > 0 { + predicates = append(predicates, events.TriggeredAtNotIn(i.TriggeredAtNotIn...)) + } + if i.TriggeredAtGT != nil { + predicates = append(predicates, events.TriggeredAtGT(*i.TriggeredAtGT)) + } + if i.TriggeredAtGTE != nil { + predicates = append(predicates, events.TriggeredAtGTE(*i.TriggeredAtGTE)) + } + if i.TriggeredAtLT != nil { + predicates = append(predicates, events.TriggeredAtLT(*i.TriggeredAtLT)) + } + if i.TriggeredAtLTE != nil { + predicates = append(predicates, events.TriggeredAtLTE(*i.TriggeredAtLTE)) + } + + if i.HasUser != nil { + p := events.HasUser() + if !*i.HasUser { + p = events.Not(p) + } + predicates = append(predicates, p) + } + if len(i.HasUserWith) > 0 { + with := make([]predicate.User, 0, len(i.HasUserWith)) + for _, w := range i.HasUserWith { + p, err := w.P() + if err != nil { + return nil, fmt.Errorf("%w: field 'HasUserWith'", err) + } + with = append(with, p) + } + predicates = append(predicates, events.HasUserWith(with...)) + } + switch len(predicates) { + case 0: + return nil, ErrEmptyEventsWhereInput + case 1: + return predicates[0], nil + default: + return events.And(predicates...), nil + } +} + // GroupWhereInput represents a where input for filtering Group queries. type GroupWhereInput struct { Predicates []predicate.Group `json:"-"` @@ -757,6 +1011,358 @@ func (i *GroupWhereInput) P() (predicate.Group, error) { } } +// PointsWhereInput represents a where input for filtering Points queries. +type PointsWhereInput struct { + Predicates []predicate.Points `json:"-"` + Not *PointsWhereInput `json:"not,omitempty"` + Or []*PointsWhereInput `json:"or,omitempty"` + And []*PointsWhereInput `json:"and,omitempty"` + + // "id" field predicates. + ID *int `json:"id,omitempty"` + IDNEQ *int `json:"idNEQ,omitempty"` + IDIn []int `json:"idIn,omitempty"` + IDNotIn []int `json:"idNotIn,omitempty"` + IDGT *int `json:"idGT,omitempty"` + IDGTE *int `json:"idGTE,omitempty"` + IDLT *int `json:"idLT,omitempty"` + IDLTE *int `json:"idLTE,omitempty"` + + // "created_at" field predicates. + CreatedAt *time.Time `json:"createdAt,omitempty"` + CreatedAtNEQ *time.Time `json:"createdAtNEQ,omitempty"` + CreatedAtIn []time.Time `json:"createdAtIn,omitempty"` + CreatedAtNotIn []time.Time `json:"createdAtNotIn,omitempty"` + CreatedAtGT *time.Time `json:"createdAtGT,omitempty"` + CreatedAtGTE *time.Time `json:"createdAtGTE,omitempty"` + CreatedAtLT *time.Time `json:"createdAtLT,omitempty"` + CreatedAtLTE *time.Time `json:"createdAtLTE,omitempty"` + + // "updated_at" field predicates. + UpdatedAt *time.Time `json:"updatedAt,omitempty"` + UpdatedAtNEQ *time.Time `json:"updatedAtNEQ,omitempty"` + UpdatedAtIn []time.Time `json:"updatedAtIn,omitempty"` + UpdatedAtNotIn []time.Time `json:"updatedAtNotIn,omitempty"` + UpdatedAtGT *time.Time `json:"updatedAtGT,omitempty"` + UpdatedAtGTE *time.Time `json:"updatedAtGTE,omitempty"` + UpdatedAtLT *time.Time `json:"updatedAtLT,omitempty"` + UpdatedAtLTE *time.Time `json:"updatedAtLTE,omitempty"` + + // "deleted_at" field predicates. + DeletedAt *time.Time `json:"deletedAt,omitempty"` + DeletedAtNEQ *time.Time `json:"deletedAtNEQ,omitempty"` + DeletedAtIn []time.Time `json:"deletedAtIn,omitempty"` + DeletedAtNotIn []time.Time `json:"deletedAtNotIn,omitempty"` + DeletedAtGT *time.Time `json:"deletedAtGT,omitempty"` + DeletedAtGTE *time.Time `json:"deletedAtGTE,omitempty"` + DeletedAtLT *time.Time `json:"deletedAtLT,omitempty"` + DeletedAtLTE *time.Time `json:"deletedAtLTE,omitempty"` + DeletedAtIsNil bool `json:"deletedAtIsNil,omitempty"` + DeletedAtNotNil bool `json:"deletedAtNotNil,omitempty"` + + // "points" field predicates. + Points *int `json:"points,omitempty"` + PointsNEQ *int `json:"pointsNEQ,omitempty"` + PointsIn []int `json:"pointsIn,omitempty"` + PointsNotIn []int `json:"pointsNotIn,omitempty"` + PointsGT *int `json:"pointsGT,omitempty"` + PointsGTE *int `json:"pointsGTE,omitempty"` + PointsLT *int `json:"pointsLT,omitempty"` + PointsLTE *int `json:"pointsLTE,omitempty"` + + // "description" field predicates. + Description *string `json:"description,omitempty"` + DescriptionNEQ *string `json:"descriptionNEQ,omitempty"` + DescriptionIn []string `json:"descriptionIn,omitempty"` + DescriptionNotIn []string `json:"descriptionNotIn,omitempty"` + DescriptionGT *string `json:"descriptionGT,omitempty"` + DescriptionGTE *string `json:"descriptionGTE,omitempty"` + DescriptionLT *string `json:"descriptionLT,omitempty"` + DescriptionLTE *string `json:"descriptionLTE,omitempty"` + DescriptionContains *string `json:"descriptionContains,omitempty"` + DescriptionHasPrefix *string `json:"descriptionHasPrefix,omitempty"` + DescriptionHasSuffix *string `json:"descriptionHasSuffix,omitempty"` + DescriptionIsNil bool `json:"descriptionIsNil,omitempty"` + DescriptionNotNil bool `json:"descriptionNotNil,omitempty"` + DescriptionEqualFold *string `json:"descriptionEqualFold,omitempty"` + DescriptionContainsFold *string `json:"descriptionContainsFold,omitempty"` + + // "user" edge predicates. + HasUser *bool `json:"hasUser,omitempty"` + HasUserWith []*UserWhereInput `json:"hasUserWith,omitempty"` +} + +// AddPredicates adds custom predicates to the where input to be used during the filtering phase. +func (i *PointsWhereInput) AddPredicates(predicates ...predicate.Points) { + i.Predicates = append(i.Predicates, predicates...) +} + +// Filter applies the PointsWhereInput filter on the PointsQuery builder. +func (i *PointsWhereInput) Filter(q *PointsQuery) (*PointsQuery, error) { + if i == nil { + return q, nil + } + p, err := i.P() + if err != nil { + if err == ErrEmptyPointsWhereInput { + return q, nil + } + return nil, err + } + return q.Where(p), nil +} + +// ErrEmptyPointsWhereInput is returned in case the PointsWhereInput is empty. +var ErrEmptyPointsWhereInput = errors.New("ent: empty predicate PointsWhereInput") + +// P returns a predicate for filtering pointsslice. +// An error is returned if the input is empty or invalid. +func (i *PointsWhereInput) P() (predicate.Points, error) { + var predicates []predicate.Points + if i.Not != nil { + p, err := i.Not.P() + if err != nil { + return nil, fmt.Errorf("%w: field 'not'", err) + } + predicates = append(predicates, points.Not(p)) + } + switch n := len(i.Or); { + case n == 1: + p, err := i.Or[0].P() + if err != nil { + return nil, fmt.Errorf("%w: field 'or'", err) + } + predicates = append(predicates, p) + case n > 1: + or := make([]predicate.Points, 0, n) + for _, w := range i.Or { + p, err := w.P() + if err != nil { + return nil, fmt.Errorf("%w: field 'or'", err) + } + or = append(or, p) + } + predicates = append(predicates, points.Or(or...)) + } + switch n := len(i.And); { + case n == 1: + p, err := i.And[0].P() + if err != nil { + return nil, fmt.Errorf("%w: field 'and'", err) + } + predicates = append(predicates, p) + case n > 1: + and := make([]predicate.Points, 0, n) + for _, w := range i.And { + p, err := w.P() + if err != nil { + return nil, fmt.Errorf("%w: field 'and'", err) + } + and = append(and, p) + } + predicates = append(predicates, points.And(and...)) + } + predicates = append(predicates, i.Predicates...) + if i.ID != nil { + predicates = append(predicates, points.IDEQ(*i.ID)) + } + if i.IDNEQ != nil { + predicates = append(predicates, points.IDNEQ(*i.IDNEQ)) + } + if len(i.IDIn) > 0 { + predicates = append(predicates, points.IDIn(i.IDIn...)) + } + if len(i.IDNotIn) > 0 { + predicates = append(predicates, points.IDNotIn(i.IDNotIn...)) + } + if i.IDGT != nil { + predicates = append(predicates, points.IDGT(*i.IDGT)) + } + if i.IDGTE != nil { + predicates = append(predicates, points.IDGTE(*i.IDGTE)) + } + if i.IDLT != nil { + predicates = append(predicates, points.IDLT(*i.IDLT)) + } + if i.IDLTE != nil { + predicates = append(predicates, points.IDLTE(*i.IDLTE)) + } + if i.CreatedAt != nil { + predicates = append(predicates, points.CreatedAtEQ(*i.CreatedAt)) + } + if i.CreatedAtNEQ != nil { + predicates = append(predicates, points.CreatedAtNEQ(*i.CreatedAtNEQ)) + } + if len(i.CreatedAtIn) > 0 { + predicates = append(predicates, points.CreatedAtIn(i.CreatedAtIn...)) + } + if len(i.CreatedAtNotIn) > 0 { + predicates = append(predicates, points.CreatedAtNotIn(i.CreatedAtNotIn...)) + } + if i.CreatedAtGT != nil { + predicates = append(predicates, points.CreatedAtGT(*i.CreatedAtGT)) + } + if i.CreatedAtGTE != nil { + predicates = append(predicates, points.CreatedAtGTE(*i.CreatedAtGTE)) + } + if i.CreatedAtLT != nil { + predicates = append(predicates, points.CreatedAtLT(*i.CreatedAtLT)) + } + if i.CreatedAtLTE != nil { + predicates = append(predicates, points.CreatedAtLTE(*i.CreatedAtLTE)) + } + if i.UpdatedAt != nil { + predicates = append(predicates, points.UpdatedAtEQ(*i.UpdatedAt)) + } + if i.UpdatedAtNEQ != nil { + predicates = append(predicates, points.UpdatedAtNEQ(*i.UpdatedAtNEQ)) + } + if len(i.UpdatedAtIn) > 0 { + predicates = append(predicates, points.UpdatedAtIn(i.UpdatedAtIn...)) + } + if len(i.UpdatedAtNotIn) > 0 { + predicates = append(predicates, points.UpdatedAtNotIn(i.UpdatedAtNotIn...)) + } + if i.UpdatedAtGT != nil { + predicates = append(predicates, points.UpdatedAtGT(*i.UpdatedAtGT)) + } + if i.UpdatedAtGTE != nil { + predicates = append(predicates, points.UpdatedAtGTE(*i.UpdatedAtGTE)) + } + if i.UpdatedAtLT != nil { + predicates = append(predicates, points.UpdatedAtLT(*i.UpdatedAtLT)) + } + if i.UpdatedAtLTE != nil { + predicates = append(predicates, points.UpdatedAtLTE(*i.UpdatedAtLTE)) + } + if i.DeletedAt != nil { + predicates = append(predicates, points.DeletedAtEQ(*i.DeletedAt)) + } + if i.DeletedAtNEQ != nil { + predicates = append(predicates, points.DeletedAtNEQ(*i.DeletedAtNEQ)) + } + if len(i.DeletedAtIn) > 0 { + predicates = append(predicates, points.DeletedAtIn(i.DeletedAtIn...)) + } + if len(i.DeletedAtNotIn) > 0 { + predicates = append(predicates, points.DeletedAtNotIn(i.DeletedAtNotIn...)) + } + if i.DeletedAtGT != nil { + predicates = append(predicates, points.DeletedAtGT(*i.DeletedAtGT)) + } + if i.DeletedAtGTE != nil { + predicates = append(predicates, points.DeletedAtGTE(*i.DeletedAtGTE)) + } + if i.DeletedAtLT != nil { + predicates = append(predicates, points.DeletedAtLT(*i.DeletedAtLT)) + } + if i.DeletedAtLTE != nil { + predicates = append(predicates, points.DeletedAtLTE(*i.DeletedAtLTE)) + } + if i.DeletedAtIsNil { + predicates = append(predicates, points.DeletedAtIsNil()) + } + if i.DeletedAtNotNil { + predicates = append(predicates, points.DeletedAtNotNil()) + } + if i.Points != nil { + predicates = append(predicates, points.PointsEQ(*i.Points)) + } + if i.PointsNEQ != nil { + predicates = append(predicates, points.PointsNEQ(*i.PointsNEQ)) + } + if len(i.PointsIn) > 0 { + predicates = append(predicates, points.PointsIn(i.PointsIn...)) + } + if len(i.PointsNotIn) > 0 { + predicates = append(predicates, points.PointsNotIn(i.PointsNotIn...)) + } + if i.PointsGT != nil { + predicates = append(predicates, points.PointsGT(*i.PointsGT)) + } + if i.PointsGTE != nil { + predicates = append(predicates, points.PointsGTE(*i.PointsGTE)) + } + if i.PointsLT != nil { + predicates = append(predicates, points.PointsLT(*i.PointsLT)) + } + if i.PointsLTE != nil { + predicates = append(predicates, points.PointsLTE(*i.PointsLTE)) + } + if i.Description != nil { + predicates = append(predicates, points.DescriptionEQ(*i.Description)) + } + if i.DescriptionNEQ != nil { + predicates = append(predicates, points.DescriptionNEQ(*i.DescriptionNEQ)) + } + if len(i.DescriptionIn) > 0 { + predicates = append(predicates, points.DescriptionIn(i.DescriptionIn...)) + } + if len(i.DescriptionNotIn) > 0 { + predicates = append(predicates, points.DescriptionNotIn(i.DescriptionNotIn...)) + } + if i.DescriptionGT != nil { + predicates = append(predicates, points.DescriptionGT(*i.DescriptionGT)) + } + if i.DescriptionGTE != nil { + predicates = append(predicates, points.DescriptionGTE(*i.DescriptionGTE)) + } + if i.DescriptionLT != nil { + predicates = append(predicates, points.DescriptionLT(*i.DescriptionLT)) + } + if i.DescriptionLTE != nil { + predicates = append(predicates, points.DescriptionLTE(*i.DescriptionLTE)) + } + if i.DescriptionContains != nil { + predicates = append(predicates, points.DescriptionContains(*i.DescriptionContains)) + } + if i.DescriptionHasPrefix != nil { + predicates = append(predicates, points.DescriptionHasPrefix(*i.DescriptionHasPrefix)) + } + if i.DescriptionHasSuffix != nil { + predicates = append(predicates, points.DescriptionHasSuffix(*i.DescriptionHasSuffix)) + } + if i.DescriptionIsNil { + predicates = append(predicates, points.DescriptionIsNil()) + } + if i.DescriptionNotNil { + predicates = append(predicates, points.DescriptionNotNil()) + } + if i.DescriptionEqualFold != nil { + predicates = append(predicates, points.DescriptionEqualFold(*i.DescriptionEqualFold)) + } + if i.DescriptionContainsFold != nil { + predicates = append(predicates, points.DescriptionContainsFold(*i.DescriptionContainsFold)) + } + + if i.HasUser != nil { + p := points.HasUser() + if !*i.HasUser { + p = points.Not(p) + } + predicates = append(predicates, p) + } + if len(i.HasUserWith) > 0 { + with := make([]predicate.User, 0, len(i.HasUserWith)) + for _, w := range i.HasUserWith { + p, err := w.P() + if err != nil { + return nil, fmt.Errorf("%w: field 'HasUserWith'", err) + } + with = append(with, p) + } + predicates = append(predicates, points.HasUserWith(with...)) + } + switch len(predicates) { + case 0: + return nil, ErrEmptyPointsWhereInput + case 1: + return predicates[0], nil + default: + return points.And(predicates...), nil + } +} + // QuestionWhereInput represents a where input for filtering Question queries. type QuestionWhereInput struct { Predicates []predicate.Question `json:"-"` @@ -1498,6 +2104,14 @@ type UserWhereInput struct { // "group" edge predicates. HasGroup *bool `json:"hasGroup,omitempty"` HasGroupWith []*GroupWhereInput `json:"hasGroupWith,omitempty"` + + // "points" edge predicates. + HasPoints *bool `json:"hasPoints,omitempty"` + HasPointsWith []*PointsWhereInput `json:"hasPointsWith,omitempty"` + + // "events" edge predicates. + HasEvents *bool `json:"hasEvents,omitempty"` + HasEventsWith []*EventsWhereInput `json:"hasEventsWith,omitempty"` } // AddPredicates adds custom predicates to the where input to be used during the filtering phase. @@ -1815,6 +2429,42 @@ func (i *UserWhereInput) P() (predicate.User, error) { } predicates = append(predicates, user.HasGroupWith(with...)) } + if i.HasPoints != nil { + p := user.HasPoints() + if !*i.HasPoints { + p = user.Not(p) + } + predicates = append(predicates, p) + } + if len(i.HasPointsWith) > 0 { + with := make([]predicate.Points, 0, len(i.HasPointsWith)) + for _, w := range i.HasPointsWith { + p, err := w.P() + if err != nil { + return nil, fmt.Errorf("%w: field 'HasPointsWith'", err) + } + with = append(with, p) + } + predicates = append(predicates, user.HasPointsWith(with...)) + } + if i.HasEvents != nil { + p := user.HasEvents() + if !*i.HasEvents { + p = user.Not(p) + } + predicates = append(predicates, p) + } + if len(i.HasEventsWith) > 0 { + with := make([]predicate.Events, 0, len(i.HasEventsWith)) + for _, w := range i.HasEventsWith { + p, err := w.P() + if err != nil { + return nil, fmt.Errorf("%w: field 'HasEventsWith'", err) + } + with = append(with, p) + } + predicates = append(predicates, user.HasEventsWith(with...)) + } switch len(predicates) { case 0: return nil, ErrEmptyUserWhereInput diff --git a/ent/hook/hook.go b/ent/hook/hook.go index d009f54..c44f1c0 100644 --- a/ent/hook/hook.go +++ b/ent/hook/hook.go @@ -21,6 +21,18 @@ func (f DatabaseFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, er return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.DatabaseMutation", m) } +// The EventsFunc type is an adapter to allow the use of ordinary +// function as Events mutator. +type EventsFunc func(context.Context, *ent.EventsMutation) (ent.Value, error) + +// Mutate calls f(ctx, m). +func (f EventsFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) { + if mv, ok := m.(*ent.EventsMutation); ok { + return f(ctx, mv) + } + return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.EventsMutation", m) +} + // The GroupFunc type is an adapter to allow the use of ordinary // function as Group mutator. type GroupFunc func(context.Context, *ent.GroupMutation) (ent.Value, error) @@ -33,6 +45,18 @@ func (f GroupFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.GroupMutation", m) } +// The PointsFunc type is an adapter to allow the use of ordinary +// function as Points mutator. +type PointsFunc func(context.Context, *ent.PointsMutation) (ent.Value, error) + +// Mutate calls f(ctx, m). +func (f PointsFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) { + if mv, ok := m.(*ent.PointsMutation); ok { + return f(ctx, mv) + } + return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.PointsMutation", m) +} + // The QuestionFunc type is an adapter to allow the use of ordinary // function as Question mutator. type QuestionFunc func(context.Context, *ent.QuestionMutation) (ent.Value, error) diff --git a/ent/intercept/intercept.go b/ent/intercept/intercept.go index c1a3b55..d08700c 100644 --- a/ent/intercept/intercept.go +++ b/ent/intercept/intercept.go @@ -9,7 +9,9 @@ import ( "entgo.io/ent/dialect/sql" "github.com/database-playground/backend-v2/ent" "github.com/database-playground/backend-v2/ent/database" + "github.com/database-playground/backend-v2/ent/events" "github.com/database-playground/backend-v2/ent/group" + "github.com/database-playground/backend-v2/ent/points" "github.com/database-playground/backend-v2/ent/predicate" "github.com/database-playground/backend-v2/ent/question" "github.com/database-playground/backend-v2/ent/scopeset" @@ -99,6 +101,33 @@ func (f TraverseDatabase) Traverse(ctx context.Context, q ent.Query) error { return fmt.Errorf("unexpected query type %T. expect *ent.DatabaseQuery", q) } +// The EventsFunc type is an adapter to allow the use of ordinary function as a Querier. +type EventsFunc func(context.Context, *ent.EventsQuery) (ent.Value, error) + +// Query calls f(ctx, q). +func (f EventsFunc) Query(ctx context.Context, q ent.Query) (ent.Value, error) { + if q, ok := q.(*ent.EventsQuery); ok { + return f(ctx, q) + } + return nil, fmt.Errorf("unexpected query type %T. expect *ent.EventsQuery", q) +} + +// The TraverseEvents type is an adapter to allow the use of ordinary function as Traverser. +type TraverseEvents func(context.Context, *ent.EventsQuery) error + +// Intercept is a dummy implementation of Intercept that returns the next Querier in the pipeline. +func (f TraverseEvents) Intercept(next ent.Querier) ent.Querier { + return next +} + +// Traverse calls f(ctx, q). +func (f TraverseEvents) Traverse(ctx context.Context, q ent.Query) error { + if q, ok := q.(*ent.EventsQuery); ok { + return f(ctx, q) + } + return fmt.Errorf("unexpected query type %T. expect *ent.EventsQuery", q) +} + // The GroupFunc type is an adapter to allow the use of ordinary function as a Querier. type GroupFunc func(context.Context, *ent.GroupQuery) (ent.Value, error) @@ -126,6 +155,33 @@ func (f TraverseGroup) Traverse(ctx context.Context, q ent.Query) error { return fmt.Errorf("unexpected query type %T. expect *ent.GroupQuery", q) } +// The PointsFunc type is an adapter to allow the use of ordinary function as a Querier. +type PointsFunc func(context.Context, *ent.PointsQuery) (ent.Value, error) + +// Query calls f(ctx, q). +func (f PointsFunc) Query(ctx context.Context, q ent.Query) (ent.Value, error) { + if q, ok := q.(*ent.PointsQuery); ok { + return f(ctx, q) + } + return nil, fmt.Errorf("unexpected query type %T. expect *ent.PointsQuery", q) +} + +// The TraversePoints type is an adapter to allow the use of ordinary function as Traverser. +type TraversePoints func(context.Context, *ent.PointsQuery) error + +// Intercept is a dummy implementation of Intercept that returns the next Querier in the pipeline. +func (f TraversePoints) Intercept(next ent.Querier) ent.Querier { + return next +} + +// Traverse calls f(ctx, q). +func (f TraversePoints) Traverse(ctx context.Context, q ent.Query) error { + if q, ok := q.(*ent.PointsQuery); ok { + return f(ctx, q) + } + return fmt.Errorf("unexpected query type %T. expect *ent.PointsQuery", q) +} + // The QuestionFunc type is an adapter to allow the use of ordinary function as a Querier. type QuestionFunc func(context.Context, *ent.QuestionQuery) (ent.Value, error) @@ -212,8 +268,12 @@ func NewQuery(q ent.Query) (Query, error) { switch q := q.(type) { case *ent.DatabaseQuery: return &query[*ent.DatabaseQuery, predicate.Database, database.OrderOption]{typ: ent.TypeDatabase, tq: q}, nil + case *ent.EventsQuery: + return &query[*ent.EventsQuery, predicate.Events, events.OrderOption]{typ: ent.TypeEvents, tq: q}, nil case *ent.GroupQuery: return &query[*ent.GroupQuery, predicate.Group, group.OrderOption]{typ: ent.TypeGroup, tq: q}, nil + case *ent.PointsQuery: + return &query[*ent.PointsQuery, predicate.Points, points.OrderOption]{typ: ent.TypePoints, tq: q}, nil case *ent.QuestionQuery: return &query[*ent.QuestionQuery, predicate.Question, question.OrderOption]{typ: ent.TypeQuestion, tq: q}, nil case *ent.ScopeSetQuery: diff --git a/ent/internal/globalid.go b/ent/internal/globalid.go index 7dd7440..999b9ac 100644 --- a/ent/internal/globalid.go +++ b/ent/internal/globalid.go @@ -2,4 +2,4 @@ package internal -const IncrementStarts = "{\"databases\":12884901888,\"groups\":4294967296,\"questions\":17179869184,\"scope_sets\":8589934592,\"users\":0}" +const IncrementStarts = "{\"databases\":12884901888,\"events\":21474836480,\"groups\":4294967296,\"points\":25769803776,\"questions\":17179869184,\"scope_sets\":8589934592,\"users\":0}" diff --git a/ent/internal/schema.go b/ent/internal/schema.go index c21bd5d..691ecea 100644 --- a/ent/internal/schema.go +++ b/ent/internal/schema.go @@ -6,4 +6,4 @@ // Package internal holds a loadable version of the latest schema. package internal -const Schema = "{\"Schema\":\"github.com/database-playground/backend-v2/ent/schema\",\"Package\":\"github.com/database-playground/backend-v2/ent\",\"Schemas\":[{\"name\":\"Database\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"questions\",\"type\":\"Question\"}],\"fields\":[{\"name\":\"slug\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"schema\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"validators\":1,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"SQL schema\"},{\"name\":\"relation_figure\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"validators\":1,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"relation figure\"}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"database:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":12884901888}}},{\"name\":\"Group\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"scope_sets\",\"type\":\"ScopeSet\"}],\"fields\":[{\"name\":\"created_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"updated_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"update_default\":true,\"position\":{\"Index\":1,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"deleted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"name\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}}],\"hooks\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"interceptors\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"group:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":4294967296}}},{\"name\":\"Question\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"database\",\"type\":\"Database\",\"ref_name\":\"questions\",\"unique\":true,\"inverse\":true,\"required\":true}],\"fields\":[{\"name\":\"category\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"CATEGORY\"}},\"comment\":\"Question category, e.g. 'query'\"},{\"name\":\"difficulty\",\"type\":{\"Type\":6,\"Ident\":\"question.Difficulty\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"enums\":[{\"N\":\"Unspecified\",\"V\":\"unspecified\"},{\"N\":\"Easy\",\"V\":\"easy\"},{\"N\":\"Medium\",\"V\":\"medium\"},{\"N\":\"Hard\",\"V\":\"hard\"}],\"default\":true,\"default_value\":\"medium\",\"default_kind\":24,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"DIFFICULTY\"}},\"comment\":\"Question difficulty, e.g. 'easy'\"},{\"name\":\"title\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"Question title\"},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"Question stem\"},{\"name\":\"reference_answer\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"position\":{\"Index\":4,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"answer:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"comment\":\"Reference answer\"}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"question:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":17179869184}}},{\"name\":\"ScopeSet\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"groups\",\"type\":\"Group\",\"ref_name\":\"scope_sets\",\"inverse\":true}],\"fields\":[{\"name\":\"slug\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"scopes\",\"type\":{\"Type\":3,\"Ident\":\"[]string\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":true,\"RType\":{\"Name\":\"\",\"Ident\":\"[]string\",\"Kind\":23,\"PkgPath\":\"\",\"Methods\":{}}},\"default\":true,\"default_value\":[],\"default_kind\":23,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"scopeset:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":8589934592}}},{\"name\":\"User\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"group\",\"type\":\"Group\",\"unique\":true,\"required\":true}],\"fields\":[{\"name\":\"created_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"updated_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"update_default\":true,\"position\":{\"Index\":1,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"deleted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"name\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"email\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"EMAIL\"}}},{\"name\":\"avatar\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}}],\"hooks\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"interceptors\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"user:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":0}}}],\"Features\":[\"namedges\",\"intercept\",\"schema/snapshot\",\"sql/globalid\"]}" +const Schema = "{\"Schema\":\"github.com/database-playground/backend-v2/ent/schema\",\"Package\":\"github.com/database-playground/backend-v2/ent\",\"Schemas\":[{\"name\":\"Database\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"questions\",\"type\":\"Question\"}],\"fields\":[{\"name\":\"slug\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"schema\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"validators\":1,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"SQL schema\"},{\"name\":\"relation_figure\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"validators\":1,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"relation figure\"}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"database:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":12884901888}}},{\"name\":\"Events\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"user\",\"type\":\"User\",\"field\":\"user_id\",\"ref_name\":\"events\",\"unique\":true,\"inverse\":true,\"required\":true}],\"fields\":[{\"name\":\"user_id\",\"type\":{\"Type\":12,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"type\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"triggered_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"payload\",\"type\":{\"Type\":3,\"Ident\":\"map[string]interface {}\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":true,\"RType\":{\"Name\":\"\",\"Ident\":\"map[string]interface {}\",\"Kind\":21,\"PkgPath\":\"\",\"Methods\":{}}},\"optional\":true,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0}}],\"indexes\":[{\"fields\":[\"type\"]},{\"fields\":[\"type\",\"user_id\"]}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"user:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":21474836480}}},{\"name\":\"Group\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"scope_sets\",\"type\":\"ScopeSet\"}],\"fields\":[{\"name\":\"created_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"updated_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"update_default\":true,\"position\":{\"Index\":1,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"deleted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"name\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}}],\"hooks\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"interceptors\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"group:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":4294967296}}},{\"name\":\"Points\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"user\",\"type\":\"User\",\"ref_name\":\"points\",\"unique\":true,\"inverse\":true,\"required\":true}],\"fields\":[{\"name\":\"created_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"updated_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"update_default\":true,\"position\":{\"Index\":1,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"deleted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"points\",\"type\":{\"Type\":12,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_value\":0,\"default_kind\":2,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}}],\"hooks\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"interceptors\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"user:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":25769803776}}},{\"name\":\"Question\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"database\",\"type\":\"Database\",\"ref_name\":\"questions\",\"unique\":true,\"inverse\":true,\"required\":true}],\"fields\":[{\"name\":\"category\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"CATEGORY\"}},\"comment\":\"Question category, e.g. 'query'\"},{\"name\":\"difficulty\",\"type\":{\"Type\":6,\"Ident\":\"question.Difficulty\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"enums\":[{\"N\":\"Unspecified\",\"V\":\"unspecified\"},{\"N\":\"Easy\",\"V\":\"easy\"},{\"N\":\"Medium\",\"V\":\"medium\"},{\"N\":\"Hard\",\"V\":\"hard\"}],\"default\":true,\"default_value\":\"medium\",\"default_kind\":24,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"DIFFICULTY\"}},\"comment\":\"Question difficulty, e.g. 'easy'\"},{\"name\":\"title\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"Question title\"},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"Question stem\"},{\"name\":\"reference_answer\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"position\":{\"Index\":4,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"answer:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"comment\":\"Reference answer\"}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"question:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":17179869184}}},{\"name\":\"ScopeSet\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"groups\",\"type\":\"Group\",\"ref_name\":\"scope_sets\",\"inverse\":true}],\"fields\":[{\"name\":\"slug\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"scopes\",\"type\":{\"Type\":3,\"Ident\":\"[]string\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":true,\"RType\":{\"Name\":\"\",\"Ident\":\"[]string\",\"Kind\":23,\"PkgPath\":\"\",\"Methods\":{}}},\"default\":true,\"default_value\":[],\"default_kind\":23,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"scopeset:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":8589934592}}},{\"name\":\"User\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"group\",\"type\":\"Group\",\"unique\":true,\"required\":true},{\"name\":\"points\",\"type\":\"Points\"},{\"name\":\"events\",\"type\":\"Events\"}],\"fields\":[{\"name\":\"created_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"updated_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"update_default\":true,\"position\":{\"Index\":1,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"deleted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"name\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"email\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"EMAIL\"}}},{\"name\":\"avatar\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}}],\"hooks\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"interceptors\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"user:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":0}}}],\"Features\":[\"namedges\",\"intercept\",\"schema/snapshot\",\"sql/globalid\"]}" diff --git a/ent/migrate/schema.go b/ent/migrate/schema.go index b82c375..b499624 100644 --- a/ent/migrate/schema.go +++ b/ent/migrate/schema.go @@ -23,6 +23,40 @@ var ( Columns: DatabasesColumns, PrimaryKey: []*schema.Column{DatabasesColumns[0]}, } + // EventsColumns holds the columns for the "events" table. + EventsColumns = []*schema.Column{ + {Name: "id", Type: field.TypeInt, Increment: true}, + {Name: "type", Type: field.TypeString}, + {Name: "triggered_at", Type: field.TypeTime}, + {Name: "payload", Type: field.TypeJSON, Nullable: true}, + {Name: "user_id", Type: field.TypeInt}, + } + // EventsTable holds the schema information for the "events" table. + EventsTable = &schema.Table{ + Name: "events", + Columns: EventsColumns, + PrimaryKey: []*schema.Column{EventsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "events_users_events", + Columns: []*schema.Column{EventsColumns[4]}, + RefColumns: []*schema.Column{UsersColumns[0]}, + OnDelete: schema.NoAction, + }, + }, + Indexes: []*schema.Index{ + { + Name: "events_type", + Unique: false, + Columns: []*schema.Column{EventsColumns[1]}, + }, + { + Name: "events_type_user_id", + Unique: false, + Columns: []*schema.Column{EventsColumns[1], EventsColumns[4]}, + }, + }, + } // GroupsColumns holds the columns for the "groups" table. GroupsColumns = []*schema.Column{ {Name: "id", Type: field.TypeInt, Increment: true}, @@ -38,6 +72,30 @@ var ( Columns: GroupsColumns, PrimaryKey: []*schema.Column{GroupsColumns[0]}, } + // PointsColumns holds the columns for the "points" table. + PointsColumns = []*schema.Column{ + {Name: "id", Type: field.TypeInt, Increment: true}, + {Name: "created_at", Type: field.TypeTime}, + {Name: "updated_at", Type: field.TypeTime}, + {Name: "deleted_at", Type: field.TypeTime, Nullable: true}, + {Name: "points", Type: field.TypeInt, Default: 0}, + {Name: "description", Type: field.TypeString, Nullable: true}, + {Name: "user_points", Type: field.TypeInt}, + } + // PointsTable holds the schema information for the "points" table. + PointsTable = &schema.Table{ + Name: "points", + Columns: PointsColumns, + PrimaryKey: []*schema.Column{PointsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "points_users_points", + Columns: []*schema.Column{PointsColumns[6]}, + RefColumns: []*schema.Column{UsersColumns[0]}, + OnDelete: schema.NoAction, + }, + }, + } // QuestionsColumns holds the columns for the "questions" table. QuestionsColumns = []*schema.Column{ {Name: "id", Type: field.TypeInt, Increment: true}, @@ -128,7 +186,9 @@ var ( // Tables holds all the tables in the schema. Tables = []*schema.Table{ DatabasesTable, + EventsTable, GroupsTable, + PointsTable, QuestionsTable, ScopeSetsTable, UsersTable, @@ -140,9 +200,17 @@ func init() { DatabasesTable.Annotation = &entsql.Annotation{ IncrementStart: func(i int) *int { return &i }(12884901888), } + EventsTable.ForeignKeys[0].RefTable = UsersTable + EventsTable.Annotation = &entsql.Annotation{ + IncrementStart: func(i int) *int { return &i }(21474836480), + } GroupsTable.Annotation = &entsql.Annotation{ IncrementStart: func(i int) *int { return &i }(4294967296), } + PointsTable.ForeignKeys[0].RefTable = UsersTable + PointsTable.Annotation = &entsql.Annotation{ + IncrementStart: func(i int) *int { return &i }(25769803776), + } QuestionsTable.ForeignKeys[0].RefTable = DatabasesTable QuestionsTable.Annotation = &entsql.Annotation{ IncrementStart: func(i int) *int { return &i }(17179869184), diff --git a/ent/mutation.go b/ent/mutation.go index a731b6c..b3decf2 100644 --- a/ent/mutation.go +++ b/ent/mutation.go @@ -12,7 +12,9 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "github.com/database-playground/backend-v2/ent/database" + "github.com/database-playground/backend-v2/ent/events" "github.com/database-playground/backend-v2/ent/group" + "github.com/database-playground/backend-v2/ent/points" "github.com/database-playground/backend-v2/ent/predicate" "github.com/database-playground/backend-v2/ent/question" "github.com/database-playground/backend-v2/ent/scopeset" @@ -29,7 +31,9 @@ const ( // Node types. TypeDatabase = "Database" + TypeEvents = "Events" TypeGroup = "Group" + TypePoints = "Points" TypeQuestion = "Question" TypeScopeSet = "ScopeSet" TypeUser = "User" @@ -638,37 +642,1280 @@ func (m *DatabaseMutation) ResetEdge(name string) error { return fmt.Errorf("unknown Database edge %s", name) } +// EventsMutation represents an operation that mutates the Events nodes in the graph. +type EventsMutation struct { + config + op Op + typ string + id *int + _type *string + triggered_at *time.Time + payload *map[string]interface{} + clearedFields map[string]struct{} + user *int + cleareduser bool + done bool + oldValue func(context.Context) (*Events, error) + predicates []predicate.Events +} + +var _ ent.Mutation = (*EventsMutation)(nil) + +// eventsOption allows management of the mutation configuration using functional options. +type eventsOption func(*EventsMutation) + +// newEventsMutation creates new mutation for the Events entity. +func newEventsMutation(c config, op Op, opts ...eventsOption) *EventsMutation { + m := &EventsMutation{ + config: c, + op: op, + typ: TypeEvents, + clearedFields: make(map[string]struct{}), + } + for _, opt := range opts { + opt(m) + } + return m +} + +// withEventsID sets the ID field of the mutation. +func withEventsID(id int) eventsOption { + return func(m *EventsMutation) { + var ( + err error + once sync.Once + value *Events + ) + m.oldValue = func(ctx context.Context) (*Events, error) { + once.Do(func() { + if m.done { + err = errors.New("querying old values post mutation is not allowed") + } else { + value, err = m.Client().Events.Get(ctx, id) + } + }) + return value, err + } + m.id = &id + } +} + +// withEvents sets the old Events of the mutation. +func withEvents(node *Events) eventsOption { + return func(m *EventsMutation) { + m.oldValue = func(context.Context) (*Events, error) { + return node, nil + } + m.id = &node.ID + } +} + +// Client returns a new `ent.Client` from the mutation. If the mutation was +// executed in a transaction (ent.Tx), a transactional client is returned. +func (m EventsMutation) Client() *Client { + client := &Client{config: m.config} + client.init() + return client +} + +// Tx returns an `ent.Tx` for mutations that were executed in transactions; +// it returns an error otherwise. +func (m EventsMutation) Tx() (*Tx, error) { + if _, ok := m.driver.(*txDriver); !ok { + return nil, errors.New("ent: mutation is not running in a transaction") + } + tx := &Tx{config: m.config} + tx.init() + return tx, nil +} + +// ID returns the ID value in the mutation. Note that the ID is only available +// if it was provided to the builder or after it was returned from the database. +func (m *EventsMutation) ID() (id int, exists bool) { + if m.id == nil { + return + } + return *m.id, true +} + +// IDs queries the database and returns the entity ids that match the mutation's predicate. +// That means, if the mutation is applied within a transaction with an isolation level such +// as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated +// or updated by the mutation. +func (m *EventsMutation) IDs(ctx context.Context) ([]int, error) { + switch { + case m.op.Is(OpUpdateOne | OpDeleteOne): + id, exists := m.ID() + if exists { + return []int{id}, nil + } + fallthrough + case m.op.Is(OpUpdate | OpDelete): + return m.Client().Events.Query().Where(m.predicates...).IDs(ctx) + default: + return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op) + } +} + +// SetUserID sets the "user_id" field. +func (m *EventsMutation) SetUserID(i int) { + m.user = &i +} + +// UserID returns the value of the "user_id" field in the mutation. +func (m *EventsMutation) UserID() (r int, exists bool) { + v := m.user + if v == nil { + return + } + return *v, true +} + +// OldUserID returns the old "user_id" field's value of the Events entity. +// If the Events object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *EventsMutation) OldUserID(ctx context.Context) (v int, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldUserID is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldUserID requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldUserID: %w", err) + } + return oldValue.UserID, nil +} + +// ResetUserID resets all changes to the "user_id" field. +func (m *EventsMutation) ResetUserID() { + m.user = nil +} + +// SetType sets the "type" field. +func (m *EventsMutation) SetType(s string) { + m._type = &s +} + +// GetType returns the value of the "type" field in the mutation. +func (m *EventsMutation) GetType() (r string, exists bool) { + v := m._type + if v == nil { + return + } + return *v, true +} + +// OldType returns the old "type" field's value of the Events entity. +// If the Events object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *EventsMutation) OldType(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldType is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldType requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldType: %w", err) + } + return oldValue.Type, nil +} + +// ResetType resets all changes to the "type" field. +func (m *EventsMutation) ResetType() { + m._type = nil +} + +// SetTriggeredAt sets the "triggered_at" field. +func (m *EventsMutation) SetTriggeredAt(t time.Time) { + m.triggered_at = &t +} + +// TriggeredAt returns the value of the "triggered_at" field in the mutation. +func (m *EventsMutation) TriggeredAt() (r time.Time, exists bool) { + v := m.triggered_at + if v == nil { + return + } + return *v, true +} + +// OldTriggeredAt returns the old "triggered_at" field's value of the Events entity. +// If the Events object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *EventsMutation) OldTriggeredAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldTriggeredAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldTriggeredAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldTriggeredAt: %w", err) + } + return oldValue.TriggeredAt, nil +} + +// ResetTriggeredAt resets all changes to the "triggered_at" field. +func (m *EventsMutation) ResetTriggeredAt() { + m.triggered_at = nil +} + +// SetPayload sets the "payload" field. +func (m *EventsMutation) SetPayload(value map[string]interface{}) { + m.payload = &value +} + +// Payload returns the value of the "payload" field in the mutation. +func (m *EventsMutation) Payload() (r map[string]interface{}, exists bool) { + v := m.payload + if v == nil { + return + } + return *v, true +} + +// OldPayload returns the old "payload" field's value of the Events entity. +// If the Events object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *EventsMutation) OldPayload(ctx context.Context) (v map[string]interface{}, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldPayload is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldPayload requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldPayload: %w", err) + } + return oldValue.Payload, nil +} + +// ClearPayload clears the value of the "payload" field. +func (m *EventsMutation) ClearPayload() { + m.payload = nil + m.clearedFields[events.FieldPayload] = struct{}{} +} + +// PayloadCleared returns if the "payload" field was cleared in this mutation. +func (m *EventsMutation) PayloadCleared() bool { + _, ok := m.clearedFields[events.FieldPayload] + return ok +} + +// ResetPayload resets all changes to the "payload" field. +func (m *EventsMutation) ResetPayload() { + m.payload = nil + delete(m.clearedFields, events.FieldPayload) +} + +// ClearUser clears the "user" edge to the User entity. +func (m *EventsMutation) ClearUser() { + m.cleareduser = true + m.clearedFields[events.FieldUserID] = struct{}{} +} + +// UserCleared reports if the "user" edge to the User entity was cleared. +func (m *EventsMutation) UserCleared() bool { + return m.cleareduser +} + +// UserIDs returns the "user" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// UserID instead. It exists only for internal usage by the builders. +func (m *EventsMutation) UserIDs() (ids []int) { + if id := m.user; id != nil { + ids = append(ids, *id) + } + return +} + +// ResetUser resets all changes to the "user" edge. +func (m *EventsMutation) ResetUser() { + m.user = nil + m.cleareduser = false +} + +// Where appends a list predicates to the EventsMutation builder. +func (m *EventsMutation) Where(ps ...predicate.Events) { + m.predicates = append(m.predicates, ps...) +} + +// WhereP appends storage-level predicates to the EventsMutation builder. Using this method, +// users can use type-assertion to append predicates that do not depend on any generated package. +func (m *EventsMutation) WhereP(ps ...func(*sql.Selector)) { + p := make([]predicate.Events, len(ps)) + for i := range ps { + p[i] = ps[i] + } + m.Where(p...) +} + +// Op returns the operation name. +func (m *EventsMutation) Op() Op { + return m.op +} + +// SetOp allows setting the mutation operation. +func (m *EventsMutation) SetOp(op Op) { + m.op = op +} + +// Type returns the node type of this mutation (Events). +func (m *EventsMutation) Type() string { + return m.typ +} + +// Fields returns all fields that were changed during this mutation. Note that in +// order to get all numeric fields that were incremented/decremented, call +// AddedFields(). +func (m *EventsMutation) Fields() []string { + fields := make([]string, 0, 4) + if m.user != nil { + fields = append(fields, events.FieldUserID) + } + if m._type != nil { + fields = append(fields, events.FieldType) + } + if m.triggered_at != nil { + fields = append(fields, events.FieldTriggeredAt) + } + if m.payload != nil { + fields = append(fields, events.FieldPayload) + } + return fields +} + +// Field returns the value of a field with the given name. The second boolean +// return value indicates that this field was not set, or was not defined in the +// schema. +func (m *EventsMutation) Field(name string) (ent.Value, bool) { + switch name { + case events.FieldUserID: + return m.UserID() + case events.FieldType: + return m.GetType() + case events.FieldTriggeredAt: + return m.TriggeredAt() + case events.FieldPayload: + return m.Payload() + } + return nil, false +} + +// OldField returns the old value of the field from the database. An error is +// returned if the mutation operation is not UpdateOne, or the query to the +// database failed. +func (m *EventsMutation) OldField(ctx context.Context, name string) (ent.Value, error) { + switch name { + case events.FieldUserID: + return m.OldUserID(ctx) + case events.FieldType: + return m.OldType(ctx) + case events.FieldTriggeredAt: + return m.OldTriggeredAt(ctx) + case events.FieldPayload: + return m.OldPayload(ctx) + } + return nil, fmt.Errorf("unknown Events field %s", name) +} + +// SetField sets the value of a field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *EventsMutation) SetField(name string, value ent.Value) error { + switch name { + case events.FieldUserID: + v, ok := value.(int) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetUserID(v) + return nil + case events.FieldType: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetType(v) + return nil + case events.FieldTriggeredAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetTriggeredAt(v) + return nil + case events.FieldPayload: + v, ok := value.(map[string]interface{}) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetPayload(v) + return nil + } + return fmt.Errorf("unknown Events field %s", name) +} + +// AddedFields returns all numeric fields that were incremented/decremented during +// this mutation. +func (m *EventsMutation) AddedFields() []string { + var fields []string + return fields +} + +// AddedField returns the numeric value that was incremented/decremented on a field +// with the given name. The second boolean return value indicates that this field +// was not set, or was not defined in the schema. +func (m *EventsMutation) AddedField(name string) (ent.Value, bool) { + switch name { + } + return nil, false +} + +// AddField adds the value to the field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *EventsMutation) AddField(name string, value ent.Value) error { + switch name { + } + return fmt.Errorf("unknown Events numeric field %s", name) +} + +// ClearedFields returns all nullable fields that were cleared during this +// mutation. +func (m *EventsMutation) ClearedFields() []string { + var fields []string + if m.FieldCleared(events.FieldPayload) { + fields = append(fields, events.FieldPayload) + } + return fields +} + +// FieldCleared returns a boolean indicating if a field with the given name was +// cleared in this mutation. +func (m *EventsMutation) FieldCleared(name string) bool { + _, ok := m.clearedFields[name] + return ok +} + +// ClearField clears the value of the field with the given name. It returns an +// error if the field is not defined in the schema. +func (m *EventsMutation) ClearField(name string) error { + switch name { + case events.FieldPayload: + m.ClearPayload() + return nil + } + return fmt.Errorf("unknown Events nullable field %s", name) +} + +// ResetField resets all changes in the mutation for the field with the given name. +// It returns an error if the field is not defined in the schema. +func (m *EventsMutation) ResetField(name string) error { + switch name { + case events.FieldUserID: + m.ResetUserID() + return nil + case events.FieldType: + m.ResetType() + return nil + case events.FieldTriggeredAt: + m.ResetTriggeredAt() + return nil + case events.FieldPayload: + m.ResetPayload() + return nil + } + return fmt.Errorf("unknown Events field %s", name) +} + +// AddedEdges returns all edge names that were set/added in this mutation. +func (m *EventsMutation) AddedEdges() []string { + edges := make([]string, 0, 1) + if m.user != nil { + edges = append(edges, events.EdgeUser) + } + return edges +} + +// AddedIDs returns all IDs (to other nodes) that were added for the given edge +// name in this mutation. +func (m *EventsMutation) AddedIDs(name string) []ent.Value { + switch name { + case events.EdgeUser: + if id := m.user; id != nil { + return []ent.Value{*id} + } + } + return nil +} + +// RemovedEdges returns all edge names that were removed in this mutation. +func (m *EventsMutation) RemovedEdges() []string { + edges := make([]string, 0, 1) + return edges +} + +// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with +// the given name in this mutation. +func (m *EventsMutation) RemovedIDs(name string) []ent.Value { + return nil +} + +// ClearedEdges returns all edge names that were cleared in this mutation. +func (m *EventsMutation) ClearedEdges() []string { + edges := make([]string, 0, 1) + if m.cleareduser { + edges = append(edges, events.EdgeUser) + } + return edges +} + +// EdgeCleared returns a boolean which indicates if the edge with the given name +// was cleared in this mutation. +func (m *EventsMutation) EdgeCleared(name string) bool { + switch name { + case events.EdgeUser: + return m.cleareduser + } + return false +} + +// ClearEdge clears the value of the edge with the given name. It returns an error +// if that edge is not defined in the schema. +func (m *EventsMutation) ClearEdge(name string) error { + switch name { + case events.EdgeUser: + m.ClearUser() + return nil + } + return fmt.Errorf("unknown Events unique edge %s", name) +} + +// ResetEdge resets all changes to the edge with the given name in this mutation. +// It returns an error if the edge is not defined in the schema. +func (m *EventsMutation) ResetEdge(name string) error { + switch name { + case events.EdgeUser: + m.ResetUser() + return nil + } + return fmt.Errorf("unknown Events edge %s", name) +} + // GroupMutation represents an operation that mutates the Group nodes in the graph. type GroupMutation struct { config - op Op - typ string - id *int - created_at *time.Time - updated_at *time.Time - deleted_at *time.Time - name *string - description *string - clearedFields map[string]struct{} - scope_sets map[int]struct{} - removedscope_sets map[int]struct{} - clearedscope_sets bool - done bool - oldValue func(context.Context) (*Group, error) - predicates []predicate.Group + op Op + typ string + id *int + created_at *time.Time + updated_at *time.Time + deleted_at *time.Time + name *string + description *string + clearedFields map[string]struct{} + scope_sets map[int]struct{} + removedscope_sets map[int]struct{} + clearedscope_sets bool + done bool + oldValue func(context.Context) (*Group, error) + predicates []predicate.Group +} + +var _ ent.Mutation = (*GroupMutation)(nil) + +// groupOption allows management of the mutation configuration using functional options. +type groupOption func(*GroupMutation) + +// newGroupMutation creates new mutation for the Group entity. +func newGroupMutation(c config, op Op, opts ...groupOption) *GroupMutation { + m := &GroupMutation{ + config: c, + op: op, + typ: TypeGroup, + clearedFields: make(map[string]struct{}), + } + for _, opt := range opts { + opt(m) + } + return m +} + +// withGroupID sets the ID field of the mutation. +func withGroupID(id int) groupOption { + return func(m *GroupMutation) { + var ( + err error + once sync.Once + value *Group + ) + m.oldValue = func(ctx context.Context) (*Group, error) { + once.Do(func() { + if m.done { + err = errors.New("querying old values post mutation is not allowed") + } else { + value, err = m.Client().Group.Get(ctx, id) + } + }) + return value, err + } + m.id = &id + } +} + +// withGroup sets the old Group of the mutation. +func withGroup(node *Group) groupOption { + return func(m *GroupMutation) { + m.oldValue = func(context.Context) (*Group, error) { + return node, nil + } + m.id = &node.ID + } +} + +// Client returns a new `ent.Client` from the mutation. If the mutation was +// executed in a transaction (ent.Tx), a transactional client is returned. +func (m GroupMutation) Client() *Client { + client := &Client{config: m.config} + client.init() + return client +} + +// Tx returns an `ent.Tx` for mutations that were executed in transactions; +// it returns an error otherwise. +func (m GroupMutation) Tx() (*Tx, error) { + if _, ok := m.driver.(*txDriver); !ok { + return nil, errors.New("ent: mutation is not running in a transaction") + } + tx := &Tx{config: m.config} + tx.init() + return tx, nil +} + +// ID returns the ID value in the mutation. Note that the ID is only available +// if it was provided to the builder or after it was returned from the database. +func (m *GroupMutation) ID() (id int, exists bool) { + if m.id == nil { + return + } + return *m.id, true +} + +// IDs queries the database and returns the entity ids that match the mutation's predicate. +// That means, if the mutation is applied within a transaction with an isolation level such +// as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated +// or updated by the mutation. +func (m *GroupMutation) IDs(ctx context.Context) ([]int, error) { + switch { + case m.op.Is(OpUpdateOne | OpDeleteOne): + id, exists := m.ID() + if exists { + return []int{id}, nil + } + fallthrough + case m.op.Is(OpUpdate | OpDelete): + return m.Client().Group.Query().Where(m.predicates...).IDs(ctx) + default: + return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op) + } +} + +// SetCreatedAt sets the "created_at" field. +func (m *GroupMutation) SetCreatedAt(t time.Time) { + m.created_at = &t +} + +// CreatedAt returns the value of the "created_at" field in the mutation. +func (m *GroupMutation) CreatedAt() (r time.Time, exists bool) { + v := m.created_at + if v == nil { + return + } + return *v, true +} + +// OldCreatedAt returns the old "created_at" field's value of the Group entity. +// If the Group object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *GroupMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldCreatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err) + } + return oldValue.CreatedAt, nil +} + +// ResetCreatedAt resets all changes to the "created_at" field. +func (m *GroupMutation) ResetCreatedAt() { + m.created_at = nil +} + +// SetUpdatedAt sets the "updated_at" field. +func (m *GroupMutation) SetUpdatedAt(t time.Time) { + m.updated_at = &t +} + +// UpdatedAt returns the value of the "updated_at" field in the mutation. +func (m *GroupMutation) UpdatedAt() (r time.Time, exists bool) { + v := m.updated_at + if v == nil { + return + } + return *v, true +} + +// OldUpdatedAt returns the old "updated_at" field's value of the Group entity. +// If the Group object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *GroupMutation) OldUpdatedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldUpdatedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldUpdatedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldUpdatedAt: %w", err) + } + return oldValue.UpdatedAt, nil +} + +// ResetUpdatedAt resets all changes to the "updated_at" field. +func (m *GroupMutation) ResetUpdatedAt() { + m.updated_at = nil +} + +// SetDeletedAt sets the "deleted_at" field. +func (m *GroupMutation) SetDeletedAt(t time.Time) { + m.deleted_at = &t +} + +// DeletedAt returns the value of the "deleted_at" field in the mutation. +func (m *GroupMutation) DeletedAt() (r time.Time, exists bool) { + v := m.deleted_at + if v == nil { + return + } + return *v, true +} + +// OldDeletedAt returns the old "deleted_at" field's value of the Group entity. +// If the Group object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *GroupMutation) OldDeletedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldDeletedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldDeletedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldDeletedAt: %w", err) + } + return oldValue.DeletedAt, nil +} + +// ClearDeletedAt clears the value of the "deleted_at" field. +func (m *GroupMutation) ClearDeletedAt() { + m.deleted_at = nil + m.clearedFields[group.FieldDeletedAt] = struct{}{} +} + +// DeletedAtCleared returns if the "deleted_at" field was cleared in this mutation. +func (m *GroupMutation) DeletedAtCleared() bool { + _, ok := m.clearedFields[group.FieldDeletedAt] + return ok +} + +// ResetDeletedAt resets all changes to the "deleted_at" field. +func (m *GroupMutation) ResetDeletedAt() { + m.deleted_at = nil + delete(m.clearedFields, group.FieldDeletedAt) +} + +// SetName sets the "name" field. +func (m *GroupMutation) SetName(s string) { + m.name = &s +} + +// Name returns the value of the "name" field in the mutation. +func (m *GroupMutation) Name() (r string, exists bool) { + v := m.name + if v == nil { + return + } + return *v, true +} + +// OldName returns the old "name" field's value of the Group entity. +// If the Group object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *GroupMutation) OldName(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldName is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldName requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldName: %w", err) + } + return oldValue.Name, nil +} + +// ResetName resets all changes to the "name" field. +func (m *GroupMutation) ResetName() { + m.name = nil +} + +// SetDescription sets the "description" field. +func (m *GroupMutation) SetDescription(s string) { + m.description = &s +} + +// Description returns the value of the "description" field in the mutation. +func (m *GroupMutation) Description() (r string, exists bool) { + v := m.description + if v == nil { + return + } + return *v, true +} + +// OldDescription returns the old "description" field's value of the Group entity. +// If the Group object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *GroupMutation) OldDescription(ctx context.Context) (v string, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldDescription is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldDescription requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldDescription: %w", err) + } + return oldValue.Description, nil +} + +// ClearDescription clears the value of the "description" field. +func (m *GroupMutation) ClearDescription() { + m.description = nil + m.clearedFields[group.FieldDescription] = struct{}{} +} + +// DescriptionCleared returns if the "description" field was cleared in this mutation. +func (m *GroupMutation) DescriptionCleared() bool { + _, ok := m.clearedFields[group.FieldDescription] + return ok +} + +// ResetDescription resets all changes to the "description" field. +func (m *GroupMutation) ResetDescription() { + m.description = nil + delete(m.clearedFields, group.FieldDescription) +} + +// AddScopeSetIDs adds the "scope_sets" edge to the ScopeSet entity by ids. +func (m *GroupMutation) AddScopeSetIDs(ids ...int) { + if m.scope_sets == nil { + m.scope_sets = make(map[int]struct{}) + } + for i := range ids { + m.scope_sets[ids[i]] = struct{}{} + } +} + +// ClearScopeSets clears the "scope_sets" edge to the ScopeSet entity. +func (m *GroupMutation) ClearScopeSets() { + m.clearedscope_sets = true +} + +// ScopeSetsCleared reports if the "scope_sets" edge to the ScopeSet entity was cleared. +func (m *GroupMutation) ScopeSetsCleared() bool { + return m.clearedscope_sets +} + +// RemoveScopeSetIDs removes the "scope_sets" edge to the ScopeSet entity by IDs. +func (m *GroupMutation) RemoveScopeSetIDs(ids ...int) { + if m.removedscope_sets == nil { + m.removedscope_sets = make(map[int]struct{}) + } + for i := range ids { + delete(m.scope_sets, ids[i]) + m.removedscope_sets[ids[i]] = struct{}{} + } +} + +// RemovedScopeSets returns the removed IDs of the "scope_sets" edge to the ScopeSet entity. +func (m *GroupMutation) RemovedScopeSetsIDs() (ids []int) { + for id := range m.removedscope_sets { + ids = append(ids, id) + } + return +} + +// ScopeSetsIDs returns the "scope_sets" edge IDs in the mutation. +func (m *GroupMutation) ScopeSetsIDs() (ids []int) { + for id := range m.scope_sets { + ids = append(ids, id) + } + return +} + +// ResetScopeSets resets all changes to the "scope_sets" edge. +func (m *GroupMutation) ResetScopeSets() { + m.scope_sets = nil + m.clearedscope_sets = false + m.removedscope_sets = nil +} + +// Where appends a list predicates to the GroupMutation builder. +func (m *GroupMutation) Where(ps ...predicate.Group) { + m.predicates = append(m.predicates, ps...) +} + +// WhereP appends storage-level predicates to the GroupMutation builder. Using this method, +// users can use type-assertion to append predicates that do not depend on any generated package. +func (m *GroupMutation) WhereP(ps ...func(*sql.Selector)) { + p := make([]predicate.Group, len(ps)) + for i := range ps { + p[i] = ps[i] + } + m.Where(p...) +} + +// Op returns the operation name. +func (m *GroupMutation) Op() Op { + return m.op +} + +// SetOp allows setting the mutation operation. +func (m *GroupMutation) SetOp(op Op) { + m.op = op +} + +// Type returns the node type of this mutation (Group). +func (m *GroupMutation) Type() string { + return m.typ +} + +// Fields returns all fields that were changed during this mutation. Note that in +// order to get all numeric fields that were incremented/decremented, call +// AddedFields(). +func (m *GroupMutation) Fields() []string { + fields := make([]string, 0, 5) + if m.created_at != nil { + fields = append(fields, group.FieldCreatedAt) + } + if m.updated_at != nil { + fields = append(fields, group.FieldUpdatedAt) + } + if m.deleted_at != nil { + fields = append(fields, group.FieldDeletedAt) + } + if m.name != nil { + fields = append(fields, group.FieldName) + } + if m.description != nil { + fields = append(fields, group.FieldDescription) + } + return fields +} + +// Field returns the value of a field with the given name. The second boolean +// return value indicates that this field was not set, or was not defined in the +// schema. +func (m *GroupMutation) Field(name string) (ent.Value, bool) { + switch name { + case group.FieldCreatedAt: + return m.CreatedAt() + case group.FieldUpdatedAt: + return m.UpdatedAt() + case group.FieldDeletedAt: + return m.DeletedAt() + case group.FieldName: + return m.Name() + case group.FieldDescription: + return m.Description() + } + return nil, false +} + +// OldField returns the old value of the field from the database. An error is +// returned if the mutation operation is not UpdateOne, or the query to the +// database failed. +func (m *GroupMutation) OldField(ctx context.Context, name string) (ent.Value, error) { + switch name { + case group.FieldCreatedAt: + return m.OldCreatedAt(ctx) + case group.FieldUpdatedAt: + return m.OldUpdatedAt(ctx) + case group.FieldDeletedAt: + return m.OldDeletedAt(ctx) + case group.FieldName: + return m.OldName(ctx) + case group.FieldDescription: + return m.OldDescription(ctx) + } + return nil, fmt.Errorf("unknown Group field %s", name) +} + +// SetField sets the value of a field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *GroupMutation) SetField(name string, value ent.Value) error { + switch name { + case group.FieldCreatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetCreatedAt(v) + return nil + case group.FieldUpdatedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetUpdatedAt(v) + return nil + case group.FieldDeletedAt: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetDeletedAt(v) + return nil + case group.FieldName: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetName(v) + return nil + case group.FieldDescription: + v, ok := value.(string) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetDescription(v) + return nil + } + return fmt.Errorf("unknown Group field %s", name) +} + +// AddedFields returns all numeric fields that were incremented/decremented during +// this mutation. +func (m *GroupMutation) AddedFields() []string { + return nil +} + +// AddedField returns the numeric value that was incremented/decremented on a field +// with the given name. The second boolean return value indicates that this field +// was not set, or was not defined in the schema. +func (m *GroupMutation) AddedField(name string) (ent.Value, bool) { + return nil, false +} + +// AddField adds the value to the field with the given name. It returns an error if +// the field is not defined in the schema, or if the type mismatched the field +// type. +func (m *GroupMutation) AddField(name string, value ent.Value) error { + switch name { + } + return fmt.Errorf("unknown Group numeric field %s", name) +} + +// ClearedFields returns all nullable fields that were cleared during this +// mutation. +func (m *GroupMutation) ClearedFields() []string { + var fields []string + if m.FieldCleared(group.FieldDeletedAt) { + fields = append(fields, group.FieldDeletedAt) + } + if m.FieldCleared(group.FieldDescription) { + fields = append(fields, group.FieldDescription) + } + return fields +} + +// FieldCleared returns a boolean indicating if a field with the given name was +// cleared in this mutation. +func (m *GroupMutation) FieldCleared(name string) bool { + _, ok := m.clearedFields[name] + return ok +} + +// ClearField clears the value of the field with the given name. It returns an +// error if the field is not defined in the schema. +func (m *GroupMutation) ClearField(name string) error { + switch name { + case group.FieldDeletedAt: + m.ClearDeletedAt() + return nil + case group.FieldDescription: + m.ClearDescription() + return nil + } + return fmt.Errorf("unknown Group nullable field %s", name) +} + +// ResetField resets all changes in the mutation for the field with the given name. +// It returns an error if the field is not defined in the schema. +func (m *GroupMutation) ResetField(name string) error { + switch name { + case group.FieldCreatedAt: + m.ResetCreatedAt() + return nil + case group.FieldUpdatedAt: + m.ResetUpdatedAt() + return nil + case group.FieldDeletedAt: + m.ResetDeletedAt() + return nil + case group.FieldName: + m.ResetName() + return nil + case group.FieldDescription: + m.ResetDescription() + return nil + } + return fmt.Errorf("unknown Group field %s", name) +} + +// AddedEdges returns all edge names that were set/added in this mutation. +func (m *GroupMutation) AddedEdges() []string { + edges := make([]string, 0, 1) + if m.scope_sets != nil { + edges = append(edges, group.EdgeScopeSets) + } + return edges +} + +// AddedIDs returns all IDs (to other nodes) that were added for the given edge +// name in this mutation. +func (m *GroupMutation) AddedIDs(name string) []ent.Value { + switch name { + case group.EdgeScopeSets: + ids := make([]ent.Value, 0, len(m.scope_sets)) + for id := range m.scope_sets { + ids = append(ids, id) + } + return ids + } + return nil +} + +// RemovedEdges returns all edge names that were removed in this mutation. +func (m *GroupMutation) RemovedEdges() []string { + edges := make([]string, 0, 1) + if m.removedscope_sets != nil { + edges = append(edges, group.EdgeScopeSets) + } + return edges +} + +// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with +// the given name in this mutation. +func (m *GroupMutation) RemovedIDs(name string) []ent.Value { + switch name { + case group.EdgeScopeSets: + ids := make([]ent.Value, 0, len(m.removedscope_sets)) + for id := range m.removedscope_sets { + ids = append(ids, id) + } + return ids + } + return nil +} + +// ClearedEdges returns all edge names that were cleared in this mutation. +func (m *GroupMutation) ClearedEdges() []string { + edges := make([]string, 0, 1) + if m.clearedscope_sets { + edges = append(edges, group.EdgeScopeSets) + } + return edges +} + +// EdgeCleared returns a boolean which indicates if the edge with the given name +// was cleared in this mutation. +func (m *GroupMutation) EdgeCleared(name string) bool { + switch name { + case group.EdgeScopeSets: + return m.clearedscope_sets + } + return false +} + +// ClearEdge clears the value of the edge with the given name. It returns an error +// if that edge is not defined in the schema. +func (m *GroupMutation) ClearEdge(name string) error { + switch name { + } + return fmt.Errorf("unknown Group unique edge %s", name) +} + +// ResetEdge resets all changes to the edge with the given name in this mutation. +// It returns an error if the edge is not defined in the schema. +func (m *GroupMutation) ResetEdge(name string) error { + switch name { + case group.EdgeScopeSets: + m.ResetScopeSets() + return nil + } + return fmt.Errorf("unknown Group edge %s", name) +} + +// PointsMutation represents an operation that mutates the Points nodes in the graph. +type PointsMutation struct { + config + op Op + typ string + id *int + created_at *time.Time + updated_at *time.Time + deleted_at *time.Time + points *int + addpoints *int + description *string + clearedFields map[string]struct{} + user *int + cleareduser bool + done bool + oldValue func(context.Context) (*Points, error) + predicates []predicate.Points } -var _ ent.Mutation = (*GroupMutation)(nil) +var _ ent.Mutation = (*PointsMutation)(nil) -// groupOption allows management of the mutation configuration using functional options. -type groupOption func(*GroupMutation) +// pointsOption allows management of the mutation configuration using functional options. +type pointsOption func(*PointsMutation) -// newGroupMutation creates new mutation for the Group entity. -func newGroupMutation(c config, op Op, opts ...groupOption) *GroupMutation { - m := &GroupMutation{ +// newPointsMutation creates new mutation for the Points entity. +func newPointsMutation(c config, op Op, opts ...pointsOption) *PointsMutation { + m := &PointsMutation{ config: c, op: op, - typ: TypeGroup, + typ: TypePoints, clearedFields: make(map[string]struct{}), } for _, opt := range opts { @@ -677,20 +1924,20 @@ func newGroupMutation(c config, op Op, opts ...groupOption) *GroupMutation { return m } -// withGroupID sets the ID field of the mutation. -func withGroupID(id int) groupOption { - return func(m *GroupMutation) { +// withPointsID sets the ID field of the mutation. +func withPointsID(id int) pointsOption { + return func(m *PointsMutation) { var ( err error once sync.Once - value *Group + value *Points ) - m.oldValue = func(ctx context.Context) (*Group, error) { + m.oldValue = func(ctx context.Context) (*Points, error) { once.Do(func() { if m.done { err = errors.New("querying old values post mutation is not allowed") } else { - value, err = m.Client().Group.Get(ctx, id) + value, err = m.Client().Points.Get(ctx, id) } }) return value, err @@ -699,10 +1946,10 @@ func withGroupID(id int) groupOption { } } -// withGroup sets the old Group of the mutation. -func withGroup(node *Group) groupOption { - return func(m *GroupMutation) { - m.oldValue = func(context.Context) (*Group, error) { +// withPoints sets the old Points of the mutation. +func withPoints(node *Points) pointsOption { + return func(m *PointsMutation) { + m.oldValue = func(context.Context) (*Points, error) { return node, nil } m.id = &node.ID @@ -711,7 +1958,7 @@ func withGroup(node *Group) groupOption { // Client returns a new `ent.Client` from the mutation. If the mutation was // executed in a transaction (ent.Tx), a transactional client is returned. -func (m GroupMutation) Client() *Client { +func (m PointsMutation) Client() *Client { client := &Client{config: m.config} client.init() return client @@ -719,7 +1966,7 @@ func (m GroupMutation) Client() *Client { // Tx returns an `ent.Tx` for mutations that were executed in transactions; // it returns an error otherwise. -func (m GroupMutation) Tx() (*Tx, error) { +func (m PointsMutation) Tx() (*Tx, error) { if _, ok := m.driver.(*txDriver); !ok { return nil, errors.New("ent: mutation is not running in a transaction") } @@ -730,7 +1977,7 @@ func (m GroupMutation) Tx() (*Tx, error) { // ID returns the ID value in the mutation. Note that the ID is only available // if it was provided to the builder or after it was returned from the database. -func (m *GroupMutation) ID() (id int, exists bool) { +func (m *PointsMutation) ID() (id int, exists bool) { if m.id == nil { return } @@ -741,7 +1988,7 @@ func (m *GroupMutation) ID() (id int, exists bool) { // That means, if the mutation is applied within a transaction with an isolation level such // as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated // or updated by the mutation. -func (m *GroupMutation) IDs(ctx context.Context) ([]int, error) { +func (m *PointsMutation) IDs(ctx context.Context) ([]int, error) { switch { case m.op.Is(OpUpdateOne | OpDeleteOne): id, exists := m.ID() @@ -750,19 +1997,19 @@ func (m *GroupMutation) IDs(ctx context.Context) ([]int, error) { } fallthrough case m.op.Is(OpUpdate | OpDelete): - return m.Client().Group.Query().Where(m.predicates...).IDs(ctx) + return m.Client().Points.Query().Where(m.predicates...).IDs(ctx) default: return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op) } } // SetCreatedAt sets the "created_at" field. -func (m *GroupMutation) SetCreatedAt(t time.Time) { +func (m *PointsMutation) SetCreatedAt(t time.Time) { m.created_at = &t } // CreatedAt returns the value of the "created_at" field in the mutation. -func (m *GroupMutation) CreatedAt() (r time.Time, exists bool) { +func (m *PointsMutation) CreatedAt() (r time.Time, exists bool) { v := m.created_at if v == nil { return @@ -770,10 +2017,10 @@ func (m *GroupMutation) CreatedAt() (r time.Time, exists bool) { return *v, true } -// OldCreatedAt returns the old "created_at" field's value of the Group entity. -// If the Group object wasn't provided to the builder, the object is fetched from the database. +// OldCreatedAt returns the old "created_at" field's value of the Points entity. +// If the Points object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. -func (m *GroupMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) { +func (m *PointsMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) { if !m.op.Is(OpUpdateOne) { return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations") } @@ -788,17 +2035,17 @@ func (m *GroupMutation) OldCreatedAt(ctx context.Context) (v time.Time, err erro } // ResetCreatedAt resets all changes to the "created_at" field. -func (m *GroupMutation) ResetCreatedAt() { +func (m *PointsMutation) ResetCreatedAt() { m.created_at = nil } // SetUpdatedAt sets the "updated_at" field. -func (m *GroupMutation) SetUpdatedAt(t time.Time) { +func (m *PointsMutation) SetUpdatedAt(t time.Time) { m.updated_at = &t } // UpdatedAt returns the value of the "updated_at" field in the mutation. -func (m *GroupMutation) UpdatedAt() (r time.Time, exists bool) { +func (m *PointsMutation) UpdatedAt() (r time.Time, exists bool) { v := m.updated_at if v == nil { return @@ -806,10 +2053,10 @@ func (m *GroupMutation) UpdatedAt() (r time.Time, exists bool) { return *v, true } -// OldUpdatedAt returns the old "updated_at" field's value of the Group entity. -// If the Group object wasn't provided to the builder, the object is fetched from the database. +// OldUpdatedAt returns the old "updated_at" field's value of the Points entity. +// If the Points object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. -func (m *GroupMutation) OldUpdatedAt(ctx context.Context) (v time.Time, err error) { +func (m *PointsMutation) OldUpdatedAt(ctx context.Context) (v time.Time, err error) { if !m.op.Is(OpUpdateOne) { return v, errors.New("OldUpdatedAt is only allowed on UpdateOne operations") } @@ -824,17 +2071,17 @@ func (m *GroupMutation) OldUpdatedAt(ctx context.Context) (v time.Time, err erro } // ResetUpdatedAt resets all changes to the "updated_at" field. -func (m *GroupMutation) ResetUpdatedAt() { +func (m *PointsMutation) ResetUpdatedAt() { m.updated_at = nil } // SetDeletedAt sets the "deleted_at" field. -func (m *GroupMutation) SetDeletedAt(t time.Time) { +func (m *PointsMutation) SetDeletedAt(t time.Time) { m.deleted_at = &t } // DeletedAt returns the value of the "deleted_at" field in the mutation. -func (m *GroupMutation) DeletedAt() (r time.Time, exists bool) { +func (m *PointsMutation) DeletedAt() (r time.Time, exists bool) { v := m.deleted_at if v == nil { return @@ -842,10 +2089,10 @@ func (m *GroupMutation) DeletedAt() (r time.Time, exists bool) { return *v, true } -// OldDeletedAt returns the old "deleted_at" field's value of the Group entity. -// If the Group object wasn't provided to the builder, the object is fetched from the database. +// OldDeletedAt returns the old "deleted_at" field's value of the Points entity. +// If the Points object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. -func (m *GroupMutation) OldDeletedAt(ctx context.Context) (v time.Time, err error) { +func (m *PointsMutation) OldDeletedAt(ctx context.Context) (v time.Time, err error) { if !m.op.Is(OpUpdateOne) { return v, errors.New("OldDeletedAt is only allowed on UpdateOne operations") } @@ -860,66 +2107,86 @@ func (m *GroupMutation) OldDeletedAt(ctx context.Context) (v time.Time, err erro } // ClearDeletedAt clears the value of the "deleted_at" field. -func (m *GroupMutation) ClearDeletedAt() { +func (m *PointsMutation) ClearDeletedAt() { m.deleted_at = nil - m.clearedFields[group.FieldDeletedAt] = struct{}{} + m.clearedFields[points.FieldDeletedAt] = struct{}{} } // DeletedAtCleared returns if the "deleted_at" field was cleared in this mutation. -func (m *GroupMutation) DeletedAtCleared() bool { - _, ok := m.clearedFields[group.FieldDeletedAt] +func (m *PointsMutation) DeletedAtCleared() bool { + _, ok := m.clearedFields[points.FieldDeletedAt] return ok } // ResetDeletedAt resets all changes to the "deleted_at" field. -func (m *GroupMutation) ResetDeletedAt() { +func (m *PointsMutation) ResetDeletedAt() { m.deleted_at = nil - delete(m.clearedFields, group.FieldDeletedAt) + delete(m.clearedFields, points.FieldDeletedAt) } -// SetName sets the "name" field. -func (m *GroupMutation) SetName(s string) { - m.name = &s +// SetPoints sets the "points" field. +func (m *PointsMutation) SetPoints(i int) { + m.points = &i + m.addpoints = nil } - -// Name returns the value of the "name" field in the mutation. -func (m *GroupMutation) Name() (r string, exists bool) { - v := m.name + +// Points returns the value of the "points" field in the mutation. +func (m *PointsMutation) Points() (r int, exists bool) { + v := m.points if v == nil { return } return *v, true } -// OldName returns the old "name" field's value of the Group entity. -// If the Group object wasn't provided to the builder, the object is fetched from the database. +// OldPoints returns the old "points" field's value of the Points entity. +// If the Points object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. -func (m *GroupMutation) OldName(ctx context.Context) (v string, err error) { +func (m *PointsMutation) OldPoints(ctx context.Context) (v int, err error) { if !m.op.Is(OpUpdateOne) { - return v, errors.New("OldName is only allowed on UpdateOne operations") + return v, errors.New("OldPoints is only allowed on UpdateOne operations") } if m.id == nil || m.oldValue == nil { - return v, errors.New("OldName requires an ID field in the mutation") + return v, errors.New("OldPoints requires an ID field in the mutation") } oldValue, err := m.oldValue(ctx) if err != nil { - return v, fmt.Errorf("querying old value for OldName: %w", err) + return v, fmt.Errorf("querying old value for OldPoints: %w", err) } - return oldValue.Name, nil + return oldValue.Points, nil } -// ResetName resets all changes to the "name" field. -func (m *GroupMutation) ResetName() { - m.name = nil +// AddPoints adds i to the "points" field. +func (m *PointsMutation) AddPoints(i int) { + if m.addpoints != nil { + *m.addpoints += i + } else { + m.addpoints = &i + } +} + +// AddedPoints returns the value that was added to the "points" field in this mutation. +func (m *PointsMutation) AddedPoints() (r int, exists bool) { + v := m.addpoints + if v == nil { + return + } + return *v, true +} + +// ResetPoints resets all changes to the "points" field. +func (m *PointsMutation) ResetPoints() { + m.points = nil + m.addpoints = nil } // SetDescription sets the "description" field. -func (m *GroupMutation) SetDescription(s string) { +func (m *PointsMutation) SetDescription(s string) { m.description = &s } // Description returns the value of the "description" field in the mutation. -func (m *GroupMutation) Description() (r string, exists bool) { +func (m *PointsMutation) Description() (r string, exists bool) { v := m.description if v == nil { return @@ -927,10 +2194,10 @@ func (m *GroupMutation) Description() (r string, exists bool) { return *v, true } -// OldDescription returns the old "description" field's value of the Group entity. -// If the Group object wasn't provided to the builder, the object is fetched from the database. +// OldDescription returns the old "description" field's value of the Points entity. +// If the Points object wasn't provided to the builder, the object is fetched from the database. // An error is returned if the mutation operation is not UpdateOne, or the database query fails. -func (m *GroupMutation) OldDescription(ctx context.Context) (v string, err error) { +func (m *PointsMutation) OldDescription(ctx context.Context) (v string, err error) { if !m.op.Is(OpUpdateOne) { return v, errors.New("OldDescription is only allowed on UpdateOne operations") } @@ -945,86 +2212,71 @@ func (m *GroupMutation) OldDescription(ctx context.Context) (v string, err error } // ClearDescription clears the value of the "description" field. -func (m *GroupMutation) ClearDescription() { +func (m *PointsMutation) ClearDescription() { m.description = nil - m.clearedFields[group.FieldDescription] = struct{}{} + m.clearedFields[points.FieldDescription] = struct{}{} } // DescriptionCleared returns if the "description" field was cleared in this mutation. -func (m *GroupMutation) DescriptionCleared() bool { - _, ok := m.clearedFields[group.FieldDescription] +func (m *PointsMutation) DescriptionCleared() bool { + _, ok := m.clearedFields[points.FieldDescription] return ok } // ResetDescription resets all changes to the "description" field. -func (m *GroupMutation) ResetDescription() { +func (m *PointsMutation) ResetDescription() { m.description = nil - delete(m.clearedFields, group.FieldDescription) -} - -// AddScopeSetIDs adds the "scope_sets" edge to the ScopeSet entity by ids. -func (m *GroupMutation) AddScopeSetIDs(ids ...int) { - if m.scope_sets == nil { - m.scope_sets = make(map[int]struct{}) - } - for i := range ids { - m.scope_sets[ids[i]] = struct{}{} - } + delete(m.clearedFields, points.FieldDescription) } -// ClearScopeSets clears the "scope_sets" edge to the ScopeSet entity. -func (m *GroupMutation) ClearScopeSets() { - m.clearedscope_sets = true +// SetUserID sets the "user" edge to the User entity by id. +func (m *PointsMutation) SetUserID(id int) { + m.user = &id } -// ScopeSetsCleared reports if the "scope_sets" edge to the ScopeSet entity was cleared. -func (m *GroupMutation) ScopeSetsCleared() bool { - return m.clearedscope_sets +// ClearUser clears the "user" edge to the User entity. +func (m *PointsMutation) ClearUser() { + m.cleareduser = true } -// RemoveScopeSetIDs removes the "scope_sets" edge to the ScopeSet entity by IDs. -func (m *GroupMutation) RemoveScopeSetIDs(ids ...int) { - if m.removedscope_sets == nil { - m.removedscope_sets = make(map[int]struct{}) - } - for i := range ids { - delete(m.scope_sets, ids[i]) - m.removedscope_sets[ids[i]] = struct{}{} - } +// UserCleared reports if the "user" edge to the User entity was cleared. +func (m *PointsMutation) UserCleared() bool { + return m.cleareduser } -// RemovedScopeSets returns the removed IDs of the "scope_sets" edge to the ScopeSet entity. -func (m *GroupMutation) RemovedScopeSetsIDs() (ids []int) { - for id := range m.removedscope_sets { - ids = append(ids, id) +// UserID returns the "user" edge ID in the mutation. +func (m *PointsMutation) UserID() (id int, exists bool) { + if m.user != nil { + return *m.user, true } return } -// ScopeSetsIDs returns the "scope_sets" edge IDs in the mutation. -func (m *GroupMutation) ScopeSetsIDs() (ids []int) { - for id := range m.scope_sets { - ids = append(ids, id) +// UserIDs returns the "user" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// UserID instead. It exists only for internal usage by the builders. +func (m *PointsMutation) UserIDs() (ids []int) { + if id := m.user; id != nil { + ids = append(ids, *id) } return } -// ResetScopeSets resets all changes to the "scope_sets" edge. -func (m *GroupMutation) ResetScopeSets() { - m.scope_sets = nil - m.clearedscope_sets = false - m.removedscope_sets = nil +// ResetUser resets all changes to the "user" edge. +func (m *PointsMutation) ResetUser() { + m.user = nil + m.cleareduser = false } -// Where appends a list predicates to the GroupMutation builder. -func (m *GroupMutation) Where(ps ...predicate.Group) { +// Where appends a list predicates to the PointsMutation builder. +func (m *PointsMutation) Where(ps ...predicate.Points) { m.predicates = append(m.predicates, ps...) } -// WhereP appends storage-level predicates to the GroupMutation builder. Using this method, +// WhereP appends storage-level predicates to the PointsMutation builder. Using this method, // users can use type-assertion to append predicates that do not depend on any generated package. -func (m *GroupMutation) WhereP(ps ...func(*sql.Selector)) { - p := make([]predicate.Group, len(ps)) +func (m *PointsMutation) WhereP(ps ...func(*sql.Selector)) { + p := make([]predicate.Points, len(ps)) for i := range ps { p[i] = ps[i] } @@ -1032,39 +2284,39 @@ func (m *GroupMutation) WhereP(ps ...func(*sql.Selector)) { } // Op returns the operation name. -func (m *GroupMutation) Op() Op { +func (m *PointsMutation) Op() Op { return m.op } // SetOp allows setting the mutation operation. -func (m *GroupMutation) SetOp(op Op) { +func (m *PointsMutation) SetOp(op Op) { m.op = op } -// Type returns the node type of this mutation (Group). -func (m *GroupMutation) Type() string { +// Type returns the node type of this mutation (Points). +func (m *PointsMutation) Type() string { return m.typ } // Fields returns all fields that were changed during this mutation. Note that in // order to get all numeric fields that were incremented/decremented, call // AddedFields(). -func (m *GroupMutation) Fields() []string { +func (m *PointsMutation) Fields() []string { fields := make([]string, 0, 5) if m.created_at != nil { - fields = append(fields, group.FieldCreatedAt) + fields = append(fields, points.FieldCreatedAt) } if m.updated_at != nil { - fields = append(fields, group.FieldUpdatedAt) + fields = append(fields, points.FieldUpdatedAt) } if m.deleted_at != nil { - fields = append(fields, group.FieldDeletedAt) + fields = append(fields, points.FieldDeletedAt) } - if m.name != nil { - fields = append(fields, group.FieldName) + if m.points != nil { + fields = append(fields, points.FieldPoints) } if m.description != nil { - fields = append(fields, group.FieldDescription) + fields = append(fields, points.FieldDescription) } return fields } @@ -1072,17 +2324,17 @@ func (m *GroupMutation) Fields() []string { // Field returns the value of a field with the given name. The second boolean // return value indicates that this field was not set, or was not defined in the // schema. -func (m *GroupMutation) Field(name string) (ent.Value, bool) { +func (m *PointsMutation) Field(name string) (ent.Value, bool) { switch name { - case group.FieldCreatedAt: + case points.FieldCreatedAt: return m.CreatedAt() - case group.FieldUpdatedAt: + case points.FieldUpdatedAt: return m.UpdatedAt() - case group.FieldDeletedAt: + case points.FieldDeletedAt: return m.DeletedAt() - case group.FieldName: - return m.Name() - case group.FieldDescription: + case points.FieldPoints: + return m.Points() + case points.FieldDescription: return m.Description() } return nil, false @@ -1091,56 +2343,56 @@ func (m *GroupMutation) Field(name string) (ent.Value, bool) { // OldField returns the old value of the field from the database. An error is // returned if the mutation operation is not UpdateOne, or the query to the // database failed. -func (m *GroupMutation) OldField(ctx context.Context, name string) (ent.Value, error) { +func (m *PointsMutation) OldField(ctx context.Context, name string) (ent.Value, error) { switch name { - case group.FieldCreatedAt: + case points.FieldCreatedAt: return m.OldCreatedAt(ctx) - case group.FieldUpdatedAt: + case points.FieldUpdatedAt: return m.OldUpdatedAt(ctx) - case group.FieldDeletedAt: + case points.FieldDeletedAt: return m.OldDeletedAt(ctx) - case group.FieldName: - return m.OldName(ctx) - case group.FieldDescription: + case points.FieldPoints: + return m.OldPoints(ctx) + case points.FieldDescription: return m.OldDescription(ctx) } - return nil, fmt.Errorf("unknown Group field %s", name) + return nil, fmt.Errorf("unknown Points field %s", name) } // SetField sets the value of a field with the given name. It returns an error if // the field is not defined in the schema, or if the type mismatched the field // type. -func (m *GroupMutation) SetField(name string, value ent.Value) error { +func (m *PointsMutation) SetField(name string, value ent.Value) error { switch name { - case group.FieldCreatedAt: + case points.FieldCreatedAt: v, ok := value.(time.Time) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.SetCreatedAt(v) return nil - case group.FieldUpdatedAt: + case points.FieldUpdatedAt: v, ok := value.(time.Time) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.SetUpdatedAt(v) return nil - case group.FieldDeletedAt: + case points.FieldDeletedAt: v, ok := value.(time.Time) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } m.SetDeletedAt(v) return nil - case group.FieldName: - v, ok := value.(string) + case points.FieldPoints: + v, ok := value.(int) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } - m.SetName(v) + m.SetPoints(v) return nil - case group.FieldDescription: + case points.FieldDescription: v, ok := value.(string) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) @@ -1148,170 +2400,175 @@ func (m *GroupMutation) SetField(name string, value ent.Value) error { m.SetDescription(v) return nil } - return fmt.Errorf("unknown Group field %s", name) + return fmt.Errorf("unknown Points field %s", name) } // AddedFields returns all numeric fields that were incremented/decremented during // this mutation. -func (m *GroupMutation) AddedFields() []string { - return nil +func (m *PointsMutation) AddedFields() []string { + var fields []string + if m.addpoints != nil { + fields = append(fields, points.FieldPoints) + } + return fields } // AddedField returns the numeric value that was incremented/decremented on a field // with the given name. The second boolean return value indicates that this field // was not set, or was not defined in the schema. -func (m *GroupMutation) AddedField(name string) (ent.Value, bool) { +func (m *PointsMutation) AddedField(name string) (ent.Value, bool) { + switch name { + case points.FieldPoints: + return m.AddedPoints() + } return nil, false } // AddField adds the value to the field with the given name. It returns an error if // the field is not defined in the schema, or if the type mismatched the field // type. -func (m *GroupMutation) AddField(name string, value ent.Value) error { +func (m *PointsMutation) AddField(name string, value ent.Value) error { switch name { + case points.FieldPoints: + v, ok := value.(int) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.AddPoints(v) + return nil } - return fmt.Errorf("unknown Group numeric field %s", name) + return fmt.Errorf("unknown Points numeric field %s", name) } // ClearedFields returns all nullable fields that were cleared during this // mutation. -func (m *GroupMutation) ClearedFields() []string { +func (m *PointsMutation) ClearedFields() []string { var fields []string - if m.FieldCleared(group.FieldDeletedAt) { - fields = append(fields, group.FieldDeletedAt) + if m.FieldCleared(points.FieldDeletedAt) { + fields = append(fields, points.FieldDeletedAt) } - if m.FieldCleared(group.FieldDescription) { - fields = append(fields, group.FieldDescription) + if m.FieldCleared(points.FieldDescription) { + fields = append(fields, points.FieldDescription) } return fields } // FieldCleared returns a boolean indicating if a field with the given name was // cleared in this mutation. -func (m *GroupMutation) FieldCleared(name string) bool { +func (m *PointsMutation) FieldCleared(name string) bool { _, ok := m.clearedFields[name] return ok } // ClearField clears the value of the field with the given name. It returns an // error if the field is not defined in the schema. -func (m *GroupMutation) ClearField(name string) error { +func (m *PointsMutation) ClearField(name string) error { switch name { - case group.FieldDeletedAt: + case points.FieldDeletedAt: m.ClearDeletedAt() return nil - case group.FieldDescription: + case points.FieldDescription: m.ClearDescription() return nil } - return fmt.Errorf("unknown Group nullable field %s", name) + return fmt.Errorf("unknown Points nullable field %s", name) } // ResetField resets all changes in the mutation for the field with the given name. // It returns an error if the field is not defined in the schema. -func (m *GroupMutation) ResetField(name string) error { +func (m *PointsMutation) ResetField(name string) error { switch name { - case group.FieldCreatedAt: + case points.FieldCreatedAt: m.ResetCreatedAt() return nil - case group.FieldUpdatedAt: + case points.FieldUpdatedAt: m.ResetUpdatedAt() return nil - case group.FieldDeletedAt: + case points.FieldDeletedAt: m.ResetDeletedAt() return nil - case group.FieldName: - m.ResetName() + case points.FieldPoints: + m.ResetPoints() return nil - case group.FieldDescription: + case points.FieldDescription: m.ResetDescription() return nil } - return fmt.Errorf("unknown Group field %s", name) + return fmt.Errorf("unknown Points field %s", name) } // AddedEdges returns all edge names that were set/added in this mutation. -func (m *GroupMutation) AddedEdges() []string { +func (m *PointsMutation) AddedEdges() []string { edges := make([]string, 0, 1) - if m.scope_sets != nil { - edges = append(edges, group.EdgeScopeSets) + if m.user != nil { + edges = append(edges, points.EdgeUser) } return edges } // AddedIDs returns all IDs (to other nodes) that were added for the given edge // name in this mutation. -func (m *GroupMutation) AddedIDs(name string) []ent.Value { +func (m *PointsMutation) AddedIDs(name string) []ent.Value { switch name { - case group.EdgeScopeSets: - ids := make([]ent.Value, 0, len(m.scope_sets)) - for id := range m.scope_sets { - ids = append(ids, id) + case points.EdgeUser: + if id := m.user; id != nil { + return []ent.Value{*id} } - return ids } return nil } // RemovedEdges returns all edge names that were removed in this mutation. -func (m *GroupMutation) RemovedEdges() []string { +func (m *PointsMutation) RemovedEdges() []string { edges := make([]string, 0, 1) - if m.removedscope_sets != nil { - edges = append(edges, group.EdgeScopeSets) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. -func (m *GroupMutation) RemovedIDs(name string) []ent.Value { - switch name { - case group.EdgeScopeSets: - ids := make([]ent.Value, 0, len(m.removedscope_sets)) - for id := range m.removedscope_sets { - ids = append(ids, id) - } - return ids - } +func (m *PointsMutation) RemovedIDs(name string) []ent.Value { return nil } // ClearedEdges returns all edge names that were cleared in this mutation. -func (m *GroupMutation) ClearedEdges() []string { +func (m *PointsMutation) ClearedEdges() []string { edges := make([]string, 0, 1) - if m.clearedscope_sets { - edges = append(edges, group.EdgeScopeSets) + if m.cleareduser { + edges = append(edges, points.EdgeUser) } return edges } // EdgeCleared returns a boolean which indicates if the edge with the given name // was cleared in this mutation. -func (m *GroupMutation) EdgeCleared(name string) bool { +func (m *PointsMutation) EdgeCleared(name string) bool { switch name { - case group.EdgeScopeSets: - return m.clearedscope_sets + case points.EdgeUser: + return m.cleareduser } return false } // ClearEdge clears the value of the edge with the given name. It returns an error // if that edge is not defined in the schema. -func (m *GroupMutation) ClearEdge(name string) error { +func (m *PointsMutation) ClearEdge(name string) error { switch name { + case points.EdgeUser: + m.ClearUser() + return nil } - return fmt.Errorf("unknown Group unique edge %s", name) + return fmt.Errorf("unknown Points unique edge %s", name) } // ResetEdge resets all changes to the edge with the given name in this mutation. // It returns an error if the edge is not defined in the schema. -func (m *GroupMutation) ResetEdge(name string) error { +func (m *PointsMutation) ResetEdge(name string) error { switch name { - case group.EdgeScopeSets: - m.ResetScopeSets() + case points.EdgeUser: + m.ResetUser() return nil } - return fmt.Errorf("unknown Group edge %s", name) + return fmt.Errorf("unknown Points edge %s", name) } // QuestionMutation represents an operation that mutates the Question nodes in the graph. @@ -2503,6 +3760,12 @@ type UserMutation struct { clearedFields map[string]struct{} group *int clearedgroup bool + points map[int]struct{} + removedpoints map[int]struct{} + clearedpoints bool + events map[int]struct{} + removedevents map[int]struct{} + clearedevents bool done bool oldValue func(context.Context) (*User, error) predicates []predicate.User @@ -2887,6 +4150,114 @@ func (m *UserMutation) ResetGroup() { m.clearedgroup = false } +// AddPointIDs adds the "points" edge to the Points entity by ids. +func (m *UserMutation) AddPointIDs(ids ...int) { + if m.points == nil { + m.points = make(map[int]struct{}) + } + for i := range ids { + m.points[ids[i]] = struct{}{} + } +} + +// ClearPoints clears the "points" edge to the Points entity. +func (m *UserMutation) ClearPoints() { + m.clearedpoints = true +} + +// PointsCleared reports if the "points" edge to the Points entity was cleared. +func (m *UserMutation) PointsCleared() bool { + return m.clearedpoints +} + +// RemovePointIDs removes the "points" edge to the Points entity by IDs. +func (m *UserMutation) RemovePointIDs(ids ...int) { + if m.removedpoints == nil { + m.removedpoints = make(map[int]struct{}) + } + for i := range ids { + delete(m.points, ids[i]) + m.removedpoints[ids[i]] = struct{}{} + } +} + +// RemovedPoints returns the removed IDs of the "points" edge to the Points entity. +func (m *UserMutation) RemovedPointsIDs() (ids []int) { + for id := range m.removedpoints { + ids = append(ids, id) + } + return +} + +// PointsIDs returns the "points" edge IDs in the mutation. +func (m *UserMutation) PointsIDs() (ids []int) { + for id := range m.points { + ids = append(ids, id) + } + return +} + +// ResetPoints resets all changes to the "points" edge. +func (m *UserMutation) ResetPoints() { + m.points = nil + m.clearedpoints = false + m.removedpoints = nil +} + +// AddEventIDs adds the "events" edge to the Events entity by ids. +func (m *UserMutation) AddEventIDs(ids ...int) { + if m.events == nil { + m.events = make(map[int]struct{}) + } + for i := range ids { + m.events[ids[i]] = struct{}{} + } +} + +// ClearEvents clears the "events" edge to the Events entity. +func (m *UserMutation) ClearEvents() { + m.clearedevents = true +} + +// EventsCleared reports if the "events" edge to the Events entity was cleared. +func (m *UserMutation) EventsCleared() bool { + return m.clearedevents +} + +// RemoveEventIDs removes the "events" edge to the Events entity by IDs. +func (m *UserMutation) RemoveEventIDs(ids ...int) { + if m.removedevents == nil { + m.removedevents = make(map[int]struct{}) + } + for i := range ids { + delete(m.events, ids[i]) + m.removedevents[ids[i]] = struct{}{} + } +} + +// RemovedEvents returns the removed IDs of the "events" edge to the Events entity. +func (m *UserMutation) RemovedEventsIDs() (ids []int) { + for id := range m.removedevents { + ids = append(ids, id) + } + return +} + +// EventsIDs returns the "events" edge IDs in the mutation. +func (m *UserMutation) EventsIDs() (ids []int) { + for id := range m.events { + ids = append(ids, id) + } + return +} + +// ResetEvents resets all changes to the "events" edge. +func (m *UserMutation) ResetEvents() { + m.events = nil + m.clearedevents = false + m.removedevents = nil +} + // Where appends a list predicates to the UserMutation builder. func (m *UserMutation) Where(ps ...predicate.User) { m.predicates = append(m.predicates, ps...) @@ -3120,10 +4491,16 @@ func (m *UserMutation) ResetField(name string) error { // AddedEdges returns all edge names that were set/added in this mutation. func (m *UserMutation) AddedEdges() []string { - edges := make([]string, 0, 1) + edges := make([]string, 0, 3) if m.group != nil { edges = append(edges, user.EdgeGroup) } + if m.points != nil { + edges = append(edges, user.EdgePoints) + } + if m.events != nil { + edges = append(edges, user.EdgeEvents) + } return edges } @@ -3135,28 +4512,66 @@ func (m *UserMutation) AddedIDs(name string) []ent.Value { if id := m.group; id != nil { return []ent.Value{*id} } + case user.EdgePoints: + ids := make([]ent.Value, 0, len(m.points)) + for id := range m.points { + ids = append(ids, id) + } + return ids + case user.EdgeEvents: + ids := make([]ent.Value, 0, len(m.events)) + for id := range m.events { + ids = append(ids, id) + } + return ids } return nil } // RemovedEdges returns all edge names that were removed in this mutation. func (m *UserMutation) RemovedEdges() []string { - edges := make([]string, 0, 1) + edges := make([]string, 0, 3) + if m.removedpoints != nil { + edges = append(edges, user.EdgePoints) + } + if m.removedevents != nil { + edges = append(edges, user.EdgeEvents) + } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *UserMutation) RemovedIDs(name string) []ent.Value { + switch name { + case user.EdgePoints: + ids := make([]ent.Value, 0, len(m.removedpoints)) + for id := range m.removedpoints { + ids = append(ids, id) + } + return ids + case user.EdgeEvents: + ids := make([]ent.Value, 0, len(m.removedevents)) + for id := range m.removedevents { + ids = append(ids, id) + } + return ids + } return nil } // ClearedEdges returns all edge names that were cleared in this mutation. func (m *UserMutation) ClearedEdges() []string { - edges := make([]string, 0, 1) + edges := make([]string, 0, 3) if m.clearedgroup { edges = append(edges, user.EdgeGroup) } + if m.clearedpoints { + edges = append(edges, user.EdgePoints) + } + if m.clearedevents { + edges = append(edges, user.EdgeEvents) + } return edges } @@ -3166,6 +4581,10 @@ func (m *UserMutation) EdgeCleared(name string) bool { switch name { case user.EdgeGroup: return m.clearedgroup + case user.EdgePoints: + return m.clearedpoints + case user.EdgeEvents: + return m.clearedevents } return false } @@ -3188,6 +4607,12 @@ func (m *UserMutation) ResetEdge(name string) error { case user.EdgeGroup: m.ResetGroup() return nil + case user.EdgePoints: + m.ResetPoints() + return nil + case user.EdgeEvents: + m.ResetEvents() + return nil } return fmt.Errorf("unknown User edge %s", name) } diff --git a/ent/points.go b/ent/points.go new file mode 100644 index 0000000..afefd91 --- /dev/null +++ b/ent/points.go @@ -0,0 +1,191 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "fmt" + "strings" + "time" + + "entgo.io/ent" + "entgo.io/ent/dialect/sql" + "github.com/database-playground/backend-v2/ent/points" + "github.com/database-playground/backend-v2/ent/user" +) + +// Points is the model entity for the Points schema. +type Points struct { + config `json:"-"` + // ID of the ent. + ID int `json:"id,omitempty"` + // CreatedAt holds the value of the "created_at" field. + CreatedAt time.Time `json:"created_at,omitempty"` + // UpdatedAt holds the value of the "updated_at" field. + UpdatedAt time.Time `json:"updated_at,omitempty"` + // DeletedAt holds the value of the "deleted_at" field. + DeletedAt time.Time `json:"deleted_at,omitempty"` + // Points holds the value of the "points" field. + Points int `json:"points,omitempty"` + // Description holds the value of the "description" field. + Description string `json:"description,omitempty"` + // Edges holds the relations/edges for other nodes in the graph. + // The values are being populated by the PointsQuery when eager-loading is set. + Edges PointsEdges `json:"edges"` + user_points *int + selectValues sql.SelectValues +} + +// PointsEdges holds the relations/edges for other nodes in the graph. +type PointsEdges struct { + // User holds the value of the user edge. + User *User `json:"user,omitempty"` + // loadedTypes holds the information for reporting if a + // type was loaded (or requested) in eager-loading or not. + loadedTypes [1]bool + // totalCount holds the count of the edges above. + totalCount [1]map[string]int +} + +// UserOrErr returns the User value or an error if the edge +// was not loaded in eager-loading, or loaded but was not found. +func (e PointsEdges) UserOrErr() (*User, error) { + if e.User != nil { + return e.User, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: user.Label} + } + return nil, &NotLoadedError{edge: "user"} +} + +// scanValues returns the types for scanning values from sql.Rows. +func (*Points) scanValues(columns []string) ([]any, error) { + values := make([]any, len(columns)) + for i := range columns { + switch columns[i] { + case points.FieldID, points.FieldPoints: + values[i] = new(sql.NullInt64) + case points.FieldDescription: + values[i] = new(sql.NullString) + case points.FieldCreatedAt, points.FieldUpdatedAt, points.FieldDeletedAt: + values[i] = new(sql.NullTime) + case points.ForeignKeys[0]: // user_points + values[i] = new(sql.NullInt64) + default: + values[i] = new(sql.UnknownType) + } + } + return values, nil +} + +// assignValues assigns the values that were returned from sql.Rows (after scanning) +// to the Points fields. +func (_m *Points) assignValues(columns []string, values []any) error { + if m, n := len(values), len(columns); m < n { + return fmt.Errorf("mismatch number of scan values: %d != %d", m, n) + } + for i := range columns { + switch columns[i] { + case points.FieldID: + value, ok := values[i].(*sql.NullInt64) + if !ok { + return fmt.Errorf("unexpected type %T for field id", value) + } + _m.ID = int(value.Int64) + case points.FieldCreatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field created_at", values[i]) + } else if value.Valid { + _m.CreatedAt = value.Time + } + case points.FieldUpdatedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field updated_at", values[i]) + } else if value.Valid { + _m.UpdatedAt = value.Time + } + case points.FieldDeletedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field deleted_at", values[i]) + } else if value.Valid { + _m.DeletedAt = value.Time + } + case points.FieldPoints: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for field points", values[i]) + } else if value.Valid { + _m.Points = int(value.Int64) + } + case points.FieldDescription: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field description", values[i]) + } else if value.Valid { + _m.Description = value.String + } + case points.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field user_points", value) + } else if value.Valid { + _m.user_points = new(int) + *_m.user_points = int(value.Int64) + } + default: + _m.selectValues.Set(columns[i], values[i]) + } + } + return nil +} + +// Value returns the ent.Value that was dynamically selected and assigned to the Points. +// This includes values selected through modifiers, order, etc. +func (_m *Points) Value(name string) (ent.Value, error) { + return _m.selectValues.Get(name) +} + +// QueryUser queries the "user" edge of the Points entity. +func (_m *Points) QueryUser() *UserQuery { + return NewPointsClient(_m.config).QueryUser(_m) +} + +// Update returns a builder for updating this Points. +// Note that you need to call Points.Unwrap() before calling this method if this Points +// was returned from a transaction, and the transaction was committed or rolled back. +func (_m *Points) Update() *PointsUpdateOne { + return NewPointsClient(_m.config).UpdateOne(_m) +} + +// Unwrap unwraps the Points entity that was returned from a transaction after it was closed, +// so that all future queries will be executed through the driver which created the transaction. +func (_m *Points) Unwrap() *Points { + _tx, ok := _m.config.driver.(*txDriver) + if !ok { + panic("ent: Points is not a transactional entity") + } + _m.config.driver = _tx.drv + return _m +} + +// String implements the fmt.Stringer. +func (_m *Points) String() string { + var builder strings.Builder + builder.WriteString("Points(") + builder.WriteString(fmt.Sprintf("id=%v, ", _m.ID)) + builder.WriteString("created_at=") + builder.WriteString(_m.CreatedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("updated_at=") + builder.WriteString(_m.UpdatedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("deleted_at=") + builder.WriteString(_m.DeletedAt.Format(time.ANSIC)) + builder.WriteString(", ") + builder.WriteString("points=") + builder.WriteString(fmt.Sprintf("%v", _m.Points)) + builder.WriteString(", ") + builder.WriteString("description=") + builder.WriteString(_m.Description) + builder.WriteByte(')') + return builder.String() +} + +// PointsSlice is a parsable slice of Points. +type PointsSlice []*Points diff --git a/ent/points/points.go b/ent/points/points.go new file mode 100644 index 0000000..8cfeb33 --- /dev/null +++ b/ent/points/points.go @@ -0,0 +1,135 @@ +// Code generated by ent, DO NOT EDIT. + +package points + +import ( + "time" + + "entgo.io/ent" + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" +) + +const ( + // Label holds the string label denoting the points type in the database. + Label = "points" + // FieldID holds the string denoting the id field in the database. + FieldID = "id" + // FieldCreatedAt holds the string denoting the created_at field in the database. + FieldCreatedAt = "created_at" + // FieldUpdatedAt holds the string denoting the updated_at field in the database. + FieldUpdatedAt = "updated_at" + // FieldDeletedAt holds the string denoting the deleted_at field in the database. + FieldDeletedAt = "deleted_at" + // FieldPoints holds the string denoting the points field in the database. + FieldPoints = "points" + // FieldDescription holds the string denoting the description field in the database. + FieldDescription = "description" + // EdgeUser holds the string denoting the user edge name in mutations. + EdgeUser = "user" + // Table holds the table name of the points in the database. + Table = "points" + // UserTable is the table that holds the user relation/edge. + UserTable = "points" + // UserInverseTable is the table name for the User entity. + // It exists in this package in order to avoid circular dependency with the "user" package. + UserInverseTable = "users" + // UserColumn is the table column denoting the user relation/edge. + UserColumn = "user_points" +) + +// Columns holds all SQL columns for points fields. +var Columns = []string{ + FieldID, + FieldCreatedAt, + FieldUpdatedAt, + FieldDeletedAt, + FieldPoints, + FieldDescription, +} + +// ForeignKeys holds the SQL foreign-keys that are owned by the "points" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "user_points", +} + +// ValidColumn reports if the column name is valid (part of the table columns). +func ValidColumn(column string) bool { + for i := range Columns { + if column == Columns[i] { + return true + } + } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } + return false +} + +// Note that the variables below are initialized by the runtime +// package on the initialization of the application. Therefore, +// it should be imported in the main as follows: +// +// import _ "github.com/database-playground/backend-v2/ent/runtime" +var ( + Hooks [1]ent.Hook + Interceptors [1]ent.Interceptor + // DefaultCreatedAt holds the default value on creation for the "created_at" field. + DefaultCreatedAt func() time.Time + // DefaultUpdatedAt holds the default value on creation for the "updated_at" field. + DefaultUpdatedAt func() time.Time + // UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field. + UpdateDefaultUpdatedAt func() time.Time + // DefaultPoints holds the default value on creation for the "points" field. + DefaultPoints int +) + +// OrderOption defines the ordering options for the Points queries. +type OrderOption func(*sql.Selector) + +// ByID orders the results by the id field. +func ByID(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldID, opts...).ToFunc() +} + +// ByCreatedAt orders the results by the created_at field. +func ByCreatedAt(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldCreatedAt, opts...).ToFunc() +} + +// ByUpdatedAt orders the results by the updated_at field. +func ByUpdatedAt(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldUpdatedAt, opts...).ToFunc() +} + +// ByDeletedAt orders the results by the deleted_at field. +func ByDeletedAt(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldDeletedAt, opts...).ToFunc() +} + +// ByPoints orders the results by the points field. +func ByPoints(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldPoints, opts...).ToFunc() +} + +// ByDescription orders the results by the description field. +func ByDescription(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldDescription, opts...).ToFunc() +} + +// ByUserField orders the results by user field. +func ByUserField(field string, opts ...sql.OrderTermOption) OrderOption { + return func(s *sql.Selector) { + sqlgraph.OrderByNeighborTerms(s, newUserStep(), sql.OrderByField(field, opts...)) + } +} +func newUserStep() *sqlgraph.Step { + return sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(UserInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, UserTable, UserColumn), + ) +} diff --git a/ent/points/where.go b/ent/points/where.go new file mode 100644 index 0000000..a8dbbb5 --- /dev/null +++ b/ent/points/where.go @@ -0,0 +1,364 @@ +// Code generated by ent, DO NOT EDIT. + +package points + +import ( + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "github.com/database-playground/backend-v2/ent/predicate" +) + +// ID filters vertices based on their ID field. +func ID(id int) predicate.Points { + return predicate.Points(sql.FieldEQ(FieldID, id)) +} + +// IDEQ applies the EQ predicate on the ID field. +func IDEQ(id int) predicate.Points { + return predicate.Points(sql.FieldEQ(FieldID, id)) +} + +// IDNEQ applies the NEQ predicate on the ID field. +func IDNEQ(id int) predicate.Points { + return predicate.Points(sql.FieldNEQ(FieldID, id)) +} + +// IDIn applies the In predicate on the ID field. +func IDIn(ids ...int) predicate.Points { + return predicate.Points(sql.FieldIn(FieldID, ids...)) +} + +// IDNotIn applies the NotIn predicate on the ID field. +func IDNotIn(ids ...int) predicate.Points { + return predicate.Points(sql.FieldNotIn(FieldID, ids...)) +} + +// IDGT applies the GT predicate on the ID field. +func IDGT(id int) predicate.Points { + return predicate.Points(sql.FieldGT(FieldID, id)) +} + +// IDGTE applies the GTE predicate on the ID field. +func IDGTE(id int) predicate.Points { + return predicate.Points(sql.FieldGTE(FieldID, id)) +} + +// IDLT applies the LT predicate on the ID field. +func IDLT(id int) predicate.Points { + return predicate.Points(sql.FieldLT(FieldID, id)) +} + +// IDLTE applies the LTE predicate on the ID field. +func IDLTE(id int) predicate.Points { + return predicate.Points(sql.FieldLTE(FieldID, id)) +} + +// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ. +func CreatedAt(v time.Time) predicate.Points { + return predicate.Points(sql.FieldEQ(FieldCreatedAt, v)) +} + +// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ. +func UpdatedAt(v time.Time) predicate.Points { + return predicate.Points(sql.FieldEQ(FieldUpdatedAt, v)) +} + +// DeletedAt applies equality check predicate on the "deleted_at" field. It's identical to DeletedAtEQ. +func DeletedAt(v time.Time) predicate.Points { + return predicate.Points(sql.FieldEQ(FieldDeletedAt, v)) +} + +// Points applies equality check predicate on the "points" field. It's identical to PointsEQ. +func Points(v int) predicate.Points { + return predicate.Points(sql.FieldEQ(FieldPoints, v)) +} + +// Description applies equality check predicate on the "description" field. It's identical to DescriptionEQ. +func Description(v string) predicate.Points { + return predicate.Points(sql.FieldEQ(FieldDescription, v)) +} + +// CreatedAtEQ applies the EQ predicate on the "created_at" field. +func CreatedAtEQ(v time.Time) predicate.Points { + return predicate.Points(sql.FieldEQ(FieldCreatedAt, v)) +} + +// CreatedAtNEQ applies the NEQ predicate on the "created_at" field. +func CreatedAtNEQ(v time.Time) predicate.Points { + return predicate.Points(sql.FieldNEQ(FieldCreatedAt, v)) +} + +// CreatedAtIn applies the In predicate on the "created_at" field. +func CreatedAtIn(vs ...time.Time) predicate.Points { + return predicate.Points(sql.FieldIn(FieldCreatedAt, vs...)) +} + +// CreatedAtNotIn applies the NotIn predicate on the "created_at" field. +func CreatedAtNotIn(vs ...time.Time) predicate.Points { + return predicate.Points(sql.FieldNotIn(FieldCreatedAt, vs...)) +} + +// CreatedAtGT applies the GT predicate on the "created_at" field. +func CreatedAtGT(v time.Time) predicate.Points { + return predicate.Points(sql.FieldGT(FieldCreatedAt, v)) +} + +// CreatedAtGTE applies the GTE predicate on the "created_at" field. +func CreatedAtGTE(v time.Time) predicate.Points { + return predicate.Points(sql.FieldGTE(FieldCreatedAt, v)) +} + +// CreatedAtLT applies the LT predicate on the "created_at" field. +func CreatedAtLT(v time.Time) predicate.Points { + return predicate.Points(sql.FieldLT(FieldCreatedAt, v)) +} + +// CreatedAtLTE applies the LTE predicate on the "created_at" field. +func CreatedAtLTE(v time.Time) predicate.Points { + return predicate.Points(sql.FieldLTE(FieldCreatedAt, v)) +} + +// UpdatedAtEQ applies the EQ predicate on the "updated_at" field. +func UpdatedAtEQ(v time.Time) predicate.Points { + return predicate.Points(sql.FieldEQ(FieldUpdatedAt, v)) +} + +// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field. +func UpdatedAtNEQ(v time.Time) predicate.Points { + return predicate.Points(sql.FieldNEQ(FieldUpdatedAt, v)) +} + +// UpdatedAtIn applies the In predicate on the "updated_at" field. +func UpdatedAtIn(vs ...time.Time) predicate.Points { + return predicate.Points(sql.FieldIn(FieldUpdatedAt, vs...)) +} + +// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field. +func UpdatedAtNotIn(vs ...time.Time) predicate.Points { + return predicate.Points(sql.FieldNotIn(FieldUpdatedAt, vs...)) +} + +// UpdatedAtGT applies the GT predicate on the "updated_at" field. +func UpdatedAtGT(v time.Time) predicate.Points { + return predicate.Points(sql.FieldGT(FieldUpdatedAt, v)) +} + +// UpdatedAtGTE applies the GTE predicate on the "updated_at" field. +func UpdatedAtGTE(v time.Time) predicate.Points { + return predicate.Points(sql.FieldGTE(FieldUpdatedAt, v)) +} + +// UpdatedAtLT applies the LT predicate on the "updated_at" field. +func UpdatedAtLT(v time.Time) predicate.Points { + return predicate.Points(sql.FieldLT(FieldUpdatedAt, v)) +} + +// UpdatedAtLTE applies the LTE predicate on the "updated_at" field. +func UpdatedAtLTE(v time.Time) predicate.Points { + return predicate.Points(sql.FieldLTE(FieldUpdatedAt, v)) +} + +// DeletedAtEQ applies the EQ predicate on the "deleted_at" field. +func DeletedAtEQ(v time.Time) predicate.Points { + return predicate.Points(sql.FieldEQ(FieldDeletedAt, v)) +} + +// DeletedAtNEQ applies the NEQ predicate on the "deleted_at" field. +func DeletedAtNEQ(v time.Time) predicate.Points { + return predicate.Points(sql.FieldNEQ(FieldDeletedAt, v)) +} + +// DeletedAtIn applies the In predicate on the "deleted_at" field. +func DeletedAtIn(vs ...time.Time) predicate.Points { + return predicate.Points(sql.FieldIn(FieldDeletedAt, vs...)) +} + +// DeletedAtNotIn applies the NotIn predicate on the "deleted_at" field. +func DeletedAtNotIn(vs ...time.Time) predicate.Points { + return predicate.Points(sql.FieldNotIn(FieldDeletedAt, vs...)) +} + +// DeletedAtGT applies the GT predicate on the "deleted_at" field. +func DeletedAtGT(v time.Time) predicate.Points { + return predicate.Points(sql.FieldGT(FieldDeletedAt, v)) +} + +// DeletedAtGTE applies the GTE predicate on the "deleted_at" field. +func DeletedAtGTE(v time.Time) predicate.Points { + return predicate.Points(sql.FieldGTE(FieldDeletedAt, v)) +} + +// DeletedAtLT applies the LT predicate on the "deleted_at" field. +func DeletedAtLT(v time.Time) predicate.Points { + return predicate.Points(sql.FieldLT(FieldDeletedAt, v)) +} + +// DeletedAtLTE applies the LTE predicate on the "deleted_at" field. +func DeletedAtLTE(v time.Time) predicate.Points { + return predicate.Points(sql.FieldLTE(FieldDeletedAt, v)) +} + +// DeletedAtIsNil applies the IsNil predicate on the "deleted_at" field. +func DeletedAtIsNil() predicate.Points { + return predicate.Points(sql.FieldIsNull(FieldDeletedAt)) +} + +// DeletedAtNotNil applies the NotNil predicate on the "deleted_at" field. +func DeletedAtNotNil() predicate.Points { + return predicate.Points(sql.FieldNotNull(FieldDeletedAt)) +} + +// PointsEQ applies the EQ predicate on the "points" field. +func PointsEQ(v int) predicate.Points { + return predicate.Points(sql.FieldEQ(FieldPoints, v)) +} + +// PointsNEQ applies the NEQ predicate on the "points" field. +func PointsNEQ(v int) predicate.Points { + return predicate.Points(sql.FieldNEQ(FieldPoints, v)) +} + +// PointsIn applies the In predicate on the "points" field. +func PointsIn(vs ...int) predicate.Points { + return predicate.Points(sql.FieldIn(FieldPoints, vs...)) +} + +// PointsNotIn applies the NotIn predicate on the "points" field. +func PointsNotIn(vs ...int) predicate.Points { + return predicate.Points(sql.FieldNotIn(FieldPoints, vs...)) +} + +// PointsGT applies the GT predicate on the "points" field. +func PointsGT(v int) predicate.Points { + return predicate.Points(sql.FieldGT(FieldPoints, v)) +} + +// PointsGTE applies the GTE predicate on the "points" field. +func PointsGTE(v int) predicate.Points { + return predicate.Points(sql.FieldGTE(FieldPoints, v)) +} + +// PointsLT applies the LT predicate on the "points" field. +func PointsLT(v int) predicate.Points { + return predicate.Points(sql.FieldLT(FieldPoints, v)) +} + +// PointsLTE applies the LTE predicate on the "points" field. +func PointsLTE(v int) predicate.Points { + return predicate.Points(sql.FieldLTE(FieldPoints, v)) +} + +// DescriptionEQ applies the EQ predicate on the "description" field. +func DescriptionEQ(v string) predicate.Points { + return predicate.Points(sql.FieldEQ(FieldDescription, v)) +} + +// DescriptionNEQ applies the NEQ predicate on the "description" field. +func DescriptionNEQ(v string) predicate.Points { + return predicate.Points(sql.FieldNEQ(FieldDescription, v)) +} + +// DescriptionIn applies the In predicate on the "description" field. +func DescriptionIn(vs ...string) predicate.Points { + return predicate.Points(sql.FieldIn(FieldDescription, vs...)) +} + +// DescriptionNotIn applies the NotIn predicate on the "description" field. +func DescriptionNotIn(vs ...string) predicate.Points { + return predicate.Points(sql.FieldNotIn(FieldDescription, vs...)) +} + +// DescriptionGT applies the GT predicate on the "description" field. +func DescriptionGT(v string) predicate.Points { + return predicate.Points(sql.FieldGT(FieldDescription, v)) +} + +// DescriptionGTE applies the GTE predicate on the "description" field. +func DescriptionGTE(v string) predicate.Points { + return predicate.Points(sql.FieldGTE(FieldDescription, v)) +} + +// DescriptionLT applies the LT predicate on the "description" field. +func DescriptionLT(v string) predicate.Points { + return predicate.Points(sql.FieldLT(FieldDescription, v)) +} + +// DescriptionLTE applies the LTE predicate on the "description" field. +func DescriptionLTE(v string) predicate.Points { + return predicate.Points(sql.FieldLTE(FieldDescription, v)) +} + +// DescriptionContains applies the Contains predicate on the "description" field. +func DescriptionContains(v string) predicate.Points { + return predicate.Points(sql.FieldContains(FieldDescription, v)) +} + +// DescriptionHasPrefix applies the HasPrefix predicate on the "description" field. +func DescriptionHasPrefix(v string) predicate.Points { + return predicate.Points(sql.FieldHasPrefix(FieldDescription, v)) +} + +// DescriptionHasSuffix applies the HasSuffix predicate on the "description" field. +func DescriptionHasSuffix(v string) predicate.Points { + return predicate.Points(sql.FieldHasSuffix(FieldDescription, v)) +} + +// DescriptionIsNil applies the IsNil predicate on the "description" field. +func DescriptionIsNil() predicate.Points { + return predicate.Points(sql.FieldIsNull(FieldDescription)) +} + +// DescriptionNotNil applies the NotNil predicate on the "description" field. +func DescriptionNotNil() predicate.Points { + return predicate.Points(sql.FieldNotNull(FieldDescription)) +} + +// DescriptionEqualFold applies the EqualFold predicate on the "description" field. +func DescriptionEqualFold(v string) predicate.Points { + return predicate.Points(sql.FieldEqualFold(FieldDescription, v)) +} + +// DescriptionContainsFold applies the ContainsFold predicate on the "description" field. +func DescriptionContainsFold(v string) predicate.Points { + return predicate.Points(sql.FieldContainsFold(FieldDescription, v)) +} + +// HasUser applies the HasEdge predicate on the "user" edge. +func HasUser() predicate.Points { + return predicate.Points(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, UserTable, UserColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasUserWith applies the HasEdge predicate on the "user" edge with a given conditions (other predicates). +func HasUserWith(preds ...predicate.User) predicate.Points { + return predicate.Points(func(s *sql.Selector) { + step := newUserStep() + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// And groups predicates with the AND operator between them. +func And(predicates ...predicate.Points) predicate.Points { + return predicate.Points(sql.AndPredicates(predicates...)) +} + +// Or groups predicates with the OR operator between them. +func Or(predicates ...predicate.Points) predicate.Points { + return predicate.Points(sql.OrPredicates(predicates...)) +} + +// Not applies the not operator on the given predicate. +func Not(p predicate.Points) predicate.Points { + return predicate.Points(sql.NotPredicates(p)) +} diff --git a/ent/points_create.go b/ent/points_create.go new file mode 100644 index 0000000..7214936 --- /dev/null +++ b/ent/points_create.go @@ -0,0 +1,329 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/database-playground/backend-v2/ent/points" + "github.com/database-playground/backend-v2/ent/user" +) + +// PointsCreate is the builder for creating a Points entity. +type PointsCreate struct { + config + mutation *PointsMutation + hooks []Hook +} + +// SetCreatedAt sets the "created_at" field. +func (_c *PointsCreate) SetCreatedAt(v time.Time) *PointsCreate { + _c.mutation.SetCreatedAt(v) + return _c +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (_c *PointsCreate) SetNillableCreatedAt(v *time.Time) *PointsCreate { + if v != nil { + _c.SetCreatedAt(*v) + } + return _c +} + +// SetUpdatedAt sets the "updated_at" field. +func (_c *PointsCreate) SetUpdatedAt(v time.Time) *PointsCreate { + _c.mutation.SetUpdatedAt(v) + return _c +} + +// SetNillableUpdatedAt sets the "updated_at" field if the given value is not nil. +func (_c *PointsCreate) SetNillableUpdatedAt(v *time.Time) *PointsCreate { + if v != nil { + _c.SetUpdatedAt(*v) + } + return _c +} + +// SetDeletedAt sets the "deleted_at" field. +func (_c *PointsCreate) SetDeletedAt(v time.Time) *PointsCreate { + _c.mutation.SetDeletedAt(v) + return _c +} + +// SetNillableDeletedAt sets the "deleted_at" field if the given value is not nil. +func (_c *PointsCreate) SetNillableDeletedAt(v *time.Time) *PointsCreate { + if v != nil { + _c.SetDeletedAt(*v) + } + return _c +} + +// SetPoints sets the "points" field. +func (_c *PointsCreate) SetPoints(v int) *PointsCreate { + _c.mutation.SetPoints(v) + return _c +} + +// SetNillablePoints sets the "points" field if the given value is not nil. +func (_c *PointsCreate) SetNillablePoints(v *int) *PointsCreate { + if v != nil { + _c.SetPoints(*v) + } + return _c +} + +// SetDescription sets the "description" field. +func (_c *PointsCreate) SetDescription(v string) *PointsCreate { + _c.mutation.SetDescription(v) + return _c +} + +// SetNillableDescription sets the "description" field if the given value is not nil. +func (_c *PointsCreate) SetNillableDescription(v *string) *PointsCreate { + if v != nil { + _c.SetDescription(*v) + } + return _c +} + +// SetUserID sets the "user" edge to the User entity by ID. +func (_c *PointsCreate) SetUserID(id int) *PointsCreate { + _c.mutation.SetUserID(id) + return _c +} + +// SetUser sets the "user" edge to the User entity. +func (_c *PointsCreate) SetUser(v *User) *PointsCreate { + return _c.SetUserID(v.ID) +} + +// Mutation returns the PointsMutation object of the builder. +func (_c *PointsCreate) Mutation() *PointsMutation { + return _c.mutation +} + +// Save creates the Points in the database. +func (_c *PointsCreate) Save(ctx context.Context) (*Points, error) { + if err := _c.defaults(); err != nil { + return nil, err + } + return withHooks(ctx, _c.sqlSave, _c.mutation, _c.hooks) +} + +// SaveX calls Save and panics if Save returns an error. +func (_c *PointsCreate) SaveX(ctx context.Context) *Points { + v, err := _c.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (_c *PointsCreate) Exec(ctx context.Context) error { + _, err := _c.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (_c *PointsCreate) ExecX(ctx context.Context) { + if err := _c.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (_c *PointsCreate) defaults() error { + if _, ok := _c.mutation.CreatedAt(); !ok { + if points.DefaultCreatedAt == nil { + return fmt.Errorf("ent: uninitialized points.DefaultCreatedAt (forgotten import ent/runtime?)") + } + v := points.DefaultCreatedAt() + _c.mutation.SetCreatedAt(v) + } + if _, ok := _c.mutation.UpdatedAt(); !ok { + if points.DefaultUpdatedAt == nil { + return fmt.Errorf("ent: uninitialized points.DefaultUpdatedAt (forgotten import ent/runtime?)") + } + v := points.DefaultUpdatedAt() + _c.mutation.SetUpdatedAt(v) + } + if _, ok := _c.mutation.Points(); !ok { + v := points.DefaultPoints + _c.mutation.SetPoints(v) + } + return nil +} + +// check runs all checks and user-defined validators on the builder. +func (_c *PointsCreate) check() error { + if _, ok := _c.mutation.CreatedAt(); !ok { + return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "Points.created_at"`)} + } + if _, ok := _c.mutation.UpdatedAt(); !ok { + return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "Points.updated_at"`)} + } + if _, ok := _c.mutation.Points(); !ok { + return &ValidationError{Name: "points", err: errors.New(`ent: missing required field "Points.points"`)} + } + if len(_c.mutation.UserIDs()) == 0 { + return &ValidationError{Name: "user", err: errors.New(`ent: missing required edge "Points.user"`)} + } + return nil +} + +func (_c *PointsCreate) sqlSave(ctx context.Context) (*Points, error) { + if err := _c.check(); err != nil { + return nil, err + } + _node, _spec := _c.createSpec() + if err := sqlgraph.CreateNode(ctx, _c.driver, _spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return nil, err + } + id := _spec.ID.Value.(int64) + _node.ID = int(id) + _c.mutation.id = &_node.ID + _c.mutation.done = true + return _node, nil +} + +func (_c *PointsCreate) createSpec() (*Points, *sqlgraph.CreateSpec) { + var ( + _node = &Points{config: _c.config} + _spec = sqlgraph.NewCreateSpec(points.Table, sqlgraph.NewFieldSpec(points.FieldID, field.TypeInt)) + ) + if value, ok := _c.mutation.CreatedAt(); ok { + _spec.SetField(points.FieldCreatedAt, field.TypeTime, value) + _node.CreatedAt = value + } + if value, ok := _c.mutation.UpdatedAt(); ok { + _spec.SetField(points.FieldUpdatedAt, field.TypeTime, value) + _node.UpdatedAt = value + } + if value, ok := _c.mutation.DeletedAt(); ok { + _spec.SetField(points.FieldDeletedAt, field.TypeTime, value) + _node.DeletedAt = value + } + if value, ok := _c.mutation.Points(); ok { + _spec.SetField(points.FieldPoints, field.TypeInt, value) + _node.Points = value + } + if value, ok := _c.mutation.Description(); ok { + _spec.SetField(points.FieldDescription, field.TypeString, value) + _node.Description = value + } + if nodes := _c.mutation.UserIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: points.UserTable, + Columns: []string{points.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeInt), + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _node.user_points = &nodes[0] + _spec.Edges = append(_spec.Edges, edge) + } + return _node, _spec +} + +// PointsCreateBulk is the builder for creating many Points entities in bulk. +type PointsCreateBulk struct { + config + err error + builders []*PointsCreate +} + +// Save creates the Points entities in the database. +func (_c *PointsCreateBulk) Save(ctx context.Context) ([]*Points, error) { + if _c.err != nil { + return nil, _c.err + } + specs := make([]*sqlgraph.CreateSpec, len(_c.builders)) + nodes := make([]*Points, len(_c.builders)) + mutators := make([]Mutator, len(_c.builders)) + for i := range _c.builders { + func(i int, root context.Context) { + builder := _c.builders[i] + builder.defaults() + var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) { + mutation, ok := m.(*PointsMutation) + if !ok { + return nil, fmt.Errorf("unexpected mutation type %T", m) + } + if err := builder.check(); err != nil { + return nil, err + } + builder.mutation = mutation + var err error + nodes[i], specs[i] = builder.createSpec() + if i < len(mutators)-1 { + _, err = mutators[i+1].Mutate(root, _c.builders[i+1].mutation) + } else { + spec := &sqlgraph.BatchCreateSpec{Nodes: specs} + // Invoke the actual operation on the latest mutation in the chain. + if err = sqlgraph.BatchCreate(ctx, _c.driver, spec); err != nil { + if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + } + } + if err != nil { + return nil, err + } + mutation.id = &nodes[i].ID + if specs[i].ID.Value != nil { + id := specs[i].ID.Value.(int64) + nodes[i].ID = int(id) + } + mutation.done = true + return nodes[i], nil + }) + for i := len(builder.hooks) - 1; i >= 0; i-- { + mut = builder.hooks[i](mut) + } + mutators[i] = mut + }(i, ctx) + } + if len(mutators) > 0 { + if _, err := mutators[0].Mutate(ctx, _c.builders[0].mutation); err != nil { + return nil, err + } + } + return nodes, nil +} + +// SaveX is like Save, but panics if an error occurs. +func (_c *PointsCreateBulk) SaveX(ctx context.Context) []*Points { + v, err := _c.Save(ctx) + if err != nil { + panic(err) + } + return v +} + +// Exec executes the query. +func (_c *PointsCreateBulk) Exec(ctx context.Context) error { + _, err := _c.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (_c *PointsCreateBulk) ExecX(ctx context.Context) { + if err := _c.Exec(ctx); err != nil { + panic(err) + } +} diff --git a/ent/points_delete.go b/ent/points_delete.go new file mode 100644 index 0000000..6577c15 --- /dev/null +++ b/ent/points_delete.go @@ -0,0 +1,88 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/database-playground/backend-v2/ent/points" + "github.com/database-playground/backend-v2/ent/predicate" +) + +// PointsDelete is the builder for deleting a Points entity. +type PointsDelete struct { + config + hooks []Hook + mutation *PointsMutation +} + +// Where appends a list predicates to the PointsDelete builder. +func (_d *PointsDelete) Where(ps ...predicate.Points) *PointsDelete { + _d.mutation.Where(ps...) + return _d +} + +// Exec executes the deletion query and returns how many vertices were deleted. +func (_d *PointsDelete) Exec(ctx context.Context) (int, error) { + return withHooks(ctx, _d.sqlExec, _d.mutation, _d.hooks) +} + +// ExecX is like Exec, but panics if an error occurs. +func (_d *PointsDelete) ExecX(ctx context.Context) int { + n, err := _d.Exec(ctx) + if err != nil { + panic(err) + } + return n +} + +func (_d *PointsDelete) sqlExec(ctx context.Context) (int, error) { + _spec := sqlgraph.NewDeleteSpec(points.Table, sqlgraph.NewFieldSpec(points.FieldID, field.TypeInt)) + if ps := _d.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + affected, err := sqlgraph.DeleteNodes(ctx, _d.driver, _spec) + if err != nil && sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + _d.mutation.done = true + return affected, err +} + +// PointsDeleteOne is the builder for deleting a single Points entity. +type PointsDeleteOne struct { + _d *PointsDelete +} + +// Where appends a list predicates to the PointsDelete builder. +func (_d *PointsDeleteOne) Where(ps ...predicate.Points) *PointsDeleteOne { + _d._d.mutation.Where(ps...) + return _d +} + +// Exec executes the deletion query. +func (_d *PointsDeleteOne) Exec(ctx context.Context) error { + n, err := _d._d.Exec(ctx) + switch { + case err != nil: + return err + case n == 0: + return &NotFoundError{points.Label} + default: + return nil + } +} + +// ExecX is like Exec, but panics if an error occurs. +func (_d *PointsDeleteOne) ExecX(ctx context.Context) { + if err := _d.Exec(ctx); err != nil { + panic(err) + } +} diff --git a/ent/points_query.go b/ent/points_query.go new file mode 100644 index 0000000..0040ec8 --- /dev/null +++ b/ent/points_query.go @@ -0,0 +1,627 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "fmt" + "math" + + "entgo.io/ent" + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/database-playground/backend-v2/ent/points" + "github.com/database-playground/backend-v2/ent/predicate" + "github.com/database-playground/backend-v2/ent/user" +) + +// PointsQuery is the builder for querying Points entities. +type PointsQuery struct { + config + ctx *QueryContext + order []points.OrderOption + inters []Interceptor + predicates []predicate.Points + withUser *UserQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*Points) error + // intermediate query (i.e. traversal path). + sql *sql.Selector + path func(context.Context) (*sql.Selector, error) +} + +// Where adds a new predicate for the PointsQuery builder. +func (_q *PointsQuery) Where(ps ...predicate.Points) *PointsQuery { + _q.predicates = append(_q.predicates, ps...) + return _q +} + +// Limit the number of records to be returned by this query. +func (_q *PointsQuery) Limit(limit int) *PointsQuery { + _q.ctx.Limit = &limit + return _q +} + +// Offset to start from. +func (_q *PointsQuery) Offset(offset int) *PointsQuery { + _q.ctx.Offset = &offset + return _q +} + +// Unique configures the query builder to filter duplicate records on query. +// By default, unique is set to true, and can be disabled using this method. +func (_q *PointsQuery) Unique(unique bool) *PointsQuery { + _q.ctx.Unique = &unique + return _q +} + +// Order specifies how the records should be ordered. +func (_q *PointsQuery) Order(o ...points.OrderOption) *PointsQuery { + _q.order = append(_q.order, o...) + return _q +} + +// QueryUser chains the current query on the "user" edge. +func (_q *PointsQuery) QueryUser() *UserQuery { + query := (&UserClient{config: _q.config}).Query() + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := _q.prepareQuery(ctx); err != nil { + return nil, err + } + selector := _q.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(points.Table, points.FieldID, selector), + sqlgraph.To(user.Table, user.FieldID), + sqlgraph.Edge(sqlgraph.M2O, true, points.UserTable, points.UserColumn), + ) + fromU = sqlgraph.SetNeighbors(_q.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// First returns the first Points entity from the query. +// Returns a *NotFoundError when no Points was found. +func (_q *PointsQuery) First(ctx context.Context) (*Points, error) { + nodes, err := _q.Limit(1).All(setContextOp(ctx, _q.ctx, ent.OpQueryFirst)) + if err != nil { + return nil, err + } + if len(nodes) == 0 { + return nil, &NotFoundError{points.Label} + } + return nodes[0], nil +} + +// FirstX is like First, but panics if an error occurs. +func (_q *PointsQuery) FirstX(ctx context.Context) *Points { + node, err := _q.First(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return node +} + +// FirstID returns the first Points ID from the query. +// Returns a *NotFoundError when no Points ID was found. +func (_q *PointsQuery) FirstID(ctx context.Context) (id int, err error) { + var ids []int + if ids, err = _q.Limit(1).IDs(setContextOp(ctx, _q.ctx, ent.OpQueryFirstID)); err != nil { + return + } + if len(ids) == 0 { + err = &NotFoundError{points.Label} + return + } + return ids[0], nil +} + +// FirstIDX is like FirstID, but panics if an error occurs. +func (_q *PointsQuery) FirstIDX(ctx context.Context) int { + id, err := _q.FirstID(ctx) + if err != nil && !IsNotFound(err) { + panic(err) + } + return id +} + +// Only returns a single Points entity found by the query, ensuring it only returns one. +// Returns a *NotSingularError when more than one Points entity is found. +// Returns a *NotFoundError when no Points entities are found. +func (_q *PointsQuery) Only(ctx context.Context) (*Points, error) { + nodes, err := _q.Limit(2).All(setContextOp(ctx, _q.ctx, ent.OpQueryOnly)) + if err != nil { + return nil, err + } + switch len(nodes) { + case 1: + return nodes[0], nil + case 0: + return nil, &NotFoundError{points.Label} + default: + return nil, &NotSingularError{points.Label} + } +} + +// OnlyX is like Only, but panics if an error occurs. +func (_q *PointsQuery) OnlyX(ctx context.Context) *Points { + node, err := _q.Only(ctx) + if err != nil { + panic(err) + } + return node +} + +// OnlyID is like Only, but returns the only Points ID in the query. +// Returns a *NotSingularError when more than one Points ID is found. +// Returns a *NotFoundError when no entities are found. +func (_q *PointsQuery) OnlyID(ctx context.Context) (id int, err error) { + var ids []int + if ids, err = _q.Limit(2).IDs(setContextOp(ctx, _q.ctx, ent.OpQueryOnlyID)); err != nil { + return + } + switch len(ids) { + case 1: + id = ids[0] + case 0: + err = &NotFoundError{points.Label} + default: + err = &NotSingularError{points.Label} + } + return +} + +// OnlyIDX is like OnlyID, but panics if an error occurs. +func (_q *PointsQuery) OnlyIDX(ctx context.Context) int { + id, err := _q.OnlyID(ctx) + if err != nil { + panic(err) + } + return id +} + +// All executes the query and returns a list of PointsSlice. +func (_q *PointsQuery) All(ctx context.Context) ([]*Points, error) { + ctx = setContextOp(ctx, _q.ctx, ent.OpQueryAll) + if err := _q.prepareQuery(ctx); err != nil { + return nil, err + } + qr := querierAll[[]*Points, *PointsQuery]() + return withInterceptors[[]*Points](ctx, _q, qr, _q.inters) +} + +// AllX is like All, but panics if an error occurs. +func (_q *PointsQuery) AllX(ctx context.Context) []*Points { + nodes, err := _q.All(ctx) + if err != nil { + panic(err) + } + return nodes +} + +// IDs executes the query and returns a list of Points IDs. +func (_q *PointsQuery) IDs(ctx context.Context) (ids []int, err error) { + if _q.ctx.Unique == nil && _q.path != nil { + _q.Unique(true) + } + ctx = setContextOp(ctx, _q.ctx, ent.OpQueryIDs) + if err = _q.Select(points.FieldID).Scan(ctx, &ids); err != nil { + return nil, err + } + return ids, nil +} + +// IDsX is like IDs, but panics if an error occurs. +func (_q *PointsQuery) IDsX(ctx context.Context) []int { + ids, err := _q.IDs(ctx) + if err != nil { + panic(err) + } + return ids +} + +// Count returns the count of the given query. +func (_q *PointsQuery) Count(ctx context.Context) (int, error) { + ctx = setContextOp(ctx, _q.ctx, ent.OpQueryCount) + if err := _q.prepareQuery(ctx); err != nil { + return 0, err + } + return withInterceptors[int](ctx, _q, querierCount[*PointsQuery](), _q.inters) +} + +// CountX is like Count, but panics if an error occurs. +func (_q *PointsQuery) CountX(ctx context.Context) int { + count, err := _q.Count(ctx) + if err != nil { + panic(err) + } + return count +} + +// Exist returns true if the query has elements in the graph. +func (_q *PointsQuery) Exist(ctx context.Context) (bool, error) { + ctx = setContextOp(ctx, _q.ctx, ent.OpQueryExist) + switch _, err := _q.FirstID(ctx); { + case IsNotFound(err): + return false, nil + case err != nil: + return false, fmt.Errorf("ent: check existence: %w", err) + default: + return true, nil + } +} + +// ExistX is like Exist, but panics if an error occurs. +func (_q *PointsQuery) ExistX(ctx context.Context) bool { + exist, err := _q.Exist(ctx) + if err != nil { + panic(err) + } + return exist +} + +// Clone returns a duplicate of the PointsQuery builder, including all associated steps. It can be +// used to prepare common query builders and use them differently after the clone is made. +func (_q *PointsQuery) Clone() *PointsQuery { + if _q == nil { + return nil + } + return &PointsQuery{ + config: _q.config, + ctx: _q.ctx.Clone(), + order: append([]points.OrderOption{}, _q.order...), + inters: append([]Interceptor{}, _q.inters...), + predicates: append([]predicate.Points{}, _q.predicates...), + withUser: _q.withUser.Clone(), + // clone intermediate query. + sql: _q.sql.Clone(), + path: _q.path, + } +} + +// WithUser tells the query-builder to eager-load the nodes that are connected to +// the "user" edge. The optional arguments are used to configure the query builder of the edge. +func (_q *PointsQuery) WithUser(opts ...func(*UserQuery)) *PointsQuery { + query := (&UserClient{config: _q.config}).Query() + for _, opt := range opts { + opt(query) + } + _q.withUser = query + return _q +} + +// GroupBy is used to group vertices by one or more fields/columns. +// It is often used with aggregate functions, like: count, max, mean, min, sum. +// +// Example: +// +// var v []struct { +// CreatedAt time.Time `json:"created_at,omitempty"` +// Count int `json:"count,omitempty"` +// } +// +// client.Points.Query(). +// GroupBy(points.FieldCreatedAt). +// Aggregate(ent.Count()). +// Scan(ctx, &v) +func (_q *PointsQuery) GroupBy(field string, fields ...string) *PointsGroupBy { + _q.ctx.Fields = append([]string{field}, fields...) + grbuild := &PointsGroupBy{build: _q} + grbuild.flds = &_q.ctx.Fields + grbuild.label = points.Label + grbuild.scan = grbuild.Scan + return grbuild +} + +// Select allows the selection one or more fields/columns for the given query, +// instead of selecting all fields in the entity. +// +// Example: +// +// var v []struct { +// CreatedAt time.Time `json:"created_at,omitempty"` +// } +// +// client.Points.Query(). +// Select(points.FieldCreatedAt). +// Scan(ctx, &v) +func (_q *PointsQuery) Select(fields ...string) *PointsSelect { + _q.ctx.Fields = append(_q.ctx.Fields, fields...) + sbuild := &PointsSelect{PointsQuery: _q} + sbuild.label = points.Label + sbuild.flds, sbuild.scan = &_q.ctx.Fields, sbuild.Scan + return sbuild +} + +// Aggregate returns a PointsSelect configured with the given aggregations. +func (_q *PointsQuery) Aggregate(fns ...AggregateFunc) *PointsSelect { + return _q.Select().Aggregate(fns...) +} + +func (_q *PointsQuery) prepareQuery(ctx context.Context) error { + for _, inter := range _q.inters { + if inter == nil { + return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)") + } + if trv, ok := inter.(Traverser); ok { + if err := trv.Traverse(ctx, _q); err != nil { + return err + } + } + } + for _, f := range _q.ctx.Fields { + if !points.ValidColumn(f) { + return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + } + if _q.path != nil { + prev, err := _q.path(ctx) + if err != nil { + return err + } + _q.sql = prev + } + return nil +} + +func (_q *PointsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Points, error) { + var ( + nodes = []*Points{} + withFKs = _q.withFKs + _spec = _q.querySpec() + loadedTypes = [1]bool{ + _q.withUser != nil, + } + ) + if _q.withUser != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, points.ForeignKeys...) + } + _spec.ScanValues = func(columns []string) ([]any, error) { + return (*Points).scanValues(nil, columns) + } + _spec.Assign = func(columns []string, values []any) error { + node := &Points{config: _q.config} + nodes = append(nodes, node) + node.Edges.loadedTypes = loadedTypes + return node.assignValues(columns, values) + } + if len(_q.modifiers) > 0 { + _spec.Modifiers = _q.modifiers + } + for i := range hooks { + hooks[i](ctx, _spec) + } + if err := sqlgraph.QueryNodes(ctx, _q.driver, _spec); err != nil { + return nil, err + } + if len(nodes) == 0 { + return nodes, nil + } + if query := _q.withUser; query != nil { + if err := _q.loadUser(ctx, query, nodes, nil, + func(n *Points, e *User) { n.Edges.User = e }); err != nil { + return nil, err + } + } + for i := range _q.loadTotal { + if err := _q.loadTotal[i](ctx, nodes); err != nil { + return nil, err + } + } + return nodes, nil +} + +func (_q *PointsQuery) loadUser(ctx context.Context, query *UserQuery, nodes []*Points, init func(*Points), assign func(*Points, *User)) error { + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*Points) + for i := range nodes { + if nodes[i].user_points == nil { + continue + } + fk := *nodes[i].user_points + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) + } + if len(ids) == 0 { + return nil + } + query.Where(user.IDIn(ids...)) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + nodes, ok := nodeids[n.ID] + if !ok { + return fmt.Errorf(`unexpected foreign-key "user_points" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) + } + } + return nil +} + +func (_q *PointsQuery) sqlCount(ctx context.Context) (int, error) { + _spec := _q.querySpec() + if len(_q.modifiers) > 0 { + _spec.Modifiers = _q.modifiers + } + _spec.Node.Columns = _q.ctx.Fields + if len(_q.ctx.Fields) > 0 { + _spec.Unique = _q.ctx.Unique != nil && *_q.ctx.Unique + } + return sqlgraph.CountNodes(ctx, _q.driver, _spec) +} + +func (_q *PointsQuery) querySpec() *sqlgraph.QuerySpec { + _spec := sqlgraph.NewQuerySpec(points.Table, points.Columns, sqlgraph.NewFieldSpec(points.FieldID, field.TypeInt)) + _spec.From = _q.sql + if unique := _q.ctx.Unique; unique != nil { + _spec.Unique = *unique + } else if _q.path != nil { + _spec.Unique = true + } + if fields := _q.ctx.Fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, points.FieldID) + for i := range fields { + if fields[i] != points.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, fields[i]) + } + } + } + if ps := _q.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if limit := _q.ctx.Limit; limit != nil { + _spec.Limit = *limit + } + if offset := _q.ctx.Offset; offset != nil { + _spec.Offset = *offset + } + if ps := _q.order; len(ps) > 0 { + _spec.Order = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + return _spec +} + +func (_q *PointsQuery) sqlQuery(ctx context.Context) *sql.Selector { + builder := sql.Dialect(_q.driver.Dialect()) + t1 := builder.Table(points.Table) + columns := _q.ctx.Fields + if len(columns) == 0 { + columns = points.Columns + } + selector := builder.Select(t1.Columns(columns...)...).From(t1) + if _q.sql != nil { + selector = _q.sql + selector.Select(selector.Columns(columns...)...) + } + if _q.ctx.Unique != nil && *_q.ctx.Unique { + selector.Distinct() + } + for _, p := range _q.predicates { + p(selector) + } + for _, p := range _q.order { + p(selector) + } + if offset := _q.ctx.Offset; offset != nil { + // limit is mandatory for offset clause. We start + // with default value, and override it below if needed. + selector.Offset(*offset).Limit(math.MaxInt32) + } + if limit := _q.ctx.Limit; limit != nil { + selector.Limit(*limit) + } + return selector +} + +// PointsGroupBy is the group-by builder for Points entities. +type PointsGroupBy struct { + selector + build *PointsQuery +} + +// Aggregate adds the given aggregation functions to the group-by query. +func (_g *PointsGroupBy) Aggregate(fns ...AggregateFunc) *PointsGroupBy { + _g.fns = append(_g.fns, fns...) + return _g +} + +// Scan applies the selector query and scans the result into the given value. +func (_g *PointsGroupBy) Scan(ctx context.Context, v any) error { + ctx = setContextOp(ctx, _g.build.ctx, ent.OpQueryGroupBy) + if err := _g.build.prepareQuery(ctx); err != nil { + return err + } + return scanWithInterceptors[*PointsQuery, *PointsGroupBy](ctx, _g.build, _g, _g.build.inters, v) +} + +func (_g *PointsGroupBy) sqlScan(ctx context.Context, root *PointsQuery, v any) error { + selector := root.sqlQuery(ctx).Select() + aggregation := make([]string, 0, len(_g.fns)) + for _, fn := range _g.fns { + aggregation = append(aggregation, fn(selector)) + } + if len(selector.SelectedColumns()) == 0 { + columns := make([]string, 0, len(*_g.flds)+len(_g.fns)) + for _, f := range *_g.flds { + columns = append(columns, selector.C(f)) + } + columns = append(columns, aggregation...) + selector.Select(columns...) + } + selector.GroupBy(selector.Columns(*_g.flds...)...) + if err := selector.Err(); err != nil { + return err + } + rows := &sql.Rows{} + query, args := selector.Query() + if err := _g.build.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} + +// PointsSelect is the builder for selecting fields of Points entities. +type PointsSelect struct { + *PointsQuery + selector +} + +// Aggregate adds the given aggregation functions to the selector query. +func (_s *PointsSelect) Aggregate(fns ...AggregateFunc) *PointsSelect { + _s.fns = append(_s.fns, fns...) + return _s +} + +// Scan applies the selector query and scans the result into the given value. +func (_s *PointsSelect) Scan(ctx context.Context, v any) error { + ctx = setContextOp(ctx, _s.ctx, ent.OpQuerySelect) + if err := _s.prepareQuery(ctx); err != nil { + return err + } + return scanWithInterceptors[*PointsQuery, *PointsSelect](ctx, _s.PointsQuery, _s, _s.inters, v) +} + +func (_s *PointsSelect) sqlScan(ctx context.Context, root *PointsQuery, v any) error { + selector := root.sqlQuery(ctx) + aggregation := make([]string, 0, len(_s.fns)) + for _, fn := range _s.fns { + aggregation = append(aggregation, fn(selector)) + } + switch n := len(*_s.selector.flds); { + case n == 0 && len(aggregation) > 0: + selector.Select(aggregation...) + case n != 0 && len(aggregation) > 0: + selector.AppendSelect(aggregation...) + } + rows := &sql.Rows{} + query, args := selector.Query() + if err := _s.driver.Query(ctx, query, args, rows); err != nil { + return err + } + defer rows.Close() + return sql.ScanSlice(rows, v) +} diff --git a/ent/points_update.go b/ent/points_update.go new file mode 100644 index 0000000..a69855e --- /dev/null +++ b/ent/points_update.go @@ -0,0 +1,531 @@ +// Code generated by ent, DO NOT EDIT. + +package ent + +import ( + "context" + "errors" + "fmt" + "time" + + "entgo.io/ent/dialect/sql" + "entgo.io/ent/dialect/sql/sqlgraph" + "entgo.io/ent/schema/field" + "github.com/database-playground/backend-v2/ent/points" + "github.com/database-playground/backend-v2/ent/predicate" + "github.com/database-playground/backend-v2/ent/user" +) + +// PointsUpdate is the builder for updating Points entities. +type PointsUpdate struct { + config + hooks []Hook + mutation *PointsMutation +} + +// Where appends a list predicates to the PointsUpdate builder. +func (_u *PointsUpdate) Where(ps ...predicate.Points) *PointsUpdate { + _u.mutation.Where(ps...) + return _u +} + +// SetCreatedAt sets the "created_at" field. +func (_u *PointsUpdate) SetCreatedAt(v time.Time) *PointsUpdate { + _u.mutation.SetCreatedAt(v) + return _u +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (_u *PointsUpdate) SetNillableCreatedAt(v *time.Time) *PointsUpdate { + if v != nil { + _u.SetCreatedAt(*v) + } + return _u +} + +// SetUpdatedAt sets the "updated_at" field. +func (_u *PointsUpdate) SetUpdatedAt(v time.Time) *PointsUpdate { + _u.mutation.SetUpdatedAt(v) + return _u +} + +// SetDeletedAt sets the "deleted_at" field. +func (_u *PointsUpdate) SetDeletedAt(v time.Time) *PointsUpdate { + _u.mutation.SetDeletedAt(v) + return _u +} + +// SetNillableDeletedAt sets the "deleted_at" field if the given value is not nil. +func (_u *PointsUpdate) SetNillableDeletedAt(v *time.Time) *PointsUpdate { + if v != nil { + _u.SetDeletedAt(*v) + } + return _u +} + +// ClearDeletedAt clears the value of the "deleted_at" field. +func (_u *PointsUpdate) ClearDeletedAt() *PointsUpdate { + _u.mutation.ClearDeletedAt() + return _u +} + +// SetPoints sets the "points" field. +func (_u *PointsUpdate) SetPoints(v int) *PointsUpdate { + _u.mutation.ResetPoints() + _u.mutation.SetPoints(v) + return _u +} + +// SetNillablePoints sets the "points" field if the given value is not nil. +func (_u *PointsUpdate) SetNillablePoints(v *int) *PointsUpdate { + if v != nil { + _u.SetPoints(*v) + } + return _u +} + +// AddPoints adds value to the "points" field. +func (_u *PointsUpdate) AddPoints(v int) *PointsUpdate { + _u.mutation.AddPoints(v) + return _u +} + +// SetDescription sets the "description" field. +func (_u *PointsUpdate) SetDescription(v string) *PointsUpdate { + _u.mutation.SetDescription(v) + return _u +} + +// SetNillableDescription sets the "description" field if the given value is not nil. +func (_u *PointsUpdate) SetNillableDescription(v *string) *PointsUpdate { + if v != nil { + _u.SetDescription(*v) + } + return _u +} + +// ClearDescription clears the value of the "description" field. +func (_u *PointsUpdate) ClearDescription() *PointsUpdate { + _u.mutation.ClearDescription() + return _u +} + +// SetUserID sets the "user" edge to the User entity by ID. +func (_u *PointsUpdate) SetUserID(id int) *PointsUpdate { + _u.mutation.SetUserID(id) + return _u +} + +// SetUser sets the "user" edge to the User entity. +func (_u *PointsUpdate) SetUser(v *User) *PointsUpdate { + return _u.SetUserID(v.ID) +} + +// Mutation returns the PointsMutation object of the builder. +func (_u *PointsUpdate) Mutation() *PointsMutation { + return _u.mutation +} + +// ClearUser clears the "user" edge to the User entity. +func (_u *PointsUpdate) ClearUser() *PointsUpdate { + _u.mutation.ClearUser() + return _u +} + +// Save executes the query and returns the number of nodes affected by the update operation. +func (_u *PointsUpdate) Save(ctx context.Context) (int, error) { + if err := _u.defaults(); err != nil { + return 0, err + } + return withHooks(ctx, _u.sqlSave, _u.mutation, _u.hooks) +} + +// SaveX is like Save, but panics if an error occurs. +func (_u *PointsUpdate) SaveX(ctx context.Context) int { + affected, err := _u.Save(ctx) + if err != nil { + panic(err) + } + return affected +} + +// Exec executes the query. +func (_u *PointsUpdate) Exec(ctx context.Context) error { + _, err := _u.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (_u *PointsUpdate) ExecX(ctx context.Context) { + if err := _u.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (_u *PointsUpdate) defaults() error { + if _, ok := _u.mutation.UpdatedAt(); !ok { + if points.UpdateDefaultUpdatedAt == nil { + return fmt.Errorf("ent: uninitialized points.UpdateDefaultUpdatedAt (forgotten import ent/runtime?)") + } + v := points.UpdateDefaultUpdatedAt() + _u.mutation.SetUpdatedAt(v) + } + return nil +} + +// check runs all checks and user-defined validators on the builder. +func (_u *PointsUpdate) check() error { + if _u.mutation.UserCleared() && len(_u.mutation.UserIDs()) > 0 { + return errors.New(`ent: clearing a required unique edge "Points.user"`) + } + return nil +} + +func (_u *PointsUpdate) sqlSave(ctx context.Context) (_node int, err error) { + if err := _u.check(); err != nil { + return _node, err + } + _spec := sqlgraph.NewUpdateSpec(points.Table, points.Columns, sqlgraph.NewFieldSpec(points.FieldID, field.TypeInt)) + if ps := _u.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := _u.mutation.CreatedAt(); ok { + _spec.SetField(points.FieldCreatedAt, field.TypeTime, value) + } + if value, ok := _u.mutation.UpdatedAt(); ok { + _spec.SetField(points.FieldUpdatedAt, field.TypeTime, value) + } + if value, ok := _u.mutation.DeletedAt(); ok { + _spec.SetField(points.FieldDeletedAt, field.TypeTime, value) + } + if _u.mutation.DeletedAtCleared() { + _spec.ClearField(points.FieldDeletedAt, field.TypeTime) + } + if value, ok := _u.mutation.Points(); ok { + _spec.SetField(points.FieldPoints, field.TypeInt, value) + } + if value, ok := _u.mutation.AddedPoints(); ok { + _spec.AddField(points.FieldPoints, field.TypeInt, value) + } + if value, ok := _u.mutation.Description(); ok { + _spec.SetField(points.FieldDescription, field.TypeString, value) + } + if _u.mutation.DescriptionCleared() { + _spec.ClearField(points.FieldDescription, field.TypeString) + } + if _u.mutation.UserCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: points.UserTable, + Columns: []string{points.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeInt), + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := _u.mutation.UserIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: points.UserTable, + Columns: []string{points.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeInt), + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if _node, err = sqlgraph.UpdateNodes(ctx, _u.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{points.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return 0, err + } + _u.mutation.done = true + return _node, nil +} + +// PointsUpdateOne is the builder for updating a single Points entity. +type PointsUpdateOne struct { + config + fields []string + hooks []Hook + mutation *PointsMutation +} + +// SetCreatedAt sets the "created_at" field. +func (_u *PointsUpdateOne) SetCreatedAt(v time.Time) *PointsUpdateOne { + _u.mutation.SetCreatedAt(v) + return _u +} + +// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. +func (_u *PointsUpdateOne) SetNillableCreatedAt(v *time.Time) *PointsUpdateOne { + if v != nil { + _u.SetCreatedAt(*v) + } + return _u +} + +// SetUpdatedAt sets the "updated_at" field. +func (_u *PointsUpdateOne) SetUpdatedAt(v time.Time) *PointsUpdateOne { + _u.mutation.SetUpdatedAt(v) + return _u +} + +// SetDeletedAt sets the "deleted_at" field. +func (_u *PointsUpdateOne) SetDeletedAt(v time.Time) *PointsUpdateOne { + _u.mutation.SetDeletedAt(v) + return _u +} + +// SetNillableDeletedAt sets the "deleted_at" field if the given value is not nil. +func (_u *PointsUpdateOne) SetNillableDeletedAt(v *time.Time) *PointsUpdateOne { + if v != nil { + _u.SetDeletedAt(*v) + } + return _u +} + +// ClearDeletedAt clears the value of the "deleted_at" field. +func (_u *PointsUpdateOne) ClearDeletedAt() *PointsUpdateOne { + _u.mutation.ClearDeletedAt() + return _u +} + +// SetPoints sets the "points" field. +func (_u *PointsUpdateOne) SetPoints(v int) *PointsUpdateOne { + _u.mutation.ResetPoints() + _u.mutation.SetPoints(v) + return _u +} + +// SetNillablePoints sets the "points" field if the given value is not nil. +func (_u *PointsUpdateOne) SetNillablePoints(v *int) *PointsUpdateOne { + if v != nil { + _u.SetPoints(*v) + } + return _u +} + +// AddPoints adds value to the "points" field. +func (_u *PointsUpdateOne) AddPoints(v int) *PointsUpdateOne { + _u.mutation.AddPoints(v) + return _u +} + +// SetDescription sets the "description" field. +func (_u *PointsUpdateOne) SetDescription(v string) *PointsUpdateOne { + _u.mutation.SetDescription(v) + return _u +} + +// SetNillableDescription sets the "description" field if the given value is not nil. +func (_u *PointsUpdateOne) SetNillableDescription(v *string) *PointsUpdateOne { + if v != nil { + _u.SetDescription(*v) + } + return _u +} + +// ClearDescription clears the value of the "description" field. +func (_u *PointsUpdateOne) ClearDescription() *PointsUpdateOne { + _u.mutation.ClearDescription() + return _u +} + +// SetUserID sets the "user" edge to the User entity by ID. +func (_u *PointsUpdateOne) SetUserID(id int) *PointsUpdateOne { + _u.mutation.SetUserID(id) + return _u +} + +// SetUser sets the "user" edge to the User entity. +func (_u *PointsUpdateOne) SetUser(v *User) *PointsUpdateOne { + return _u.SetUserID(v.ID) +} + +// Mutation returns the PointsMutation object of the builder. +func (_u *PointsUpdateOne) Mutation() *PointsMutation { + return _u.mutation +} + +// ClearUser clears the "user" edge to the User entity. +func (_u *PointsUpdateOne) ClearUser() *PointsUpdateOne { + _u.mutation.ClearUser() + return _u +} + +// Where appends a list predicates to the PointsUpdate builder. +func (_u *PointsUpdateOne) Where(ps ...predicate.Points) *PointsUpdateOne { + _u.mutation.Where(ps...) + return _u +} + +// Select allows selecting one or more fields (columns) of the returned entity. +// The default is selecting all fields defined in the entity schema. +func (_u *PointsUpdateOne) Select(field string, fields ...string) *PointsUpdateOne { + _u.fields = append([]string{field}, fields...) + return _u +} + +// Save executes the query and returns the updated Points entity. +func (_u *PointsUpdateOne) Save(ctx context.Context) (*Points, error) { + if err := _u.defaults(); err != nil { + return nil, err + } + return withHooks(ctx, _u.sqlSave, _u.mutation, _u.hooks) +} + +// SaveX is like Save, but panics if an error occurs. +func (_u *PointsUpdateOne) SaveX(ctx context.Context) *Points { + node, err := _u.Save(ctx) + if err != nil { + panic(err) + } + return node +} + +// Exec executes the query on the entity. +func (_u *PointsUpdateOne) Exec(ctx context.Context) error { + _, err := _u.Save(ctx) + return err +} + +// ExecX is like Exec, but panics if an error occurs. +func (_u *PointsUpdateOne) ExecX(ctx context.Context) { + if err := _u.Exec(ctx); err != nil { + panic(err) + } +} + +// defaults sets the default values of the builder before save. +func (_u *PointsUpdateOne) defaults() error { + if _, ok := _u.mutation.UpdatedAt(); !ok { + if points.UpdateDefaultUpdatedAt == nil { + return fmt.Errorf("ent: uninitialized points.UpdateDefaultUpdatedAt (forgotten import ent/runtime?)") + } + v := points.UpdateDefaultUpdatedAt() + _u.mutation.SetUpdatedAt(v) + } + return nil +} + +// check runs all checks and user-defined validators on the builder. +func (_u *PointsUpdateOne) check() error { + if _u.mutation.UserCleared() && len(_u.mutation.UserIDs()) > 0 { + return errors.New(`ent: clearing a required unique edge "Points.user"`) + } + return nil +} + +func (_u *PointsUpdateOne) sqlSave(ctx context.Context) (_node *Points, err error) { + if err := _u.check(); err != nil { + return _node, err + } + _spec := sqlgraph.NewUpdateSpec(points.Table, points.Columns, sqlgraph.NewFieldSpec(points.FieldID, field.TypeInt)) + id, ok := _u.mutation.ID() + if !ok { + return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "Points.id" for update`)} + } + _spec.Node.ID.Value = id + if fields := _u.fields; len(fields) > 0 { + _spec.Node.Columns = make([]string, 0, len(fields)) + _spec.Node.Columns = append(_spec.Node.Columns, points.FieldID) + for _, f := range fields { + if !points.ValidColumn(f) { + return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)} + } + if f != points.FieldID { + _spec.Node.Columns = append(_spec.Node.Columns, f) + } + } + } + if ps := _u.mutation.predicates; len(ps) > 0 { + _spec.Predicate = func(selector *sql.Selector) { + for i := range ps { + ps[i](selector) + } + } + } + if value, ok := _u.mutation.CreatedAt(); ok { + _spec.SetField(points.FieldCreatedAt, field.TypeTime, value) + } + if value, ok := _u.mutation.UpdatedAt(); ok { + _spec.SetField(points.FieldUpdatedAt, field.TypeTime, value) + } + if value, ok := _u.mutation.DeletedAt(); ok { + _spec.SetField(points.FieldDeletedAt, field.TypeTime, value) + } + if _u.mutation.DeletedAtCleared() { + _spec.ClearField(points.FieldDeletedAt, field.TypeTime) + } + if value, ok := _u.mutation.Points(); ok { + _spec.SetField(points.FieldPoints, field.TypeInt, value) + } + if value, ok := _u.mutation.AddedPoints(); ok { + _spec.AddField(points.FieldPoints, field.TypeInt, value) + } + if value, ok := _u.mutation.Description(); ok { + _spec.SetField(points.FieldDescription, field.TypeString, value) + } + if _u.mutation.DescriptionCleared() { + _spec.ClearField(points.FieldDescription, field.TypeString) + } + if _u.mutation.UserCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: points.UserTable, + Columns: []string{points.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeInt), + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := _u.mutation.UserIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.M2O, + Inverse: true, + Table: points.UserTable, + Columns: []string{points.UserColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeInt), + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + _node = &Points{config: _u.config} + _spec.Assign = _node.assignValues + _spec.ScanValues = _node.scanValues + if err = sqlgraph.UpdateNode(ctx, _u.driver, _spec); err != nil { + if _, ok := err.(*sqlgraph.NotFoundError); ok { + err = &NotFoundError{points.Label} + } else if sqlgraph.IsConstraintError(err) { + err = &ConstraintError{msg: err.Error(), wrap: err} + } + return nil, err + } + _u.mutation.done = true + return _node, nil +} diff --git a/ent/predicate/predicate.go b/ent/predicate/predicate.go index 08c2ad1..5d87ae2 100644 --- a/ent/predicate/predicate.go +++ b/ent/predicate/predicate.go @@ -9,9 +9,15 @@ import ( // Database is the predicate function for database builders. type Database func(*sql.Selector) +// Events is the predicate function for events builders. +type Events func(*sql.Selector) + // Group is the predicate function for group builders. type Group func(*sql.Selector) +// Points is the predicate function for points builders. +type Points func(*sql.Selector) + // Question is the predicate function for question builders. type Question func(*sql.Selector) diff --git a/ent/runtime/runtime.go b/ent/runtime/runtime.go index 825eb5d..77fcb53 100644 --- a/ent/runtime/runtime.go +++ b/ent/runtime/runtime.go @@ -6,7 +6,9 @@ import ( "time" "github.com/database-playground/backend-v2/ent/database" + "github.com/database-playground/backend-v2/ent/events" "github.com/database-playground/backend-v2/ent/group" + "github.com/database-playground/backend-v2/ent/points" "github.com/database-playground/backend-v2/ent/question" "github.com/database-playground/backend-v2/ent/schema" "github.com/database-playground/backend-v2/ent/scopeset" @@ -31,6 +33,16 @@ func init() { databaseDescRelationFigure := databaseFields[3].Descriptor() // database.RelationFigureValidator is a validator for the "relation_figure" field. It is called by the builders before save. database.RelationFigureValidator = databaseDescRelationFigure.Validators[0].(func(string) error) + eventsFields := schema.Events{}.Fields() + _ = eventsFields + // eventsDescType is the schema descriptor for type field. + eventsDescType := eventsFields[1].Descriptor() + // events.TypeValidator is a validator for the "type" field. It is called by the builders before save. + events.TypeValidator = eventsDescType.Validators[0].(func(string) error) + // eventsDescTriggeredAt is the schema descriptor for triggered_at field. + eventsDescTriggeredAt := eventsFields[2].Descriptor() + // events.DefaultTriggeredAt holds the default value on creation for the triggered_at field. + events.DefaultTriggeredAt = eventsDescTriggeredAt.Default.(func() time.Time) groupMixin := schema.Group{}.Mixin() groupMixinHooks0 := groupMixin[0].Hooks() group.Hooks[0] = groupMixinHooks0[0] @@ -54,6 +66,29 @@ func init() { groupDescName := groupFields[0].Descriptor() // group.NameValidator is a validator for the "name" field. It is called by the builders before save. group.NameValidator = groupDescName.Validators[0].(func(string) error) + pointsMixin := schema.Points{}.Mixin() + pointsMixinHooks0 := pointsMixin[0].Hooks() + points.Hooks[0] = pointsMixinHooks0[0] + pointsMixinInters0 := pointsMixin[0].Interceptors() + points.Interceptors[0] = pointsMixinInters0[0] + pointsMixinFields0 := pointsMixin[0].Fields() + _ = pointsMixinFields0 + pointsFields := schema.Points{}.Fields() + _ = pointsFields + // pointsDescCreatedAt is the schema descriptor for created_at field. + pointsDescCreatedAt := pointsMixinFields0[0].Descriptor() + // points.DefaultCreatedAt holds the default value on creation for the created_at field. + points.DefaultCreatedAt = pointsDescCreatedAt.Default.(func() time.Time) + // pointsDescUpdatedAt is the schema descriptor for updated_at field. + pointsDescUpdatedAt := pointsMixinFields0[1].Descriptor() + // points.DefaultUpdatedAt holds the default value on creation for the updated_at field. + points.DefaultUpdatedAt = pointsDescUpdatedAt.Default.(func() time.Time) + // points.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field. + points.UpdateDefaultUpdatedAt = pointsDescUpdatedAt.UpdateDefault.(func() time.Time) + // pointsDescPoints is the schema descriptor for points field. + pointsDescPoints := pointsFields[0].Descriptor() + // points.DefaultPoints holds the default value on creation for the points field. + points.DefaultPoints = pointsDescPoints.Default.(int) questionFields := schema.Question{}.Fields() _ = questionFields // questionDescCategory is the schema descriptor for category field. diff --git a/ent/schema/events.go b/ent/schema/events.go new file mode 100644 index 0000000..77eb50d --- /dev/null +++ b/ent/schema/events.go @@ -0,0 +1,55 @@ +package schema + +import ( + "time" + + "entgo.io/contrib/entgql" + "entgo.io/ent" + "entgo.io/ent/schema" + "entgo.io/ent/schema/edge" + "entgo.io/ent/schema/field" + "entgo.io/ent/schema/index" +) + +// Events records the events (what users do) of a user. +type Events struct { + ent.Schema +} + +func (Events) Fields() []ent.Field { + return []ent.Field{ + field.Int("user_id"), + field.String("type"). + NotEmpty(), + field.Time("triggered_at"). + Default(time.Now), + field.JSON("payload", map[string]any{}). + Optional(), + } +} + +func (Events) Edges() []ent.Edge { + return []ent.Edge{ + edge.From("user", User.Type).Ref("events").Field("user_id").Unique().Required(), + } +} + +func (Events) Indexes() []ent.Index { + return []ent.Index{ + index.Fields("type"), + index.Fields("type", "user_id"), + } +} + +func (Events) Annotations() []schema.Annotation { + return []schema.Annotation{ + entgql.QueryField().Directives( + ScopeDirective("user:read"), + ), + entgql.Mutations( + entgql.MutationCreate(), + entgql.MutationUpdate(), + ), + entgql.RelayConnection(), + } +} diff --git a/ent/schema/points.go b/ent/schema/points.go new file mode 100644 index 0000000..8bbddb2 --- /dev/null +++ b/ent/schema/points.go @@ -0,0 +1,48 @@ +package schema + +import ( + "entgo.io/contrib/entgql" + "entgo.io/ent" + "entgo.io/ent/schema" + "entgo.io/ent/schema/edge" + "entgo.io/ent/schema/field" +) + +// Points is the schema for the points (users' scores) resource. +type Points struct { + ent.Schema +} + +func (Points) Fields() []ent.Field { + return []ent.Field{ + field.Int("points"). + Default(0), + field.String("description"). + Optional(), + } +} + +func (Points) Edges() []ent.Edge { + return []ent.Edge{ + edge.From("user", User.Type).Ref("points").Unique().Required(), + } +} + +func (Points) Mixin() []ent.Mixin { + return []ent.Mixin{ + TimestampMixin{}, + } +} + +func (Points) Annotations() []schema.Annotation { + return []schema.Annotation{ + entgql.QueryField().Directives( + ScopeDirective("user:read"), + ), + entgql.Mutations( + entgql.MutationCreate(), + entgql.MutationUpdate(), + ), + entgql.RelayConnection(), + } +} diff --git a/ent/schema/user.go b/ent/schema/user.go index bdf1ec4..48a532b 100644 --- a/ent/schema/user.go +++ b/ent/schema/user.go @@ -30,6 +30,8 @@ func (User) Fields() []ent.Field { func (User) Edges() []ent.Edge { return []ent.Edge{ edge.To("group", Group.Type).Unique().Required(), + edge.To("points", Points.Type), + edge.To("events", Events.Type), } } diff --git a/ent/tx.go b/ent/tx.go index 4bf0373..7bc6c07 100644 --- a/ent/tx.go +++ b/ent/tx.go @@ -14,8 +14,12 @@ type Tx struct { config // Database is the client for interacting with the Database builders. Database *DatabaseClient + // Events is the client for interacting with the Events builders. + Events *EventsClient // Group is the client for interacting with the Group builders. Group *GroupClient + // Points is the client for interacting with the Points builders. + Points *PointsClient // Question is the client for interacting with the Question builders. Question *QuestionClient // ScopeSet is the client for interacting with the ScopeSet builders. @@ -154,7 +158,9 @@ func (tx *Tx) Client() *Client { func (tx *Tx) init() { tx.Database = NewDatabaseClient(tx.config) + tx.Events = NewEventsClient(tx.config) tx.Group = NewGroupClient(tx.config) + tx.Points = NewPointsClient(tx.config) tx.Question = NewQuestionClient(tx.config) tx.ScopeSet = NewScopeSetClient(tx.config) tx.User = NewUserClient(tx.config) diff --git a/ent/user.go b/ent/user.go index e19ede2..b295c5e 100644 --- a/ent/user.go +++ b/ent/user.go @@ -41,11 +41,18 @@ type User struct { type UserEdges struct { // Group holds the value of the group edge. Group *Group `json:"group,omitempty"` + // Points holds the value of the points edge. + Points []*Points `json:"points,omitempty"` + // Events holds the value of the events edge. + Events []*Events `json:"events,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. - loadedTypes [1]bool + loadedTypes [3]bool // totalCount holds the count of the edges above. - totalCount [1]map[string]int + totalCount [3]map[string]int + + namedPoints map[string][]*Points + namedEvents map[string][]*Events } // GroupOrErr returns the Group value or an error if the edge @@ -59,6 +66,24 @@ func (e UserEdges) GroupOrErr() (*Group, error) { return nil, &NotLoadedError{edge: "group"} } +// PointsOrErr returns the Points value or an error if the edge +// was not loaded in eager-loading. +func (e UserEdges) PointsOrErr() ([]*Points, error) { + if e.loadedTypes[1] { + return e.Points, nil + } + return nil, &NotLoadedError{edge: "points"} +} + +// EventsOrErr returns the Events value or an error if the edge +// was not loaded in eager-loading. +func (e UserEdges) EventsOrErr() ([]*Events, error) { + if e.loadedTypes[2] { + return e.Events, nil + } + return nil, &NotLoadedError{edge: "events"} +} + // scanValues returns the types for scanning values from sql.Rows. func (*User) scanValues(columns []string) ([]any, error) { values := make([]any, len(columns)) @@ -154,6 +179,16 @@ func (_m *User) QueryGroup() *GroupQuery { return NewUserClient(_m.config).QueryGroup(_m) } +// QueryPoints queries the "points" edge of the User entity. +func (_m *User) QueryPoints() *PointsQuery { + return NewUserClient(_m.config).QueryPoints(_m) +} + +// QueryEvents queries the "events" edge of the User entity. +func (_m *User) QueryEvents() *EventsQuery { + return NewUserClient(_m.config).QueryEvents(_m) +} + // Update returns a builder for updating this User. // Note that you need to call User.Unwrap() before calling this method if this User // was returned from a transaction, and the transaction was committed or rolled back. @@ -198,5 +233,53 @@ func (_m *User) String() string { return builder.String() } +// NamedPoints returns the Points named value or an error if the edge was not +// loaded in eager-loading with this name. +func (_m *User) NamedPoints(name string) ([]*Points, error) { + if _m.Edges.namedPoints == nil { + return nil, &NotLoadedError{edge: name} + } + nodes, ok := _m.Edges.namedPoints[name] + if !ok { + return nil, &NotLoadedError{edge: name} + } + return nodes, nil +} + +func (_m *User) appendNamedPoints(name string, edges ...*Points) { + if _m.Edges.namedPoints == nil { + _m.Edges.namedPoints = make(map[string][]*Points) + } + if len(edges) == 0 { + _m.Edges.namedPoints[name] = []*Points{} + } else { + _m.Edges.namedPoints[name] = append(_m.Edges.namedPoints[name], edges...) + } +} + +// NamedEvents returns the Events named value or an error if the edge was not +// loaded in eager-loading with this name. +func (_m *User) NamedEvents(name string) ([]*Events, error) { + if _m.Edges.namedEvents == nil { + return nil, &NotLoadedError{edge: name} + } + nodes, ok := _m.Edges.namedEvents[name] + if !ok { + return nil, &NotLoadedError{edge: name} + } + return nodes, nil +} + +func (_m *User) appendNamedEvents(name string, edges ...*Events) { + if _m.Edges.namedEvents == nil { + _m.Edges.namedEvents = make(map[string][]*Events) + } + if len(edges) == 0 { + _m.Edges.namedEvents[name] = []*Events{} + } else { + _m.Edges.namedEvents[name] = append(_m.Edges.namedEvents[name], edges...) + } +} + // Users is a parsable slice of User. type Users []*User diff --git a/ent/user/user.go b/ent/user/user.go index 3da64b6..19988c1 100644 --- a/ent/user/user.go +++ b/ent/user/user.go @@ -29,6 +29,10 @@ const ( FieldAvatar = "avatar" // EdgeGroup holds the string denoting the group edge name in mutations. EdgeGroup = "group" + // EdgePoints holds the string denoting the points edge name in mutations. + EdgePoints = "points" + // EdgeEvents holds the string denoting the events edge name in mutations. + EdgeEvents = "events" // Table holds the table name of the user in the database. Table = "users" // GroupTable is the table that holds the group relation/edge. @@ -38,6 +42,20 @@ const ( GroupInverseTable = "groups" // GroupColumn is the table column denoting the group relation/edge. GroupColumn = "user_group" + // PointsTable is the table that holds the points relation/edge. + PointsTable = "points" + // PointsInverseTable is the table name for the Points entity. + // It exists in this package in order to avoid circular dependency with the "points" package. + PointsInverseTable = "points" + // PointsColumn is the table column denoting the points relation/edge. + PointsColumn = "user_points" + // EventsTable is the table that holds the events relation/edge. + EventsTable = "events" + // EventsInverseTable is the table name for the Events entity. + // It exists in this package in order to avoid circular dependency with the "events" package. + EventsInverseTable = "events" + // EventsColumn is the table column denoting the events relation/edge. + EventsColumn = "user_id" ) // Columns holds all SQL columns for user fields. @@ -136,6 +154,34 @@ func ByGroupField(field string, opts ...sql.OrderTermOption) OrderOption { sqlgraph.OrderByNeighborTerms(s, newGroupStep(), sql.OrderByField(field, opts...)) } } + +// ByPointsCount orders the results by points count. +func ByPointsCount(opts ...sql.OrderTermOption) OrderOption { + return func(s *sql.Selector) { + sqlgraph.OrderByNeighborsCount(s, newPointsStep(), opts...) + } +} + +// ByPoints orders the results by points terms. +func ByPoints(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { + return func(s *sql.Selector) { + sqlgraph.OrderByNeighborTerms(s, newPointsStep(), append([]sql.OrderTerm{term}, terms...)...) + } +} + +// ByEventsCount orders the results by events count. +func ByEventsCount(opts ...sql.OrderTermOption) OrderOption { + return func(s *sql.Selector) { + sqlgraph.OrderByNeighborsCount(s, newEventsStep(), opts...) + } +} + +// ByEvents orders the results by events terms. +func ByEvents(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { + return func(s *sql.Selector) { + sqlgraph.OrderByNeighborTerms(s, newEventsStep(), append([]sql.OrderTerm{term}, terms...)...) + } +} func newGroupStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), @@ -143,3 +189,17 @@ func newGroupStep() *sqlgraph.Step { sqlgraph.Edge(sqlgraph.M2O, false, GroupTable, GroupColumn), ) } +func newPointsStep() *sqlgraph.Step { + return sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(PointsInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, PointsTable, PointsColumn), + ) +} +func newEventsStep() *sqlgraph.Step { + return sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.To(EventsInverseTable, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, EventsTable, EventsColumn), + ) +} diff --git a/ent/user/where.go b/ent/user/where.go index d50ab0c..99b8d8d 100644 --- a/ent/user/where.go +++ b/ent/user/where.go @@ -443,6 +443,52 @@ func HasGroupWith(preds ...predicate.Group) predicate.User { }) } +// HasPoints applies the HasEdge predicate on the "points" edge. +func HasPoints() predicate.User { + return predicate.User(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, PointsTable, PointsColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasPointsWith applies the HasEdge predicate on the "points" edge with a given conditions (other predicates). +func HasPointsWith(preds ...predicate.Points) predicate.User { + return predicate.User(func(s *sql.Selector) { + step := newPointsStep() + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + +// HasEvents applies the HasEdge predicate on the "events" edge. +func HasEvents() predicate.User { + return predicate.User(func(s *sql.Selector) { + step := sqlgraph.NewStep( + sqlgraph.From(Table, FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, EventsTable, EventsColumn), + ) + sqlgraph.HasNeighbors(s, step) + }) +} + +// HasEventsWith applies the HasEdge predicate on the "events" edge with a given conditions (other predicates). +func HasEventsWith(preds ...predicate.Events) predicate.User { + return predicate.User(func(s *sql.Selector) { + step := newEventsStep() + sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) { + for _, p := range preds { + p(s) + } + }) + }) +} + // And groups predicates with the AND operator between them. func And(predicates ...predicate.User) predicate.User { return predicate.User(sql.AndPredicates(predicates...)) diff --git a/ent/user_create.go b/ent/user_create.go index e4e8a3c..e1ecdb5 100644 --- a/ent/user_create.go +++ b/ent/user_create.go @@ -10,7 +10,9 @@ import ( "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" + "github.com/database-playground/backend-v2/ent/events" "github.com/database-playground/backend-v2/ent/group" + "github.com/database-playground/backend-v2/ent/points" "github.com/database-playground/backend-v2/ent/user" ) @@ -100,6 +102,36 @@ func (_c *UserCreate) SetGroup(v *Group) *UserCreate { return _c.SetGroupID(v.ID) } +// AddPointIDs adds the "points" edge to the Points entity by IDs. +func (_c *UserCreate) AddPointIDs(ids ...int) *UserCreate { + _c.mutation.AddPointIDs(ids...) + return _c +} + +// AddPoints adds the "points" edges to the Points entity. +func (_c *UserCreate) AddPoints(v ...*Points) *UserCreate { + ids := make([]int, len(v)) + for i := range v { + ids[i] = v[i].ID + } + return _c.AddPointIDs(ids...) +} + +// AddEventIDs adds the "events" edge to the Events entity by IDs. +func (_c *UserCreate) AddEventIDs(ids ...int) *UserCreate { + _c.mutation.AddEventIDs(ids...) + return _c +} + +// AddEvents adds the "events" edges to the Events entity. +func (_c *UserCreate) AddEvents(v ...*Events) *UserCreate { + ids := make([]int, len(v)) + for i := range v { + ids[i] = v[i].ID + } + return _c.AddEventIDs(ids...) +} + // Mutation returns the UserMutation object of the builder. func (_c *UserCreate) Mutation() *UserMutation { return _c.mutation @@ -248,6 +280,38 @@ func (_c *UserCreate) createSpec() (*User, *sqlgraph.CreateSpec) { _node.user_group = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } + if nodes := _c.mutation.PointsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.PointsTable, + Columns: []string{user.PointsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(points.FieldID, field.TypeInt), + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges = append(_spec.Edges, edge) + } + if nodes := _c.mutation.EventsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.EventsTable, + Columns: []string{user.EventsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(events.FieldID, field.TypeInt), + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges = append(_spec.Edges, edge) + } return _node, _spec } diff --git a/ent/user_query.go b/ent/user_query.go index 76e00d2..b799e45 100644 --- a/ent/user_query.go +++ b/ent/user_query.go @@ -4,6 +4,7 @@ package ent import ( "context" + "database/sql/driver" "fmt" "math" @@ -11,7 +12,9 @@ import ( "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" + "github.com/database-playground/backend-v2/ent/events" "github.com/database-playground/backend-v2/ent/group" + "github.com/database-playground/backend-v2/ent/points" "github.com/database-playground/backend-v2/ent/predicate" "github.com/database-playground/backend-v2/ent/user" ) @@ -19,14 +22,18 @@ import ( // UserQuery is the builder for querying User entities. type UserQuery struct { config - ctx *QueryContext - order []user.OrderOption - inters []Interceptor - predicates []predicate.User - withGroup *GroupQuery - withFKs bool - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*User) error + ctx *QueryContext + order []user.OrderOption + inters []Interceptor + predicates []predicate.User + withGroup *GroupQuery + withPoints *PointsQuery + withEvents *EventsQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*User) error + withNamedPoints map[string]*PointsQuery + withNamedEvents map[string]*EventsQuery // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -85,6 +92,50 @@ func (_q *UserQuery) QueryGroup() *GroupQuery { return query } +// QueryPoints chains the current query on the "points" edge. +func (_q *UserQuery) QueryPoints() *PointsQuery { + query := (&PointsClient{config: _q.config}).Query() + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := _q.prepareQuery(ctx); err != nil { + return nil, err + } + selector := _q.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(user.Table, user.FieldID, selector), + sqlgraph.To(points.Table, points.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, user.PointsTable, user.PointsColumn), + ) + fromU = sqlgraph.SetNeighbors(_q.driver.Dialect(), step) + return fromU, nil + } + return query +} + +// QueryEvents chains the current query on the "events" edge. +func (_q *UserQuery) QueryEvents() *EventsQuery { + query := (&EventsClient{config: _q.config}).Query() + query.path = func(ctx context.Context) (fromU *sql.Selector, err error) { + if err := _q.prepareQuery(ctx); err != nil { + return nil, err + } + selector := _q.sqlQuery(ctx) + if err := selector.Err(); err != nil { + return nil, err + } + step := sqlgraph.NewStep( + sqlgraph.From(user.Table, user.FieldID, selector), + sqlgraph.To(events.Table, events.FieldID), + sqlgraph.Edge(sqlgraph.O2M, false, user.EventsTable, user.EventsColumn), + ) + fromU = sqlgraph.SetNeighbors(_q.driver.Dialect(), step) + return fromU, nil + } + return query +} + // First returns the first User entity from the query. // Returns a *NotFoundError when no User was found. func (_q *UserQuery) First(ctx context.Context) (*User, error) { @@ -278,6 +329,8 @@ func (_q *UserQuery) Clone() *UserQuery { inters: append([]Interceptor{}, _q.inters...), predicates: append([]predicate.User{}, _q.predicates...), withGroup: _q.withGroup.Clone(), + withPoints: _q.withPoints.Clone(), + withEvents: _q.withEvents.Clone(), // clone intermediate query. sql: _q.sql.Clone(), path: _q.path, @@ -295,6 +348,28 @@ func (_q *UserQuery) WithGroup(opts ...func(*GroupQuery)) *UserQuery { return _q } +// WithPoints tells the query-builder to eager-load the nodes that are connected to +// the "points" edge. The optional arguments are used to configure the query builder of the edge. +func (_q *UserQuery) WithPoints(opts ...func(*PointsQuery)) *UserQuery { + query := (&PointsClient{config: _q.config}).Query() + for _, opt := range opts { + opt(query) + } + _q.withPoints = query + return _q +} + +// WithEvents tells the query-builder to eager-load the nodes that are connected to +// the "events" edge. The optional arguments are used to configure the query builder of the edge. +func (_q *UserQuery) WithEvents(opts ...func(*EventsQuery)) *UserQuery { + query := (&EventsClient{config: _q.config}).Query() + for _, opt := range opts { + opt(query) + } + _q.withEvents = query + return _q +} + // GroupBy is used to group vertices by one or more fields/columns. // It is often used with aggregate functions, like: count, max, mean, min, sum. // @@ -374,8 +449,10 @@ func (_q *UserQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*User, e nodes = []*User{} withFKs = _q.withFKs _spec = _q.querySpec() - loadedTypes = [1]bool{ + loadedTypes = [3]bool{ _q.withGroup != nil, + _q.withPoints != nil, + _q.withEvents != nil, } ) if _q.withGroup != nil { @@ -411,6 +488,34 @@ func (_q *UserQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*User, e return nil, err } } + if query := _q.withPoints; query != nil { + if err := _q.loadPoints(ctx, query, nodes, + func(n *User) { n.Edges.Points = []*Points{} }, + func(n *User, e *Points) { n.Edges.Points = append(n.Edges.Points, e) }); err != nil { + return nil, err + } + } + if query := _q.withEvents; query != nil { + if err := _q.loadEvents(ctx, query, nodes, + func(n *User) { n.Edges.Events = []*Events{} }, + func(n *User, e *Events) { n.Edges.Events = append(n.Edges.Events, e) }); err != nil { + return nil, err + } + } + for name, query := range _q.withNamedPoints { + if err := _q.loadPoints(ctx, query, nodes, + func(n *User) { n.appendNamedPoints(name) }, + func(n *User, e *Points) { n.appendNamedPoints(name, e) }); err != nil { + return nil, err + } + } + for name, query := range _q.withNamedEvents { + if err := _q.loadEvents(ctx, query, nodes, + func(n *User) { n.appendNamedEvents(name) }, + func(n *User, e *Events) { n.appendNamedEvents(name, e) }); err != nil { + return nil, err + } + } for i := range _q.loadTotal { if err := _q.loadTotal[i](ctx, nodes); err != nil { return nil, err @@ -451,6 +556,67 @@ func (_q *UserQuery) loadGroup(ctx context.Context, query *GroupQuery, nodes []* } return nil } +func (_q *UserQuery) loadPoints(ctx context.Context, query *PointsQuery, nodes []*User, init func(*User), assign func(*User, *Points)) error { + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*User) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] + if init != nil { + init(nodes[i]) + } + } + query.withFKs = true + query.Where(predicate.Points(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(user.PointsColumn), fks...)) + })) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + fk := n.user_points + if fk == nil { + return fmt.Errorf(`foreign-key "user_points" is nil for node %v`, n.ID) + } + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "user_points" returned %v for node %v`, *fk, n.ID) + } + assign(node, n) + } + return nil +} +func (_q *UserQuery) loadEvents(ctx context.Context, query *EventsQuery, nodes []*User, init func(*User), assign func(*User, *Events)) error { + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*User) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] + if init != nil { + init(nodes[i]) + } + } + if len(query.ctx.Fields) > 0 { + query.ctx.AppendFieldOnce(events.FieldUserID) + } + query.Where(predicate.Events(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(user.EventsColumn), fks...)) + })) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + fk := n.UserID + node, ok := nodeids[fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "user_id" returned %v for node %v`, fk, n.ID) + } + assign(node, n) + } + return nil +} func (_q *UserQuery) sqlCount(ctx context.Context) (int, error) { _spec := _q.querySpec() @@ -536,6 +702,34 @@ func (_q *UserQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } +// WithNamedPoints tells the query-builder to eager-load the nodes that are connected to the "points" +// edge with the given name. The optional arguments are used to configure the query builder of the edge. +func (_q *UserQuery) WithNamedPoints(name string, opts ...func(*PointsQuery)) *UserQuery { + query := (&PointsClient{config: _q.config}).Query() + for _, opt := range opts { + opt(query) + } + if _q.withNamedPoints == nil { + _q.withNamedPoints = make(map[string]*PointsQuery) + } + _q.withNamedPoints[name] = query + return _q +} + +// WithNamedEvents tells the query-builder to eager-load the nodes that are connected to the "events" +// edge with the given name. The optional arguments are used to configure the query builder of the edge. +func (_q *UserQuery) WithNamedEvents(name string, opts ...func(*EventsQuery)) *UserQuery { + query := (&EventsClient{config: _q.config}).Query() + for _, opt := range opts { + opt(query) + } + if _q.withNamedEvents == nil { + _q.withNamedEvents = make(map[string]*EventsQuery) + } + _q.withNamedEvents[name] = query + return _q +} + // UserGroupBy is the group-by builder for User entities. type UserGroupBy struct { selector diff --git a/ent/user_update.go b/ent/user_update.go index bb59621..3271274 100644 --- a/ent/user_update.go +++ b/ent/user_update.go @@ -11,7 +11,9 @@ import ( "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" + "github.com/database-playground/backend-v2/ent/events" "github.com/database-playground/backend-v2/ent/group" + "github.com/database-playground/backend-v2/ent/points" "github.com/database-playground/backend-v2/ent/predicate" "github.com/database-playground/backend-v2/ent/user" ) @@ -114,6 +116,36 @@ func (_u *UserUpdate) SetGroup(v *Group) *UserUpdate { return _u.SetGroupID(v.ID) } +// AddPointIDs adds the "points" edge to the Points entity by IDs. +func (_u *UserUpdate) AddPointIDs(ids ...int) *UserUpdate { + _u.mutation.AddPointIDs(ids...) + return _u +} + +// AddPoints adds the "points" edges to the Points entity. +func (_u *UserUpdate) AddPoints(v ...*Points) *UserUpdate { + ids := make([]int, len(v)) + for i := range v { + ids[i] = v[i].ID + } + return _u.AddPointIDs(ids...) +} + +// AddEventIDs adds the "events" edge to the Events entity by IDs. +func (_u *UserUpdate) AddEventIDs(ids ...int) *UserUpdate { + _u.mutation.AddEventIDs(ids...) + return _u +} + +// AddEvents adds the "events" edges to the Events entity. +func (_u *UserUpdate) AddEvents(v ...*Events) *UserUpdate { + ids := make([]int, len(v)) + for i := range v { + ids[i] = v[i].ID + } + return _u.AddEventIDs(ids...) +} + // Mutation returns the UserMutation object of the builder. func (_u *UserUpdate) Mutation() *UserMutation { return _u.mutation @@ -125,6 +157,48 @@ func (_u *UserUpdate) ClearGroup() *UserUpdate { return _u } +// ClearPoints clears all "points" edges to the Points entity. +func (_u *UserUpdate) ClearPoints() *UserUpdate { + _u.mutation.ClearPoints() + return _u +} + +// RemovePointIDs removes the "points" edge to Points entities by IDs. +func (_u *UserUpdate) RemovePointIDs(ids ...int) *UserUpdate { + _u.mutation.RemovePointIDs(ids...) + return _u +} + +// RemovePoints removes "points" edges to Points entities. +func (_u *UserUpdate) RemovePoints(v ...*Points) *UserUpdate { + ids := make([]int, len(v)) + for i := range v { + ids[i] = v[i].ID + } + return _u.RemovePointIDs(ids...) +} + +// ClearEvents clears all "events" edges to the Events entity. +func (_u *UserUpdate) ClearEvents() *UserUpdate { + _u.mutation.ClearEvents() + return _u +} + +// RemoveEventIDs removes the "events" edge to Events entities by IDs. +func (_u *UserUpdate) RemoveEventIDs(ids ...int) *UserUpdate { + _u.mutation.RemoveEventIDs(ids...) + return _u +} + +// RemoveEvents removes "events" edges to Events entities. +func (_u *UserUpdate) RemoveEvents(v ...*Events) *UserUpdate { + ids := make([]int, len(v)) + for i := range v { + ids[i] = v[i].ID + } + return _u.RemoveEventIDs(ids...) +} + // Save executes the query and returns the number of nodes affected by the update operation. func (_u *UserUpdate) Save(ctx context.Context) (int, error) { if err := _u.defaults(); err != nil { @@ -242,6 +316,96 @@ func (_u *UserUpdate) sqlSave(ctx context.Context) (_node int, err error) { } _spec.Edges.Add = append(_spec.Edges.Add, edge) } + if _u.mutation.PointsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.PointsTable, + Columns: []string{user.PointsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(points.FieldID, field.TypeInt), + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := _u.mutation.RemovedPointsIDs(); len(nodes) > 0 && !_u.mutation.PointsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.PointsTable, + Columns: []string{user.PointsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(points.FieldID, field.TypeInt), + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := _u.mutation.PointsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.PointsTable, + Columns: []string{user.PointsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(points.FieldID, field.TypeInt), + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if _u.mutation.EventsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.EventsTable, + Columns: []string{user.EventsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(events.FieldID, field.TypeInt), + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := _u.mutation.RemovedEventsIDs(); len(nodes) > 0 && !_u.mutation.EventsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.EventsTable, + Columns: []string{user.EventsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(events.FieldID, field.TypeInt), + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := _u.mutation.EventsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.EventsTable, + Columns: []string{user.EventsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(events.FieldID, field.TypeInt), + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } if _node, err = sqlgraph.UpdateNodes(ctx, _u.driver, _spec); err != nil { if _, ok := err.(*sqlgraph.NotFoundError); ok { err = &NotFoundError{user.Label} @@ -347,6 +511,36 @@ func (_u *UserUpdateOne) SetGroup(v *Group) *UserUpdateOne { return _u.SetGroupID(v.ID) } +// AddPointIDs adds the "points" edge to the Points entity by IDs. +func (_u *UserUpdateOne) AddPointIDs(ids ...int) *UserUpdateOne { + _u.mutation.AddPointIDs(ids...) + return _u +} + +// AddPoints adds the "points" edges to the Points entity. +func (_u *UserUpdateOne) AddPoints(v ...*Points) *UserUpdateOne { + ids := make([]int, len(v)) + for i := range v { + ids[i] = v[i].ID + } + return _u.AddPointIDs(ids...) +} + +// AddEventIDs adds the "events" edge to the Events entity by IDs. +func (_u *UserUpdateOne) AddEventIDs(ids ...int) *UserUpdateOne { + _u.mutation.AddEventIDs(ids...) + return _u +} + +// AddEvents adds the "events" edges to the Events entity. +func (_u *UserUpdateOne) AddEvents(v ...*Events) *UserUpdateOne { + ids := make([]int, len(v)) + for i := range v { + ids[i] = v[i].ID + } + return _u.AddEventIDs(ids...) +} + // Mutation returns the UserMutation object of the builder. func (_u *UserUpdateOne) Mutation() *UserMutation { return _u.mutation @@ -358,6 +552,48 @@ func (_u *UserUpdateOne) ClearGroup() *UserUpdateOne { return _u } +// ClearPoints clears all "points" edges to the Points entity. +func (_u *UserUpdateOne) ClearPoints() *UserUpdateOne { + _u.mutation.ClearPoints() + return _u +} + +// RemovePointIDs removes the "points" edge to Points entities by IDs. +func (_u *UserUpdateOne) RemovePointIDs(ids ...int) *UserUpdateOne { + _u.mutation.RemovePointIDs(ids...) + return _u +} + +// RemovePoints removes "points" edges to Points entities. +func (_u *UserUpdateOne) RemovePoints(v ...*Points) *UserUpdateOne { + ids := make([]int, len(v)) + for i := range v { + ids[i] = v[i].ID + } + return _u.RemovePointIDs(ids...) +} + +// ClearEvents clears all "events" edges to the Events entity. +func (_u *UserUpdateOne) ClearEvents() *UserUpdateOne { + _u.mutation.ClearEvents() + return _u +} + +// RemoveEventIDs removes the "events" edge to Events entities by IDs. +func (_u *UserUpdateOne) RemoveEventIDs(ids ...int) *UserUpdateOne { + _u.mutation.RemoveEventIDs(ids...) + return _u +} + +// RemoveEvents removes "events" edges to Events entities. +func (_u *UserUpdateOne) RemoveEvents(v ...*Events) *UserUpdateOne { + ids := make([]int, len(v)) + for i := range v { + ids[i] = v[i].ID + } + return _u.RemoveEventIDs(ids...) +} + // Where appends a list predicates to the UserUpdate builder. func (_u *UserUpdateOne) Where(ps ...predicate.User) *UserUpdateOne { _u.mutation.Where(ps...) @@ -505,6 +741,96 @@ func (_u *UserUpdateOne) sqlSave(ctx context.Context) (_node *User, err error) { } _spec.Edges.Add = append(_spec.Edges.Add, edge) } + if _u.mutation.PointsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.PointsTable, + Columns: []string{user.PointsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(points.FieldID, field.TypeInt), + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := _u.mutation.RemovedPointsIDs(); len(nodes) > 0 && !_u.mutation.PointsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.PointsTable, + Columns: []string{user.PointsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(points.FieldID, field.TypeInt), + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := _u.mutation.PointsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.PointsTable, + Columns: []string{user.PointsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(points.FieldID, field.TypeInt), + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } + if _u.mutation.EventsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.EventsTable, + Columns: []string{user.EventsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(events.FieldID, field.TypeInt), + }, + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := _u.mutation.RemovedEventsIDs(); len(nodes) > 0 && !_u.mutation.EventsCleared() { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.EventsTable, + Columns: []string{user.EventsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(events.FieldID, field.TypeInt), + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Clear = append(_spec.Edges.Clear, edge) + } + if nodes := _u.mutation.EventsIDs(); len(nodes) > 0 { + edge := &sqlgraph.EdgeSpec{ + Rel: sqlgraph.O2M, + Inverse: false, + Table: user.EventsTable, + Columns: []string{user.EventsColumn}, + Bidi: false, + Target: &sqlgraph.EdgeTarget{ + IDSpec: sqlgraph.NewFieldSpec(events.FieldID, field.TypeInt), + }, + } + for _, k := range nodes { + edge.Target.Nodes = append(edge.Target.Nodes, k) + } + _spec.Edges.Add = append(_spec.Edges.Add, edge) + } _node = &User{config: _u.config} _spec.Assign = _node.assignValues _spec.ScanValues = _node.scanValues diff --git a/graph/ent.graphqls b/graph/ent.graphqls index a71d825..f110320 100644 --- a/graph/ent.graphqls +++ b/graph/ent.graphqls @@ -18,6 +18,16 @@ input CreateDatabaseInput { questionIDs: [ID!] } """ +CreateEventsInput is used for create Events object. +Input was generated by ent. +""" +input CreateEventsInput { + type: String! + triggeredAt: Time + payload: Map + userID: ID! +} +""" CreateGroupInput is used for create Group object. Input was generated by ent. """ @@ -27,6 +37,15 @@ input CreateGroupInput { scopeSetIDs: [ID!] } """ +CreatePointsInput is used for create Points object. +Input was generated by ent. +""" +input CreatePointsInput { + points: Int + description: String + userID: ID! +} +""" CreateQuestionInput is used for create Question object. Input was generated by ent. """ @@ -72,6 +91,8 @@ input CreateUserInput { email: String! avatar: String groupID: ID! + pointIDs: [ID!] + eventIDs: [ID!] } """ Define a Relay Cursor type: @@ -183,6 +204,103 @@ input DatabaseWhereInput { hasQuestions: Boolean hasQuestionsWith: [QuestionWhereInput!] } +type Events implements Node { + id: ID! + userID: ID! + type: String! + triggeredAt: Time! + payload: Map + user: User! +} +""" +A connection to a list of items. +""" +type EventsConnection { + """ + A list of edges. + """ + edges: [EventsEdge] + """ + Information to aid in pagination. + """ + pageInfo: PageInfo! + """ + Identifies the total count of items in the connection. + """ + totalCount: Int! +} +""" +An edge in a connection. +""" +type EventsEdge { + """ + The item at the end of the edge. + """ + node: Events + """ + A cursor for use in pagination. + """ + cursor: Cursor! +} +""" +EventsWhereInput is used for filtering Events objects. +Input was generated by ent. +""" +input EventsWhereInput { + not: EventsWhereInput + and: [EventsWhereInput!] + or: [EventsWhereInput!] + """ + id field predicates + """ + id: ID + idNEQ: ID + idIn: [ID!] + idNotIn: [ID!] + idGT: ID + idGTE: ID + idLT: ID + idLTE: ID + """ + user_id field predicates + """ + userID: ID + userIDNEQ: ID + userIDIn: [ID!] + userIDNotIn: [ID!] + """ + type field predicates + """ + type: String + typeNEQ: String + typeIn: [String!] + typeNotIn: [String!] + typeGT: String + typeGTE: String + typeLT: String + typeLTE: String + typeContains: String + typeHasPrefix: String + typeHasSuffix: String + typeEqualFold: String + typeContainsFold: String + """ + triggered_at field predicates + """ + triggeredAt: Time + triggeredAtNEQ: Time + triggeredAtIn: [Time!] + triggeredAtNotIn: [Time!] + triggeredAtGT: Time + triggeredAtGTE: Time + triggeredAtLT: Time + triggeredAtLTE: Time + """ + user edge predicates + """ + hasUser: Boolean + hasUserWith: [UserWhereInput!] +} type Group implements Node { id: ID! createdAt: Time! @@ -287,6 +405,10 @@ input GroupWhereInput { hasScopeSetsWith: [ScopeSetWhereInput!] } """ +The builtin Map type +""" +scalar Map +""" An object with an ID. Follows the [Relay Global Object Identification Specification](https://relay.dev/graphql/objectidentification.htm) """ @@ -331,6 +453,134 @@ type PageInfo { """ endCursor: Cursor } +type Points implements Node { + id: ID! + createdAt: Time! + updatedAt: Time! + deletedAt: Time + points: Int! + description: String + user: User! +} +""" +A connection to a list of items. +""" +type PointsConnection { + """ + A list of edges. + """ + edges: [PointsEdge] + """ + Information to aid in pagination. + """ + pageInfo: PageInfo! + """ + Identifies the total count of items in the connection. + """ + totalCount: Int! +} +""" +An edge in a connection. +""" +type PointsEdge { + """ + The item at the end of the edge. + """ + node: Points + """ + A cursor for use in pagination. + """ + cursor: Cursor! +} +""" +PointsWhereInput is used for filtering Points objects. +Input was generated by ent. +""" +input PointsWhereInput { + not: PointsWhereInput + and: [PointsWhereInput!] + or: [PointsWhereInput!] + """ + id field predicates + """ + id: ID + idNEQ: ID + idIn: [ID!] + idNotIn: [ID!] + idGT: ID + idGTE: ID + idLT: ID + idLTE: ID + """ + created_at field predicates + """ + createdAt: Time + createdAtNEQ: Time + createdAtIn: [Time!] + createdAtNotIn: [Time!] + createdAtGT: Time + createdAtGTE: Time + createdAtLT: Time + createdAtLTE: Time + """ + updated_at field predicates + """ + updatedAt: Time + updatedAtNEQ: Time + updatedAtIn: [Time!] + updatedAtNotIn: [Time!] + updatedAtGT: Time + updatedAtGTE: Time + updatedAtLT: Time + updatedAtLTE: Time + """ + deleted_at field predicates + """ + deletedAt: Time + deletedAtNEQ: Time + deletedAtIn: [Time!] + deletedAtNotIn: [Time!] + deletedAtGT: Time + deletedAtGTE: Time + deletedAtLT: Time + deletedAtLTE: Time + deletedAtIsNil: Boolean + deletedAtNotNil: Boolean + """ + points field predicates + """ + points: Int + pointsNEQ: Int + pointsIn: [Int!] + pointsNotIn: [Int!] + pointsGT: Int + pointsGTE: Int + pointsLT: Int + pointsLTE: Int + """ + description field predicates + """ + description: String + descriptionNEQ: String + descriptionIn: [String!] + descriptionNotIn: [String!] + descriptionGT: String + descriptionGTE: String + descriptionLT: String + descriptionLTE: String + descriptionContains: String + descriptionHasPrefix: String + descriptionHasSuffix: String + descriptionIsNil: Boolean + descriptionNotNil: Boolean + descriptionEqualFold: String + descriptionContainsFold: String + """ + user edge predicates + """ + hasUser: Boolean + hasUserWith: [UserWhereInput!] +} type Query { """ Fetches an object given its ID. @@ -351,7 +601,59 @@ type Query { ids: [ID!]! ): [Node]! databases: [Database!]! @scope(scope: "database:read") + eventsSlice( + """ + Returns the elements in the list that come after the specified cursor. + """ + after: Cursor + + """ + Returns the first _n_ elements from the list. + """ + first: Int + + """ + Returns the elements in the list that come before the specified cursor. + """ + before: Cursor + + """ + Returns the last _n_ elements from the list. + """ + last: Int + + """ + Filtering options for EventsSlice returned from the connection. + """ + where: EventsWhereInput + ): EventsConnection! @scope(scope: "user:read") groups: [Group!]! @scope(scope: "group:read") + pointsSlice( + """ + Returns the elements in the list that come after the specified cursor. + """ + after: Cursor + + """ + Returns the first _n_ elements from the list. + """ + first: Int + + """ + Returns the elements in the list that come before the specified cursor. + """ + before: Cursor + + """ + Returns the last _n_ elements from the list. + """ + last: Int + + """ + Filtering options for PointsSlice returned from the connection. + """ + where: PointsWhereInput + ): PointsConnection! @scope(scope: "user:read") questions( """ Returns the elements in the list that come after the specified cursor. @@ -685,6 +987,17 @@ input UpdateDatabaseInput { clearQuestions: Boolean } """ +UpdateEventsInput is used for update Events object. +Input was generated by ent. +""" +input UpdateEventsInput { + type: String + triggeredAt: Time + payload: Map + clearPayload: Boolean + userID: ID +} +""" UpdateGroupInput is used for update Group object. Input was generated by ent. """ @@ -697,6 +1010,16 @@ input UpdateGroupInput { clearScopeSets: Boolean } """ +UpdatePointsInput is used for update Points object. +Input was generated by ent. +""" +input UpdatePointsInput { + points: Int + description: String + clearDescription: Boolean + userID: ID +} +""" UpdateQuestionInput is used for update Question object. Input was generated by ent. """ @@ -741,6 +1064,12 @@ input UpdateUserInput { avatar: String clearAvatar: Boolean groupID: ID + addPointIDs: [ID!] + removePointIDs: [ID!] + clearPoints: Boolean + addEventIDs: [ID!] + removeEventIDs: [ID!] + clearEvents: Boolean } type User implements Node { id: ID! @@ -751,6 +1080,8 @@ type User implements Node { email: String! avatar: String group: Group! + points: [Points!] + events: [Events!] } """ A connection to a list of items. @@ -910,4 +1241,14 @@ input UserWhereInput { """ hasGroup: Boolean hasGroupWith: [GroupWhereInput!] + """ + points edge predicates + """ + hasPoints: Boolean + hasPointsWith: [PointsWhereInput!] + """ + events edge predicates + """ + hasEvents: Boolean + hasEventsWith: [EventsWhereInput!] } diff --git a/graph/ent.resolvers.go b/graph/ent.resolvers.go index 7d66d9a..468af16 100644 --- a/graph/ent.resolvers.go +++ b/graph/ent.resolvers.go @@ -33,6 +33,13 @@ func (r *queryResolver) Databases(ctx context.Context) ([]*ent.Database, error) return entClient.Database.Query().All(ctx) } +// EventsSlice is the resolver for the eventsSlice field. +func (r *queryResolver) EventsSlice(ctx context.Context, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, where *ent.EventsWhereInput) (*ent.EventsConnection, error) { + entClient := r.EntClient(ctx) + + return entClient.Events.Query().Paginate(ctx, after, first, before, last, ent.WithEventsFilter(where.Filter)) +} + // Groups is the resolver for the groups field. func (r *queryResolver) Groups(ctx context.Context) ([]*ent.Group, error) { entClient := r.EntClient(ctx) @@ -40,6 +47,13 @@ func (r *queryResolver) Groups(ctx context.Context) ([]*ent.Group, error) { return entClient.Group.Query().All(ctx) } +// PointsSlice is the resolver for the pointsSlice field. +func (r *queryResolver) PointsSlice(ctx context.Context, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, where *ent.PointsWhereInput) (*ent.PointsConnection, error) { + entClient := r.EntClient(ctx) + + return entClient.Points.Query().Paginate(ctx, after, first, before, last, ent.WithPointsFilter(where.Filter)) +} + // Questions is the resolver for the questions field. func (r *queryResolver) Questions(ctx context.Context, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, orderBy *ent.QuestionOrder, where *ent.QuestionWhereInput) (*ent.QuestionConnection, error) { entClient := r.EntClient(ctx) From 75ceb9672ab6b2abb332373df920cf6c4b62dbe0 Mon Sep 17 00:00:00 2001 From: Yi-Jyun Pan Date: Wed, 17 Sep 2025 03:09:50 +0800 Subject: [PATCH 02/14] feat: implement events --- internal/events/README.md | 3 + internal/events/constants.go | 7 + internal/events/events.go | 55 +++++ internal/events/points.go | 118 ++++++++++ internal/events/points_test.go | 416 +++++++++++++++++++++++++++++++++ 5 files changed, 599 insertions(+) create mode 100644 internal/events/README.md create mode 100644 internal/events/constants.go create mode 100644 internal/events/events.go create mode 100644 internal/events/points.go create mode 100644 internal/events/points_test.go diff --git a/internal/events/README.md b/internal/events/README.md new file mode 100644 index 0000000..676197e --- /dev/null +++ b/internal/events/README.md @@ -0,0 +1,3 @@ +# Events + +負責觸發事件和加減點數的 service。 diff --git a/internal/events/constants.go b/internal/events/constants.go new file mode 100644 index 0000000..98a9ff9 --- /dev/null +++ b/internal/events/constants.go @@ -0,0 +1,7 @@ +package events + +type EventType string + +const ( + EventTypeLogin EventType = "login" +) diff --git a/internal/events/events.go b/internal/events/events.go new file mode 100644 index 0000000..6b81c3d --- /dev/null +++ b/internal/events/events.go @@ -0,0 +1,55 @@ +package events + +import ( + "context" + "log/slog" + "time" + + "github.com/database-playground/backend-v2/ent" +) + +// EventService is the service for triggering events. +type EventService struct { + entClient *ent.Client + pointsGranter *PointsGranter +} + +// NewEventService creates a new EventService. +func NewEventService(entClient *ent.Client) *EventService { + return &EventService{ + entClient: entClient, + pointsGranter: NewPointsGranter(entClient), + } +} + +// Event is the event to be triggered. +type Event struct { + Type EventType + Payload map[string]any + UserID int +} + +// TriggerEvent triggers an event. +func (s *EventService) TriggerEvent(ctx context.Context, event Event) error { + err := s.entClient.Events.Create(). + SetType(string(event.Type)). + SetPayload(event.Payload). + SetUserID(event.UserID). + SetTriggeredAt(time.Now()). + Exec(ctx) + if err != nil { + return err + } + + if event.Type == EventTypeLogin { + ok, err := s.pointsGranter.GrantDailyLoginPoints(ctx, event.UserID) + if err != nil { + return err + } + if ok { + slog.Info("granted daily login points", "user_id", event.UserID) + } + } + + return nil +} diff --git a/internal/events/points.go b/internal/events/points.go new file mode 100644 index 0000000..d800ae3 --- /dev/null +++ b/internal/events/points.go @@ -0,0 +1,118 @@ +package events + +import ( + "context" + "time" + + "github.com/database-playground/backend-v2/ent" + "github.com/database-playground/backend-v2/ent/events" + "github.com/database-playground/backend-v2/ent/points" + "github.com/database-playground/backend-v2/ent/user" +) + +const ( + PointDescriptionDailyLogin = "daily login" + PointDescriptionWeeklyLogin = "weekly login" +) + +const ( + PointValueDailyLogin = 20 + PointValueWeeklyLogin = 50 +) + +// PointsGranter determines if the criteria is met to grant points to a user. +type PointsGranter struct { + entClient *ent.Client +} + +// NewPointsGranter creates a new PointsGranter. +func NewPointsGranter(entClient *ent.Client) *PointsGranter { + return &PointsGranter{ + entClient: entClient, + } +} + +// GrantDailyLoginPoints grants the "daily login" points to a user. +func (d *PointsGranter) GrantDailyLoginPoints(ctx context.Context, userID int) (bool, error) { + // Check if we have granted the "daily login" points for this user today. + hasPointsRecord, err := d.entClient.Points.Query(). + Where(points.HasUserWith(user.ID(userID))). + Where(points.DescriptionEQ(PointDescriptionDailyLogin)). + Where(points.CreatedAtGTE(time.Now().AddDate(0, 0, -1))).Exist(ctx) + if err != nil { + return false, err + } + if hasPointsRecord { + return false, nil + } + + // Check if the user has logged in today. + hasTodayLoginRecord, err := d.entClient.Events.Query(). + Where(events.Type(string(EventTypeLogin))). + Where(events.UserID(userID)). + Where(events.TriggeredAtGTE(time.Now().AddDate(0, 0, -1))). + Exist(ctx) + if err != nil { + return false, err + } + if !hasTodayLoginRecord { + return false, nil + } + + // Grant the "daily login" points to the user. + err = d.entClient.Points.Create(). + SetUserID(userID). + SetDescription(PointDescriptionDailyLogin). + SetPoints(PointValueDailyLogin). + Exec(ctx) + if err != nil { + return false, err + } + + return true, nil +} + +// GrantWeeklyLoginPoints grants the "weekly login" points to a user. +func (d *PointsGranter) GrantWeeklyLoginPoints(ctx context.Context, userID int) (bool, error) { + // Check if we have granted the "weekly login" points for this user this week. + hasPointsRecord, err := d.entClient.Points.Query(). + Where(points.HasUserWith(user.ID(userID))). + Where(points.DescriptionEQ(PointDescriptionWeeklyLogin)). + Where(points.CreatedAtGTE(time.Now().AddDate(0, 0, -7))).Exist(ctx) + if err != nil { + return false, err + } + if hasPointsRecord { + return false, nil + } + + // Check if the user has logged in every day this week. + weekLoginRecords, err := d.entClient.Events.Query(). + Where(events.Type(string(EventTypeLogin))). + Where(events.UserID(userID)). + All(ctx) + if err != nil { + return false, err + } + + // aggreated by day + weekLoginRecordsByDay := make(map[time.Time]int) + for _, record := range weekLoginRecords { + weekLoginRecordsByDay[record.TriggeredAt.Truncate(24*time.Hour)]++ + } + + if len(weekLoginRecordsByDay) != 7 { + return false, nil + } + + // Grant the "weekly login" points to the user. + err = d.entClient.Points.Create(). + SetUserID(userID). + SetDescription(PointDescriptionWeeklyLogin). + SetPoints(PointValueWeeklyLogin). + Exec(ctx) + if err != nil { + return false, err + } + return true, nil +} diff --git a/internal/events/points_test.go b/internal/events/points_test.go new file mode 100644 index 0000000..b971d04 --- /dev/null +++ b/internal/events/points_test.go @@ -0,0 +1,416 @@ +package events_test + +import ( + "context" + "testing" + "time" + + "github.com/database-playground/backend-v2/ent" + "github.com/database-playground/backend-v2/ent/points" + "github.com/database-playground/backend-v2/ent/user" + "github.com/database-playground/backend-v2/internal/events" + "github.com/database-playground/backend-v2/internal/setup" + "github.com/database-playground/backend-v2/internal/testhelper" + "github.com/stretchr/testify/require" + + _ "github.com/mattn/go-sqlite3" +) + +// setupTestData creates a user and returns the client and user ID for testing +func setupTestData(t *testing.T, client *ent.Client) int { + t.Helper() + + ctx := context.Background() + + // Setup the database with required groups and scope sets + setupResult, err := setup.Setup(ctx, client) + require.NoError(t, err) + + // Create a user for testing with the new user group + user, err := client.User.Create(). + SetName("Test User"). + SetEmail("test@example.com"). + SetGroup(setupResult.NewUserGroup). + Save(ctx) + require.NoError(t, err) + + return user.ID +} + +// createLoginEvent creates a login event for the user at the specified time +func createLoginEvent(t *testing.T, client *ent.Client, userID int, triggeredAt time.Time) { + t.Helper() + + ctx := context.Background() + + _, err := client.Events.Create(). + SetUserID(userID). + SetType(string(events.EventTypeLogin)). + SetTriggeredAt(triggeredAt). + Save(ctx) + require.NoError(t, err) +} + +// createPointsRecord creates a points record for the user with specified created_at time +func createPointsRecord(t *testing.T, client *ent.Client, userID int, description string, pointsValue int, createdAt time.Time) { + t.Helper() + + ctx := context.Background() + + // Create the points record with specified created_at time + _, err := client.Points.Create(). + SetUserID(userID). + SetDescription(description). + SetPoints(pointsValue). + SetCreatedAt(createdAt). + Save(ctx) + require.NoError(t, err) +} + +func TestGrantDailyLoginPoints_Success(t *testing.T) { + client := testhelper.NewEntSqliteClient(t) + granter := events.NewPointsGranter(client) + userID := setupTestData(t, client) + + ctx := context.Background() + now := time.Now() + + // Create a login event from today + createLoginEvent(t, client, userID, now) + + // Grant daily login points + granted, err := granter.GrantDailyLoginPoints(ctx, userID) + require.NoError(t, err) + require.True(t, granted) + + // Verify points were created + pointsRecords, err := client.Points.Query(). + Where(points.HasUserWith(user.IDEQ(userID))). + Where(points.DescriptionEQ(events.PointDescriptionDailyLogin)). + All(ctx) + require.NoError(t, err) + require.Len(t, pointsRecords, 1) + require.Equal(t, events.PointValueDailyLogin, pointsRecords[0].Points) +} + +func TestGrantDailyLoginPoints_AlreadyGrantedToday(t *testing.T) { + client := testhelper.NewEntSqliteClient(t) + granter := events.NewPointsGranter(client) + userID := setupTestData(t, client) + + ctx := context.Background() + now := time.Now() + + // Create a login event from today + createLoginEvent(t, client, userID, now) + + // Create an existing points record from today + createPointsRecord(t, client, userID, events.PointDescriptionDailyLogin, events.PointValueDailyLogin, now) + + // Attempt to grant daily login points again + granted, err := granter.GrantDailyLoginPoints(ctx, userID) + require.NoError(t, err) + require.False(t, granted) + + // Verify only one points record exists + pointsRecords, err := client.Points.Query(). + Where(points.HasUserWith(user.IDEQ(userID))). + Where(points.DescriptionEQ(events.PointDescriptionDailyLogin)). + All(ctx) + require.NoError(t, err) + require.Len(t, pointsRecords, 1) +} + +func TestGrantDailyLoginPoints_NoLoginToday(t *testing.T) { + client := testhelper.NewEntSqliteClient(t) + granter := events.NewPointsGranter(client) + userID := setupTestData(t, client) + + ctx := context.Background() + yesterday := time.Now().AddDate(0, 0, -2) // 2 days ago to be sure it's outside the window + + // Create a login event from yesterday (outside the 24 hour window) + createLoginEvent(t, client, userID, yesterday) + + // Attempt to grant daily login points + granted, err := granter.GrantDailyLoginPoints(ctx, userID) + require.NoError(t, err) + require.False(t, granted) + + // Verify no points record was created + pointsRecords, err := client.Points.Query(). + Where(points.HasUserWith(user.IDEQ(userID))). + Where(points.DescriptionEQ(events.PointDescriptionDailyLogin)). + All(ctx) + require.NoError(t, err) + require.Len(t, pointsRecords, 0) +} + +func TestGrantDailyLoginPoints_OldPointsRecordExists(t *testing.T) { + client := testhelper.NewEntSqliteClient(t) + granter := events.NewPointsGranter(client) + userID := setupTestData(t, client) + + ctx := context.Background() + now := time.Now() + twoDaysAgo := now.AddDate(0, 0, -2) + + // Create a login event from today + createLoginEvent(t, client, userID, now) + + // Create an old points record from 2 days ago + createPointsRecord(t, client, userID, events.PointDescriptionDailyLogin, events.PointValueDailyLogin, twoDaysAgo) + + // Grant daily login points should succeed since old record is outside 24 hour window + granted, err := granter.GrantDailyLoginPoints(ctx, userID) + require.NoError(t, err) + require.True(t, granted) + + // Verify two points records exist now + pointsRecords, err := client.Points.Query(). + Where(points.HasUserWith(user.IDEQ(userID))). + Where(points.DescriptionEQ(events.PointDescriptionDailyLogin)). + All(ctx) + require.NoError(t, err) + require.Len(t, pointsRecords, 2) +} + +func TestGrantWeeklyLoginPoints_Success(t *testing.T) { + client := testhelper.NewEntSqliteClient(t) + granter := events.NewPointsGranter(client) + userID := setupTestData(t, client) + + ctx := context.Background() + now := time.Now() + + // Create login events for 7 consecutive days + for i := 0; i < 7; i++ { + loginTime := now.AddDate(0, 0, -i) + createLoginEvent(t, client, userID, loginTime) + } + + // Grant weekly login points + granted, err := granter.GrantWeeklyLoginPoints(ctx, userID) + require.NoError(t, err) + require.True(t, granted) + + // Verify points were created + pointsRecords, err := client.Points.Query(). + Where(points.HasUserWith(user.IDEQ(userID))). + Where(points.DescriptionEQ(events.PointDescriptionWeeklyLogin)). + All(ctx) + require.NoError(t, err) + require.Len(t, pointsRecords, 1) + require.Equal(t, events.PointValueWeeklyLogin, pointsRecords[0].Points) +} + +func TestGrantWeeklyLoginPoints_AlreadyGrantedThisWeek(t *testing.T) { + client := testhelper.NewEntSqliteClient(t) + granter := events.NewPointsGranter(client) + userID := setupTestData(t, client) + + ctx := context.Background() + now := time.Now() + + // Create login events for 7 consecutive days + for i := 0; i < 7; i++ { + loginTime := now.AddDate(0, 0, -i) + createLoginEvent(t, client, userID, loginTime) + } + + // Create an existing weekly points record from this week + createPointsRecord(t, client, userID, events.PointDescriptionWeeklyLogin, events.PointValueWeeklyLogin, now) + + // Attempt to grant weekly login points again + granted, err := granter.GrantWeeklyLoginPoints(ctx, userID) + require.NoError(t, err) + require.False(t, granted) + + // Verify only one points record exists + pointsRecords, err := client.Points.Query(). + Where(points.HasUserWith(user.IDEQ(userID))). + Where(points.DescriptionEQ(events.PointDescriptionWeeklyLogin)). + All(ctx) + require.NoError(t, err) + require.Len(t, pointsRecords, 1) +} + +func TestGrantWeeklyLoginPoints_InsufficientLoginDays(t *testing.T) { + client := testhelper.NewEntSqliteClient(t) + granter := events.NewPointsGranter(client) + userID := setupTestData(t, client) + + ctx := context.Background() + now := time.Now() + + // Create login events for only 5 days (insufficient for weekly points) + for i := 0; i < 5; i++ { + loginTime := now.AddDate(0, 0, -i) + createLoginEvent(t, client, userID, loginTime) + } + + // Attempt to grant weekly login points + granted, err := granter.GrantWeeklyLoginPoints(ctx, userID) + require.NoError(t, err) + require.False(t, granted) + + // Verify no points record was created + pointsRecords, err := client.Points.Query(). + Where(points.HasUserWith(user.IDEQ(userID))). + Where(points.DescriptionEQ(events.PointDescriptionWeeklyLogin)). + All(ctx) + require.NoError(t, err) + require.Len(t, pointsRecords, 0) +} + +func TestGrantWeeklyLoginPoints_NoLoginEvents(t *testing.T) { + client := testhelper.NewEntSqliteClient(t) + granter := events.NewPointsGranter(client) + userID := setupTestData(t, client) + + ctx := context.Background() + + // Don't create any login events + + // Attempt to grant weekly login points + granted, err := granter.GrantWeeklyLoginPoints(ctx, userID) + require.NoError(t, err) + require.False(t, granted) + + // Verify no points record was created + pointsRecords, err := client.Points.Query(). + Where(points.HasUserWith(user.IDEQ(userID))). + Where(points.DescriptionEQ(events.PointDescriptionWeeklyLogin)). + All(ctx) + require.NoError(t, err) + require.Len(t, pointsRecords, 0) +} + +func TestGrantWeeklyLoginPoints_MultipleLoginsPerDay(t *testing.T) { + testCases := []struct { + name string + days int + shouldGrant bool + description string + }{ + { + name: "SufficientDays", + days: 7, + shouldGrant: true, + description: "Should grant points with 7 days of multiple logins per day", + }, + { + name: "InsufficientDays", + days: 6, + shouldGrant: false, + description: "Should not grant points with only 6 days of multiple logins per day", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + client := testhelper.NewEntSqliteClient(t) + granter := events.NewPointsGranter(client) + userID := setupTestData(t, client) + + ctx := context.Background() + now := time.Now() + + // Create multiple login events for specified number of consecutive days + for i := 0; i < tc.days; i++ { + dayStart := now.AddDate(0, 0, -i) + // Create 3 login events for each day at different times + for j := 0; j < 3; j++ { + loginTime := dayStart.Add(time.Duration(j) * time.Hour) + createLoginEvent(t, client, userID, loginTime) + } + } + + // Grant weekly login points + granted, err := granter.GrantWeeklyLoginPoints(ctx, userID) + require.NoError(t, err) + require.Equal(t, tc.shouldGrant, granted) + + // Verify points were created or not created based on expectation + pointsRecords, err := client.Points.Query(). + Where(points.HasUserWith(user.IDEQ(userID))). + Where(points.DescriptionEQ(events.PointDescriptionWeeklyLogin)). + All(ctx) + require.NoError(t, err) + + if tc.shouldGrant { + require.Len(t, pointsRecords, 1) + require.Equal(t, events.PointValueWeeklyLogin, pointsRecords[0].Points) + } else { + require.Len(t, pointsRecords, 0) + } + }) + } +} + +func TestGrantWeeklyLoginPoints_OldWeeklyPointsRecordExists(t *testing.T) { + client := testhelper.NewEntSqliteClient(t) + granter := events.NewPointsGranter(client) + userID := setupTestData(t, client) + + ctx := context.Background() + now := time.Now() + tenDaysAgo := now.AddDate(0, 0, -10) + + // Create login events for 7 consecutive days + for i := 0; i < 7; i++ { + loginTime := now.AddDate(0, 0, -i) + createLoginEvent(t, client, userID, loginTime) + } + + // Create an old weekly points record from 10 days ago + createPointsRecord(t, client, userID, events.PointDescriptionWeeklyLogin, events.PointValueWeeklyLogin, tenDaysAgo) + + // Grant weekly login points should succeed since old record is outside 7 day window + granted, err := granter.GrantWeeklyLoginPoints(ctx, userID) + require.NoError(t, err) + require.True(t, granted) + + // Verify two points records exist now + pointsRecords, err := client.Points.Query(). + Where(points.HasUserWith(user.IDEQ(userID))). + Where(points.DescriptionEQ(events.PointDescriptionWeeklyLogin)). + All(ctx) + require.NoError(t, err) + require.Len(t, pointsRecords, 2) +} + +func TestNewPointsGranter(t *testing.T) { + client := testhelper.NewEntSqliteClient(t) + + granter := events.NewPointsGranter(client) + require.NotNil(t, granter) +} + +func TestGrantDailyLoginPoints_NonExistentUser(t *testing.T) { + client := testhelper.NewEntSqliteClient(t) + granter := events.NewPointsGranter(client) + + ctx := context.Background() + nonExistentUserID := 99999 + + // Attempt to grant points for non-existent user should not fail during the query phase + // but should return false since there's no login event + granted, err := granter.GrantDailyLoginPoints(ctx, nonExistentUserID) + require.NoError(t, err) + require.False(t, granted) +} + +func TestGrantWeeklyLoginPoints_NonExistentUser(t *testing.T) { + client := testhelper.NewEntSqliteClient(t) + granter := events.NewPointsGranter(client) + + ctx := context.Background() + nonExistentUserID := 99999 + + // Attempt to grant points for non-existent user should not fail during the query phase + // but should return false since there are no login events + granted, err := granter.GrantWeeklyLoginPoints(ctx, nonExistentUserID) + require.NoError(t, err) + require.False(t, granted) +} From a279a5b16caa2db56f7e527b0aea5b5ebc34d0c9 Mon Sep 17 00:00:00 2001 From: Yi-Jyun Pan Date: Wed, 17 Sep 2025 23:02:50 +0800 Subject: [PATCH 03/14] feat: abstract HandleEvent --- internal/events/events.go | 28 ++++++++++++++++------------ internal/events/points.go | 14 ++++++++++++++ 2 files changed, 30 insertions(+), 12 deletions(-) diff --git a/internal/events/events.go b/internal/events/events.go index 6b81c3d..95a5673 100644 --- a/internal/events/events.go +++ b/internal/events/events.go @@ -2,7 +2,6 @@ package events import ( "context" - "log/slog" "time" "github.com/database-playground/backend-v2/ent" @@ -10,15 +9,16 @@ import ( // EventService is the service for triggering events. type EventService struct { - entClient *ent.Client - pointsGranter *PointsGranter + entClient *ent.Client + + handlers []EventHandler } // NewEventService creates a new EventService. func NewEventService(entClient *ent.Client) *EventService { return &EventService{ - entClient: entClient, - pointsGranter: NewPointsGranter(entClient), + entClient: entClient, + handlers: []EventHandler{NewPointsGranter(entClient)}, } } @@ -29,26 +29,30 @@ type Event struct { UserID int } +// EventHandler is the handler for the event. +// +// You can think it as the callback of the event. +type EventHandler interface { + HandleEvent(ctx context.Context, event *ent.Events) error +} + // TriggerEvent triggers an event. func (s *EventService) TriggerEvent(ctx context.Context, event Event) error { - err := s.entClient.Events.Create(). + eventEntity, err := s.entClient.Events.Create(). SetType(string(event.Type)). SetPayload(event.Payload). SetUserID(event.UserID). SetTriggeredAt(time.Now()). - Exec(ctx) + Save(ctx) if err != nil { return err } - if event.Type == EventTypeLogin { - ok, err := s.pointsGranter.GrantDailyLoginPoints(ctx, event.UserID) + for _, handler := range s.handlers { + err := handler.HandleEvent(ctx, eventEntity) if err != nil { return err } - if ok { - slog.Info("granted daily login points", "user_id", event.UserID) - } } return nil diff --git a/internal/events/points.go b/internal/events/points.go index d800ae3..3371ff4 100644 --- a/internal/events/points.go +++ b/internal/events/points.go @@ -2,6 +2,7 @@ package events import ( "context" + "log/slog" "time" "github.com/database-playground/backend-v2/ent" @@ -32,6 +33,19 @@ func NewPointsGranter(entClient *ent.Client) *PointsGranter { } } +// HandleEvent handles the event creation. +func (d *PointsGranter) HandleEvent(ctx context.Context, event *ent.Events) error { + switch event.Type { + case string(EventTypeLogin): + ok, err := d.GrantDailyLoginPoints(ctx, event.UserID) + if ok { + slog.Info("granted daily login points", "user_id", event.UserID) + } + return err + } + return nil +} + // GrantDailyLoginPoints grants the "daily login" points to a user. func (d *PointsGranter) GrantDailyLoginPoints(ctx context.Context, userID int) (bool, error) { // Check if we have granted the "daily login" points for this user today. From 39ed80fb59b2a15310edc19db0548995aeb89dd9 Mon Sep 17 00:00:00 2001 From: Yi-Jyun Pan Date: Wed, 17 Sep 2025 23:21:41 +0800 Subject: [PATCH 04/14] feat(events): make TriggerEvent async --- internal/events/events.go | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/internal/events/events.go b/internal/events/events.go index 95a5673..1448007 100644 --- a/internal/events/events.go +++ b/internal/events/events.go @@ -2,6 +2,7 @@ package events import ( "context" + "log/slog" "time" "github.com/database-playground/backend-v2/ent" @@ -37,7 +38,17 @@ type EventHandler interface { } // TriggerEvent triggers an event. -func (s *EventService) TriggerEvent(ctx context.Context, event Event) error { +func (s *EventService) TriggerEvent(ctx context.Context, event Event) { + go func() { + err := s.triggerEvent(ctx, event) + if err != nil { + slog.Error("failed to trigger event", "error", err) + } + }() +} + +// triggerEvent triggers an event synchronously. +func (s *EventService) triggerEvent(ctx context.Context, event Event) error { eventEntity, err := s.entClient.Events.Create(). SetType(string(event.Type)). SetPayload(event.Payload). From 995ffb7f7a098b3c98fa5eb191d55f430d6cd371 Mon Sep 17 00:00:00 2001 From: Yi-Jyun Pan Date: Wed, 17 Sep 2025 23:21:52 +0800 Subject: [PATCH 05/14] chore: ignore all PEM files --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 8c48cda..53e64a6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ graph/*.generated.go -.env \ No newline at end of file +.env +*.pem \ No newline at end of file From 6a28f9b4fad5d5e73239190504baacc239014608 Mon Sep 17 00:00:00 2001 From: Yi-Jyun Pan Date: Wed, 17 Sep 2025 23:22:01 +0800 Subject: [PATCH 06/14] feat(events): add Impersonate event --- internal/events/constants.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/internal/events/constants.go b/internal/events/constants.go index 98a9ff9..2791393 100644 --- a/internal/events/constants.go +++ b/internal/events/constants.go @@ -3,5 +3,6 @@ package events type EventType string const ( - EventTypeLogin EventType = "login" + EventTypeLogin EventType = "login" + EventTypeImpersonated EventType = "impersonated" ) From a49f14685160744f71f6f9a2cf1cf4d85c156340 Mon Sep 17 00:00:00 2001 From: Yi-Jyun Pan Date: Wed, 17 Sep 2025 23:22:10 +0800 Subject: [PATCH 07/14] feat(useraccount): implement login event --- internal/useraccount/delete_test.go | 7 +- internal/useraccount/register_flow_test.go | 37 ++-- internal/useraccount/token.go | 16 ++ internal/useraccount/token_test.go | 209 ++++++++++++++++++++- internal/useraccount/useraccount.go | 13 +- internal/useraccount/useraccount_test.go | 7 +- 6 files changed, 261 insertions(+), 28 deletions(-) diff --git a/internal/useraccount/delete_test.go b/internal/useraccount/delete_test.go index a151cc0..869a4fc 100644 --- a/internal/useraccount/delete_test.go +++ b/internal/useraccount/delete_test.go @@ -6,6 +6,7 @@ import ( "github.com/database-playground/backend-v2/ent" "github.com/database-playground/backend-v2/ent/group" + "github.com/database-playground/backend-v2/internal/events" "github.com/database-playground/backend-v2/internal/useraccount" "github.com/stretchr/testify/require" ) @@ -35,7 +36,8 @@ func TestDeleteUser(t *testing.T) { t.Run(tt.name, func(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) var userID int if tt.setupUser { @@ -76,7 +78,8 @@ func TestDeleteUser(t *testing.T) { func TestDeleteUser_Integration(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) // Get the unverified group unverifiedGroup, err := client.Group.Query().Where(group.NameEQ(useraccount.UnverifiedGroupSlug)).Only(context.Background()) diff --git a/internal/useraccount/register_flow_test.go b/internal/useraccount/register_flow_test.go index abdd4f9..3bf717d 100644 --- a/internal/useraccount/register_flow_test.go +++ b/internal/useraccount/register_flow_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/database-playground/backend-v2/ent/group" + "github.com/database-playground/backend-v2/internal/events" "github.com/database-playground/backend-v2/internal/testhelper" "github.com/database-playground/backend-v2/internal/useraccount" "github.com/stretchr/testify/assert" @@ -14,7 +15,8 @@ import ( func TestGetOrRegister_NewUser(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() req := useraccount.UserRegisterRequest{ @@ -45,7 +47,8 @@ func TestGetOrRegister_NewUser(t *testing.T) { func TestGetOrRegister_ExistingUser(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() // Create an existing user @@ -78,7 +81,8 @@ func TestGetOrRegister_MissingUnverifiedGroup(t *testing.T) { // Create a fresh database without setup client := testhelper.NewEntSqliteClient(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() req := useraccount.UserRegisterRequest{ @@ -94,7 +98,8 @@ func TestGetOrRegister_MissingUnverifiedGroup(t *testing.T) { func TestVerify_Success(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() // Create an unverified user @@ -130,7 +135,8 @@ func TestVerify_Success(t *testing.T) { func TestVerify_UserNotFound(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() err := ctx.Verify(context, 99999) // Non-existent user ID @@ -141,7 +147,8 @@ func TestVerify_UserNotFound(t *testing.T) { func TestVerify_UserAlreadyVerified(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() // Create a user in new-user group (already verified) @@ -165,7 +172,8 @@ func TestVerify_MissingNewUserGroup(t *testing.T) { // Create a fresh database without setup client := testhelper.NewEntSqliteClient(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() // Create only unverified group @@ -191,7 +199,8 @@ func TestVerify_MissingNewUserGroup(t *testing.T) { func TestRegistrationFlow_Complete(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() // Step 1: Register new user (should be unverified) @@ -240,7 +249,8 @@ func TestRegistrationFlow_Complete(t *testing.T) { func TestRegistrationFlow_ExistingUser(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() // Create an existing verified user @@ -281,7 +291,8 @@ func TestRegistrationFlow_ErrorCases(t *testing.T) { // Create a fresh database without setup client := testhelper.NewEntSqliteClient(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() req := useraccount.UserRegisterRequest{ @@ -297,7 +308,8 @@ func TestRegistrationFlow_ErrorCases(t *testing.T) { t.Run("verify already verified user", func(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() // Create verified user @@ -320,7 +332,8 @@ func TestRegistrationFlow_ErrorCases(t *testing.T) { t.Run("verify non-existent user", func(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() err := ctx.Verify(context, 99999) diff --git a/internal/useraccount/token.go b/internal/useraccount/token.go index 5d19ccd..5abf03b 100644 --- a/internal/useraccount/token.go +++ b/internal/useraccount/token.go @@ -6,6 +6,7 @@ import ( "github.com/database-playground/backend-v2/ent" "github.com/database-playground/backend-v2/internal/auth" + "github.com/database-playground/backend-v2/internal/events" ) const ( @@ -57,7 +58,22 @@ func (c *Context) GrantToken(ctx context.Context, user *ent.User, machine string MetaInitiateFromFlow: options.flow, } if options.impersonatorID != 0 { + c.eventService.TriggerEvent(ctx, events.Event{ + Type: events.EventTypeImpersonated, + UserID: user.ID, + Payload: map[string]any{ + "impersonator_id": options.impersonatorID, + }, + }) meta[MetaImpersonation] = strconv.Itoa(options.impersonatorID) + } else { + c.eventService.TriggerEvent(ctx, events.Event{ + Type: events.EventTypeLogin, + UserID: user.ID, + Payload: map[string]any{ + "machine": machine, + }, + }) } token, err := c.auth.Create(ctx, auth.TokenInfo{ diff --git a/internal/useraccount/token_test.go b/internal/useraccount/token_test.go index 277a2ac..7328471 100644 --- a/internal/useraccount/token_test.go +++ b/internal/useraccount/token_test.go @@ -4,9 +4,12 @@ import ( "context" "strconv" "testing" + "time" + "github.com/database-playground/backend-v2/ent/events" "github.com/database-playground/backend-v2/ent/group" "github.com/database-playground/backend-v2/internal/auth" + events_pkg "github.com/database-playground/backend-v2/internal/events" "github.com/database-playground/backend-v2/internal/useraccount" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -15,7 +18,8 @@ import ( func TestGrantToken_Success(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events_pkg.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() // Create a user in unverified group @@ -51,7 +55,8 @@ func TestGrantToken_Success(t *testing.T) { func TestGrantToken_Impersonation(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events_pkg.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() unverifiedGroup, err := client.Group.Query().Where(group.NameEQ(useraccount.UnverifiedGroupSlug)).Only(context) @@ -85,7 +90,8 @@ func TestGrantToken_Impersonation(t *testing.T) { func TestGrantToken_DefaultFlow(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events_pkg.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() unverifiedGroup, err := client.Group.Query().Where(group.NameEQ(useraccount.UnverifiedGroupSlug)).Only(context) @@ -113,7 +119,8 @@ func TestGrantToken_DefaultFlow(t *testing.T) { func TestGrantToken_NewUserScopes(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events_pkg.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() // Create a user in new-user group @@ -146,7 +153,8 @@ func TestGrantToken_NewUserScopes(t *testing.T) { func TestGrantToken_UserWithoutScopeSet(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events_pkg.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() // Create a group without scope set @@ -179,7 +187,8 @@ func TestGrantToken_UserWithoutScopeSet(t *testing.T) { func TestRevokeToken_Success(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events_pkg.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() unverifiedGroup, err := client.Group.Query().Where(group.NameEQ(useraccount.UnverifiedGroupSlug)).Only(context) @@ -210,7 +219,8 @@ func TestRevokeToken_Success(t *testing.T) { func TestRevokeAllTokens_Success(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events_pkg.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() unverifiedGroup, err := client.Group.Query().Where(group.NameEQ(useraccount.UnverifiedGroupSlug)).Only(context) @@ -248,3 +258,188 @@ func TestRevokeAllTokens_Success(t *testing.T) { require.Error(t, err) assert.Equal(t, auth.ErrNotFound, err) } + +func TestGrantToken_LoginEventTriggered(t *testing.T) { + client := setupTestDatabase(t) + authStorage := newMockAuthStorage() + eventService := events_pkg.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) + context := context.Background() + + // Create a user in unverified group + unverifiedGroup, err := client.Group.Query().Where(group.NameEQ(useraccount.UnverifiedGroupSlug)).Only(context) + require.NoError(t, err) + + user, err := client.User.Create(). + SetName("Test User"). + SetEmail("test-event-login@example.com"). + SetGroup(unverifiedGroup). + Save(context) + require.NoError(t, err) + + // Grant token (should trigger login event) + token, err := ctx.GrantToken( + context, user, "test-machine-login", + useraccount.WithFlow("login"), + ) + require.NoError(t, err) + require.NotEmpty(t, token) + + // Wait a bit for the async event processing to complete + // Since TriggerEvent runs in a goroutine, we need to give it time + time.Sleep(100 * time.Millisecond) + + // Verify login event was created in database + loginEvents, err := client.Events.Query(). + Where(events.UserIDEQ(user.ID)). + Where(events.TypeEQ(string(events_pkg.EventTypeLogin))). + All(context) + require.NoError(t, err) + require.Len(t, loginEvents, 1) + + // Verify event payload contains correct machine info + loginEvent := loginEvents[0] + assert.Equal(t, user.ID, loginEvent.UserID) + assert.Equal(t, string(events_pkg.EventTypeLogin), loginEvent.Type) + assert.NotNil(t, loginEvent.Payload) + assert.Equal(t, "test-machine-login", loginEvent.Payload["machine"]) + assert.NotZero(t, loginEvent.TriggeredAt) +} + +func TestGrantToken_ImpersonationEventTriggered(t *testing.T) { + client := setupTestDatabase(t) + authStorage := newMockAuthStorage() + eventService := events_pkg.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) + context := context.Background() + + // Create a user in unverified group + unverifiedGroup, err := client.Group.Query().Where(group.NameEQ(useraccount.UnverifiedGroupSlug)).Only(context) + require.NoError(t, err) + + user, err := client.User.Create(). + SetName("Test User"). + SetEmail("test-event-impersonation@example.com"). + SetGroup(unverifiedGroup). + Save(context) + require.NoError(t, err) + + // Create an impersonator user + impersonator, err := client.User.Create(). + SetName("Impersonator User"). + SetEmail("impersonator@example.com"). + SetGroup(unverifiedGroup). + Save(context) + require.NoError(t, err) + + // Grant token with impersonation (should trigger impersonation event) + token, err := ctx.GrantToken( + context, user, "test-machine-impersonation", + useraccount.WithFlow("admin"), + useraccount.WithImpersonation(impersonator.ID), + ) + require.NoError(t, err) + require.NotEmpty(t, token) + + // Wait a bit for the async event processing to complete + time.Sleep(100 * time.Millisecond) + + // Verify impersonation event was created in database + impersonationEvents, err := client.Events.Query(). + Where(events.UserIDEQ(user.ID)). + Where(events.TypeEQ(string(events_pkg.EventTypeImpersonated))). + All(context) + require.NoError(t, err) + require.Len(t, impersonationEvents, 1) + + // Verify event payload contains correct impersonator info + impersonationEvent := impersonationEvents[0] + assert.Equal(t, user.ID, impersonationEvent.UserID) + assert.Equal(t, string(events_pkg.EventTypeImpersonated), impersonationEvent.Type) + assert.NotNil(t, impersonationEvent.Payload) + + // JSON unmarshaling converts numbers to float64, so we need to convert + impersonatorIDFloat, ok := impersonationEvent.Payload["impersonator_id"].(float64) + require.True(t, ok, "impersonator_id should be a number") + assert.Equal(t, float64(impersonator.ID), impersonatorIDFloat) + assert.NotZero(t, impersonationEvent.TriggeredAt) + + // Verify no login event was created (impersonation takes precedence) + loginEvents, err := client.Events.Query(). + Where(events.UserIDEQ(user.ID)). + Where(events.TypeEQ(string(events_pkg.EventTypeLogin))). + All(context) + require.NoError(t, err) + require.Len(t, loginEvents, 0) +} + +func TestGrantToken_MultipleTokensCreateMultipleEvents(t *testing.T) { + client := setupTestDatabase(t) + authStorage := newMockAuthStorage() + eventService := events_pkg.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) + context := context.Background() + + // Create a user in unverified group + unverifiedGroup, err := client.Group.Query().Where(group.NameEQ(useraccount.UnverifiedGroupSlug)).Only(context) + require.NoError(t, err) + + user, err := client.User.Create(). + SetName("Test User"). + SetEmail("test-event-multiple@example.com"). + SetGroup(unverifiedGroup). + Save(context) + require.NoError(t, err) + + // Grant multiple tokens (should create multiple login events) + // Note: We'll grant them sequentially to avoid race conditions + token1, err := ctx.GrantToken( + context, user, "machine-1", + useraccount.WithFlow("login"), + ) + require.NoError(t, err) + require.NotEmpty(t, token1) + + // Wait for the first event to be processed + time.Sleep(50 * time.Millisecond) + + token2, err := ctx.GrantToken( + context, user, "machine-2", + useraccount.WithFlow("login"), + ) + require.NoError(t, err) + require.NotEmpty(t, token2) + + // Wait for the second event to be processed + time.Sleep(50 * time.Millisecond) + + token3, err := ctx.GrantToken( + context, user, "machine-3", + useraccount.WithFlow("login"), + ) + require.NoError(t, err) + require.NotEmpty(t, token3) + + // Wait for the third event to be processed + time.Sleep(100 * time.Millisecond) + + // Verify three login events were created + loginEvents, err := client.Events.Query(). + Where(events.UserIDEQ(user.ID)). + Where(events.TypeEQ(string(events_pkg.EventTypeLogin))). + All(context) + require.NoError(t, err) + require.Len(t, loginEvents, 3) + + // Verify each event has different machine info + machines := make(map[string]bool) + for _, event := range loginEvents { + machine, ok := event.Payload["machine"].(string) + require.True(t, ok, "machine should be a string") + machines[machine] = true + } + assert.Len(t, machines, 3) + assert.True(t, machines["machine-1"]) + assert.True(t, machines["machine-2"]) + assert.True(t, machines["machine-3"]) +} diff --git a/internal/useraccount/useraccount.go b/internal/useraccount/useraccount.go index 251fd7b..7729c7f 100644 --- a/internal/useraccount/useraccount.go +++ b/internal/useraccount/useraccount.go @@ -4,16 +4,19 @@ package useraccount import ( "github.com/database-playground/backend-v2/ent" "github.com/database-playground/backend-v2/internal/auth" + "github.com/database-playground/backend-v2/internal/events" ) type Context struct { - entClient *ent.Client - auth auth.Storage + entClient *ent.Client + auth auth.Storage + eventService *events.EventService } -func NewContext(entClient *ent.Client, auth auth.Storage) *Context { +func NewContext(entClient *ent.Client, auth auth.Storage, eventService *events.EventService) *Context { return &Context{ - entClient: entClient, - auth: auth, + entClient: entClient, + auth: auth, + eventService: eventService, } } diff --git a/internal/useraccount/useraccount_test.go b/internal/useraccount/useraccount_test.go index e255860..1c93357 100644 --- a/internal/useraccount/useraccount_test.go +++ b/internal/useraccount/useraccount_test.go @@ -7,6 +7,7 @@ import ( "github.com/database-playground/backend-v2/ent" "github.com/database-playground/backend-v2/ent/group" "github.com/database-playground/backend-v2/internal/auth" + "github.com/database-playground/backend-v2/internal/events" "github.com/database-playground/backend-v2/internal/setup" "github.com/database-playground/backend-v2/internal/testhelper" "github.com/database-playground/backend-v2/internal/useraccount" @@ -71,15 +72,17 @@ func setupTestDatabase(t *testing.T) *ent.Client { func TestNewContext(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() + eventService := events.NewEventService(client) - ctx := useraccount.NewContext(client, authStorage) + ctx := useraccount.NewContext(client, authStorage, eventService) require.NotNil(t, ctx) } func TestGetUser(t *testing.T) { client := setupTestDatabase(t) authStorage := newMockAuthStorage() - ctx := useraccount.NewContext(client, authStorage) + eventService := events.NewEventService(client) + ctx := useraccount.NewContext(client, authStorage, eventService) context := context.Background() // Create a group for the user From 03c8b57185e2784ac2d10168f67dc5f09def420c Mon Sep 17 00:00:00 2001 From: Yi-Jyun Pan Date: Wed, 17 Sep 2025 23:27:29 +0800 Subject: [PATCH 08/14] fix: dependencies --- cmd/backend/dependencies.go | 20 ++++++++++++++++---- cmd/backend/server.go | 2 ++ graph/resolver.go | 14 ++++++++------ httpapi/auth/introspect.go | 3 +-- httpapi/auth/introspect_test.go | 6 +++++- httpapi/auth/revoke_test.go | 6 +++++- httpapi/auth/root.go | 20 ++++++++++++-------- 7 files changed, 49 insertions(+), 22 deletions(-) diff --git a/cmd/backend/dependencies.go b/cmd/backend/dependencies.go index c5f8059..78ba026 100644 --- a/cmd/backend/dependencies.go +++ b/cmd/backend/dependencies.go @@ -20,8 +20,10 @@ import ( authservice "github.com/database-playground/backend-v2/httpapi/auth" "github.com/database-playground/backend-v2/internal/auth" "github.com/database-playground/backend-v2/internal/config" + "github.com/database-playground/backend-v2/internal/events" "github.com/database-playground/backend-v2/internal/httputils" "github.com/database-playground/backend-v2/internal/sqlrunner" + "github.com/database-playground/backend-v2/internal/useraccount" "github.com/gin-contrib/cors" "github.com/gin-gonic/gin" "github.com/redis/rueidis" @@ -41,8 +43,8 @@ func SqlRunner(cfg config.Config) *sqlrunner.SqlRunner { } // GqlgenHandler creates a gqlgen handler. -func GqlgenHandler(entClient *ent.Client, storage auth.Storage, sqlrunner *sqlrunner.SqlRunner) *handler.Server { - srv := handler.New(graph.NewSchema(entClient, storage, sqlrunner)) +func GqlgenHandler(entClient *ent.Client, storage auth.Storage, sqlrunner *sqlrunner.SqlRunner, eventService *events.EventService) *handler.Server { + srv := handler.New(graph.NewSchema(entClient, storage, sqlrunner, eventService)) srv.AddTransport(transport.Options{}) srv.AddTransport(transport.GET{}) @@ -61,9 +63,19 @@ func GqlgenHandler(entClient *ent.Client, storage auth.Storage, sqlrunner *sqlru return srv } +// UserAccountContext creates a useraccount.Context. +func UserAccountContext(entClient *ent.Client, storage auth.Storage, eventService *events.EventService) *useraccount.Context { + return useraccount.NewContext(entClient, storage, eventService) +} + +// EventService creates an events.EventService. +func EventService(entClient *ent.Client) *events.EventService { + return events.NewEventService(entClient) +} + // AuthService creates an auth service. -func AuthService(entClient *ent.Client, storage auth.Storage, config config.Config) httpapi.Service { - return authservice.NewAuthService(entClient, storage, config) +func AuthService(entClient *ent.Client, storage auth.Storage, config config.Config, useraccount *useraccount.Context) httpapi.Service { + return authservice.NewAuthService(entClient, storage, config, useraccount) } // GinEngine creates a gin engine. diff --git a/cmd/backend/server.go b/cmd/backend/server.go index da10f44..1afd037 100644 --- a/cmd/backend/server.go +++ b/cmd/backend/server.go @@ -16,6 +16,8 @@ func main() { fx.Provide( AuthStorage, SqlRunner, + UserAccountContext, + EventService, AnnotateService(AuthService), GqlgenHandler, fx.Annotate( diff --git a/graph/resolver.go b/graph/resolver.go index fdaa992..9154f53 100644 --- a/graph/resolver.go +++ b/graph/resolver.go @@ -9,6 +9,7 @@ import ( "github.com/database-playground/backend-v2/graph/defs" "github.com/database-playground/backend-v2/graph/directive" "github.com/database-playground/backend-v2/internal/auth" + "github.com/database-playground/backend-v2/internal/events" "github.com/database-playground/backend-v2/internal/sqlrunner" "github.com/database-playground/backend-v2/internal/useraccount" "github.com/vektah/gqlparser/v2/gqlerror" @@ -20,15 +21,16 @@ import ( // Resolver is the resolver root. type Resolver struct { - ent *ent.Client - auth auth.Storage - sqlrunner *sqlrunner.SqlRunner + ent *ent.Client + auth auth.Storage + sqlrunner *sqlrunner.SqlRunner + eventService *events.EventService } // NewSchema creates a graphql executable schema. -func NewSchema(ent *ent.Client, auth auth.Storage, sqlrunner *sqlrunner.SqlRunner) graphql.ExecutableSchema { +func NewSchema(ent *ent.Client, auth auth.Storage, sqlrunner *sqlrunner.SqlRunner, eventService *events.EventService) graphql.ExecutableSchema { return NewExecutableSchema(Config{ - Resolvers: &Resolver{ent, auth, sqlrunner}, + Resolvers: &Resolver{ent, auth, sqlrunner, eventService}, Directives: DirectiveRoot{ Scope: directive.ScopeDirective, }, @@ -36,7 +38,7 @@ func NewSchema(ent *ent.Client, auth auth.Storage, sqlrunner *sqlrunner.SqlRunne } func (r *Resolver) UserAccount(ctx context.Context) *useraccount.Context { - return useraccount.NewContext(r.EntClient(ctx), r.auth) + return useraccount.NewContext(r.EntClient(ctx), r.auth, r.eventService) } func (r *Resolver) EntClient(ctx context.Context) *ent.Client { diff --git a/httpapi/auth/introspect.go b/httpapi/auth/introspect.go index b7a15fd..54fdf40 100644 --- a/httpapi/auth/introspect.go +++ b/httpapi/auth/introspect.go @@ -76,8 +76,7 @@ func (s *AuthService) IntrospectToken(c *gin.Context) { } // Get user information - useraccountCtx := useraccount.NewContext(s.entClient, s.storage) - entUser, err := useraccountCtx.GetUser(c.Request.Context(), tokenInfo.UserID) + entUser, err := s.useraccount.GetUser(c.Request.Context(), tokenInfo.UserID) if err != nil { if errors.Is(err, useraccount.ErrUserNotFound) { // User not found - token is technically invalid diff --git a/httpapi/auth/introspect_test.go b/httpapi/auth/introspect_test.go index 312b17d..66663a3 100644 --- a/httpapi/auth/introspect_test.go +++ b/httpapi/auth/introspect_test.go @@ -15,8 +15,10 @@ import ( "github.com/database-playground/backend-v2/ent/group" "github.com/database-playground/backend-v2/internal/auth" "github.com/database-playground/backend-v2/internal/config" + "github.com/database-playground/backend-v2/internal/events" "github.com/database-playground/backend-v2/internal/setup" "github.com/database-playground/backend-v2/internal/testhelper" + "github.com/database-playground/backend-v2/internal/useraccount" "github.com/gin-gonic/gin" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -52,8 +54,10 @@ func setupTestAuthServiceWithDatabase(t *testing.T) (*AuthService, *mockAuthStor storage := newMockAuthStorageForIntrospect() cfg := config.Config{} + eventService := events.NewEventService(entClient) + useraccount := useraccount.NewContext(entClient, storage, eventService) - authService := NewAuthService(entClient, storage, cfg) + authService := NewAuthService(entClient, storage, cfg, useraccount) return authService, storage, entClient } diff --git a/httpapi/auth/revoke_test.go b/httpapi/auth/revoke_test.go index 380217a..f65032c 100644 --- a/httpapi/auth/revoke_test.go +++ b/httpapi/auth/revoke_test.go @@ -12,7 +12,9 @@ import ( "github.com/database-playground/backend-v2/internal/auth" "github.com/database-playground/backend-v2/internal/config" + "github.com/database-playground/backend-v2/internal/events" "github.com/database-playground/backend-v2/internal/testhelper" + "github.com/database-playground/backend-v2/internal/useraccount" "github.com/gin-gonic/gin" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -74,8 +76,10 @@ func setupTestAuthService(t *testing.T) (*AuthService, *mockAuthStorage) { entClient := testhelper.NewEntSqliteClient(t) storage := newMockAuthStorage() cfg := config.Config{} + eventService := events.NewEventService(entClient) + useraccount := useraccount.NewContext(entClient, storage, eventService) - authService := NewAuthService(entClient, storage, cfg) + authService := NewAuthService(entClient, storage, cfg, useraccount) return authService, storage } diff --git a/httpapi/auth/root.go b/httpapi/auth/root.go index 76ca6a2..3200035 100644 --- a/httpapi/auth/root.go +++ b/httpapi/auth/root.go @@ -13,22 +13,26 @@ import ( ) type AuthService struct { - entClient *ent.Client - storage auth.Storage - config config.Config + entClient *ent.Client + storage auth.Storage + config config.Config + useraccount *useraccount.Context } -func NewAuthService(entClient *ent.Client, storage auth.Storage, config config.Config) *AuthService { - return &AuthService{entClient: entClient, storage: storage, config: config} +func NewAuthService(entClient *ent.Client, storage auth.Storage, config config.Config, useraccount *useraccount.Context) *AuthService { + return &AuthService{ + entClient: entClient, + storage: storage, + config: config, + useraccount: useraccount, + } } func (s *AuthService) Register(router gin.IRouter) { - useraccount := useraccount.NewContext(s.entClient, s.storage) - auth := router.Group("/auth/v2") oauthConfig := BuildOAuthConfig(s.config.GAuth) oauthConfig.RedirectURL = fmt.Sprintf("%s%s/callback/google", s.config.Server.URI, auth.BasePath()) - gauthHandler := NewGauthHandler(oauthConfig, useraccount, s.config.GAuth.RedirectURIs, s.config.GAuth.Secret) + gauthHandler := NewGauthHandler(oauthConfig, s.useraccount, s.config.GAuth.RedirectURIs, s.config.GAuth.Secret) auth.GET("/authorize/google", gauthHandler.Authorize) auth.GET("/callback/google", gauthHandler.Callback) From 5149d23981b04abeead72826ede3195a484b72cb Mon Sep 17 00:00:00 2001 From: Yi-Jyun Pan Date: Wed, 17 Sep 2025 23:32:52 +0800 Subject: [PATCH 09/14] fix: use WaitGroup pattern to prevent early stop --- cmd/backend/dependencies.go | 9 +++++++-- internal/events/events.go | 5 +++-- internal/workers/workers.go | 28 ++++++++++++++++++++++++++++ 3 files changed, 38 insertions(+), 4 deletions(-) create mode 100644 internal/workers/workers.go diff --git a/cmd/backend/dependencies.go b/cmd/backend/dependencies.go index 78ba026..b7803d6 100644 --- a/cmd/backend/dependencies.go +++ b/cmd/backend/dependencies.go @@ -24,6 +24,7 @@ import ( "github.com/database-playground/backend-v2/internal/httputils" "github.com/database-playground/backend-v2/internal/sqlrunner" "github.com/database-playground/backend-v2/internal/useraccount" + "github.com/database-playground/backend-v2/internal/workers" "github.com/gin-contrib/cors" "github.com/gin-gonic/gin" "github.com/redis/rueidis" @@ -144,12 +145,12 @@ func GinLifecycle(lifecycle fx.Lifecycle, engine *gin.Engine, cfg config.Config) } }() - go func() { + workers.Global.Go(func() { <-httpCtx.Done() if err := srv.Shutdown(context.Background()); err != nil { slog.Error("error shutting down gin engine", "error", err) } - }() + }) return nil }, @@ -161,6 +162,10 @@ func GinLifecycle(lifecycle fx.Lifecycle, engine *gin.Engine, cfg config.Config) cancel() } + // Wait for all workers to finish + slog.Info("waiting for workers to finish") + workers.Global.Wait() + return nil }, }) diff --git a/internal/events/events.go b/internal/events/events.go index 1448007..bd5551b 100644 --- a/internal/events/events.go +++ b/internal/events/events.go @@ -6,6 +6,7 @@ import ( "time" "github.com/database-playground/backend-v2/ent" + "github.com/database-playground/backend-v2/internal/workers" ) // EventService is the service for triggering events. @@ -39,12 +40,12 @@ type EventHandler interface { // TriggerEvent triggers an event. func (s *EventService) TriggerEvent(ctx context.Context, event Event) { - go func() { + workers.Global.Go(func() { err := s.triggerEvent(ctx, event) if err != nil { slog.Error("failed to trigger event", "error", err) } - }() + }) } // triggerEvent triggers an event synchronously. diff --git a/internal/workers/workers.go b/internal/workers/workers.go new file mode 100644 index 0000000..45c42c1 --- /dev/null +++ b/internal/workers/workers.go @@ -0,0 +1,28 @@ +package workers + +import "sync" + +var Global = NewWorker() + +type Worker struct { + wg *sync.WaitGroup +} + +func NewWorker() *Worker { + return &Worker{ + wg: &sync.WaitGroup{}, + } +} + +func (w *Worker) Go(fn func()) { + w.wg.Add(1) + + go func() { + defer w.wg.Done() + fn() + }() +} + +func (w *Worker) Wait() { + w.wg.Wait() +} From 2bda1135627495954220ff1c47fa6428ee804d38 Mon Sep 17 00:00:00 2001 From: Yi-Jyun Pan Date: Wed, 17 Sep 2025 23:33:26 +0800 Subject: [PATCH 10/14] fix: use background context to trigger event --- internal/events/events.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/events/events.go b/internal/events/events.go index bd5551b..0f6e6ed 100644 --- a/internal/events/events.go +++ b/internal/events/events.go @@ -41,7 +41,7 @@ type EventHandler interface { // TriggerEvent triggers an event. func (s *EventService) TriggerEvent(ctx context.Context, event Event) { workers.Global.Go(func() { - err := s.triggerEvent(ctx, event) + err := s.triggerEvent(context.Background(), event) if err != nil { slog.Error("failed to trigger event", "error", err) } From ce02cc8519cd1a2092afa3b5957b540072484ec7 Mon Sep 17 00:00:00 2001 From: Yi-Jyun Pan Date: Wed, 17 Sep 2025 23:37:33 +0800 Subject: [PATCH 11/14] refactor: use only "granted_at" for points --- ent/client.go | 6 +- ent/gql_collection.go | 20 +-- ent/gql_mutation_input.go | 8 ++ ent/gql_where_input.go | 144 +++++--------------- ent/internal/schema.go | 2 +- ent/migrate/schema.go | 6 +- ent/mutation.go | 233 ++++++++------------------------- ent/points.go | 46 ++----- ent/points/points.go | 48 ++----- ent/points/where.go | 190 +++++++-------------------- ent/points_create.go | 102 ++++----------- ent/points_query.go | 8 +- ent/points_update.go | 168 +++++------------------- ent/runtime/runtime.go | 21 +-- ent/schema/points.go | 10 +- graph/ent.graphqls | 52 ++------ internal/events/points.go | 4 +- internal/events/points_test.go | 28 ++-- 18 files changed, 273 insertions(+), 823 deletions(-) diff --git a/ent/client.go b/ent/client.go index 1870083..1288644 100644 --- a/ent/client.go +++ b/ent/client.go @@ -828,14 +828,12 @@ func (c *PointsClient) QueryUser(_m *Points) *UserQuery { // Hooks returns the client hooks. func (c *PointsClient) Hooks() []Hook { - hooks := c.hooks.Points - return append(hooks[:len(hooks):len(hooks)], points.Hooks[:]...) + return c.hooks.Points } // Interceptors returns the client interceptors. func (c *PointsClient) Interceptors() []Interceptor { - inters := c.inters.Points - return append(inters[:len(inters):len(inters)], points.Interceptors[:]...) + return c.inters.Points } func (c *PointsClient) mutate(ctx context.Context, m *PointsMutation) (Value, error) { diff --git a/ent/gql_collection.go b/ent/gql_collection.go index 75425ec..8f66fc3 100644 --- a/ent/gql_collection.go +++ b/ent/gql_collection.go @@ -340,26 +340,16 @@ func (_q *PointsQuery) collectField(ctx context.Context, oneNode bool, opCtx *gr return err } _q.withUser = query - case "createdAt": - if _, ok := fieldSeen[points.FieldCreatedAt]; !ok { - selectedFields = append(selectedFields, points.FieldCreatedAt) - fieldSeen[points.FieldCreatedAt] = struct{}{} - } - case "updatedAt": - if _, ok := fieldSeen[points.FieldUpdatedAt]; !ok { - selectedFields = append(selectedFields, points.FieldUpdatedAt) - fieldSeen[points.FieldUpdatedAt] = struct{}{} - } - case "deletedAt": - if _, ok := fieldSeen[points.FieldDeletedAt]; !ok { - selectedFields = append(selectedFields, points.FieldDeletedAt) - fieldSeen[points.FieldDeletedAt] = struct{}{} - } case "points": if _, ok := fieldSeen[points.FieldPoints]; !ok { selectedFields = append(selectedFields, points.FieldPoints) fieldSeen[points.FieldPoints] = struct{}{} } + case "grantedAt": + if _, ok := fieldSeen[points.FieldGrantedAt]; !ok { + selectedFields = append(selectedFields, points.FieldGrantedAt) + fieldSeen[points.FieldGrantedAt] = struct{}{} + } case "description": if _, ok := fieldSeen[points.FieldDescription]; !ok { selectedFields = append(selectedFields, points.FieldDescription) diff --git a/ent/gql_mutation_input.go b/ent/gql_mutation_input.go index 4b6b6e9..a1c0541 100644 --- a/ent/gql_mutation_input.go +++ b/ent/gql_mutation_input.go @@ -221,6 +221,7 @@ func (c *GroupUpdateOne) SetInput(i UpdateGroupInput) *GroupUpdateOne { // CreatePointsInput represents a mutation input for creating pointsslice. type CreatePointsInput struct { Points *int + GrantedAt *time.Time Description *string UserID int } @@ -230,6 +231,9 @@ func (i *CreatePointsInput) Mutate(m *PointsMutation) { if v := i.Points; v != nil { m.SetPoints(*v) } + if v := i.GrantedAt; v != nil { + m.SetGrantedAt(*v) + } if v := i.Description; v != nil { m.SetDescription(*v) } @@ -245,6 +249,7 @@ func (c *PointsCreate) SetInput(i CreatePointsInput) *PointsCreate { // UpdatePointsInput represents a mutation input for updating pointsslice. type UpdatePointsInput struct { Points *int + GrantedAt *time.Time ClearDescription bool Description *string UserID *int @@ -255,6 +260,9 @@ func (i *UpdatePointsInput) Mutate(m *PointsMutation) { if v := i.Points; v != nil { m.SetPoints(*v) } + if v := i.GrantedAt; v != nil { + m.SetGrantedAt(*v) + } if i.ClearDescription { m.ClearDescription() } diff --git a/ent/gql_where_input.go b/ent/gql_where_input.go index 88b7da4..75ee605 100644 --- a/ent/gql_where_input.go +++ b/ent/gql_where_input.go @@ -1028,38 +1028,6 @@ type PointsWhereInput struct { IDLT *int `json:"idLT,omitempty"` IDLTE *int `json:"idLTE,omitempty"` - // "created_at" field predicates. - CreatedAt *time.Time `json:"createdAt,omitempty"` - CreatedAtNEQ *time.Time `json:"createdAtNEQ,omitempty"` - CreatedAtIn []time.Time `json:"createdAtIn,omitempty"` - CreatedAtNotIn []time.Time `json:"createdAtNotIn,omitempty"` - CreatedAtGT *time.Time `json:"createdAtGT,omitempty"` - CreatedAtGTE *time.Time `json:"createdAtGTE,omitempty"` - CreatedAtLT *time.Time `json:"createdAtLT,omitempty"` - CreatedAtLTE *time.Time `json:"createdAtLTE,omitempty"` - - // "updated_at" field predicates. - UpdatedAt *time.Time `json:"updatedAt,omitempty"` - UpdatedAtNEQ *time.Time `json:"updatedAtNEQ,omitempty"` - UpdatedAtIn []time.Time `json:"updatedAtIn,omitempty"` - UpdatedAtNotIn []time.Time `json:"updatedAtNotIn,omitempty"` - UpdatedAtGT *time.Time `json:"updatedAtGT,omitempty"` - UpdatedAtGTE *time.Time `json:"updatedAtGTE,omitempty"` - UpdatedAtLT *time.Time `json:"updatedAtLT,omitempty"` - UpdatedAtLTE *time.Time `json:"updatedAtLTE,omitempty"` - - // "deleted_at" field predicates. - DeletedAt *time.Time `json:"deletedAt,omitempty"` - DeletedAtNEQ *time.Time `json:"deletedAtNEQ,omitempty"` - DeletedAtIn []time.Time `json:"deletedAtIn,omitempty"` - DeletedAtNotIn []time.Time `json:"deletedAtNotIn,omitempty"` - DeletedAtGT *time.Time `json:"deletedAtGT,omitempty"` - DeletedAtGTE *time.Time `json:"deletedAtGTE,omitempty"` - DeletedAtLT *time.Time `json:"deletedAtLT,omitempty"` - DeletedAtLTE *time.Time `json:"deletedAtLTE,omitempty"` - DeletedAtIsNil bool `json:"deletedAtIsNil,omitempty"` - DeletedAtNotNil bool `json:"deletedAtNotNil,omitempty"` - // "points" field predicates. Points *int `json:"points,omitempty"` PointsNEQ *int `json:"pointsNEQ,omitempty"` @@ -1070,6 +1038,16 @@ type PointsWhereInput struct { PointsLT *int `json:"pointsLT,omitempty"` PointsLTE *int `json:"pointsLTE,omitempty"` + // "granted_at" field predicates. + GrantedAt *time.Time `json:"grantedAt,omitempty"` + GrantedAtNEQ *time.Time `json:"grantedAtNEQ,omitempty"` + GrantedAtIn []time.Time `json:"grantedAtIn,omitempty"` + GrantedAtNotIn []time.Time `json:"grantedAtNotIn,omitempty"` + GrantedAtGT *time.Time `json:"grantedAtGT,omitempty"` + GrantedAtGTE *time.Time `json:"grantedAtGTE,omitempty"` + GrantedAtLT *time.Time `json:"grantedAtLT,omitempty"` + GrantedAtLTE *time.Time `json:"grantedAtLTE,omitempty"` + // "description" field predicates. Description *string `json:"description,omitempty"` DescriptionNEQ *string `json:"descriptionNEQ,omitempty"` @@ -1187,84 +1165,6 @@ func (i *PointsWhereInput) P() (predicate.Points, error) { if i.IDLTE != nil { predicates = append(predicates, points.IDLTE(*i.IDLTE)) } - if i.CreatedAt != nil { - predicates = append(predicates, points.CreatedAtEQ(*i.CreatedAt)) - } - if i.CreatedAtNEQ != nil { - predicates = append(predicates, points.CreatedAtNEQ(*i.CreatedAtNEQ)) - } - if len(i.CreatedAtIn) > 0 { - predicates = append(predicates, points.CreatedAtIn(i.CreatedAtIn...)) - } - if len(i.CreatedAtNotIn) > 0 { - predicates = append(predicates, points.CreatedAtNotIn(i.CreatedAtNotIn...)) - } - if i.CreatedAtGT != nil { - predicates = append(predicates, points.CreatedAtGT(*i.CreatedAtGT)) - } - if i.CreatedAtGTE != nil { - predicates = append(predicates, points.CreatedAtGTE(*i.CreatedAtGTE)) - } - if i.CreatedAtLT != nil { - predicates = append(predicates, points.CreatedAtLT(*i.CreatedAtLT)) - } - if i.CreatedAtLTE != nil { - predicates = append(predicates, points.CreatedAtLTE(*i.CreatedAtLTE)) - } - if i.UpdatedAt != nil { - predicates = append(predicates, points.UpdatedAtEQ(*i.UpdatedAt)) - } - if i.UpdatedAtNEQ != nil { - predicates = append(predicates, points.UpdatedAtNEQ(*i.UpdatedAtNEQ)) - } - if len(i.UpdatedAtIn) > 0 { - predicates = append(predicates, points.UpdatedAtIn(i.UpdatedAtIn...)) - } - if len(i.UpdatedAtNotIn) > 0 { - predicates = append(predicates, points.UpdatedAtNotIn(i.UpdatedAtNotIn...)) - } - if i.UpdatedAtGT != nil { - predicates = append(predicates, points.UpdatedAtGT(*i.UpdatedAtGT)) - } - if i.UpdatedAtGTE != nil { - predicates = append(predicates, points.UpdatedAtGTE(*i.UpdatedAtGTE)) - } - if i.UpdatedAtLT != nil { - predicates = append(predicates, points.UpdatedAtLT(*i.UpdatedAtLT)) - } - if i.UpdatedAtLTE != nil { - predicates = append(predicates, points.UpdatedAtLTE(*i.UpdatedAtLTE)) - } - if i.DeletedAt != nil { - predicates = append(predicates, points.DeletedAtEQ(*i.DeletedAt)) - } - if i.DeletedAtNEQ != nil { - predicates = append(predicates, points.DeletedAtNEQ(*i.DeletedAtNEQ)) - } - if len(i.DeletedAtIn) > 0 { - predicates = append(predicates, points.DeletedAtIn(i.DeletedAtIn...)) - } - if len(i.DeletedAtNotIn) > 0 { - predicates = append(predicates, points.DeletedAtNotIn(i.DeletedAtNotIn...)) - } - if i.DeletedAtGT != nil { - predicates = append(predicates, points.DeletedAtGT(*i.DeletedAtGT)) - } - if i.DeletedAtGTE != nil { - predicates = append(predicates, points.DeletedAtGTE(*i.DeletedAtGTE)) - } - if i.DeletedAtLT != nil { - predicates = append(predicates, points.DeletedAtLT(*i.DeletedAtLT)) - } - if i.DeletedAtLTE != nil { - predicates = append(predicates, points.DeletedAtLTE(*i.DeletedAtLTE)) - } - if i.DeletedAtIsNil { - predicates = append(predicates, points.DeletedAtIsNil()) - } - if i.DeletedAtNotNil { - predicates = append(predicates, points.DeletedAtNotNil()) - } if i.Points != nil { predicates = append(predicates, points.PointsEQ(*i.Points)) } @@ -1289,6 +1189,30 @@ func (i *PointsWhereInput) P() (predicate.Points, error) { if i.PointsLTE != nil { predicates = append(predicates, points.PointsLTE(*i.PointsLTE)) } + if i.GrantedAt != nil { + predicates = append(predicates, points.GrantedAtEQ(*i.GrantedAt)) + } + if i.GrantedAtNEQ != nil { + predicates = append(predicates, points.GrantedAtNEQ(*i.GrantedAtNEQ)) + } + if len(i.GrantedAtIn) > 0 { + predicates = append(predicates, points.GrantedAtIn(i.GrantedAtIn...)) + } + if len(i.GrantedAtNotIn) > 0 { + predicates = append(predicates, points.GrantedAtNotIn(i.GrantedAtNotIn...)) + } + if i.GrantedAtGT != nil { + predicates = append(predicates, points.GrantedAtGT(*i.GrantedAtGT)) + } + if i.GrantedAtGTE != nil { + predicates = append(predicates, points.GrantedAtGTE(*i.GrantedAtGTE)) + } + if i.GrantedAtLT != nil { + predicates = append(predicates, points.GrantedAtLT(*i.GrantedAtLT)) + } + if i.GrantedAtLTE != nil { + predicates = append(predicates, points.GrantedAtLTE(*i.GrantedAtLTE)) + } if i.Description != nil { predicates = append(predicates, points.DescriptionEQ(*i.Description)) } diff --git a/ent/internal/schema.go b/ent/internal/schema.go index 691ecea..16bcaeb 100644 --- a/ent/internal/schema.go +++ b/ent/internal/schema.go @@ -6,4 +6,4 @@ // Package internal holds a loadable version of the latest schema. package internal -const Schema = "{\"Schema\":\"github.com/database-playground/backend-v2/ent/schema\",\"Package\":\"github.com/database-playground/backend-v2/ent\",\"Schemas\":[{\"name\":\"Database\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"questions\",\"type\":\"Question\"}],\"fields\":[{\"name\":\"slug\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"schema\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"validators\":1,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"SQL schema\"},{\"name\":\"relation_figure\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"validators\":1,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"relation figure\"}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"database:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":12884901888}}},{\"name\":\"Events\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"user\",\"type\":\"User\",\"field\":\"user_id\",\"ref_name\":\"events\",\"unique\":true,\"inverse\":true,\"required\":true}],\"fields\":[{\"name\":\"user_id\",\"type\":{\"Type\":12,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"type\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"triggered_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"payload\",\"type\":{\"Type\":3,\"Ident\":\"map[string]interface {}\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":true,\"RType\":{\"Name\":\"\",\"Ident\":\"map[string]interface {}\",\"Kind\":21,\"PkgPath\":\"\",\"Methods\":{}}},\"optional\":true,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0}}],\"indexes\":[{\"fields\":[\"type\"]},{\"fields\":[\"type\",\"user_id\"]}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"user:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":21474836480}}},{\"name\":\"Group\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"scope_sets\",\"type\":\"ScopeSet\"}],\"fields\":[{\"name\":\"created_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"updated_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"update_default\":true,\"position\":{\"Index\":1,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"deleted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"name\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}}],\"hooks\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"interceptors\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"group:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":4294967296}}},{\"name\":\"Points\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"user\",\"type\":\"User\",\"ref_name\":\"points\",\"unique\":true,\"inverse\":true,\"required\":true}],\"fields\":[{\"name\":\"created_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"updated_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"update_default\":true,\"position\":{\"Index\":1,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"deleted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"points\",\"type\":{\"Type\":12,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_value\":0,\"default_kind\":2,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}}],\"hooks\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"interceptors\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"user:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":25769803776}}},{\"name\":\"Question\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"database\",\"type\":\"Database\",\"ref_name\":\"questions\",\"unique\":true,\"inverse\":true,\"required\":true}],\"fields\":[{\"name\":\"category\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"CATEGORY\"}},\"comment\":\"Question category, e.g. 'query'\"},{\"name\":\"difficulty\",\"type\":{\"Type\":6,\"Ident\":\"question.Difficulty\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"enums\":[{\"N\":\"Unspecified\",\"V\":\"unspecified\"},{\"N\":\"Easy\",\"V\":\"easy\"},{\"N\":\"Medium\",\"V\":\"medium\"},{\"N\":\"Hard\",\"V\":\"hard\"}],\"default\":true,\"default_value\":\"medium\",\"default_kind\":24,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"DIFFICULTY\"}},\"comment\":\"Question difficulty, e.g. 'easy'\"},{\"name\":\"title\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"Question title\"},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"Question stem\"},{\"name\":\"reference_answer\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"position\":{\"Index\":4,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"answer:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"comment\":\"Reference answer\"}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"question:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":17179869184}}},{\"name\":\"ScopeSet\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"groups\",\"type\":\"Group\",\"ref_name\":\"scope_sets\",\"inverse\":true}],\"fields\":[{\"name\":\"slug\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"scopes\",\"type\":{\"Type\":3,\"Ident\":\"[]string\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":true,\"RType\":{\"Name\":\"\",\"Ident\":\"[]string\",\"Kind\":23,\"PkgPath\":\"\",\"Methods\":{}}},\"default\":true,\"default_value\":[],\"default_kind\":23,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"scopeset:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":8589934592}}},{\"name\":\"User\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"group\",\"type\":\"Group\",\"unique\":true,\"required\":true},{\"name\":\"points\",\"type\":\"Points\"},{\"name\":\"events\",\"type\":\"Events\"}],\"fields\":[{\"name\":\"created_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"updated_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"update_default\":true,\"position\":{\"Index\":1,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"deleted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"name\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"email\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"EMAIL\"}}},{\"name\":\"avatar\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}}],\"hooks\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"interceptors\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"user:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":0}}}],\"Features\":[\"namedges\",\"intercept\",\"schema/snapshot\",\"sql/globalid\"]}" +const Schema = "{\"Schema\":\"github.com/database-playground/backend-v2/ent/schema\",\"Package\":\"github.com/database-playground/backend-v2/ent\",\"Schemas\":[{\"name\":\"Database\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"questions\",\"type\":\"Question\"}],\"fields\":[{\"name\":\"slug\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"schema\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"validators\":1,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"SQL schema\"},{\"name\":\"relation_figure\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"validators\":1,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"relation figure\"}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"database:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":12884901888}}},{\"name\":\"Events\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"user\",\"type\":\"User\",\"field\":\"user_id\",\"ref_name\":\"events\",\"unique\":true,\"inverse\":true,\"required\":true}],\"fields\":[{\"name\":\"user_id\",\"type\":{\"Type\":12,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"type\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"triggered_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"payload\",\"type\":{\"Type\":3,\"Ident\":\"map[string]interface {}\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":true,\"RType\":{\"Name\":\"\",\"Ident\":\"map[string]interface {}\",\"Kind\":21,\"PkgPath\":\"\",\"Methods\":{}}},\"optional\":true,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0}}],\"indexes\":[{\"fields\":[\"type\"]},{\"fields\":[\"type\",\"user_id\"]}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"user:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":21474836480}}},{\"name\":\"Group\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"scope_sets\",\"type\":\"ScopeSet\"}],\"fields\":[{\"name\":\"created_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"updated_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"update_default\":true,\"position\":{\"Index\":1,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"deleted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"name\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}}],\"hooks\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"interceptors\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"group:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":4294967296}}},{\"name\":\"Points\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"user\",\"type\":\"User\",\"ref_name\":\"points\",\"unique\":true,\"inverse\":true,\"required\":true}],\"fields\":[{\"name\":\"points\",\"type\":{\"Type\":12,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_value\":0,\"default_kind\":2,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"granted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"user:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":25769803776}}},{\"name\":\"Question\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"database\",\"type\":\"Database\",\"ref_name\":\"questions\",\"unique\":true,\"inverse\":true,\"required\":true}],\"fields\":[{\"name\":\"category\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"CATEGORY\"}},\"comment\":\"Question category, e.g. 'query'\"},{\"name\":\"difficulty\",\"type\":{\"Type\":6,\"Ident\":\"question.Difficulty\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"enums\":[{\"N\":\"Unspecified\",\"V\":\"unspecified\"},{\"N\":\"Easy\",\"V\":\"easy\"},{\"N\":\"Medium\",\"V\":\"medium\"},{\"N\":\"Hard\",\"V\":\"hard\"}],\"default\":true,\"default_value\":\"medium\",\"default_kind\":24,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"DIFFICULTY\"}},\"comment\":\"Question difficulty, e.g. 'easy'\"},{\"name\":\"title\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"Question title\"},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"Question stem\"},{\"name\":\"reference_answer\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"position\":{\"Index\":4,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"answer:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"comment\":\"Reference answer\"}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"question:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":17179869184}}},{\"name\":\"ScopeSet\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"groups\",\"type\":\"Group\",\"ref_name\":\"scope_sets\",\"inverse\":true}],\"fields\":[{\"name\":\"slug\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"scopes\",\"type\":{\"Type\":3,\"Ident\":\"[]string\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":true,\"RType\":{\"Name\":\"\",\"Ident\":\"[]string\",\"Kind\":23,\"PkgPath\":\"\",\"Methods\":{}}},\"default\":true,\"default_value\":[],\"default_kind\":23,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"scopeset:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":8589934592}}},{\"name\":\"User\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"group\",\"type\":\"Group\",\"unique\":true,\"required\":true},{\"name\":\"points\",\"type\":\"Points\"},{\"name\":\"events\",\"type\":\"Events\"}],\"fields\":[{\"name\":\"created_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"updated_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"update_default\":true,\"position\":{\"Index\":1,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"deleted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"name\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"email\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"EMAIL\"}}},{\"name\":\"avatar\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}}],\"hooks\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"interceptors\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"user:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":0}}}],\"Features\":[\"namedges\",\"intercept\",\"schema/snapshot\",\"sql/globalid\"]}" diff --git a/ent/migrate/schema.go b/ent/migrate/schema.go index b499624..4d0ee68 100644 --- a/ent/migrate/schema.go +++ b/ent/migrate/schema.go @@ -75,10 +75,8 @@ var ( // PointsColumns holds the columns for the "points" table. PointsColumns = []*schema.Column{ {Name: "id", Type: field.TypeInt, Increment: true}, - {Name: "created_at", Type: field.TypeTime}, - {Name: "updated_at", Type: field.TypeTime}, - {Name: "deleted_at", Type: field.TypeTime, Nullable: true}, {Name: "points", Type: field.TypeInt, Default: 0}, + {Name: "granted_at", Type: field.TypeTime}, {Name: "description", Type: field.TypeString, Nullable: true}, {Name: "user_points", Type: field.TypeInt}, } @@ -90,7 +88,7 @@ var ( ForeignKeys: []*schema.ForeignKey{ { Symbol: "points_users_points", - Columns: []*schema.Column{PointsColumns[6]}, + Columns: []*schema.Column{PointsColumns[4]}, RefColumns: []*schema.Column{UsersColumns[0]}, OnDelete: schema.NoAction, }, diff --git a/ent/mutation.go b/ent/mutation.go index b3decf2..5653a8a 100644 --- a/ent/mutation.go +++ b/ent/mutation.go @@ -1891,11 +1891,9 @@ type PointsMutation struct { op Op typ string id *int - created_at *time.Time - updated_at *time.Time - deleted_at *time.Time points *int addpoints *int + granted_at *time.Time description *string clearedFields map[string]struct{} user *int @@ -2003,127 +2001,6 @@ func (m *PointsMutation) IDs(ctx context.Context) ([]int, error) { } } -// SetCreatedAt sets the "created_at" field. -func (m *PointsMutation) SetCreatedAt(t time.Time) { - m.created_at = &t -} - -// CreatedAt returns the value of the "created_at" field in the mutation. -func (m *PointsMutation) CreatedAt() (r time.Time, exists bool) { - v := m.created_at - if v == nil { - return - } - return *v, true -} - -// OldCreatedAt returns the old "created_at" field's value of the Points entity. -// If the Points object wasn't provided to the builder, the object is fetched from the database. -// An error is returned if the mutation operation is not UpdateOne, or the database query fails. -func (m *PointsMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) { - if !m.op.Is(OpUpdateOne) { - return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations") - } - if m.id == nil || m.oldValue == nil { - return v, errors.New("OldCreatedAt requires an ID field in the mutation") - } - oldValue, err := m.oldValue(ctx) - if err != nil { - return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err) - } - return oldValue.CreatedAt, nil -} - -// ResetCreatedAt resets all changes to the "created_at" field. -func (m *PointsMutation) ResetCreatedAt() { - m.created_at = nil -} - -// SetUpdatedAt sets the "updated_at" field. -func (m *PointsMutation) SetUpdatedAt(t time.Time) { - m.updated_at = &t -} - -// UpdatedAt returns the value of the "updated_at" field in the mutation. -func (m *PointsMutation) UpdatedAt() (r time.Time, exists bool) { - v := m.updated_at - if v == nil { - return - } - return *v, true -} - -// OldUpdatedAt returns the old "updated_at" field's value of the Points entity. -// If the Points object wasn't provided to the builder, the object is fetched from the database. -// An error is returned if the mutation operation is not UpdateOne, or the database query fails. -func (m *PointsMutation) OldUpdatedAt(ctx context.Context) (v time.Time, err error) { - if !m.op.Is(OpUpdateOne) { - return v, errors.New("OldUpdatedAt is only allowed on UpdateOne operations") - } - if m.id == nil || m.oldValue == nil { - return v, errors.New("OldUpdatedAt requires an ID field in the mutation") - } - oldValue, err := m.oldValue(ctx) - if err != nil { - return v, fmt.Errorf("querying old value for OldUpdatedAt: %w", err) - } - return oldValue.UpdatedAt, nil -} - -// ResetUpdatedAt resets all changes to the "updated_at" field. -func (m *PointsMutation) ResetUpdatedAt() { - m.updated_at = nil -} - -// SetDeletedAt sets the "deleted_at" field. -func (m *PointsMutation) SetDeletedAt(t time.Time) { - m.deleted_at = &t -} - -// DeletedAt returns the value of the "deleted_at" field in the mutation. -func (m *PointsMutation) DeletedAt() (r time.Time, exists bool) { - v := m.deleted_at - if v == nil { - return - } - return *v, true -} - -// OldDeletedAt returns the old "deleted_at" field's value of the Points entity. -// If the Points object wasn't provided to the builder, the object is fetched from the database. -// An error is returned if the mutation operation is not UpdateOne, or the database query fails. -func (m *PointsMutation) OldDeletedAt(ctx context.Context) (v time.Time, err error) { - if !m.op.Is(OpUpdateOne) { - return v, errors.New("OldDeletedAt is only allowed on UpdateOne operations") - } - if m.id == nil || m.oldValue == nil { - return v, errors.New("OldDeletedAt requires an ID field in the mutation") - } - oldValue, err := m.oldValue(ctx) - if err != nil { - return v, fmt.Errorf("querying old value for OldDeletedAt: %w", err) - } - return oldValue.DeletedAt, nil -} - -// ClearDeletedAt clears the value of the "deleted_at" field. -func (m *PointsMutation) ClearDeletedAt() { - m.deleted_at = nil - m.clearedFields[points.FieldDeletedAt] = struct{}{} -} - -// DeletedAtCleared returns if the "deleted_at" field was cleared in this mutation. -func (m *PointsMutation) DeletedAtCleared() bool { - _, ok := m.clearedFields[points.FieldDeletedAt] - return ok -} - -// ResetDeletedAt resets all changes to the "deleted_at" field. -func (m *PointsMutation) ResetDeletedAt() { - m.deleted_at = nil - delete(m.clearedFields, points.FieldDeletedAt) -} - // SetPoints sets the "points" field. func (m *PointsMutation) SetPoints(i int) { m.points = &i @@ -2180,6 +2057,42 @@ func (m *PointsMutation) ResetPoints() { m.addpoints = nil } +// SetGrantedAt sets the "granted_at" field. +func (m *PointsMutation) SetGrantedAt(t time.Time) { + m.granted_at = &t +} + +// GrantedAt returns the value of the "granted_at" field in the mutation. +func (m *PointsMutation) GrantedAt() (r time.Time, exists bool) { + v := m.granted_at + if v == nil { + return + } + return *v, true +} + +// OldGrantedAt returns the old "granted_at" field's value of the Points entity. +// If the Points object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *PointsMutation) OldGrantedAt(ctx context.Context) (v time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldGrantedAt is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldGrantedAt requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldGrantedAt: %w", err) + } + return oldValue.GrantedAt, nil +} + +// ResetGrantedAt resets all changes to the "granted_at" field. +func (m *PointsMutation) ResetGrantedAt() { + m.granted_at = nil +} + // SetDescription sets the "description" field. func (m *PointsMutation) SetDescription(s string) { m.description = &s @@ -2302,19 +2215,13 @@ func (m *PointsMutation) Type() string { // order to get all numeric fields that were incremented/decremented, call // AddedFields(). func (m *PointsMutation) Fields() []string { - fields := make([]string, 0, 5) - if m.created_at != nil { - fields = append(fields, points.FieldCreatedAt) - } - if m.updated_at != nil { - fields = append(fields, points.FieldUpdatedAt) - } - if m.deleted_at != nil { - fields = append(fields, points.FieldDeletedAt) - } + fields := make([]string, 0, 3) if m.points != nil { fields = append(fields, points.FieldPoints) } + if m.granted_at != nil { + fields = append(fields, points.FieldGrantedAt) + } if m.description != nil { fields = append(fields, points.FieldDescription) } @@ -2326,14 +2233,10 @@ func (m *PointsMutation) Fields() []string { // schema. func (m *PointsMutation) Field(name string) (ent.Value, bool) { switch name { - case points.FieldCreatedAt: - return m.CreatedAt() - case points.FieldUpdatedAt: - return m.UpdatedAt() - case points.FieldDeletedAt: - return m.DeletedAt() case points.FieldPoints: return m.Points() + case points.FieldGrantedAt: + return m.GrantedAt() case points.FieldDescription: return m.Description() } @@ -2345,14 +2248,10 @@ func (m *PointsMutation) Field(name string) (ent.Value, bool) { // database failed. func (m *PointsMutation) OldField(ctx context.Context, name string) (ent.Value, error) { switch name { - case points.FieldCreatedAt: - return m.OldCreatedAt(ctx) - case points.FieldUpdatedAt: - return m.OldUpdatedAt(ctx) - case points.FieldDeletedAt: - return m.OldDeletedAt(ctx) case points.FieldPoints: return m.OldPoints(ctx) + case points.FieldGrantedAt: + return m.OldGrantedAt(ctx) case points.FieldDescription: return m.OldDescription(ctx) } @@ -2364,33 +2263,19 @@ func (m *PointsMutation) OldField(ctx context.Context, name string) (ent.Value, // type. func (m *PointsMutation) SetField(name string, value ent.Value) error { switch name { - case points.FieldCreatedAt: - v, ok := value.(time.Time) - if !ok { - return fmt.Errorf("unexpected type %T for field %s", value, name) - } - m.SetCreatedAt(v) - return nil - case points.FieldUpdatedAt: - v, ok := value.(time.Time) + case points.FieldPoints: + v, ok := value.(int) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } - m.SetUpdatedAt(v) + m.SetPoints(v) return nil - case points.FieldDeletedAt: + case points.FieldGrantedAt: v, ok := value.(time.Time) if !ok { return fmt.Errorf("unexpected type %T for field %s", value, name) } - m.SetDeletedAt(v) - return nil - case points.FieldPoints: - v, ok := value.(int) - if !ok { - return fmt.Errorf("unexpected type %T for field %s", value, name) - } - m.SetPoints(v) + m.SetGrantedAt(v) return nil case points.FieldDescription: v, ok := value.(string) @@ -2444,9 +2329,6 @@ func (m *PointsMutation) AddField(name string, value ent.Value) error { // mutation. func (m *PointsMutation) ClearedFields() []string { var fields []string - if m.FieldCleared(points.FieldDeletedAt) { - fields = append(fields, points.FieldDeletedAt) - } if m.FieldCleared(points.FieldDescription) { fields = append(fields, points.FieldDescription) } @@ -2464,9 +2346,6 @@ func (m *PointsMutation) FieldCleared(name string) bool { // error if the field is not defined in the schema. func (m *PointsMutation) ClearField(name string) error { switch name { - case points.FieldDeletedAt: - m.ClearDeletedAt() - return nil case points.FieldDescription: m.ClearDescription() return nil @@ -2478,18 +2357,12 @@ func (m *PointsMutation) ClearField(name string) error { // It returns an error if the field is not defined in the schema. func (m *PointsMutation) ResetField(name string) error { switch name { - case points.FieldCreatedAt: - m.ResetCreatedAt() - return nil - case points.FieldUpdatedAt: - m.ResetUpdatedAt() - return nil - case points.FieldDeletedAt: - m.ResetDeletedAt() - return nil case points.FieldPoints: m.ResetPoints() return nil + case points.FieldGrantedAt: + m.ResetGrantedAt() + return nil case points.FieldDescription: m.ResetDescription() return nil diff --git a/ent/points.go b/ent/points.go index afefd91..2ad7cdc 100644 --- a/ent/points.go +++ b/ent/points.go @@ -18,14 +18,10 @@ type Points struct { config `json:"-"` // ID of the ent. ID int `json:"id,omitempty"` - // CreatedAt holds the value of the "created_at" field. - CreatedAt time.Time `json:"created_at,omitempty"` - // UpdatedAt holds the value of the "updated_at" field. - UpdatedAt time.Time `json:"updated_at,omitempty"` - // DeletedAt holds the value of the "deleted_at" field. - DeletedAt time.Time `json:"deleted_at,omitempty"` // Points holds the value of the "points" field. Points int `json:"points,omitempty"` + // GrantedAt holds the value of the "granted_at" field. + GrantedAt time.Time `json:"granted_at,omitempty"` // Description holds the value of the "description" field. Description string `json:"description,omitempty"` // Edges holds the relations/edges for other nodes in the graph. @@ -66,7 +62,7 @@ func (*Points) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case points.FieldDescription: values[i] = new(sql.NullString) - case points.FieldCreatedAt, points.FieldUpdatedAt, points.FieldDeletedAt: + case points.FieldGrantedAt: values[i] = new(sql.NullTime) case points.ForeignKeys[0]: // user_points values[i] = new(sql.NullInt64) @@ -91,30 +87,18 @@ func (_m *Points) assignValues(columns []string, values []any) error { return fmt.Errorf("unexpected type %T for field id", value) } _m.ID = int(value.Int64) - case points.FieldCreatedAt: - if value, ok := values[i].(*sql.NullTime); !ok { - return fmt.Errorf("unexpected type %T for field created_at", values[i]) - } else if value.Valid { - _m.CreatedAt = value.Time - } - case points.FieldUpdatedAt: - if value, ok := values[i].(*sql.NullTime); !ok { - return fmt.Errorf("unexpected type %T for field updated_at", values[i]) - } else if value.Valid { - _m.UpdatedAt = value.Time - } - case points.FieldDeletedAt: - if value, ok := values[i].(*sql.NullTime); !ok { - return fmt.Errorf("unexpected type %T for field deleted_at", values[i]) - } else if value.Valid { - _m.DeletedAt = value.Time - } case points.FieldPoints: if value, ok := values[i].(*sql.NullInt64); !ok { return fmt.Errorf("unexpected type %T for field points", values[i]) } else if value.Valid { _m.Points = int(value.Int64) } + case points.FieldGrantedAt: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field granted_at", values[i]) + } else if value.Valid { + _m.GrantedAt = value.Time + } case points.FieldDescription: if value, ok := values[i].(*sql.NullString); !ok { return fmt.Errorf("unexpected type %T for field description", values[i]) @@ -169,18 +153,12 @@ func (_m *Points) String() string { var builder strings.Builder builder.WriteString("Points(") builder.WriteString(fmt.Sprintf("id=%v, ", _m.ID)) - builder.WriteString("created_at=") - builder.WriteString(_m.CreatedAt.Format(time.ANSIC)) - builder.WriteString(", ") - builder.WriteString("updated_at=") - builder.WriteString(_m.UpdatedAt.Format(time.ANSIC)) - builder.WriteString(", ") - builder.WriteString("deleted_at=") - builder.WriteString(_m.DeletedAt.Format(time.ANSIC)) - builder.WriteString(", ") builder.WriteString("points=") builder.WriteString(fmt.Sprintf("%v", _m.Points)) builder.WriteString(", ") + builder.WriteString("granted_at=") + builder.WriteString(_m.GrantedAt.Format(time.ANSIC)) + builder.WriteString(", ") builder.WriteString("description=") builder.WriteString(_m.Description) builder.WriteByte(')') diff --git a/ent/points/points.go b/ent/points/points.go index 8cfeb33..0522601 100644 --- a/ent/points/points.go +++ b/ent/points/points.go @@ -5,7 +5,6 @@ package points import ( "time" - "entgo.io/ent" "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" ) @@ -15,14 +14,10 @@ const ( Label = "points" // FieldID holds the string denoting the id field in the database. FieldID = "id" - // FieldCreatedAt holds the string denoting the created_at field in the database. - FieldCreatedAt = "created_at" - // FieldUpdatedAt holds the string denoting the updated_at field in the database. - FieldUpdatedAt = "updated_at" - // FieldDeletedAt holds the string denoting the deleted_at field in the database. - FieldDeletedAt = "deleted_at" // FieldPoints holds the string denoting the points field in the database. FieldPoints = "points" + // FieldGrantedAt holds the string denoting the granted_at field in the database. + FieldGrantedAt = "granted_at" // FieldDescription holds the string denoting the description field in the database. FieldDescription = "description" // EdgeUser holds the string denoting the user edge name in mutations. @@ -41,10 +36,8 @@ const ( // Columns holds all SQL columns for points fields. var Columns = []string{ FieldID, - FieldCreatedAt, - FieldUpdatedAt, - FieldDeletedAt, FieldPoints, + FieldGrantedAt, FieldDescription, } @@ -69,22 +62,11 @@ func ValidColumn(column string) bool { return false } -// Note that the variables below are initialized by the runtime -// package on the initialization of the application. Therefore, -// it should be imported in the main as follows: -// -// import _ "github.com/database-playground/backend-v2/ent/runtime" var ( - Hooks [1]ent.Hook - Interceptors [1]ent.Interceptor - // DefaultCreatedAt holds the default value on creation for the "created_at" field. - DefaultCreatedAt func() time.Time - // DefaultUpdatedAt holds the default value on creation for the "updated_at" field. - DefaultUpdatedAt func() time.Time - // UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field. - UpdateDefaultUpdatedAt func() time.Time // DefaultPoints holds the default value on creation for the "points" field. DefaultPoints int + // DefaultGrantedAt holds the default value on creation for the "granted_at" field. + DefaultGrantedAt func() time.Time ) // OrderOption defines the ordering options for the Points queries. @@ -95,26 +77,16 @@ func ByID(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldID, opts...).ToFunc() } -// ByCreatedAt orders the results by the created_at field. -func ByCreatedAt(opts ...sql.OrderTermOption) OrderOption { - return sql.OrderByField(FieldCreatedAt, opts...).ToFunc() -} - -// ByUpdatedAt orders the results by the updated_at field. -func ByUpdatedAt(opts ...sql.OrderTermOption) OrderOption { - return sql.OrderByField(FieldUpdatedAt, opts...).ToFunc() -} - -// ByDeletedAt orders the results by the deleted_at field. -func ByDeletedAt(opts ...sql.OrderTermOption) OrderOption { - return sql.OrderByField(FieldDeletedAt, opts...).ToFunc() -} - // ByPoints orders the results by the points field. func ByPoints(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldPoints, opts...).ToFunc() } +// ByGrantedAt orders the results by the granted_at field. +func ByGrantedAt(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldGrantedAt, opts...).ToFunc() +} + // ByDescription orders the results by the description field. func ByDescription(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldDescription, opts...).ToFunc() diff --git a/ent/points/where.go b/ent/points/where.go index a8dbbb5..6c5a6df 100644 --- a/ent/points/where.go +++ b/ent/points/where.go @@ -55,161 +55,21 @@ func IDLTE(id int) predicate.Points { return predicate.Points(sql.FieldLTE(FieldID, id)) } -// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ. -func CreatedAt(v time.Time) predicate.Points { - return predicate.Points(sql.FieldEQ(FieldCreatedAt, v)) -} - -// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ. -func UpdatedAt(v time.Time) predicate.Points { - return predicate.Points(sql.FieldEQ(FieldUpdatedAt, v)) -} - -// DeletedAt applies equality check predicate on the "deleted_at" field. It's identical to DeletedAtEQ. -func DeletedAt(v time.Time) predicate.Points { - return predicate.Points(sql.FieldEQ(FieldDeletedAt, v)) -} - // Points applies equality check predicate on the "points" field. It's identical to PointsEQ. func Points(v int) predicate.Points { return predicate.Points(sql.FieldEQ(FieldPoints, v)) } +// GrantedAt applies equality check predicate on the "granted_at" field. It's identical to GrantedAtEQ. +func GrantedAt(v time.Time) predicate.Points { + return predicate.Points(sql.FieldEQ(FieldGrantedAt, v)) +} + // Description applies equality check predicate on the "description" field. It's identical to DescriptionEQ. func Description(v string) predicate.Points { return predicate.Points(sql.FieldEQ(FieldDescription, v)) } -// CreatedAtEQ applies the EQ predicate on the "created_at" field. -func CreatedAtEQ(v time.Time) predicate.Points { - return predicate.Points(sql.FieldEQ(FieldCreatedAt, v)) -} - -// CreatedAtNEQ applies the NEQ predicate on the "created_at" field. -func CreatedAtNEQ(v time.Time) predicate.Points { - return predicate.Points(sql.FieldNEQ(FieldCreatedAt, v)) -} - -// CreatedAtIn applies the In predicate on the "created_at" field. -func CreatedAtIn(vs ...time.Time) predicate.Points { - return predicate.Points(sql.FieldIn(FieldCreatedAt, vs...)) -} - -// CreatedAtNotIn applies the NotIn predicate on the "created_at" field. -func CreatedAtNotIn(vs ...time.Time) predicate.Points { - return predicate.Points(sql.FieldNotIn(FieldCreatedAt, vs...)) -} - -// CreatedAtGT applies the GT predicate on the "created_at" field. -func CreatedAtGT(v time.Time) predicate.Points { - return predicate.Points(sql.FieldGT(FieldCreatedAt, v)) -} - -// CreatedAtGTE applies the GTE predicate on the "created_at" field. -func CreatedAtGTE(v time.Time) predicate.Points { - return predicate.Points(sql.FieldGTE(FieldCreatedAt, v)) -} - -// CreatedAtLT applies the LT predicate on the "created_at" field. -func CreatedAtLT(v time.Time) predicate.Points { - return predicate.Points(sql.FieldLT(FieldCreatedAt, v)) -} - -// CreatedAtLTE applies the LTE predicate on the "created_at" field. -func CreatedAtLTE(v time.Time) predicate.Points { - return predicate.Points(sql.FieldLTE(FieldCreatedAt, v)) -} - -// UpdatedAtEQ applies the EQ predicate on the "updated_at" field. -func UpdatedAtEQ(v time.Time) predicate.Points { - return predicate.Points(sql.FieldEQ(FieldUpdatedAt, v)) -} - -// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field. -func UpdatedAtNEQ(v time.Time) predicate.Points { - return predicate.Points(sql.FieldNEQ(FieldUpdatedAt, v)) -} - -// UpdatedAtIn applies the In predicate on the "updated_at" field. -func UpdatedAtIn(vs ...time.Time) predicate.Points { - return predicate.Points(sql.FieldIn(FieldUpdatedAt, vs...)) -} - -// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field. -func UpdatedAtNotIn(vs ...time.Time) predicate.Points { - return predicate.Points(sql.FieldNotIn(FieldUpdatedAt, vs...)) -} - -// UpdatedAtGT applies the GT predicate on the "updated_at" field. -func UpdatedAtGT(v time.Time) predicate.Points { - return predicate.Points(sql.FieldGT(FieldUpdatedAt, v)) -} - -// UpdatedAtGTE applies the GTE predicate on the "updated_at" field. -func UpdatedAtGTE(v time.Time) predicate.Points { - return predicate.Points(sql.FieldGTE(FieldUpdatedAt, v)) -} - -// UpdatedAtLT applies the LT predicate on the "updated_at" field. -func UpdatedAtLT(v time.Time) predicate.Points { - return predicate.Points(sql.FieldLT(FieldUpdatedAt, v)) -} - -// UpdatedAtLTE applies the LTE predicate on the "updated_at" field. -func UpdatedAtLTE(v time.Time) predicate.Points { - return predicate.Points(sql.FieldLTE(FieldUpdatedAt, v)) -} - -// DeletedAtEQ applies the EQ predicate on the "deleted_at" field. -func DeletedAtEQ(v time.Time) predicate.Points { - return predicate.Points(sql.FieldEQ(FieldDeletedAt, v)) -} - -// DeletedAtNEQ applies the NEQ predicate on the "deleted_at" field. -func DeletedAtNEQ(v time.Time) predicate.Points { - return predicate.Points(sql.FieldNEQ(FieldDeletedAt, v)) -} - -// DeletedAtIn applies the In predicate on the "deleted_at" field. -func DeletedAtIn(vs ...time.Time) predicate.Points { - return predicate.Points(sql.FieldIn(FieldDeletedAt, vs...)) -} - -// DeletedAtNotIn applies the NotIn predicate on the "deleted_at" field. -func DeletedAtNotIn(vs ...time.Time) predicate.Points { - return predicate.Points(sql.FieldNotIn(FieldDeletedAt, vs...)) -} - -// DeletedAtGT applies the GT predicate on the "deleted_at" field. -func DeletedAtGT(v time.Time) predicate.Points { - return predicate.Points(sql.FieldGT(FieldDeletedAt, v)) -} - -// DeletedAtGTE applies the GTE predicate on the "deleted_at" field. -func DeletedAtGTE(v time.Time) predicate.Points { - return predicate.Points(sql.FieldGTE(FieldDeletedAt, v)) -} - -// DeletedAtLT applies the LT predicate on the "deleted_at" field. -func DeletedAtLT(v time.Time) predicate.Points { - return predicate.Points(sql.FieldLT(FieldDeletedAt, v)) -} - -// DeletedAtLTE applies the LTE predicate on the "deleted_at" field. -func DeletedAtLTE(v time.Time) predicate.Points { - return predicate.Points(sql.FieldLTE(FieldDeletedAt, v)) -} - -// DeletedAtIsNil applies the IsNil predicate on the "deleted_at" field. -func DeletedAtIsNil() predicate.Points { - return predicate.Points(sql.FieldIsNull(FieldDeletedAt)) -} - -// DeletedAtNotNil applies the NotNil predicate on the "deleted_at" field. -func DeletedAtNotNil() predicate.Points { - return predicate.Points(sql.FieldNotNull(FieldDeletedAt)) -} - // PointsEQ applies the EQ predicate on the "points" field. func PointsEQ(v int) predicate.Points { return predicate.Points(sql.FieldEQ(FieldPoints, v)) @@ -250,6 +110,46 @@ func PointsLTE(v int) predicate.Points { return predicate.Points(sql.FieldLTE(FieldPoints, v)) } +// GrantedAtEQ applies the EQ predicate on the "granted_at" field. +func GrantedAtEQ(v time.Time) predicate.Points { + return predicate.Points(sql.FieldEQ(FieldGrantedAt, v)) +} + +// GrantedAtNEQ applies the NEQ predicate on the "granted_at" field. +func GrantedAtNEQ(v time.Time) predicate.Points { + return predicate.Points(sql.FieldNEQ(FieldGrantedAt, v)) +} + +// GrantedAtIn applies the In predicate on the "granted_at" field. +func GrantedAtIn(vs ...time.Time) predicate.Points { + return predicate.Points(sql.FieldIn(FieldGrantedAt, vs...)) +} + +// GrantedAtNotIn applies the NotIn predicate on the "granted_at" field. +func GrantedAtNotIn(vs ...time.Time) predicate.Points { + return predicate.Points(sql.FieldNotIn(FieldGrantedAt, vs...)) +} + +// GrantedAtGT applies the GT predicate on the "granted_at" field. +func GrantedAtGT(v time.Time) predicate.Points { + return predicate.Points(sql.FieldGT(FieldGrantedAt, v)) +} + +// GrantedAtGTE applies the GTE predicate on the "granted_at" field. +func GrantedAtGTE(v time.Time) predicate.Points { + return predicate.Points(sql.FieldGTE(FieldGrantedAt, v)) +} + +// GrantedAtLT applies the LT predicate on the "granted_at" field. +func GrantedAtLT(v time.Time) predicate.Points { + return predicate.Points(sql.FieldLT(FieldGrantedAt, v)) +} + +// GrantedAtLTE applies the LTE predicate on the "granted_at" field. +func GrantedAtLTE(v time.Time) predicate.Points { + return predicate.Points(sql.FieldLTE(FieldGrantedAt, v)) +} + // DescriptionEQ applies the EQ predicate on the "description" field. func DescriptionEQ(v string) predicate.Points { return predicate.Points(sql.FieldEQ(FieldDescription, v)) diff --git a/ent/points_create.go b/ent/points_create.go index 7214936..5cef83b 100644 --- a/ent/points_create.go +++ b/ent/points_create.go @@ -21,58 +21,30 @@ type PointsCreate struct { hooks []Hook } -// SetCreatedAt sets the "created_at" field. -func (_c *PointsCreate) SetCreatedAt(v time.Time) *PointsCreate { - _c.mutation.SetCreatedAt(v) - return _c -} - -// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. -func (_c *PointsCreate) SetNillableCreatedAt(v *time.Time) *PointsCreate { - if v != nil { - _c.SetCreatedAt(*v) - } - return _c -} - -// SetUpdatedAt sets the "updated_at" field. -func (_c *PointsCreate) SetUpdatedAt(v time.Time) *PointsCreate { - _c.mutation.SetUpdatedAt(v) +// SetPoints sets the "points" field. +func (_c *PointsCreate) SetPoints(v int) *PointsCreate { + _c.mutation.SetPoints(v) return _c } -// SetNillableUpdatedAt sets the "updated_at" field if the given value is not nil. -func (_c *PointsCreate) SetNillableUpdatedAt(v *time.Time) *PointsCreate { +// SetNillablePoints sets the "points" field if the given value is not nil. +func (_c *PointsCreate) SetNillablePoints(v *int) *PointsCreate { if v != nil { - _c.SetUpdatedAt(*v) + _c.SetPoints(*v) } return _c } -// SetDeletedAt sets the "deleted_at" field. -func (_c *PointsCreate) SetDeletedAt(v time.Time) *PointsCreate { - _c.mutation.SetDeletedAt(v) +// SetGrantedAt sets the "granted_at" field. +func (_c *PointsCreate) SetGrantedAt(v time.Time) *PointsCreate { + _c.mutation.SetGrantedAt(v) return _c } -// SetNillableDeletedAt sets the "deleted_at" field if the given value is not nil. -func (_c *PointsCreate) SetNillableDeletedAt(v *time.Time) *PointsCreate { +// SetNillableGrantedAt sets the "granted_at" field if the given value is not nil. +func (_c *PointsCreate) SetNillableGrantedAt(v *time.Time) *PointsCreate { if v != nil { - _c.SetDeletedAt(*v) - } - return _c -} - -// SetPoints sets the "points" field. -func (_c *PointsCreate) SetPoints(v int) *PointsCreate { - _c.mutation.SetPoints(v) - return _c -} - -// SetNillablePoints sets the "points" field if the given value is not nil. -func (_c *PointsCreate) SetNillablePoints(v *int) *PointsCreate { - if v != nil { - _c.SetPoints(*v) + _c.SetGrantedAt(*v) } return _c } @@ -109,9 +81,7 @@ func (_c *PointsCreate) Mutation() *PointsMutation { // Save creates the Points in the database. func (_c *PointsCreate) Save(ctx context.Context) (*Points, error) { - if err := _c.defaults(); err != nil { - return nil, err - } + _c.defaults() return withHooks(ctx, _c.sqlSave, _c.mutation, _c.hooks) } @@ -138,39 +108,25 @@ func (_c *PointsCreate) ExecX(ctx context.Context) { } // defaults sets the default values of the builder before save. -func (_c *PointsCreate) defaults() error { - if _, ok := _c.mutation.CreatedAt(); !ok { - if points.DefaultCreatedAt == nil { - return fmt.Errorf("ent: uninitialized points.DefaultCreatedAt (forgotten import ent/runtime?)") - } - v := points.DefaultCreatedAt() - _c.mutation.SetCreatedAt(v) - } - if _, ok := _c.mutation.UpdatedAt(); !ok { - if points.DefaultUpdatedAt == nil { - return fmt.Errorf("ent: uninitialized points.DefaultUpdatedAt (forgotten import ent/runtime?)") - } - v := points.DefaultUpdatedAt() - _c.mutation.SetUpdatedAt(v) - } +func (_c *PointsCreate) defaults() { if _, ok := _c.mutation.Points(); !ok { v := points.DefaultPoints _c.mutation.SetPoints(v) } - return nil + if _, ok := _c.mutation.GrantedAt(); !ok { + v := points.DefaultGrantedAt() + _c.mutation.SetGrantedAt(v) + } } // check runs all checks and user-defined validators on the builder. func (_c *PointsCreate) check() error { - if _, ok := _c.mutation.CreatedAt(); !ok { - return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "Points.created_at"`)} - } - if _, ok := _c.mutation.UpdatedAt(); !ok { - return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "Points.updated_at"`)} - } if _, ok := _c.mutation.Points(); !ok { return &ValidationError{Name: "points", err: errors.New(`ent: missing required field "Points.points"`)} } + if _, ok := _c.mutation.GrantedAt(); !ok { + return &ValidationError{Name: "granted_at", err: errors.New(`ent: missing required field "Points.granted_at"`)} + } if len(_c.mutation.UserIDs()) == 0 { return &ValidationError{Name: "user", err: errors.New(`ent: missing required edge "Points.user"`)} } @@ -200,22 +156,14 @@ func (_c *PointsCreate) createSpec() (*Points, *sqlgraph.CreateSpec) { _node = &Points{config: _c.config} _spec = sqlgraph.NewCreateSpec(points.Table, sqlgraph.NewFieldSpec(points.FieldID, field.TypeInt)) ) - if value, ok := _c.mutation.CreatedAt(); ok { - _spec.SetField(points.FieldCreatedAt, field.TypeTime, value) - _node.CreatedAt = value - } - if value, ok := _c.mutation.UpdatedAt(); ok { - _spec.SetField(points.FieldUpdatedAt, field.TypeTime, value) - _node.UpdatedAt = value - } - if value, ok := _c.mutation.DeletedAt(); ok { - _spec.SetField(points.FieldDeletedAt, field.TypeTime, value) - _node.DeletedAt = value - } if value, ok := _c.mutation.Points(); ok { _spec.SetField(points.FieldPoints, field.TypeInt, value) _node.Points = value } + if value, ok := _c.mutation.GrantedAt(); ok { + _spec.SetField(points.FieldGrantedAt, field.TypeTime, value) + _node.GrantedAt = value + } if value, ok := _c.mutation.Description(); ok { _spec.SetField(points.FieldDescription, field.TypeString, value) _node.Description = value diff --git a/ent/points_query.go b/ent/points_query.go index 0040ec8..f45084f 100644 --- a/ent/points_query.go +++ b/ent/points_query.go @@ -301,12 +301,12 @@ func (_q *PointsQuery) WithUser(opts ...func(*UserQuery)) *PointsQuery { // Example: // // var v []struct { -// CreatedAt time.Time `json:"created_at,omitempty"` +// Points int `json:"points,omitempty"` // Count int `json:"count,omitempty"` // } // // client.Points.Query(). -// GroupBy(points.FieldCreatedAt). +// GroupBy(points.FieldPoints). // Aggregate(ent.Count()). // Scan(ctx, &v) func (_q *PointsQuery) GroupBy(field string, fields ...string) *PointsGroupBy { @@ -324,11 +324,11 @@ func (_q *PointsQuery) GroupBy(field string, fields ...string) *PointsGroupBy { // Example: // // var v []struct { -// CreatedAt time.Time `json:"created_at,omitempty"` +// Points int `json:"points,omitempty"` // } // // client.Points.Query(). -// Select(points.FieldCreatedAt). +// Select(points.FieldPoints). // Scan(ctx, &v) func (_q *PointsQuery) Select(fields ...string) *PointsSelect { _q.ctx.Fields = append(_q.ctx.Fields, fields...) diff --git a/ent/points_update.go b/ent/points_update.go index a69855e..4cac029 100644 --- a/ent/points_update.go +++ b/ent/points_update.go @@ -29,46 +29,6 @@ func (_u *PointsUpdate) Where(ps ...predicate.Points) *PointsUpdate { return _u } -// SetCreatedAt sets the "created_at" field. -func (_u *PointsUpdate) SetCreatedAt(v time.Time) *PointsUpdate { - _u.mutation.SetCreatedAt(v) - return _u -} - -// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. -func (_u *PointsUpdate) SetNillableCreatedAt(v *time.Time) *PointsUpdate { - if v != nil { - _u.SetCreatedAt(*v) - } - return _u -} - -// SetUpdatedAt sets the "updated_at" field. -func (_u *PointsUpdate) SetUpdatedAt(v time.Time) *PointsUpdate { - _u.mutation.SetUpdatedAt(v) - return _u -} - -// SetDeletedAt sets the "deleted_at" field. -func (_u *PointsUpdate) SetDeletedAt(v time.Time) *PointsUpdate { - _u.mutation.SetDeletedAt(v) - return _u -} - -// SetNillableDeletedAt sets the "deleted_at" field if the given value is not nil. -func (_u *PointsUpdate) SetNillableDeletedAt(v *time.Time) *PointsUpdate { - if v != nil { - _u.SetDeletedAt(*v) - } - return _u -} - -// ClearDeletedAt clears the value of the "deleted_at" field. -func (_u *PointsUpdate) ClearDeletedAt() *PointsUpdate { - _u.mutation.ClearDeletedAt() - return _u -} - // SetPoints sets the "points" field. func (_u *PointsUpdate) SetPoints(v int) *PointsUpdate { _u.mutation.ResetPoints() @@ -90,6 +50,20 @@ func (_u *PointsUpdate) AddPoints(v int) *PointsUpdate { return _u } +// SetGrantedAt sets the "granted_at" field. +func (_u *PointsUpdate) SetGrantedAt(v time.Time) *PointsUpdate { + _u.mutation.SetGrantedAt(v) + return _u +} + +// SetNillableGrantedAt sets the "granted_at" field if the given value is not nil. +func (_u *PointsUpdate) SetNillableGrantedAt(v *time.Time) *PointsUpdate { + if v != nil { + _u.SetGrantedAt(*v) + } + return _u +} + // SetDescription sets the "description" field. func (_u *PointsUpdate) SetDescription(v string) *PointsUpdate { _u.mutation.SetDescription(v) @@ -134,9 +108,6 @@ func (_u *PointsUpdate) ClearUser() *PointsUpdate { // Save executes the query and returns the number of nodes affected by the update operation. func (_u *PointsUpdate) Save(ctx context.Context) (int, error) { - if err := _u.defaults(); err != nil { - return 0, err - } return withHooks(ctx, _u.sqlSave, _u.mutation, _u.hooks) } @@ -162,18 +133,6 @@ func (_u *PointsUpdate) ExecX(ctx context.Context) { } } -// defaults sets the default values of the builder before save. -func (_u *PointsUpdate) defaults() error { - if _, ok := _u.mutation.UpdatedAt(); !ok { - if points.UpdateDefaultUpdatedAt == nil { - return fmt.Errorf("ent: uninitialized points.UpdateDefaultUpdatedAt (forgotten import ent/runtime?)") - } - v := points.UpdateDefaultUpdatedAt() - _u.mutation.SetUpdatedAt(v) - } - return nil -} - // check runs all checks and user-defined validators on the builder. func (_u *PointsUpdate) check() error { if _u.mutation.UserCleared() && len(_u.mutation.UserIDs()) > 0 { @@ -194,24 +153,15 @@ func (_u *PointsUpdate) sqlSave(ctx context.Context) (_node int, err error) { } } } - if value, ok := _u.mutation.CreatedAt(); ok { - _spec.SetField(points.FieldCreatedAt, field.TypeTime, value) - } - if value, ok := _u.mutation.UpdatedAt(); ok { - _spec.SetField(points.FieldUpdatedAt, field.TypeTime, value) - } - if value, ok := _u.mutation.DeletedAt(); ok { - _spec.SetField(points.FieldDeletedAt, field.TypeTime, value) - } - if _u.mutation.DeletedAtCleared() { - _spec.ClearField(points.FieldDeletedAt, field.TypeTime) - } if value, ok := _u.mutation.Points(); ok { _spec.SetField(points.FieldPoints, field.TypeInt, value) } if value, ok := _u.mutation.AddedPoints(); ok { _spec.AddField(points.FieldPoints, field.TypeInt, value) } + if value, ok := _u.mutation.GrantedAt(); ok { + _spec.SetField(points.FieldGrantedAt, field.TypeTime, value) + } if value, ok := _u.mutation.Description(); ok { _spec.SetField(points.FieldDescription, field.TypeString, value) } @@ -267,46 +217,6 @@ type PointsUpdateOne struct { mutation *PointsMutation } -// SetCreatedAt sets the "created_at" field. -func (_u *PointsUpdateOne) SetCreatedAt(v time.Time) *PointsUpdateOne { - _u.mutation.SetCreatedAt(v) - return _u -} - -// SetNillableCreatedAt sets the "created_at" field if the given value is not nil. -func (_u *PointsUpdateOne) SetNillableCreatedAt(v *time.Time) *PointsUpdateOne { - if v != nil { - _u.SetCreatedAt(*v) - } - return _u -} - -// SetUpdatedAt sets the "updated_at" field. -func (_u *PointsUpdateOne) SetUpdatedAt(v time.Time) *PointsUpdateOne { - _u.mutation.SetUpdatedAt(v) - return _u -} - -// SetDeletedAt sets the "deleted_at" field. -func (_u *PointsUpdateOne) SetDeletedAt(v time.Time) *PointsUpdateOne { - _u.mutation.SetDeletedAt(v) - return _u -} - -// SetNillableDeletedAt sets the "deleted_at" field if the given value is not nil. -func (_u *PointsUpdateOne) SetNillableDeletedAt(v *time.Time) *PointsUpdateOne { - if v != nil { - _u.SetDeletedAt(*v) - } - return _u -} - -// ClearDeletedAt clears the value of the "deleted_at" field. -func (_u *PointsUpdateOne) ClearDeletedAt() *PointsUpdateOne { - _u.mutation.ClearDeletedAt() - return _u -} - // SetPoints sets the "points" field. func (_u *PointsUpdateOne) SetPoints(v int) *PointsUpdateOne { _u.mutation.ResetPoints() @@ -328,6 +238,20 @@ func (_u *PointsUpdateOne) AddPoints(v int) *PointsUpdateOne { return _u } +// SetGrantedAt sets the "granted_at" field. +func (_u *PointsUpdateOne) SetGrantedAt(v time.Time) *PointsUpdateOne { + _u.mutation.SetGrantedAt(v) + return _u +} + +// SetNillableGrantedAt sets the "granted_at" field if the given value is not nil. +func (_u *PointsUpdateOne) SetNillableGrantedAt(v *time.Time) *PointsUpdateOne { + if v != nil { + _u.SetGrantedAt(*v) + } + return _u +} + // SetDescription sets the "description" field. func (_u *PointsUpdateOne) SetDescription(v string) *PointsUpdateOne { _u.mutation.SetDescription(v) @@ -385,9 +309,6 @@ func (_u *PointsUpdateOne) Select(field string, fields ...string) *PointsUpdateO // Save executes the query and returns the updated Points entity. func (_u *PointsUpdateOne) Save(ctx context.Context) (*Points, error) { - if err := _u.defaults(); err != nil { - return nil, err - } return withHooks(ctx, _u.sqlSave, _u.mutation, _u.hooks) } @@ -413,18 +334,6 @@ func (_u *PointsUpdateOne) ExecX(ctx context.Context) { } } -// defaults sets the default values of the builder before save. -func (_u *PointsUpdateOne) defaults() error { - if _, ok := _u.mutation.UpdatedAt(); !ok { - if points.UpdateDefaultUpdatedAt == nil { - return fmt.Errorf("ent: uninitialized points.UpdateDefaultUpdatedAt (forgotten import ent/runtime?)") - } - v := points.UpdateDefaultUpdatedAt() - _u.mutation.SetUpdatedAt(v) - } - return nil -} - // check runs all checks and user-defined validators on the builder. func (_u *PointsUpdateOne) check() error { if _u.mutation.UserCleared() && len(_u.mutation.UserIDs()) > 0 { @@ -462,24 +371,15 @@ func (_u *PointsUpdateOne) sqlSave(ctx context.Context) (_node *Points, err erro } } } - if value, ok := _u.mutation.CreatedAt(); ok { - _spec.SetField(points.FieldCreatedAt, field.TypeTime, value) - } - if value, ok := _u.mutation.UpdatedAt(); ok { - _spec.SetField(points.FieldUpdatedAt, field.TypeTime, value) - } - if value, ok := _u.mutation.DeletedAt(); ok { - _spec.SetField(points.FieldDeletedAt, field.TypeTime, value) - } - if _u.mutation.DeletedAtCleared() { - _spec.ClearField(points.FieldDeletedAt, field.TypeTime) - } if value, ok := _u.mutation.Points(); ok { _spec.SetField(points.FieldPoints, field.TypeInt, value) } if value, ok := _u.mutation.AddedPoints(); ok { _spec.AddField(points.FieldPoints, field.TypeInt, value) } + if value, ok := _u.mutation.GrantedAt(); ok { + _spec.SetField(points.FieldGrantedAt, field.TypeTime, value) + } if value, ok := _u.mutation.Description(); ok { _spec.SetField(points.FieldDescription, field.TypeString, value) } diff --git a/ent/runtime/runtime.go b/ent/runtime/runtime.go index 77fcb53..13ba053 100644 --- a/ent/runtime/runtime.go +++ b/ent/runtime/runtime.go @@ -66,29 +66,16 @@ func init() { groupDescName := groupFields[0].Descriptor() // group.NameValidator is a validator for the "name" field. It is called by the builders before save. group.NameValidator = groupDescName.Validators[0].(func(string) error) - pointsMixin := schema.Points{}.Mixin() - pointsMixinHooks0 := pointsMixin[0].Hooks() - points.Hooks[0] = pointsMixinHooks0[0] - pointsMixinInters0 := pointsMixin[0].Interceptors() - points.Interceptors[0] = pointsMixinInters0[0] - pointsMixinFields0 := pointsMixin[0].Fields() - _ = pointsMixinFields0 pointsFields := schema.Points{}.Fields() _ = pointsFields - // pointsDescCreatedAt is the schema descriptor for created_at field. - pointsDescCreatedAt := pointsMixinFields0[0].Descriptor() - // points.DefaultCreatedAt holds the default value on creation for the created_at field. - points.DefaultCreatedAt = pointsDescCreatedAt.Default.(func() time.Time) - // pointsDescUpdatedAt is the schema descriptor for updated_at field. - pointsDescUpdatedAt := pointsMixinFields0[1].Descriptor() - // points.DefaultUpdatedAt holds the default value on creation for the updated_at field. - points.DefaultUpdatedAt = pointsDescUpdatedAt.Default.(func() time.Time) - // points.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field. - points.UpdateDefaultUpdatedAt = pointsDescUpdatedAt.UpdateDefault.(func() time.Time) // pointsDescPoints is the schema descriptor for points field. pointsDescPoints := pointsFields[0].Descriptor() // points.DefaultPoints holds the default value on creation for the points field. points.DefaultPoints = pointsDescPoints.Default.(int) + // pointsDescGrantedAt is the schema descriptor for granted_at field. + pointsDescGrantedAt := pointsFields[1].Descriptor() + // points.DefaultGrantedAt holds the default value on creation for the granted_at field. + points.DefaultGrantedAt = pointsDescGrantedAt.Default.(func() time.Time) questionFields := schema.Question{}.Fields() _ = questionFields // questionDescCategory is the schema descriptor for category field. diff --git a/ent/schema/points.go b/ent/schema/points.go index 8bbddb2..e9bed21 100644 --- a/ent/schema/points.go +++ b/ent/schema/points.go @@ -1,6 +1,8 @@ package schema import ( + "time" + "entgo.io/contrib/entgql" "entgo.io/ent" "entgo.io/ent/schema" @@ -17,6 +19,8 @@ func (Points) Fields() []ent.Field { return []ent.Field{ field.Int("points"). Default(0), + field.Time("granted_at"). + Default(time.Now), field.String("description"). Optional(), } @@ -28,12 +32,6 @@ func (Points) Edges() []ent.Edge { } } -func (Points) Mixin() []ent.Mixin { - return []ent.Mixin{ - TimestampMixin{}, - } -} - func (Points) Annotations() []schema.Annotation { return []schema.Annotation{ entgql.QueryField().Directives( diff --git a/graph/ent.graphqls b/graph/ent.graphqls index f110320..4d5f7d8 100644 --- a/graph/ent.graphqls +++ b/graph/ent.graphqls @@ -42,6 +42,7 @@ Input was generated by ent. """ input CreatePointsInput { points: Int + grantedAt: Time description: String userID: ID! } @@ -455,10 +456,8 @@ type PageInfo { } type Points implements Node { id: ID! - createdAt: Time! - updatedAt: Time! - deletedAt: Time points: Int! + grantedAt: Time! description: String user: User! } @@ -512,41 +511,6 @@ input PointsWhereInput { idLT: ID idLTE: ID """ - created_at field predicates - """ - createdAt: Time - createdAtNEQ: Time - createdAtIn: [Time!] - createdAtNotIn: [Time!] - createdAtGT: Time - createdAtGTE: Time - createdAtLT: Time - createdAtLTE: Time - """ - updated_at field predicates - """ - updatedAt: Time - updatedAtNEQ: Time - updatedAtIn: [Time!] - updatedAtNotIn: [Time!] - updatedAtGT: Time - updatedAtGTE: Time - updatedAtLT: Time - updatedAtLTE: Time - """ - deleted_at field predicates - """ - deletedAt: Time - deletedAtNEQ: Time - deletedAtIn: [Time!] - deletedAtNotIn: [Time!] - deletedAtGT: Time - deletedAtGTE: Time - deletedAtLT: Time - deletedAtLTE: Time - deletedAtIsNil: Boolean - deletedAtNotNil: Boolean - """ points field predicates """ points: Int @@ -558,6 +522,17 @@ input PointsWhereInput { pointsLT: Int pointsLTE: Int """ + granted_at field predicates + """ + grantedAt: Time + grantedAtNEQ: Time + grantedAtIn: [Time!] + grantedAtNotIn: [Time!] + grantedAtGT: Time + grantedAtGTE: Time + grantedAtLT: Time + grantedAtLTE: Time + """ description field predicates """ description: String @@ -1015,6 +990,7 @@ Input was generated by ent. """ input UpdatePointsInput { points: Int + grantedAt: Time description: String clearDescription: Boolean userID: ID diff --git a/internal/events/points.go b/internal/events/points.go index 3371ff4..3284c57 100644 --- a/internal/events/points.go +++ b/internal/events/points.go @@ -52,7 +52,7 @@ func (d *PointsGranter) GrantDailyLoginPoints(ctx context.Context, userID int) ( hasPointsRecord, err := d.entClient.Points.Query(). Where(points.HasUserWith(user.ID(userID))). Where(points.DescriptionEQ(PointDescriptionDailyLogin)). - Where(points.CreatedAtGTE(time.Now().AddDate(0, 0, -1))).Exist(ctx) + Where(points.GrantedAtGTE(time.Now().AddDate(0, 0, -1))).Exist(ctx) if err != nil { return false, err } @@ -92,7 +92,7 @@ func (d *PointsGranter) GrantWeeklyLoginPoints(ctx context.Context, userID int) hasPointsRecord, err := d.entClient.Points.Query(). Where(points.HasUserWith(user.ID(userID))). Where(points.DescriptionEQ(PointDescriptionWeeklyLogin)). - Where(points.CreatedAtGTE(time.Now().AddDate(0, 0, -7))).Exist(ctx) + Where(points.GrantedAtGTE(time.Now().AddDate(0, 0, -7))).Exist(ctx) if err != nil { return false, err } diff --git a/internal/events/points_test.go b/internal/events/points_test.go index b971d04..d77afac 100644 --- a/internal/events/points_test.go +++ b/internal/events/points_test.go @@ -52,7 +52,7 @@ func createLoginEvent(t *testing.T, client *ent.Client, userID int, triggeredAt } // createPointsRecord creates a points record for the user with specified created_at time -func createPointsRecord(t *testing.T, client *ent.Client, userID int, description string, pointsValue int, createdAt time.Time) { +func createPointsRecord(t *testing.T, client *ent.Client, userID int, description string, pointsValue int, grantedAt time.Time) { t.Helper() ctx := context.Background() @@ -62,7 +62,7 @@ func createPointsRecord(t *testing.T, client *ent.Client, userID int, descriptio SetUserID(userID). SetDescription(description). SetPoints(pointsValue). - SetCreatedAt(createdAt). + SetGrantedAt(grantedAt). Save(ctx) require.NoError(t, err) } @@ -288,22 +288,22 @@ func TestGrantWeeklyLoginPoints_NoLoginEvents(t *testing.T) { func TestGrantWeeklyLoginPoints_MultipleLoginsPerDay(t *testing.T) { testCases := []struct { - name string - days int - shouldGrant bool - description string + name string + days int + shouldGrant bool + description string }{ { - name: "SufficientDays", - days: 7, - shouldGrant: true, - description: "Should grant points with 7 days of multiple logins per day", + name: "SufficientDays", + days: 7, + shouldGrant: true, + description: "Should grant points with 7 days of multiple logins per day", }, { - name: "InsufficientDays", - days: 6, - shouldGrant: false, - description: "Should not grant points with only 6 days of multiple logins per day", + name: "InsufficientDays", + days: 6, + shouldGrant: false, + description: "Should not grant points with only 6 days of multiple logins per day", }, } From 298588f0e64c491352f3c397e10a108a3c09ca0b Mon Sep 17 00:00:00 2001 From: Yi-Jyun Pan Date: Thu, 18 Sep 2025 00:21:11 +0800 Subject: [PATCH 12/14] fix: tests --- go.mod | 4 +- graph/resolver.go | 7 +- graph/user.resolvers_test.go | 163 ++++++++--------------------------- internal/events/events.go | 11 +-- internal/testhelper/ent.go | 4 + internal/workers/workers.go | 29 +------ 6 files changed, 56 insertions(+), 162 deletions(-) diff --git a/go.mod b/go.mod index 25ecaf5..92c52bb 100644 --- a/go.mod +++ b/go.mod @@ -1,8 +1,6 @@ module github.com/database-playground/backend-v2 -go 1.24.0 - -toolchain go1.24.2 +go 1.25.0 require ( entgo.io/contrib v0.7.0 diff --git a/graph/resolver.go b/graph/resolver.go index 9154f53..6310d17 100644 --- a/graph/resolver.go +++ b/graph/resolver.go @@ -27,10 +27,15 @@ type Resolver struct { eventService *events.EventService } +// NewResolver creates a new resolver. +func NewResolver(ent *ent.Client, auth auth.Storage, sqlrunner *sqlrunner.SqlRunner, eventService *events.EventService) *Resolver { + return &Resolver{ent, auth, sqlrunner, eventService} +} + // NewSchema creates a graphql executable schema. func NewSchema(ent *ent.Client, auth auth.Storage, sqlrunner *sqlrunner.SqlRunner, eventService *events.EventService) graphql.ExecutableSchema { return NewExecutableSchema(Config{ - Resolvers: &Resolver{ent, auth, sqlrunner, eventService}, + Resolvers: NewResolver(ent, auth, sqlrunner, eventService), Directives: DirectiveRoot{ Scope: directive.ScopeDirective, }, diff --git a/graph/user.resolvers_test.go b/graph/user.resolvers_test.go index 8ee10be..24c5b59 100644 --- a/graph/user.resolvers_test.go +++ b/graph/user.resolvers_test.go @@ -14,6 +14,7 @@ import ( "github.com/database-playground/backend-v2/graph/defs" "github.com/database-playground/backend-v2/graph/directive" "github.com/database-playground/backend-v2/internal/auth" + "github.com/database-playground/backend-v2/internal/events" "github.com/database-playground/backend-v2/internal/setup" "github.com/database-playground/backend-v2/internal/testhelper" "github.com/database-playground/backend-v2/internal/useraccount" @@ -51,10 +52,7 @@ func (m *mockAuthStorage) Peek(ctx context.Context, token string) (auth.TokenInf func TestMutationResolver_LogoutAll(t *testing.T) { t.Run("success", func(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -83,10 +81,7 @@ func TestMutationResolver_LogoutAll(t *testing.T) { t.Run("unauthenticated", func(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -111,10 +106,7 @@ func TestMutationResolver_LogoutAll(t *testing.T) { t.Run("insufficient scope", func(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -146,12 +138,9 @@ func TestMutationResolver_LogoutAll(t *testing.T) { // Setup test resolver with mock auth storage storageErr := errors.New("storage error") - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{ - deleteByUserErr: storageErr, - }, - } + resolver := NewResolver(entClient, &mockAuthStorage{ + deleteByUserErr: storageErr, + }, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -197,12 +186,9 @@ func TestMutationResolver_ImpersonateUser(t *testing.T) { // Setup test resolver with mock auth storage expectedToken := "test-token" - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{ - createToken: expectedToken, - }, - } + resolver := NewResolver(entClient, &mockAuthStorage{ + createToken: expectedToken, + }, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -232,10 +218,7 @@ func TestMutationResolver_ImpersonateUser(t *testing.T) { t.Run("unauthenticated", func(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -260,10 +243,7 @@ func TestMutationResolver_ImpersonateUser(t *testing.T) { t.Run("insufficient scope", func(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -307,12 +287,9 @@ func TestMutationResolver_ImpersonateUser(t *testing.T) { // Setup test resolver with mock auth storage storageErr := errors.New("storage error") - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{ - createErr: storageErr, - }, - } + resolver := NewResolver(entClient, &mockAuthStorage{ + createErr: storageErr, + }, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -341,10 +318,7 @@ func TestMutationResolver_ImpersonateUser(t *testing.T) { t.Run("no such user", func(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -414,10 +388,7 @@ func TestQueryResolver_Me(t *testing.T) { require.NoError(t, err) // Setup test resolver - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -453,10 +424,7 @@ func TestQueryResolver_Me(t *testing.T) { t.Run("unauthenticated", func(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -483,10 +451,7 @@ func TestQueryResolver_Me(t *testing.T) { t.Run("insufficient scope", func(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -518,10 +483,7 @@ func TestQueryResolver_Me(t *testing.T) { t.Run("invalid user id", func(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -553,10 +515,7 @@ func TestQueryResolver_Me(t *testing.T) { func TestQueryResolver_User(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) cfg := Config{ Resolvers: resolver, Directives: DirectiveRoot{Scope: directive.ScopeDirective}, @@ -640,10 +599,7 @@ func TestQueryResolver_User(t *testing.T) { func TestQueryResolver_Group(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) cfg := Config{ Resolvers: resolver, Directives: DirectiveRoot{Scope: directive.ScopeDirective}, @@ -720,10 +676,7 @@ func TestQueryResolver_Group(t *testing.T) { func TestQueryResolver_ScopeSet(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) cfg := Config{ Resolvers: resolver, Directives: DirectiveRoot{Scope: directive.ScopeDirective}, @@ -879,10 +832,7 @@ func TestUserResolver_ImpersonatedBy(t *testing.T) { require.NoError(t, err) // Setup test resolver - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -921,10 +871,7 @@ func TestUserResolver_ImpersonatedBy(t *testing.T) { t.Run("unauthenticated", func(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -965,10 +912,7 @@ func TestUserResolver_ImpersonatedBy(t *testing.T) { Save(context.Background()) require.NoError(t, err) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -1018,10 +962,7 @@ func TestUserResolver_ImpersonatedBy(t *testing.T) { require.NoError(t, err) // Setup test resolver - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -1071,10 +1012,7 @@ func TestMutationResolver_UpdateMe(t *testing.T) { Save(context.Background()) require.NoError(t, err) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -1117,10 +1055,7 @@ func TestMutationResolver_UpdateMe(t *testing.T) { Save(context.Background()) require.NoError(t, err) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -1151,10 +1086,7 @@ func TestMutationResolver_UpdateMe(t *testing.T) { t.Run("unauthenticated", func(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -1181,10 +1113,7 @@ func TestMutationResolver_UpdateMe(t *testing.T) { t.Run("insufficient scope", func(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -1217,10 +1146,7 @@ func TestMutationResolver_UpdateMe(t *testing.T) { t.Run("user not found", func(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -1271,10 +1197,7 @@ func TestMutationResolver_VerifyRegistration(t *testing.T) { Save(context.Background()) require.NoError(t, err) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -1311,10 +1234,7 @@ func TestMutationResolver_VerifyRegistration(t *testing.T) { t.Run("unauthenticated", func(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -1339,10 +1259,7 @@ func TestMutationResolver_VerifyRegistration(t *testing.T) { t.Run("insufficient scope", func(t *testing.T) { entClient := testhelper.NewEntSqliteClient(t) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -1391,10 +1308,7 @@ func TestMutationResolver_VerifyRegistration(t *testing.T) { Save(context.Background()) require.NoError(t, err) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ @@ -1428,10 +1342,7 @@ func TestMutationResolver_VerifyRegistration(t *testing.T) { _, err := setup.Setup(context.Background(), entClient) require.NoError(t, err) - resolver := &Resolver{ - ent: entClient, - auth: &mockAuthStorage{}, - } + resolver := NewResolver(entClient, &mockAuthStorage{}, nil, events.NewEventService(entClient)) // Create test server with scope directive cfg := Config{ diff --git a/internal/events/events.go b/internal/events/events.go index 0f6e6ed..7de2d3c 100644 --- a/internal/events/events.go +++ b/internal/events/events.go @@ -6,7 +6,6 @@ import ( "time" "github.com/database-playground/backend-v2/ent" - "github.com/database-playground/backend-v2/internal/workers" ) // EventService is the service for triggering events. @@ -40,12 +39,10 @@ type EventHandler interface { // TriggerEvent triggers an event. func (s *EventService) TriggerEvent(ctx context.Context, event Event) { - workers.Global.Go(func() { - err := s.triggerEvent(context.Background(), event) - if err != nil { - slog.Error("failed to trigger event", "error", err) - } - }) + err := s.triggerEvent(context.Background(), event) + if err != nil { + slog.Error("failed to trigger event", "error", err) + } } // triggerEvent triggers an event synchronously. diff --git a/internal/testhelper/ent.go b/internal/testhelper/ent.go index f753c89..e329861 100644 --- a/internal/testhelper/ent.go +++ b/internal/testhelper/ent.go @@ -5,6 +5,7 @@ import ( "github.com/database-playground/backend-v2/ent" "github.com/database-playground/backend-v2/ent/enttest" + "github.com/database-playground/backend-v2/internal/workers" ) // NewEntSqliteClient creates a new in-memory Ent SQLite client for testing. @@ -14,6 +15,9 @@ func NewEntSqliteClient(t *testing.T) *ent.Client { client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=private&_fk=1") t.Cleanup(func() { + // must wait the workers to finish + workers.Global.Wait() + if err := client.Close(); err != nil { t.Fatalf("Failed to close client: %v", err) } diff --git a/internal/workers/workers.go b/internal/workers/workers.go index 45c42c1..d9df138 100644 --- a/internal/workers/workers.go +++ b/internal/workers/workers.go @@ -1,28 +1,7 @@ package workers -import "sync" +import ( + "sync" +) -var Global = NewWorker() - -type Worker struct { - wg *sync.WaitGroup -} - -func NewWorker() *Worker { - return &Worker{ - wg: &sync.WaitGroup{}, - } -} - -func (w *Worker) Go(fn func()) { - w.wg.Add(1) - - go func() { - defer w.wg.Done() - fn() - }() -} - -func (w *Worker) Wait() { - w.wg.Wait() -} +var Global = &sync.WaitGroup{} From 1b66a94e840ca8431605514ef5a84c4d0e8348cf Mon Sep 17 00:00:00 2001 From: Yi-Jyun Pan Date: Thu, 18 Sep 2025 00:35:55 +0800 Subject: [PATCH 13/14] refactor: remove event and point mutation --- ent/gql_mutation_input.go | 136 -------------------------------------- ent/internal/schema.go | 2 +- ent/schema/events.go | 4 -- ent/schema/points.go | 4 -- graph/ent.graphqls | 42 ------------ 5 files changed, 1 insertion(+), 187 deletions(-) diff --git a/ent/gql_mutation_input.go b/ent/gql_mutation_input.go index a1c0541..a55dedf 100644 --- a/ent/gql_mutation_input.go +++ b/ent/gql_mutation_input.go @@ -3,8 +3,6 @@ package ent import ( - "time" - "github.com/database-playground/backend-v2/ent/question" ) @@ -84,72 +82,6 @@ func (c *DatabaseUpdateOne) SetInput(i UpdateDatabaseInput) *DatabaseUpdateOne { return c } -// CreateEventsInput represents a mutation input for creating eventsslice. -type CreateEventsInput struct { - Type string - TriggeredAt *time.Time - Payload map[string]interface{} - UserID int -} - -// Mutate applies the CreateEventsInput on the EventsMutation builder. -func (i *CreateEventsInput) Mutate(m *EventsMutation) { - m.SetType(i.Type) - if v := i.TriggeredAt; v != nil { - m.SetTriggeredAt(*v) - } - if v := i.Payload; v != nil { - m.SetPayload(v) - } - m.SetUserID(i.UserID) -} - -// SetInput applies the change-set in the CreateEventsInput on the EventsCreate builder. -func (c *EventsCreate) SetInput(i CreateEventsInput) *EventsCreate { - i.Mutate(c.Mutation()) - return c -} - -// UpdateEventsInput represents a mutation input for updating eventsslice. -type UpdateEventsInput struct { - Type *string - TriggeredAt *time.Time - ClearPayload bool - Payload map[string]interface{} - UserID *int -} - -// Mutate applies the UpdateEventsInput on the EventsMutation builder. -func (i *UpdateEventsInput) Mutate(m *EventsMutation) { - if v := i.Type; v != nil { - m.SetType(*v) - } - if v := i.TriggeredAt; v != nil { - m.SetTriggeredAt(*v) - } - if i.ClearPayload { - m.ClearPayload() - } - if v := i.Payload; v != nil { - m.SetPayload(v) - } - if v := i.UserID; v != nil { - m.SetUserID(*v) - } -} - -// SetInput applies the change-set in the UpdateEventsInput on the EventsUpdate builder. -func (c *EventsUpdate) SetInput(i UpdateEventsInput) *EventsUpdate { - i.Mutate(c.Mutation()) - return c -} - -// SetInput applies the change-set in the UpdateEventsInput on the EventsUpdateOne builder. -func (c *EventsUpdateOne) SetInput(i UpdateEventsInput) *EventsUpdateOne { - i.Mutate(c.Mutation()) - return c -} - // CreateGroupInput represents a mutation input for creating groups. type CreateGroupInput struct { Name string @@ -218,74 +150,6 @@ func (c *GroupUpdateOne) SetInput(i UpdateGroupInput) *GroupUpdateOne { return c } -// CreatePointsInput represents a mutation input for creating pointsslice. -type CreatePointsInput struct { - Points *int - GrantedAt *time.Time - Description *string - UserID int -} - -// Mutate applies the CreatePointsInput on the PointsMutation builder. -func (i *CreatePointsInput) Mutate(m *PointsMutation) { - if v := i.Points; v != nil { - m.SetPoints(*v) - } - if v := i.GrantedAt; v != nil { - m.SetGrantedAt(*v) - } - if v := i.Description; v != nil { - m.SetDescription(*v) - } - m.SetUserID(i.UserID) -} - -// SetInput applies the change-set in the CreatePointsInput on the PointsCreate builder. -func (c *PointsCreate) SetInput(i CreatePointsInput) *PointsCreate { - i.Mutate(c.Mutation()) - return c -} - -// UpdatePointsInput represents a mutation input for updating pointsslice. -type UpdatePointsInput struct { - Points *int - GrantedAt *time.Time - ClearDescription bool - Description *string - UserID *int -} - -// Mutate applies the UpdatePointsInput on the PointsMutation builder. -func (i *UpdatePointsInput) Mutate(m *PointsMutation) { - if v := i.Points; v != nil { - m.SetPoints(*v) - } - if v := i.GrantedAt; v != nil { - m.SetGrantedAt(*v) - } - if i.ClearDescription { - m.ClearDescription() - } - if v := i.Description; v != nil { - m.SetDescription(*v) - } - if v := i.UserID; v != nil { - m.SetUserID(*v) - } -} - -// SetInput applies the change-set in the UpdatePointsInput on the PointsUpdate builder. -func (c *PointsUpdate) SetInput(i UpdatePointsInput) *PointsUpdate { - i.Mutate(c.Mutation()) - return c -} - -// SetInput applies the change-set in the UpdatePointsInput on the PointsUpdateOne builder. -func (c *PointsUpdateOne) SetInput(i UpdatePointsInput) *PointsUpdateOne { - i.Mutate(c.Mutation()) - return c -} - // CreateQuestionInput represents a mutation input for creating questions. type CreateQuestionInput struct { Category string diff --git a/ent/internal/schema.go b/ent/internal/schema.go index 16bcaeb..b2067ab 100644 --- a/ent/internal/schema.go +++ b/ent/internal/schema.go @@ -6,4 +6,4 @@ // Package internal holds a loadable version of the latest schema. package internal -const Schema = "{\"Schema\":\"github.com/database-playground/backend-v2/ent/schema\",\"Package\":\"github.com/database-playground/backend-v2/ent\",\"Schemas\":[{\"name\":\"Database\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"questions\",\"type\":\"Question\"}],\"fields\":[{\"name\":\"slug\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"schema\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"validators\":1,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"SQL schema\"},{\"name\":\"relation_figure\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"validators\":1,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"relation figure\"}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"database:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":12884901888}}},{\"name\":\"Events\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"user\",\"type\":\"User\",\"field\":\"user_id\",\"ref_name\":\"events\",\"unique\":true,\"inverse\":true,\"required\":true}],\"fields\":[{\"name\":\"user_id\",\"type\":{\"Type\":12,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"type\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"triggered_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"payload\",\"type\":{\"Type\":3,\"Ident\":\"map[string]interface {}\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":true,\"RType\":{\"Name\":\"\",\"Ident\":\"map[string]interface {}\",\"Kind\":21,\"PkgPath\":\"\",\"Methods\":{}}},\"optional\":true,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0}}],\"indexes\":[{\"fields\":[\"type\"]},{\"fields\":[\"type\",\"user_id\"]}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"user:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":21474836480}}},{\"name\":\"Group\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"scope_sets\",\"type\":\"ScopeSet\"}],\"fields\":[{\"name\":\"created_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"updated_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"update_default\":true,\"position\":{\"Index\":1,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"deleted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"name\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}}],\"hooks\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"interceptors\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"group:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":4294967296}}},{\"name\":\"Points\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"user\",\"type\":\"User\",\"ref_name\":\"points\",\"unique\":true,\"inverse\":true,\"required\":true}],\"fields\":[{\"name\":\"points\",\"type\":{\"Type\":12,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_value\":0,\"default_kind\":2,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"granted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"user:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":25769803776}}},{\"name\":\"Question\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"database\",\"type\":\"Database\",\"ref_name\":\"questions\",\"unique\":true,\"inverse\":true,\"required\":true}],\"fields\":[{\"name\":\"category\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"CATEGORY\"}},\"comment\":\"Question category, e.g. 'query'\"},{\"name\":\"difficulty\",\"type\":{\"Type\":6,\"Ident\":\"question.Difficulty\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"enums\":[{\"N\":\"Unspecified\",\"V\":\"unspecified\"},{\"N\":\"Easy\",\"V\":\"easy\"},{\"N\":\"Medium\",\"V\":\"medium\"},{\"N\":\"Hard\",\"V\":\"hard\"}],\"default\":true,\"default_value\":\"medium\",\"default_kind\":24,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"DIFFICULTY\"}},\"comment\":\"Question difficulty, e.g. 'easy'\"},{\"name\":\"title\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"Question title\"},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"Question stem\"},{\"name\":\"reference_answer\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"position\":{\"Index\":4,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"answer:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"comment\":\"Reference answer\"}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"question:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":17179869184}}},{\"name\":\"ScopeSet\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"groups\",\"type\":\"Group\",\"ref_name\":\"scope_sets\",\"inverse\":true}],\"fields\":[{\"name\":\"slug\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"scopes\",\"type\":{\"Type\":3,\"Ident\":\"[]string\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":true,\"RType\":{\"Name\":\"\",\"Ident\":\"[]string\",\"Kind\":23,\"PkgPath\":\"\",\"Methods\":{}}},\"default\":true,\"default_value\":[],\"default_kind\":23,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"scopeset:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":8589934592}}},{\"name\":\"User\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"group\",\"type\":\"Group\",\"unique\":true,\"required\":true},{\"name\":\"points\",\"type\":\"Points\"},{\"name\":\"events\",\"type\":\"Events\"}],\"fields\":[{\"name\":\"created_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"updated_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"update_default\":true,\"position\":{\"Index\":1,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"deleted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"name\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"email\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"EMAIL\"}}},{\"name\":\"avatar\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}}],\"hooks\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"interceptors\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"user:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":0}}}],\"Features\":[\"namedges\",\"intercept\",\"schema/snapshot\",\"sql/globalid\"]}" +const Schema = "{\"Schema\":\"github.com/database-playground/backend-v2/ent/schema\",\"Package\":\"github.com/database-playground/backend-v2/ent\",\"Schemas\":[{\"name\":\"Database\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"questions\",\"type\":\"Question\"}],\"fields\":[{\"name\":\"slug\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"schema\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"validators\":1,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"SQL schema\"},{\"name\":\"relation_figure\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"validators\":1,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"relation figure\"}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"database:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":12884901888}}},{\"name\":\"Events\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"user\",\"type\":\"User\",\"field\":\"user_id\",\"ref_name\":\"events\",\"unique\":true,\"inverse\":true,\"required\":true}],\"fields\":[{\"name\":\"user_id\",\"type\":{\"Type\":12,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"type\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"triggered_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"payload\",\"type\":{\"Type\":3,\"Ident\":\"map[string]interface {}\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":true,\"RType\":{\"Name\":\"\",\"Ident\":\"map[string]interface {}\",\"Kind\":21,\"PkgPath\":\"\",\"Methods\":{}}},\"optional\":true,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0}}],\"indexes\":[{\"fields\":[\"type\"]},{\"fields\":[\"type\",\"user_id\"]}],\"annotations\":{\"EntGQL\":{\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"user:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":21474836480}}},{\"name\":\"Group\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"scope_sets\",\"type\":\"ScopeSet\"}],\"fields\":[{\"name\":\"created_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"updated_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"update_default\":true,\"position\":{\"Index\":1,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"deleted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"name\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}}],\"hooks\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"interceptors\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"group:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":4294967296}}},{\"name\":\"Points\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"user\",\"type\":\"User\",\"ref_name\":\"points\",\"unique\":true,\"inverse\":true,\"required\":true}],\"fields\":[{\"name\":\"points\",\"type\":{\"Type\":12,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_value\":0,\"default_kind\":2,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"granted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}}],\"annotations\":{\"EntGQL\":{\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"user:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":25769803776}}},{\"name\":\"Question\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"database\",\"type\":\"Database\",\"ref_name\":\"questions\",\"unique\":true,\"inverse\":true,\"required\":true}],\"fields\":[{\"name\":\"category\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"CATEGORY\"}},\"comment\":\"Question category, e.g. 'query'\"},{\"name\":\"difficulty\",\"type\":{\"Type\":6,\"Ident\":\"question.Difficulty\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"enums\":[{\"N\":\"Unspecified\",\"V\":\"unspecified\"},{\"N\":\"Easy\",\"V\":\"easy\"},{\"N\":\"Medium\",\"V\":\"medium\"},{\"N\":\"Hard\",\"V\":\"hard\"}],\"default\":true,\"default_value\":\"medium\",\"default_kind\":24,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"DIFFICULTY\"}},\"comment\":\"Question difficulty, e.g. 'easy'\"},{\"name\":\"title\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"Question title\"},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"position\":{\"Index\":3,\"MixedIn\":false,\"MixinIndex\":0},\"comment\":\"Question stem\"},{\"name\":\"reference_answer\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"size\":2147483647,\"position\":{\"Index\":4,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"answer:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"comment\":\"Reference answer\"}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"question:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":17179869184}}},{\"name\":\"ScopeSet\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"groups\",\"type\":\"Group\",\"ref_name\":\"scope_sets\",\"inverse\":true}],\"fields\":[{\"name\":\"slug\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"description\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"scopes\",\"type\":{\"Type\":3,\"Ident\":\"[]string\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":true,\"RType\":{\"Name\":\"\",\"Ident\":\"[]string\",\"Kind\":23,\"PkgPath\":\"\",\"Methods\":{}}},\"default\":true,\"default_value\":[],\"default_kind\":23,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"scopeset:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]}},\"EntSQL\":{\"increment_start\":8589934592}}},{\"name\":\"User\",\"config\":{\"Table\":\"\"},\"edges\":[{\"name\":\"group\",\"type\":\"Group\",\"unique\":true,\"required\":true},{\"name\":\"points\",\"type\":\"Points\"},{\"name\":\"events\",\"type\":\"Events\"}],\"fields\":[{\"name\":\"created_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"position\":{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"updated_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"default\":true,\"default_kind\":19,\"update_default\":true,\"position\":{\"Index\":1,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"deleted_at\",\"type\":{\"Type\":2,\"Ident\":\"\",\"PkgPath\":\"time\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":true,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"Skip\":48}}},{\"name\":\"name\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"validators\":1,\"position\":{\"Index\":0,\"MixedIn\":false,\"MixinIndex\":0}},{\"name\":\"email\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"unique\":true,\"immutable\":true,\"validators\":1,\"position\":{\"Index\":1,\"MixedIn\":false,\"MixinIndex\":0},\"annotations\":{\"EntGQL\":{\"OrderField\":\"EMAIL\"}}},{\"name\":\"avatar\",\"type\":{\"Type\":7,\"Ident\":\"\",\"PkgPath\":\"\",\"PkgName\":\"\",\"Nillable\":false,\"RType\":null},\"optional\":true,\"position\":{\"Index\":2,\"MixedIn\":false,\"MixinIndex\":0}}],\"hooks\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"interceptors\":[{\"Index\":0,\"MixedIn\":true,\"MixinIndex\":0}],\"annotations\":{\"EntGQL\":{\"MutationInputs\":[{\"IsCreate\":true},{}],\"QueryField\":{\"Directives\":[{\"arguments\":[{\"Comment\":null,\"Name\":\"scope\",\"Value\":{\"Children\":null,\"Comment\":null,\"Definition\":null,\"ExpectedType\":null,\"Kind\":3,\"Raw\":\"user:read\",\"VariableDefinition\":null}}],\"name\":\"scope\"}]},\"RelayConnection\":true},\"EntSQL\":{\"increment_start\":0}}}],\"Features\":[\"namedges\",\"intercept\",\"schema/snapshot\",\"sql/globalid\"]}" diff --git a/ent/schema/events.go b/ent/schema/events.go index 77eb50d..7bea237 100644 --- a/ent/schema/events.go +++ b/ent/schema/events.go @@ -46,10 +46,6 @@ func (Events) Annotations() []schema.Annotation { entgql.QueryField().Directives( ScopeDirective("user:read"), ), - entgql.Mutations( - entgql.MutationCreate(), - entgql.MutationUpdate(), - ), entgql.RelayConnection(), } } diff --git a/ent/schema/points.go b/ent/schema/points.go index e9bed21..50dc734 100644 --- a/ent/schema/points.go +++ b/ent/schema/points.go @@ -37,10 +37,6 @@ func (Points) Annotations() []schema.Annotation { entgql.QueryField().Directives( ScopeDirective("user:read"), ), - entgql.Mutations( - entgql.MutationCreate(), - entgql.MutationUpdate(), - ), entgql.RelayConnection(), } } diff --git a/graph/ent.graphqls b/graph/ent.graphqls index 4d5f7d8..5d6e622 100644 --- a/graph/ent.graphqls +++ b/graph/ent.graphqls @@ -18,16 +18,6 @@ input CreateDatabaseInput { questionIDs: [ID!] } """ -CreateEventsInput is used for create Events object. -Input was generated by ent. -""" -input CreateEventsInput { - type: String! - triggeredAt: Time - payload: Map - userID: ID! -} -""" CreateGroupInput is used for create Group object. Input was generated by ent. """ @@ -37,16 +27,6 @@ input CreateGroupInput { scopeSetIDs: [ID!] } """ -CreatePointsInput is used for create Points object. -Input was generated by ent. -""" -input CreatePointsInput { - points: Int - grantedAt: Time - description: String - userID: ID! -} -""" CreateQuestionInput is used for create Question object. Input was generated by ent. """ @@ -962,17 +942,6 @@ input UpdateDatabaseInput { clearQuestions: Boolean } """ -UpdateEventsInput is used for update Events object. -Input was generated by ent. -""" -input UpdateEventsInput { - type: String - triggeredAt: Time - payload: Map - clearPayload: Boolean - userID: ID -} -""" UpdateGroupInput is used for update Group object. Input was generated by ent. """ @@ -985,17 +954,6 @@ input UpdateGroupInput { clearScopeSets: Boolean } """ -UpdatePointsInput is used for update Points object. -Input was generated by ent. -""" -input UpdatePointsInput { - points: Int - grantedAt: Time - description: String - clearDescription: Boolean - userID: ID -} -""" UpdateQuestionInput is used for update Question object. Input was generated by ent. """ From 957650a66b1306b9271811ce1bcb46e4e209217a Mon Sep 17 00:00:00 2001 From: Yi-Jyun Pan Date: Thu, 18 Sep 2025 00:51:56 +0800 Subject: [PATCH 14/14] fix: comments by Copilot --- internal/events/events.go | 2 +- internal/events/points.go | 3 ++- internal/useraccount/token_test.go | 10 ---------- 3 files changed, 3 insertions(+), 12 deletions(-) diff --git a/internal/events/events.go b/internal/events/events.go index 7de2d3c..02eb75d 100644 --- a/internal/events/events.go +++ b/internal/events/events.go @@ -39,7 +39,7 @@ type EventHandler interface { // TriggerEvent triggers an event. func (s *EventService) TriggerEvent(ctx context.Context, event Event) { - err := s.triggerEvent(context.Background(), event) + err := s.triggerEvent(ctx, event) if err != nil { slog.Error("failed to trigger event", "error", err) } diff --git a/internal/events/points.go b/internal/events/points.go index 3284c57..d51a648 100644 --- a/internal/events/points.go +++ b/internal/events/points.go @@ -104,12 +104,13 @@ func (d *PointsGranter) GrantWeeklyLoginPoints(ctx context.Context, userID int) weekLoginRecords, err := d.entClient.Events.Query(). Where(events.Type(string(EventTypeLogin))). Where(events.UserID(userID)). + Where(events.TriggeredAtGTE(time.Now().AddDate(0, 0, -7))). All(ctx) if err != nil { return false, err } - // aggreated by day + // Aggregated by day weekLoginRecordsByDay := make(map[time.Time]int) for _, record := range weekLoginRecords { weekLoginRecordsByDay[record.TriggeredAt.Truncate(24*time.Hour)]++ diff --git a/internal/useraccount/token_test.go b/internal/useraccount/token_test.go index 7328471..11d2885 100644 --- a/internal/useraccount/token_test.go +++ b/internal/useraccount/token_test.go @@ -285,10 +285,6 @@ func TestGrantToken_LoginEventTriggered(t *testing.T) { require.NoError(t, err) require.NotEmpty(t, token) - // Wait a bit for the async event processing to complete - // Since TriggerEvent runs in a goroutine, we need to give it time - time.Sleep(100 * time.Millisecond) - // Verify login event was created in database loginEvents, err := client.Events.Query(). Where(events.UserIDEQ(user.ID)). @@ -341,9 +337,6 @@ func TestGrantToken_ImpersonationEventTriggered(t *testing.T) { require.NoError(t, err) require.NotEmpty(t, token) - // Wait a bit for the async event processing to complete - time.Sleep(100 * time.Millisecond) - // Verify impersonation event was created in database impersonationEvents, err := client.Events.Query(). Where(events.UserIDEQ(user.ID)). @@ -420,9 +413,6 @@ func TestGrantToken_MultipleTokensCreateMultipleEvents(t *testing.T) { require.NoError(t, err) require.NotEmpty(t, token3) - // Wait for the third event to be processed - time.Sleep(100 * time.Millisecond) - // Verify three login events were created loginEvents, err := client.Events.Query(). Where(events.UserIDEQ(user.ID)).