Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 24 additions & 17 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,11 @@ Needs postgresql DSN, if no database is selected in the DSN, it will use all dat
If you are having issues with the connector, please check the following:

- Specified database
- User needs permission to access the database, this will cause missing resources.
- Not specified database
- User needs permission for each database, otherwise it will not be able to read the resources for that database.
- User needs permission to access the database, this will cause missing resources.
- Sync all databases
- Use `--dsn` and `--sync-all-databases` flag for postgres db `postgres://user:password@localhost:5432/postgres`
- Needs access to `"pg_catalog"."pg_database"` to get all databases
- Needs permission for each database to get all resources

## brew

Expand Down Expand Up @@ -85,23 +87,28 @@ Usage:
Available Commands:
capabilities Get connector capabilities
completion Generate the autocompletion script for the specified shell
config Get the connector config schema
help Help about any command

Flags:
--client-id string The client ID used to authenticate with ConductorOne ($BATON_CLIENT_ID)
--client-secret string The client secret used to authenticate with ConductorOne ($BATON_CLIENT_SECRET)
--dsn string required: The DSN to connect to the database ($BATON_DSN)
-f, --file string The path to the c1z file to sync with ($BATON_FILE) (default "sync.c1z")
-h, --help help for baton-postgresql
--include-columns Include column privileges when syncing. This can result in large amounts of data ($BATON_INCLUDE_COLUMNS)
--include-large-objects Include large objects when syncing. This can result in large amounts of data ($BATON_INCLUDE_LARGE_OBJECTS)
--log-format string The output format for logs: json, console ($BATON_LOG_FORMAT) (default "json")
--log-level string The log level: debug, info, warn, error ($BATON_LOG_LEVEL) (default "info")
-p, --provisioning This must be set in order for provisioning actions to be enabled ($BATON_PROVISIONING)
--schemas strings The schemas to include in the sync ($BATON_SCHEMAS) (default [public])
--skip-full-sync This must be set to skip a full sync ($BATON_SKIP_FULL_SYNC)
--ticketing This must be set to enable ticketing support ($BATON_TICKETING)
-v, --version version for baton-postgresql
--client-id string The client ID used to authenticate with ConductorOne ($BATON_CLIENT_ID)
--client-secret string The client secret used to authenticate with ConductorOne ($BATON_CLIENT_SECRET)
--dsn string required: The DSN to connect to the database ($BATON_DSN)
--external-resource-c1z string The path to the c1z file to sync external baton resources with ($BATON_EXTERNAL_RESOURCE_C1Z)
--external-resource-entitlement-id-filter string The entitlement that external users, groups must have access to sync external baton resources ($BATON_EXTERNAL_RESOURCE_ENTITLEMENT_ID_FILTER)
-f, --file string The path to the c1z file to sync with ($BATON_FILE) (default "sync.c1z")
-h, --help help for baton-postgresql
--include-columns Include column privileges when syncing. This can result in large amounts of data ($BATON_INCLUDE_COLUMNS)
--include-large-objects Include large objects when syncing. This can result in large amounts of data ($BATON_INCLUDE_LARGE_OBJECTS)
--log-format string The output format for logs: json, console ($BATON_LOG_FORMAT) (default "json")
--log-level string The log level: debug, info, warn, error ($BATON_LOG_LEVEL) (default "info")
--otel-collector-endpoint string The endpoint of the OpenTelemetry collector to send observability data to (used for both tracing and logging if specific endpoints are not provided) ($BATON_OTEL_COLLECTOR_ENDPOINT)
-p, --provisioning This must be set in order for provisioning actions to be enabled ($BATON_PROVISIONING)
--schemas strings The schemas to include in the sync ($BATON_SCHEMAS) (default [public])
--skip-full-sync This must be set to skip a full sync ($BATON_SKIP_FULL_SYNC)
--sync-all-databases Sync all databases. This can result in large amounts of data ($BATON_SYNC_ALL_DATABASES)
--ticketing This must be set to enable ticketing support ($BATON_TICKETING)
-v, --version version for baton-postgresql

Use "baton-postgresql [command] --help" for more information about a command.
```
2 changes: 1 addition & 1 deletion cmd/baton-postgresql/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ func main() {
func getConnector(ctx context.Context, pgc *cfg.Postgresql) (types.ConnectorServer, error) {
l := ctxzap.Extract(ctx)

cb, err := connector.New(ctx, pgc.Dsn, pgc.Schemas, pgc.IncludeColumns, pgc.IncludeLargeObjects)
cb, err := connector.New(ctx, pgc.Dsn, pgc.Schemas, pgc.IncludeColumns, pgc.IncludeLargeObjects, pgc.SyncAllDatabases)
if err != nil {
l.Error("error creating connector", zap.Error(err))
return nil, err
Expand Down
1 change: 1 addition & 0 deletions pkg/config/conf.gen.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion pkg/config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,12 @@ var (
schemas = field.StringSliceField("schemas", field.WithDefaultValue([]string{"public"}), field.WithDescription("The schemas to include in the sync"))
includeColumns = field.BoolField("include-columns", field.WithDescription("Include column privileges when syncing. This can result in large amounts of data"))
includeLargeObjects = field.BoolField("include-large-objects", field.WithDescription("Include large objects when syncing. This can result in large amounts of data"))
syncAllDatabases = field.BoolField("sync-all-databases", field.WithDescription("Sync all databases. This can result in large amounts of data"), field.WithDefaultValue(false))
)

var relationships = []field.SchemaFieldRelationship{}

//go:generate go run ./gen
var Config = field.NewConfiguration([]field.SchemaField{
dsn, schemas, includeColumns, includeLargeObjects,
dsn, schemas, includeColumns, includeLargeObjects, syncAllDatabases,
}, relationships...)
6 changes: 4 additions & 2 deletions pkg/connector/connector.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ type Postgresql struct {
schemas []string
includeColumns bool
includeLargeObjects bool
syncAllDatabases bool
}

func (o *Postgresql) ResourceSyncers(ctx context.Context) []connectorbuilder.ResourceSyncer {
Expand All @@ -28,7 +29,7 @@ func (o *Postgresql) ResourceSyncers(ctx context.Context) []connectorbuilder.Res
newFunctionSyncer(ctx, o.clientPool),
newProcedureSyncer(ctx, o.clientPool),
newLargeObjectSyncer(ctx, o.clientPool.Default(ctx), o.includeLargeObjects),
newDatabaseSyncer(ctx, o.clientPool),
newDatabaseSyncer(ctx, o.clientPool, o.syncAllDatabases),
newSequenceSyncer(ctx, o.clientPool),
}
}
Expand Down Expand Up @@ -60,7 +61,7 @@ func (c *Postgresql) Asset(ctx context.Context, asset *v2.AssetRef) (string, io.
return "", nil, fmt.Errorf("not implemented")
}

func New(ctx context.Context, dsn string, schemas []string, includeColumns bool, includeLargeObjects bool) (*Postgresql, error) {
func New(ctx context.Context, dsn string, schemas []string, includeColumns bool, includeLargeObjects bool, syncAllDatabases bool) (*Postgresql, error) {
clientPool, err := postgres.NewClientDatabasesPool(ctx, dsn, postgres.WithSchemaFilter(schemas))
if err != nil {
return nil, fmt.Errorf("failed to create postgres client pool: %w", err)
Expand All @@ -71,5 +72,6 @@ func New(ctx context.Context, dsn string, schemas []string, includeColumns bool,
schemas: schemas,
includeColumns: includeColumns,
includeLargeObjects: includeLargeObjects,
syncAllDatabases: syncAllDatabases,
}, nil
}
20 changes: 11 additions & 9 deletions pkg/connector/database.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,10 @@ var databaseResourceType = &v2.ResourceType{
}

type databaseSyncer struct {
resourceType *v2.ResourceType
clientPool *postgres.ClientDatabasesPool
client *postgres.Client
resourceType *v2.ResourceType
clientPool *postgres.ClientDatabasesPool
client *postgres.Client
syncAllDatabases bool
}

func (r *databaseSyncer) ResourceType(ctx context.Context) *v2.ResourceType {
Expand Down Expand Up @@ -64,11 +65,11 @@ func (r *databaseSyncer) List(ctx context.Context, parentResourceID *v2.Resource
return nil, "", nil, err
}

defaultDatabase := r.clientPool.DefaultDatabase(ctx)
defaultDbClient := r.clientPool.Default(ctx)

var ret []*v2.Resource
for _, o := range databases {
if defaultDatabase != "" && o.Name != defaultDatabase {
if !r.syncAllDatabases && o.Name != defaultDbClient.DatabaseName() {
continue
}

Expand Down Expand Up @@ -329,10 +330,11 @@ func (r *databaseSyncer) Revoke(ctx context.Context, grant *v2.Grant) (annotatio
return nil, err
}

func newDatabaseSyncer(ctx context.Context, c *postgres.ClientDatabasesPool) *databaseSyncer {
func newDatabaseSyncer(ctx context.Context, c *postgres.ClientDatabasesPool, syncAllDatabases bool) *databaseSyncer {
return &databaseSyncer{
resourceType: databaseResourceType,
clientPool: c,
client: c.Default(ctx),
resourceType: databaseResourceType,
clientPool: c,
client: c.Default(ctx),
syncAllDatabases: syncAllDatabases,
}
}
63 changes: 16 additions & 47 deletions pkg/postgres/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package postgres

import (
"context"
"errors"
"sync"

"github.com/grpc-ecosystem/go-grpc-middleware/logging/zap/ctxzap"
Expand All @@ -11,74 +10,40 @@ import (
)

type ClientDatabasesPool struct {
databases map[string]*Client
opts []ClientOpt
mutex *sync.Mutex
logger *Logger
dsn string
// Only used if dsn have any database name
defaultClientWithDatabase *Client
defaultClientDsn *Client
databases map[string]*Client
opts []ClientOpt
mutex *sync.Mutex
logger *Logger
dsn string
defaultClientDsn *Client
}

func NewClientDatabasesPool(ctx context.Context, dsn string, opts ...ClientOpt) (*ClientDatabasesPool, error) {
l := ctxzap.Extract(ctx)

config, err := pgxpool.ParseConfig(dsn)
if err != nil {
return nil, err
}

var defaultClientWithDatabase *Client
if config.ConnConfig.Database != "" {
l.Info("using default database database config", zap.String("database", config.ConnConfig.Database))
defaultClientWithDatabase, err = New(ctx, dsn, opts...)
if err != nil {
return nil, err
}
}

defaultClientDsn, err := New(ctx, dsn, opts...)
if err != nil {
l.Error("failed to create default database client", zap.Error(err))
return nil, err
}

return &ClientDatabasesPool{
dsn: dsn,
databases: make(map[string]*Client),
opts: opts,
mutex: &sync.Mutex{},
logger: &Logger{},
defaultClientWithDatabase: defaultClientWithDatabase,
defaultClientDsn: defaultClientDsn,
dsn: dsn,
databases: make(map[string]*Client),
opts: opts,
mutex: &sync.Mutex{},
logger: &Logger{},
defaultClientDsn: defaultClientDsn,
}, nil
}

func (c *ClientDatabasesPool) DefaultDatabase(ctx context.Context) string {
if c.defaultClientWithDatabase == nil {
return ""
}

return c.defaultClientWithDatabase.cfg.ConnConfig.Database
}

func (p *ClientDatabasesPool) Default(ctx context.Context) *Client {
return p.defaultClientDsn
}

func (p *ClientDatabasesPool) Get(ctx context.Context, database string) (*Client, string, error) {
l := ctxzap.Extract(ctx)

if database == "" {
if p.defaultClientWithDatabase != nil {
dbName := p.defaultClientWithDatabase.db.Config().ConnConfig.Database
return p.defaultClientWithDatabase, dbName, nil
}

return nil, "", errors.New("database name is required")
}

dbModel, err := p.defaultClientDsn.GetDatabaseById(ctx, database)
if err != nil {
return nil, "", err
Expand Down Expand Up @@ -179,3 +144,7 @@ func New(ctx context.Context, dsn string, opts ...ClientOpt) (*Client, error) {

return c, nil
}

func (c *Client) DatabaseName() string {
return c.cfg.ConnConfig.Database
}
Loading