Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions db-connector.go
Original file line number Diff line number Diff line change
Expand Up @@ -10070,6 +10070,23 @@ func GetAllFiles(ctx context.Context, orgId, namespace string) ([]File, error) {
}
}

// Should check if it's a child org and get parent orgs files if that is distributed to that child org
foundOrg, err := GetOrg(ctx, orgId)
if err == nil && len(foundOrg.ChildOrgs) == 0 && len(foundOrg.CreatorOrg) > 0 && foundOrg.CreatorOrg != orgId {
parentOrg, err := GetOrg(ctx, foundOrg.CreatorOrg)
if err == nil {
parentFiles, err := GetAllFiles(ctx, parentOrg.Id, namespace)
if err == nil {
for _, f := range parentFiles {
if !ArrayContains(f.SuborgDistribution, orgId) {
continue
}
files = append(files, f)
}
}
}
}

if project.CacheDb {
data, err := json.Marshal(files)
if err != nil {
Expand Down
251 changes: 244 additions & 7 deletions files.go
Original file line number Diff line number Diff line change
Expand Up @@ -571,17 +571,51 @@ func HandleGetFileNamespace(resp http.ResponseWriter, request *http.Request) {
// FIXME: This double control is silly
fileResponse.Files = append(fileResponse.Files, file)
fileResponse.List = append(fileResponse.List, BaseFile{
Name: file.Filename,
ID: file.Id,
Type: file.Type,
UpdatedAt: file.UpdatedAt,
Md5Sum: file.Md5sum,
Status: file.Status,
FileSize: file.FileSize,
Name: file.Filename,
ID: file.Id,
Type: file.Type,
UpdatedAt: file.UpdatedAt,
Md5Sum: file.Md5sum,
Status: file.Status,
FileSize: file.FileSize,
OrgId: file.OrgId,
SuborgDistribution: file.SuborgDistribution,
})
}
}

// If current org is sub org and file suborg distributed is true than add file to list
foundOrg, err := GetOrg(ctx, user.ActiveOrg.Id)
if err == nil && len(foundOrg.ChildOrgs) == 0 && len(foundOrg.CreatorOrg) > 0 {
parentOrg, err := GetOrg(ctx, foundOrg.CreatorOrg)
if err == nil {
parentFiles, err := GetAllFiles(ctx, parentOrg.Id, namespace)
if err == nil {
for _, file := range parentFiles {

if !ArrayContains(file.SuborgDistribution, user.ActiveOrg.Id) {
continue
}

if file.Namespace == namespace {
fileResponse.Files = append(fileResponse.Files, file)
fileResponse.List = append(fileResponse.List, BaseFile{
Name: file.Filename,
ID: file.Id,
Type: file.Type,
UpdatedAt: file.UpdatedAt,
Md5Sum: file.Md5sum,
Status: file.Status,
FileSize: file.FileSize,
OrgId: file.OrgId,
SuborgDistribution: file.SuborgDistribution,
})
}
}
}
}
}

//log.Printf("[DEBUG] Found %d (%d:%d) files in org %s (%s) for namespace '%s'", len(files), len(fileResponse.Files), len(fileResponse.List), user.ActiveOrg.Name, user.ActiveOrg.Id, namespace)

// Standards to load directly from Github if applicable
Expand Down Expand Up @@ -1971,3 +2005,206 @@ func HandleDownloadRemoteFiles(resp http.ResponseWriter, request *http.Request)
resp.WriteHeader(200)
resp.Write([]byte(fmt.Sprintf(`{"success": true}`)))
}

func HandleShareNamespace(resp http.ResponseWriter, request *http.Request) {

cors := HandleCors(resp, request)
if cors {
return
}

user, err := HandleApiAuthentication(resp, request)
if err != nil {
log.Printf("[AUDIT] Api authentication failed in share namespace: %s", err)
resp.WriteHeader(401)
resp.Write([]byte(`{"success": false}`))
return
}

if user.Role != "admin" {
log.Printf("User (%s) isn't admin during namespace share", user.Username)
resp.WriteHeader(401)
resp.Write([]byte(`{"success": false, "reason": "only admin can share namespace"}`))
return
}

var namespace string
location := strings.Split(request.URL.String(), "/")
if location[1] == "api" {
if len(location) <= 4 {
log.Printf("Path too short: %d", len(location))
resp.WriteHeader(401)
resp.Write([]byte(`{"success": false}`))
return
}

namespace = location[5]
}

body, err := ioutil.ReadAll(request.Body)
if err != nil {
log.Printf("Error with body read: %s", err)
resp.WriteHeader(401)
resp.Write([]byte(`{"success": false}`))
return
}

type shareNamespace struct {
SelectedFiles []string `json:"selectedFiles"`
}

var share shareNamespace
err = json.Unmarshal(body, &share)
if err != nil {
log.Printf("Failed unmarshaling (appauth): %s", err)
resp.WriteHeader(401)
resp.Write([]byte(`{"success": false}`))
return
}

if len(namespace) == 0 {
log.Printf("[ERROR] Missing namespace in share namespace")
resp.WriteHeader(401)
resp.Write([]byte(`{"success": false, "reason": "Missing namespace"}`))
return
}

if len(share.SelectedFiles) == 0 {
log.Printf("[ERROR] Missing selectedFiles in share namespace")
resp.WriteHeader(401)
resp.Write([]byte(`{"success": false, "reason": "Missing selectedFiles"}`))
return
}

ctx := GetContext(request)
for _, fileId := range share.SelectedFiles {
file, err := GetFile(ctx, fileId)
if err != nil {
log.Printf("[INFO] File %s not found: %s", fileId, err)
resp.WriteHeader(400)
resp.Write([]byte(`{"success": false}`))
return
}

file.Namespace = namespace
err = SetFile(ctx, *file)
if err != nil {
log.Printf("[ERROR] Failed setting file back to active")
resp.WriteHeader(500)
resp.Write([]byte(`{"success": false, "reason": "Failed setting file to active"}`))
return
}
}

log.Printf("[INFO] Successfully shared namespace %s for %d files", namespace, len(share.SelectedFiles))
resp.WriteHeader(200)
resp.Write([]byte(fmt.Sprintf(`{"success": true, "reason": "Namespace shared successfully!"}`)))
}

// destribute files to all sub orgs of parent org
func HandleSetFileConfig(resp http.ResponseWriter, request *http.Request) {

cors := HandleCors(resp, request)
if cors {
return
}

user, err := HandleApiAuthentication(resp, request)
if err != nil {
log.Printf("[AUDIT] Api authentication failed in load files: %s", err)
resp.WriteHeader(401)
resp.Write([]byte(`{"success": false}`))
return
}

if user.Role != "admin" {
log.Printf("User (%s) isn't admin during file edit config", user.Username)
resp.WriteHeader(401)
resp.Write([]byte(`{"success": false, "reason": "only admin can edit file config"}`))
return
}

var fileId string
location := strings.Split(request.URL.String(), "/")
if location[1] == "api" {
if len(location) <= 4 {
log.Printf("Path too short: %d", len(location))
resp.WriteHeader(401)
resp.Write([]byte(`{"success": false}`))
return
}

fileId = location[4]
}

body, err := ioutil.ReadAll(request.Body)
if err != nil {
log.Printf("Error with body read: %s", err)
resp.WriteHeader(401)
resp.Write([]byte(`{"success": false}`))
return
}

type configFile struct {
Id string `json:"id"`
Action string `json:"action"`
SelectedSuborg []string `json:"selected_suborgs"`
}

var config configFile
err = json.Unmarshal(body, &config)
if err != nil {
log.Printf("Failed unmarshaling (appauth): %s", err)
resp.WriteHeader(401)
resp.Write([]byte(`{"success": false}`))
return
}

if config.Id != fileId {
resp.WriteHeader(401)
resp.Write([]byte(`{"success": false, "reason": "Bad ID match"}`))
return
}

ctx := GetContext(request)
file, err := GetFile(ctx, fileId)
if err != nil {
log.Printf("[INFO] File %s not found: %s", fileId, err)
resp.WriteHeader(400)
resp.Write([]byte(`{"success": false}`))
return
}

if config.Action == "suborg_distribute" {

if len(config.SelectedSuborg) == 0 {
file.SuborgDistribution = []string{}
} else {
file.SuborgDistribution = config.SelectedSuborg
}

err = SetFile(ctx, *file)
if err != nil {
log.Printf("[ERROR] Failed setting file back to active")
resp.WriteHeader(500)
resp.Write([]byte(`{"success": false, "reason": "Failed setting file to active"}`))
return
}

}

//if current org is suborg and file is distributed, get the parent org file
foundOrg, err := GetOrg(ctx, user.ActiveOrg.Id)
if err == nil {
for _, childOrg := range foundOrg.ChildOrgs {
cacheKey := fmt.Sprintf("files_%s_%s", childOrg.Id, file.Namespace)
DeleteCache(ctx, cacheKey)
}
}

log.Printf("[INFO] Successfully updated file: %s for org: %s", file.Id, user.ActiveOrg.Id)

resp.WriteHeader(200)
resp.Write([]byte(fmt.Sprintf(`{"success": true, "reason": "File updated successfully!"}`)))

}
81 changes: 42 additions & 39 deletions structs.go
Original file line number Diff line number Diff line change
Expand Up @@ -1434,38 +1434,39 @@ type NotificationCached struct {
}

type File struct {
Id string `json:"id" datastore:"id"`
ReferenceFileId string `json:"reference_file_id" datastore:"reference_file_id"`
Type string `json:"type" datastore:"type"`
CreatedAt int64 `json:"created_at" datastore:"created_at"`
UpdatedAt int64 `json:"updated_at" datastore:"updated_at"`
MetaAccessAt int64 `json:"meta_access_at" datastore:"meta_access_at"`
DownloadAt int64 `json:"last_downloaded" datastore:"last_downloaded"`
Description string `json:"description" datastore:"description"`
ExpiresAt string `json:"expires_at" datastore:"expires_at"`
Status string `json:"status" datastore:"status"`
Filename string `json:"filename" datastore:"filename"`
URL string `json:"url" datastore:"org"`
OrgId string `json:"org_id" datastore:"org_id"`
WorkflowId string `json:"workflow_id" datastore:"workflow_id"`
Workflows []string `json:"workflows" datastore:"workflows"`
DownloadPath string `json:"download_path" datastore:"download_path"`
Md5sum string `json:"md5_sum" datastore:"md5_sum"`
Sha256sum string `json:"sha256_sum" datastore:"sha256_sum"`
FileSize int64 `json:"filesize" datastore:"filesize"`
Duplicate bool `json:"duplicate" datastore:"duplicate"`
Subflows []string `json:"subflows" datastore:"subflows"`
Tags []string `json:"tags" datastore:"tags"`
StorageArea string `json:"storage_area" datastore:"storage_area"`
Etag int `json:"etag" datastore:"etag"`
ContentType string `json:"content_type" datastore:"content_type"`
UpdatedBy string `json:"updated_by" datastore:"updated_by"`
CreatedBy string `json:"created_by" datastore:"created_by"`
Namespace string `json:"namespace" datastore:"namespace"`
Encrypted bool `json:"encrypted" datastore:"encrypted"`
IsEdited bool `json:"isedited" datastore:"isedited"`
LastEditor string `json:"lasteditor" datastore:"lasteditor"`
OriginalMd5sum string `json:"Originalmd5_sum" datastore:"Originalmd5_sum"`
Id string `json:"id" datastore:"id"`
ReferenceFileId string `json:"reference_file_id" datastore:"reference_file_id"`
Type string `json:"type" datastore:"type"`
CreatedAt int64 `json:"created_at" datastore:"created_at"`
UpdatedAt int64 `json:"updated_at" datastore:"updated_at"`
MetaAccessAt int64 `json:"meta_access_at" datastore:"meta_access_at"`
DownloadAt int64 `json:"last_downloaded" datastore:"last_downloaded"`
Description string `json:"description" datastore:"description"`
ExpiresAt string `json:"expires_at" datastore:"expires_at"`
Status string `json:"status" datastore:"status"`
Filename string `json:"filename" datastore:"filename"`
URL string `json:"url" datastore:"org"`
OrgId string `json:"org_id" datastore:"org_id"`
WorkflowId string `json:"workflow_id" datastore:"workflow_id"`
Workflows []string `json:"workflows" datastore:"workflows"`
DownloadPath string `json:"download_path" datastore:"download_path"`
Md5sum string `json:"md5_sum" datastore:"md5_sum"`
Sha256sum string `json:"sha256_sum" datastore:"sha256_sum"`
FileSize int64 `json:"filesize" datastore:"filesize"`
Duplicate bool `json:"duplicate" datastore:"duplicate"`
Subflows []string `json:"subflows" datastore:"subflows"`
Tags []string `json:"tags" datastore:"tags"`
StorageArea string `json:"storage_area" datastore:"storage_area"`
Etag int `json:"etag" datastore:"etag"`
ContentType string `json:"content_type" datastore:"content_type"`
UpdatedBy string `json:"updated_by" datastore:"updated_by"`
CreatedBy string `json:"created_by" datastore:"created_by"`
Namespace string `json:"namespace" datastore:"namespace"`
Encrypted bool `json:"encrypted" datastore:"encrypted"`
IsEdited bool `json:"isedited" datastore:"isedited"`
LastEditor string `json:"lasteditor" datastore:"lasteditor"`
OriginalMd5sum string `json:"Originalmd5_sum" datastore:"Originalmd5_sum"`
SuborgDistribution []string `json:"suborg_distribution" datastore:"suborg_distribution"`
}

type DisabledRules struct {
Expand Down Expand Up @@ -2715,13 +2716,15 @@ type DataToSend struct {
}

type BaseFile struct {
Name string `json:"name"`
ID string `json:"id"`
Type string `json:"type"`
UpdatedAt int64 `json:"updated_at"`
Md5Sum string `json:"md5_sum"`
Status string `json:"status"`
FileSize int64 `json:"filesize"`
Name string `json:"name"`
ID string `json:"id"`
Type string `json:"type"`
UpdatedAt int64 `json:"updated_at"`
Md5Sum string `json:"md5_sum"`
Status string `json:"status"`
FileSize int64 `json:"filesize"`
OrgId string `json:"org_id"`
SuborgDistribution []string `json:"suborg_distribution"`
}

type FileResponse struct {
Expand Down
Loading