Toolbox/goDataverse/goDataverse.go

1808 lines
61 KiB
Go

/**
= Creative Commons Lizenzvertrag =
Diese Software ist von der archium GmbH, Gera ist lizenziert unter einer Creative Commons Namensnennung - Nicht kommerziell - Keine Bearbeitungen 4.0 International Lizenz. (http://creativecommons.org/licenses/by-nc-nd/4.0/deed.de)
Individuelle über diese Lizenz hinausgehende Berechtigungen können Sie unter https://archium.org erhalten.
= Creative Commons License =
Software by archium GmbH, Gera is licensed under a Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License. (http://creativecommons.org/licenses/by-nc-nd/4.0/)
Individual permissions beyond the scope of this license may be available at https://archium.org.
**/
package goDataverse
import (
"errors"
"fmt"
// "html"
"log"
"regexp"
// "sort"
"strings"
// "time"
// "reflect"
json "encoding/json"
// sql "database/sql"
def "Toolbox/defaults"
dst "Toolbox/goDataverse/dataset"
dvr "Toolbox/goDataverse/dataverse"
// fls "Toolbox/goDataverse/files"
sch "Toolbox/goDataverse/search"
tol "Toolbox/goDataverse/tools"
usr "Toolbox/goDataverse/user"
gjs "github.com/tidwall/gjson"
)
// #############################################################################
// ### lokale benutzte funktionen
// #############################################################################
// spalten einlesen (momentan 20210308) nur userliste
func grid_getcolumns_user(labels gjs.Result, _dparam tol.TDVParams, _fparam tol.TFVParams) string {
cols := `"cols":[[` + "\n"
var columns string = ""
// spalten auslesen, wenn objekt..
if labels.IsObject() {
conn, err := tol.CheckConnection(_fparam)
if err == nil {
def.LogMessage("grid_getcolumns_user(param)", fmt.Sprintf("what: %v, who: %v", _fparam.FP_what, _fparam.FP_who), def.DEF_logdebg)
csql := labels.String()
csql = strings.ReplaceAll(csql, "{", "[{")
csql = strings.ReplaceAll(csql, "}", "}]")
csql = strings.ReplaceAll(csql, ",", "},{")
csql = "SELECT * FROM html.\"GetColumnsByType\"('" + csql + "'::JSON, "
csql = csql + fmt.Sprintf("%v", def.DEF_customerid) + ", '"
csql = csql + _fparam.FP_what + "', 'column." + _fparam.FP_who + "', '"
csql = csql + _dparam.DP_localeCode + "', true);"
def.LogMessage("grid_getcolumns_user(sql)", csql, def.DEF_logdebg)
labelstr, err := conn.Query(csql)
if err == nil {
defer labelstr.Close()
for labelstr.Next() {
if err := labelstr.Scan(&columns); err == nil {
return columns
}
}
}
}
}
cols = cols + "\n]],\n"
return cols
}
// spalten einlesen (momentan 20210412)
func grid_getcolumns_map(_prefix string, _map tol.ColMap, _dparam tol.TDVParams, _fparam tol.TFVParams) string {
cols := `"cols":[[` + "\n"
var columns string = ""
conn, err := tol.CheckConnection(_fparam)
if err == nil {
var csql string = fmt.Sprintf("%v\n", _map)
csql = strings.ReplaceAll(csql, "map[", "[{")
csql = strings.ReplaceAll(csql, "]", "}]")
csql = strings.ReplaceAll(csql, "\" \"", "\"},{\"")
csql = "SELECT * FROM html.\"GetColumnsByType\"('" + csql + "'::JSON, "
csql = csql + fmt.Sprintf("%v", def.DEF_customerid) + ", "
csql = csql + "'" + _fparam.FP_what + "', '" + _prefix + _fparam.FP_who + "', "
csql = csql + "'" + _dparam.DP_localeCode + "', true);"
labelstr, err := conn.Query(csql)
if err == nil {
for labelstr.Next() {
if err := labelstr.Scan(&columns); err == nil {
return columns
}
}
}
}
cols = cols + "\n]],\n"
return cols
}
// zeilen für grid einlesen (momentan 20210412) über map[]
func grid_getrows_map(_map tol.RowMap, _fparam tol.TFVParams, _forwhat string) string {
// fmt.Printf("grid_getrow_map(begin-%v) > \n", _forwhat)
rows := `"rows":[` + "\n"
conn, err := tol.CheckConnection(_fparam)
if err == nil {
// fmt.Printf("grid_getrow_map() > %v\n", _map)
var rowsql string = ""
rowsql = rowsql + fmt.Sprintf("%v", _map)
switch _forwhat {
case "dataset":
// fmt.Printf("grid_getrow_map(sql-%v) > %v", _forwhat, _map)
var iarr int = 0
var sarr string = "[{"
for ikey, ielement := range _map {
s := `%v:[%v]`
jsn, _ := json.Marshal(ielement)
if iarr > 0 {
sarr = sarr + ",\n"
}
sarr = sarr + fmt.Sprintf(s, ikey, strings.ReplaceAll(strings.ReplaceAll(string(jsn), `"\"`, `"`), `\""`, `"`))
iarr = iarr + 1
}
rowsql = sarr + `}]`
rowsql = strings.ReplaceAll(rowsql, `","`, `"},{"`)
rowsql = strings.ReplaceAll(rowsql, "'", "'")
//gjson := tol.GetObjectFromStr(rowsql)
// fmt.Printf("grid_getrow_map(sql-%v) > %v", _forwhat, gjson.String())
default:
rowsql = strings.ReplaceAll(rowsql, "map[", "[{")
rowsql = strings.ReplaceAll(rowsql, "] ", "], ")
rowsql = strings.ReplaceAll(rowsql, "]", "}]")
rowsql = strings.ReplaceAll(rowsql, "\" \"", "\"},{\"")
}
rowsql = "SELECT * FROM html.\"GetGridRows\"('" + rowsql + "'::JSON);"
switch _forwhat {
case "dataset":
// fmt.Printf("grid_getrow_map(sql-%v) > %v", _forwhat, rowsql)
default:
//fmt.Printf("grid_getrow_map(sql-%v) > %v", _forwhat, rowsql)
}
var rows string = ""
switch _forwhat {
case "dataset":
rowstr, err := conn.Query(rowsql)
if err == nil {
defer rowstr.Close()
for rowstr.Next() {
if err := rowstr.Scan(&rows); err == nil {
// fmt.Printf("grid_getrow_map(%v) > %v", _fparam.FP_who, rows)
return rows
}
}
}
default:
rowstr, err := conn.Query(rowsql)
if err == nil {
defer rowstr.Close()
for rowstr.Next() {
if err := rowstr.Scan(&rows); err == nil {
// fmt.Printf("grid_getrow_map(%v) > %v", _fparam.FP_who, rows)
return rows
}
}
}
}
}
rows = rows + "\n]\n"
//Toolbox
return rows
}
// zeilen für grid einlesen (momentan 20210308) nur userliste
func grid_getrows_user(lines gjs.Result, _withoutarr, _withoutobj bool) string {
rows := `"rows":[`
// userliste auslesen, wenn array..
if lines.IsArray() {
i := 0
for _, line := range lines.Array() {
// zeilen auslesen, wenn objekt..tol.
if line.IsObject() {
if i > 0 {
rows = rows + ","
}
rows = rows + "\n\t{\n"
// zeile (spalten) iterieren
j := 0
line.ForEach(func(key, value gjs.Result) bool {
details := value.String()
if value.IsArray() {
if _withoutarr {
return true
}
details = ""
for k, detail := range value.Array() {
if k > 0 {
details = details + ", "
}
details = details + detail.String()
}
details = details + ""
}
if _withoutobj {
if value.IsObject() {
return true
}
}
if j > 0 {
rows = rows + ",\n"
}
row := "\t\t" + `"` + key.String() + `"` + ":" + `"` + tol.DoFilterByStr(details) + `"`
rows = rows + row
j = j + 1
return true
})
rows = rows + "\n\t}"
}
i = i + 1
}
}
rows = rows + "\n]"
//
return rows
}
// spalten einlesen (momentan 20210415)
func file_gettranslated_columns(_map tol.ColMap, _prekey, _type string, _dparam tol.TDVParams, _fparam tol.TFVParams) (map[int64]string, int64) {
var rows map[int64]string = make(map[int64]string, 0)
var icount int64 = 0
var row string = ""
conn, err := tol.CheckConnection(_fparam)
if err == nil {
var istype string = ""
if len(_type) > 0 {
istype = "." + _type
}
var csql string = fmt.Sprintf("%v\n", _map)
csql = strings.ReplaceAll(csql, "map[", "[{")
csql = strings.ReplaceAll(csql, "] ", "], ")
csql = strings.ReplaceAll(csql, "]", "}]")
csql = strings.ReplaceAll(csql, "\" \"", "\"},{\"")
csql = "SELECT * FROM html.\"GetColumnsByType\"('" + csql + "'::JSON, "
csql = csql + fmt.Sprintf("%v", def.DEF_customerid) + ", "
csql = csql + "'" + _fparam.FP_what + "', '" + _prekey + "." + _fparam.FP_who + istype + "', "
csql = csql + "'" + _dparam.DP_localeCode + "', true) AS rowtemplate;"
// log.Printf("file_gettranslated_columns(sql) > %v\n", csql)
labelstr, err := conn.Query(csql)
if err == nil {
defer labelstr.Close()
for labelstr.Next() {
if err := labelstr.Scan(&row); err == nil {
rows[icount] = row
icount = icount + 1
} else {
log.Printf("file_gettranslated_columns(%v) > %v\n", _fparam.FP_who, err)
}
}
}
}
//
return rows, icount
}
// zeilen für menu einlesen (momentan 20210810) für "dataverse by alias"
func get_dataverse_ids(_dataverses gjs.Result) ([]string, []string) {
ids_dataverse := []string{}
ids_dataset := []string{}
//
if _dataverses.IsArray() {
for _, line := range _dataverses.Array() {
if line.IsObject() {
// alle zeilen iterieren, wenn object
dvid := tol.GetJsonResult(line, "id")
otyp := tol.GetJsonResult(line, "type").String()
switch otyp {
case "dataverse":
ids_dataverse = append(ids_dataverse, dvid.String())
case "dataset":
dspid := tol.GetJsonString(line, "protocol") + ":" +
tol.GetJsonString(line, "authority") + "/" +
tol.GetJsonString(line, "identifier")
ids_dataset = append(ids_dataset, dspid)
default:
// log.Println("get_dataverse_ids(", idx, "): ", dvid.String(), "type: ", otyp)
}
}
}
}
//
return ids_dataverse, ids_dataset
}
func GetMenuFromDatasets(_datasets string, _aliasdv string, _level int) (string, int) {
var idcount int = 0
var sdjson string = ""
status := gjs.Get(_datasets, "status")
if status.String() == "OK" {
datads := gjs.Get(_datasets, "data")
if datads.IsObject() {
total := gjs.Get(datads.String(), "total_count")
if total.Int() > 0 {
itemsds := gjs.Get(datads.String(), "items")
if itemsds.IsArray() {
for _, dvalue := range itemsds.Array() {
aliasds := gjs.Get(dvalue.String(), "identifier_of_dataverse")
// log.Printf("GetMenuFromDataverse(%v) > menu-set: %v\n", aliasdv, dvalue)
if _aliasdv == aliasds.String() {
ilevel := _level + idcount + 1
if idcount > 0 {
sdjson = sdjson + ","
}
sdjson += get_menuchild_dataset(dvalue, "dataset", ilevel)
//
ifcount := int(gjs.Get(dvalue.String(), "fileCount").Int())
sfjson := get_menuchild_file_simple("file", ifcount, (ilevel*10)+1)
sdjson = strings.ReplaceAll(sdjson, "%count%", fmt.Sprintf(`<span style=\"color:#337ab7;font-size:80&#37;;\">(%v) - </span>`, ifcount))
sdjson = strings.ReplaceAll(sdjson, "%children%", "["+sfjson+"]")
idcount += 1
} else {
}
}
// log.Printf("GetSearchSimpleResult(%v) > menu-set: %v\n", _aliasdv, sdjson)
}
}
}
}
//
return sdjson, idcount
}
// zeilen für menu einlesen (momentan 20210319) für "dataverse by alias"
func get_menuchild_dataverse(_dataverse gjs.Result, _type, _alias string, _level int) string {
schildren := ""
if _dataverse.IsObject() {
var resbool bool
var value gjs.Result
// ohne direktes iterieren, wenn object
switch _type {
case "dataverse":
// fmt.Println("get_menuchild_dataset(dataverse):", _dataverse.String())
taba, tabb := tol.GetTab10(_level)
schildren = strings.Replace(def.MenuChildDataverse(), "%alias%", _alias, -1)
schildren = strings.Replace(schildren, "%level%", fmt.Sprintf("%v", _level), -1)
schildren = strings.Replace(schildren, "%type%", _type, -1)
value, resbool = tol.JsonSearch(_dataverse, "", "id", 0, value)
def.LogMessage("get_menuchild_dataverse(dataverse)", fmt.Sprintf("%v, %v", resbool, value), def.DEF_logdebg)
if resbool {
schildren = strings.Replace(schildren, "%id%", value.String(), -1)
value, resbool = tol.JsonSearch(_dataverse, "", "name", 0, value)
if resbool {
schildren = strings.Replace(schildren, "%title%", tol.DoFilterByStr(value.String()), -1)
schildren = strings.Replace(schildren, "%name%", tol.DoFilterByStr(value.String()), -1)
schildren = strings.Replace(schildren, "%icon%", "%tabb%&#34;iconCls&#34;:&#34;icon-dataverse&#34;,\n", -1)
schildren = strings.Replace(schildren, "%taba%", taba, -1)
schildren = strings.Replace(schildren, "%tabb%", tabb, -1)
}
}
// fmt.Printf("%v: %v (%v)\n", taba, _level, _type)
}
}
//
return schildren
}
// zeilen für menu einlesen (momentan 20210810) benutzt in "dataverse by alias"
func get_menuchild_dataset(_dataset gjs.Result, _type string, _level int) string {
schildren := ""
if _dataset.IsObject() {
var value gjs.Result
//
switch _type {
case "dataset":
// fmt.Println("get_menuchild_dataset(dataset):", _dataset.String())
taba, tabb := tol.GetTab10(_level)
schildren = strings.Replace(def.MenuChildDataset(), "%type%", _type, -1)
schildren = strings.Replace(schildren, "%level%", fmt.Sprintf("%v", _level), -1)
value = gjs.Get(_dataset.String(), "name")
if value.Type == gjs.String {
schildren = strings.Replace(schildren, "%title%", tol.DoFilterByStr(value.String()), -1)
value = gjs.Get(_dataset.String(), "versionId")
if value.Type == gjs.Number {
schildren = strings.Replace(schildren, "%id%", tol.DoFilterByStr(value.String()), -1)
value = gjs.Get(_dataset.String(), "global_id")
if value.Type == gjs.String {
schildren = strings.Replace(schildren, "%id%", tol.DoFilterByStr(value.String()), -1)
schildren = strings.Replace(schildren, "%datasetPid%", tol.DoFilterByStr(value.String()), -1)
value = gjs.Get(_dataset.String(), "subjects")
if value.IsArray() {
schildren = strings.Replace(schildren, "%subjects%", fmt.Sprintf("%v", value.String()), -1)
}
schildren = strings.Replace(schildren, "%icon%", "%tabb%&#34;iconCls&#34;:&#34;icon-dataset&#34;,\n", -1)
schildren = strings.Replace(schildren, "%taba%", taba, -1)
schildren = strings.Replace(schildren, "%tabb%", tabb, -1)
}
}
}
// fmt.Printf("%v: %v (%v)\n", taba, _level, _type)
}
}
//
return schildren
}
// icons nach content-type ermitteln (momentan 20210809)
func get_file_icon(_file gjs.Result) string {
var icon string = "icon-file"
//
var content gjs.Result = gjs.Get(_file.String(), "dataFile.contentType")
if content.Type == gjs.String {
var iconmap []string = strings.Split(content.String(), "/")
if len(iconmap) > 0 {
switch iconmap[0] {
case "text":
icon = "icon-file"
if len(iconmap) > 1 {
switch iconmap[1] {
case "css":
icon = "icon-code"
case "html":
icon = "icon-code"
case "x-go":
icon = "icon-code"
case "x-tex":
icon = "icon-code"
case "javascript":
icon = "icon-code"
default:
icon = "icon-file"
}
}
case "audio":
icon = "icon-audio"
case "image":
icon = "icon-image"
case "video":
icon = "icon-video"
case "application":
icon = "icon-package"
if len(iconmap) > 1 {
switch iconmap[1] {
case "json":
icon = "icon-code"
}
}
}
}
}
//
return icon
}
// zeilen für menu einlesen (momentan 20210326) für "dataverse by alias"
func get_menuchild_file(_file gjs.Result, _type string, _level int) string {
schildren := ""
var value gjs.Result
var resbool bool = false
//
switch _type {
case "file":
taba, tabb := tol.GetTab10(_level)
//fmt.Println("get_menuchild_file():", _type, iconmap)
schildren = strings.Replace(def.MenuChildFile(), "%type%", _type, -1)
schildren = strings.Replace(schildren, "%level%", fmt.Sprintf("%v", _level), -1)
value, resbool = tol.JsonSearch(_file, "", "label", 0, value)
if resbool {
schildren = strings.Replace(schildren, "%label%", tol.DoFilterByStr(value.String()), -1)
value, resbool = tol.JsonSearch(_file, "", "id", 0, value)
if resbool {
schildren = strings.Replace(schildren, "%id%", value.String(), -1)
schildren = strings.Replace(schildren, "%icon%", "%tabb%&#34;iconCls&#34;:&#34;"+get_file_icon(_file)+"&#34;,\n", -1)
schildren = strings.Replace(schildren, "%taba%", taba, -1)
schildren = strings.Replace(schildren, "%tabb%", tabb, -1)
}
}
}
//
return schildren
}
// zeilen für menu einlesen (momentan 20210326) für "dataverse by alias"
func get_menuchild_file_simple(_type string, _ifcount int, _level int) string {
var schildren string = ""
if _ifcount > 0 {
var slabel string = "file(s)"
switch _ifcount {
case 1:
slabel = "file"
}
slabel = fmt.Sprintf("%v %v found.", _ifcount, slabel)
//
switch _type {
case "file":
taba, tabb := tol.GetTab10(_level)
//fmt.Println("get_menuchild_file():", _type, iconmap)
schildren = strings.ReplaceAll(def.MenuChildFileSimple(), "%type%", _type)
schildren = strings.ReplaceAll(schildren, "%level%", fmt.Sprintf("%v", _level))
schildren = strings.ReplaceAll(schildren, "%label%", slabel)
schildren = strings.ReplaceAll(schildren, "%icon%", "%tabb%&#34;iconCls&#34;:&#34;icon-file&#34;,\n")
schildren = strings.ReplaceAll(schildren, "%taba%", taba)
schildren = strings.ReplaceAll(schildren, "%tabb%", tabb)
//
// fmt.Printf("%v: %v (%v) %v\n", taba, _level, _type, schildren)
}
}
//
return schildren
}
// #############################################################################
// ### exportierte funktionen
// #############################################################################
func CheckUserValidity(_fvp tol.TFVParams, _dvp tol.TDVParams) (bool, bool, string, error) {
valid, super, id, err := usr.IsValidUser(tol.GetSiteUrl(_dvp), tol.GetApiKey(_dvp))
if err != nil {
def.LogError("CheckUserValidity()", err)
}
//
return valid, super, id, err
}
func CheckUserSuperuser(_dvp tol.TDVParams) (bool, error) {
valid, err := usr.IsSuperUser(tol.GetSiteUrl(_dvp), tol.GetApiKey(_dvp))
if err != nil {
def.LogError("CheckUserSuperuser()", err)
}
//
return valid, err
}
func ToggleSuperUser(_dvp tol.TDVParams, _id string) (string, error) {
response, err := usr.ToggleSuperUser(tol.GetSiteUrl(_dvp), tol.GetApiKey(_dvp), _id)
if err != nil {
def.LogError("ToggleSuperUser()", err)
}
//
return (fmt.Sprintf("%s", response)), err
}
func GetErrorByDataverse(_dvp tol.TDVParams, _error error, _type, _label string) string {
sjson := ""
sjson = sjson + "> Site-URL: '" + tol.GetSiteUrl(_dvp) + "'<br>"
sjson = sjson + "> Persistent-ID: '" + _dvp.DP_datasetPid + "'<br>"
sjson = sjson + "> Api-key: '" + tol.GetApiKey(_dvp) + "'<br><br>"
sjson = sjson + "There went something wrong:<br>"
sjson = sjson + "> '%error%'"
sjson = strings.Replace(sjson, "%error%", fmt.Sprintf("%v", _error), -1)
sjson = "{\n" + def.GetErrorByString(sjson, _type, _label) + "\n}"
//
return sjson
}
func GetDataverseName(_dvp tol.TDVParams, _alias string) (string, error) {
var name string = _alias
aliasdv, err := dvr.GetDataverseByIdOrAlias(_dvp, _alias)
if err != nil {
def.LogError("GetDataverseByIdOrAlias(error)", err)
} else {
namedv := tol.GetObjectFromStr(aliasdv)
if namedv.IsObject() {
status := gjs.Get(namedv.String(), "status")
if status.String() == "OK" {
name = gjs.Get(namedv.String(), "data.name").String()
}
}
}
//
return name, err
}
// alle atasets einlesen (momentan 20210810)
func GetDatasetsFromAlias(_dvp tol.TDVParams, _fvp tol.TFVParams, _alias string) (string, error) {
if tol.Ds == nil {
tol.Ds = make(tol.TDatasetEntries, 0)
}
var err error = errors.New("SUID not found.")
var dssets string = ""
if len(_fvp.FP_suid) > 0 {
var ok bool = false
//
dssets, ok = tol.Ds[_fvp.FP_suid]
if ok {
if len(dssets) == 0 {
dssets, _ = sch.SearchSimple(_dvp, "&subtree="+_alias, "&type=dataset", ``)
//
tol.Ds[_fvp.FP_suid] = dssets
}
} else {
dssets, _ = sch.SearchSimple(_dvp, "&subtree="+_alias, "&type=dataset", ``)
// log.Println("GetDatasetsFromAlias:", ok, dssets)
//
tol.Ds[_fvp.FP_suid] = dssets
}
//
return dssets, nil
}
//
return "", err
}
// menü einlesen (momentan 20210308)
func GetMenuFromDataverse(_dvp tol.TDVParams, _fvp tol.TFVParams,
_alias, _tab string, _level int, _first bool) (string, error) {
res, err := dvr.GetContentByAlias(_dvp, _alias)
if err != nil {
def.LogError("GetContentByAlias(menu)", err)
//
return res, err
}
//
sdatasets, err := GetDatasetsFromAlias(_dvp, _fvp, _alias)
if err != nil {
log.Printf("GetMenuFromDataverse(%v) > menu-set: %v, %v\n", _alias, len(sdatasets), err)
}
//
sjson := ""
smenu := ""
status := gjs.Get(res, "status")
icount := 0
if status.String() == "OK" {
dataverses := gjs.Get(res, "data")
ids_dv, ids_ds := get_dataverse_ids(dataverses)
// dataverse - ids
if len(ids_dv) > 0 {
// log.Printf("get_dataverse_ids(%v): %v", _alias, ids_dv)
idv := len(ids_dv)
// if idv > 5 {
// idv = 5
// }
//for i := 0; i < len(ids_dv); i++ {
for i := 0; i < idv; i++ {
resdv, err := dvr.GetDataverseByIdOrAlias(_dvp, ids_dv[i])
if err != nil {
def.LogError("GetMenuFromDataverse(error)", err)
}
status := gjs.Get(resdv, "status")
if status.String() == "OK" {
aliasdv := gjs.Get(resdv, "data.alias").String()
if len(aliasdv) > 0 {
def.LogMessage("GetMenuFromDataverse(dataverse)", fmt.Sprintf("%v, %v", ids_dv[i], aliasdv), def.DEF_logdebg)
datadv := gjs.Get(resdv, "data")
if icount > 0 {
sjson = sjson + ","
}
icount = icount + 1
s := get_menuchild_dataverse(datadv, "dataverse", aliasdv, _level+icount)
sjson = sjson + s
var sdjson string = ``
var idcount int = 0
//log.Printf("GetMenuFromDataverse(%v) > menu-set: %v, %v\n", aliasdv, sdatasets, err)
if err == nil {
sdjson, idcount = GetMenuFromDatasets(sdatasets, aliasdv, (_level+icount)*10)
idcount = idcount
}
sjson = strings.ReplaceAll(sjson, "%count%", fmt.Sprintf(`<span style=\"color:#c55b28;font-size:80&#37;;\">(%v) - </span>`, idcount))
sdjson = strings.ReplaceAll(sdjson, "%children%", "")
// log.Printf("GetMenuFromDataverse(%v) > %v : %v\n", _level, icount, idcount)
/*
smenu, err = GetMenuFromDataverse(_dvp, _fvp, aliasdv, "\t"+_tab, (_level+icount+1)*10, false)
if icount > 0 {
if len(smenu) > 0 {
smenu = smenu + "," + sdjson
} else {
smenu = sdjson
}
}
*/
smenu, err = GetMenuFromDataverse(_dvp, _fvp, aliasdv, "\t"+_tab, (_level+icount+1)*10, false)
if icount > 0 {
if len(smenu) > 0 {
if len(sdjson) > 0 {
smenu = smenu + "," + sdjson
}
} else {
smenu = sdjson
}
}
if err != nil {
def.LogError("GetMenuFromDataverse()", err)
return sjson, err
} else {
sjson = strings.Replace(sjson, "%children%", "["+smenu+"]", -1)
}
} else {
message := errors.New("Alias does not exist for '" + ids_dv[i] + "'.")
def.LogMessage2Level("GetDataverseByIdOrAlias()", fmt.Sprintf("%v: %v", ids_dv[i], message), def.DEF_logerro)
}
} else {
message := gjs.Get(resdv, "message")
def.LogMessage2Level("GetDataverseByIdOrAlias()", fmt.Sprintf("%v: %v", ids_dv[i], message), def.DEF_logerro)
}
}
sjson = strings.Replace(sjson, "%children%", "[]", -1)
}
// dataset - ids
if len(ids_ds) > 0 {
// ######################################################################
// => menü einlesen (änderung 20210803)
// . die "children" werden nicht mehr ermittelt und eingetragen.
// das reduziert die datenmenge um fast 50%.
// ######################################################################
// log.Printf("get_dataverse_ids(%v): %v", _alias, ids_ds)
// for i := 0; i < len(ids_ds); i++ {
// resset, err := dst.GetDatasetByPersistentId(_dvp, ids_ds[i], "")
// log.Println("get_dataset_ids(2):", resset, err)
// if err != nil {
// def.LogError("GetMenuFromDataset(error)", err)
// }
// status := gjs.Get(resset, "status")
// if status.String() == "OK" {
// datads := gjs.Get(resset, "data")
// // log.Println("\t", _tab, ".level(dataset):", ".level:", _level+icount, datads)
// if icount > 0 {
// sjson = sjson + ","
// }
// icount = icount + 1
// sjson = sjson + get_menuchild_dataset(datads, "dataset", _level+icount)
// // metadata
// idds := gjs.Get(datads.String(), "datasetId") // wichtig! nicht "id" verwenden
// if len(strings.TrimSpace(idds.String())) == 0 {
// idds = gjs.Get(datads.String(), "id")
// }
// version := ":latest"
// //version, _ := dst.GetDatasetLastVersion(_dvp, idds.String())
// def.LogMessage("GetDatasetLastVersion()", fmt.Sprintf("%v: %v - version: %v", ids_ds[i], idds.String(), version), def.DEF_logdebg)
// metadv, err := dst.GetDatasetMetadata(_dvp, idds.String(), version)
// if err != nil {
// def.LogError("GetDatasetMetadata()", err)
// } else {
// status := gjs.Get(metadv, "status")
// if status.String() == "OK" {
// // files
// citation := gjs.Get(metadv, "data.citation")
// if citation.IsObject() {
// sfiles, err := fls.GetFilesByDatasetId(_dvp, idds.String(), "", citation)
// if err != nil {
// log.Println("\t"+_tab+". getfiles(error)", err)
// }
// status := gjs.Get(sfiles, "status")
// if status.String() == "OK" {
// datafs := gjs.Get(sfiles, "data")
// filejson := ""
// if datafs.IsArray() {
// ifcount := 0
// for _, file := range datafs.Array() {
// if file.IsObject() {
// // if ifcount > 0 {
// // filejson = filejson + ","
// // }
// ifcount = ifcount + 1
// //filejson = filejson + get_menuchild_file(file, "file", ((_level+icount)*10)+ifcount)
// }
// }
// if ifcount == 0 {
// // log.Println("\t"+_tab+". GetMenuFromDataverse(", idds.String(), "):", citation)
// // log.Println("\t"+_tab+". GetMenuFromDataverse(", idds.String(), ") . dataset:", ids_ds[i], "has no files")
// } else {
// var sfcount string = "file(s)"
// switch ifcount {
// case 1:
// sfcount = "file"
// }
// filejson = filejson + get_menuchild_file_simple("file", fmt.Sprintf("%v %v found.", ifcount, sfcount), ((_level+icount)*10)+ifcount)
// }
// sjson = strings.Replace(sjson, "%children%", "["+filejson+"]", -1)
// }
// }
// }
// } else {
// message := gjs.Get(metadv, "message")
// def.LogMessage2Level(fmt.Sprintf("GetDatasetMetadata(%v)", ids_ds[i]), message.String(), def.DEF_logerro)
// }
// }
// } else {
// message := gjs.Get(resset, "message")
// def.LogMessage2Level(fmt.Sprintf("GetDatasetByPersistentId(%v)", ids_ds[i]), message.String(), def.DEF_logerro)
// }
// sjson = strings.Replace(sjson, "%children%", "[]", -1)
// }
}
} else {
message := gjs.Get(res, "message")
if message.Type != gjs.String {
message.Str = "No message found in result (API - problem?)."
}
err := errors.New(message.String())
def.LogError("GetMenuFromDataverse()", err)
//
return "", errors.New(message.String())
}
var sout string = strings.TrimSpace(sjson)
if len(sout) == 0 {
sout = "No submenu found."
}
def.LogMessage("GetMenuFromDataverse(columns)", fmt.Sprintf("%v", sout), def.DEF_logdebg)
//
return sjson, err
}
func AddToTable(_fparam tol.TFVParams, _array gjs.Result, _sqladd, _sqlparam, _sorc string) bool {
resbool := false
addmap := make(map[int]string)
if _array.IsArray() {
for key, user := range _array.Array() {
addsql := _sqladd
user.ForEach(func(fkey, fvalue gjs.Result) bool {
for _, param := range strings.Split(_sqlparam, ",") {
if fkey.String() == param {
addsql = strings.Replace(addsql, "%"+param+"%", fvalue.String(), -1)
}
}
return true
})
resbool = (strings.Index(addsql, "%") == -1)
if resbool {
addmap[key] = addsql
} else {
return resbool
}
}
// falls alle felder in sql erfolgreich ausgetauscht, dann eintragen..
if resbool {
conn, err := tol.CheckConnection(_fparam)
if err != nil {
def.LogError("AddToTable(open)", err)
return false
}
def.LogMessage("AddToTable(ping):", fmt.Sprintf("%v: %v", _sorc, "Ping was successfull! Connection established."), def.DEF_logdebg)
for _, ssql := range addmap {
rows, err := conn.Query(ssql)
if err != nil {
def.LogError("AddToTable(query)", err)
return false
}
defer rows.Close()
for rows.Next() {
var result bool
err = rows.Scan(&result)
if err != nil {
def.LogError("AddToTable(scan)", err)
}
}
defer rows.Close()
}
}
}
//
return resbool
}
// userliste einlesen (momentan 20210308)
func GetGridFromUserList(_dvp tol.TDVParams, _fvp tol.TFVParams, _sort bool) (string, error) {
sjson := "{\n"
userlist, err := usr.ListUsers(tol.GetSiteUrl(_dvp), tol.GetApiKey(_dvp), _sort)
if err == nil {
// log.Printf(".userlist(): [%v]\n", userlist)
status := gjs.Get(userlist, "status")
if status.String() == "OK" {
data := gjs.Get(userlist, "data")
if data.IsObject() {
usercount := gjs.Get(userlist, "data.userCount")
switch _fvp.FP_what {
case "datagrid":
labels := gjs.Get(userlist, "data.bundleStrings")
// spalten lesen (direkt über API)
sjson = sjson + grid_getcolumns_user(labels, _dvp, _fvp)
//
// TODO: dataverse - eventuelle krücke für falsch gemeldete felder
sjson = strings.Replace(sjson, "userId", "id", -1)
sjson = strings.Replace(sjson, "identifier", "userIdentifier", -1)
//
// zeilen einlesen
users := gjs.Get(userlist, "data.users")
sjson = sjson + grid_getrows_user(users, true, true) + ",\n"
sjson = sjson + `"success":true,` + "\n"
sjson = sjson + `"total":` + usercount.String()
// log.Printf(".userlist(%s): [%v] %s\n", time.Now(), data.String())
//
if AddToTable(_fvp, users, def.DEF_sqlUserAdd, def.DEF_sqlUserParam, "User") {
// log.Printf("AddToTable(%s): %s\n", "User", "successfull\n")
}
//
}
}
} else {
message := gjs.Get(userlist, "message")
if message.Type != gjs.String {
message.Str = "No users found in result (API - problem?)."
}
def.LogError("GetGridFromUserList(ListUsers)", errors.New(message.Str))
//
return "", errors.New(message.String())
}
sjson = sjson + "\n}"
}
// log.Println("GetGridFromUserList():", sjson, err)
//
return sjson, err
}
// dataverses einlesen (momentan 20210331)
func GetGridFromDataverse(_dvp tol.TDVParams, _fvp tol.TFVParams, _sort bool) (string, error) {
sjson := "{\n"
res, err := dvr.GetContentByAlias(_dvp, _fvp.FP_alias)
if err != nil {
def.LogError("GetContentByAlias(grid.dataverse)", err)
return res, err
}
status := gjs.Get(res, "status")
icount := 0
if status.String() == "OK" {
dataverses := gjs.Get(res, "data")
ids_dv, _ := get_dataverse_ids(dataverses)
// liste vorbereiten
var dvColList tol.ColMap
var dvRowList tol.RowMap
dvRowList = make(tol.RowMap)
// dataset - ids
if len(ids_dv) > 0 {
dvColList = make(tol.ColMap)
}
for i := 0; i < len(ids_dv); i++ {
resdv, err := dvr.GetDataverseByIdOrAlias(_dvp, ids_dv[i])
// log.Println("GetGridFromDataverse():", ids_dv[i], resdv)
if err != nil {
def.LogError("GetDataverseByIdOrAlias(grid.dataverse)", err)
} else {
status := gjs.Get(resdv, "status")
if status.String() == "OK" {
icount = icount + 1
data := gjs.Get(resdv, "data")
iddv := gjs.Get(data.String(), "id")
data.ForEach(func(fkey, fvalue gjs.Result) bool {
// log.Println("object(type):", _fvp.FP_who, fkey)
dvRowList, dvColList = tol.AddValueToMap(dvRowList,
dvColList, iddv, fkey, fvalue, "", "")
return true
})
}
}
}
sjson = sjson + grid_getcolumns_map("column.", dvColList, _dvp, _fvp)
sjson = sjson + grid_getrows_map(dvRowList, _fvp, "dataverse") + ",\n"
sjson = sjson + `"success":true,` + "\n"
sjson = sjson + `"total":` + fmt.Sprintf("%v", icount)
}
sjson = sjson + "\n}"
//
return sjson, nil
}
// dataverses einlesen (momentan 20210325)
func GetGridFromDataset(_dvp tol.TDVParams, _fvp tol.TFVParams, _sort bool,
_exclude, _include string) (string, error) {
sjson := "{\n"
res, err := dvr.GetContentByAlias(_dvp, _fvp.FP_alias)
if err != nil {
def.LogError("GetDataverseByIdOrAlias(grid.dataset)", err)
return res, err
}
status := gjs.Get(res, "status")
icount := 0
if status.String() == "OK" {
dataverses := gjs.Get(res, "data")
_, ids_ds := get_dataverse_ids(dataverses)
// log.Println("GetGridFromDataset(ids):", ids_ds)
// liste vorbereiten
var dsColList tol.ColMap
var dsRowList tol.RowMap
dsRowList = make(tol.RowMap)
// dataset - ids
if len(ids_ds) > 0 {
dsColList = make(tol.ColMap)
}
//
for i := 0; i < len(ids_ds); i++ {
resset, err := dst.GetDatasetByPersistentId(_dvp, ids_ds[i], "")
// fmt.Println("GetGridFromDataset(resset):", ids_ds[i], resset)
if err != nil {
def.LogError("GetDatasetByPersistentId(grid.dataset)", err)
}
status := gjs.Get(resset, "status")
if status.String() == "OK" {
icount = icount + 1
datads := gjs.Get(resset, "data")
if datads.IsObject() {
latestds := gjs.Get(datads.String(), "latestVersion")
if latestds.Type == gjs.Null {
latestds = gjs.Get(resset, "data")
}
if latestds.IsObject() {
idds := gjs.Get(latestds.String(), "datasetId")
latestds.ForEach(func(fkey, fvalue gjs.Result) bool {
// fmt.Println("GetGridFromDataset(latest):", ids_ds[i], idds.String(), fkey, fvalue)
if len(_include) > 0 {
if strings.Contains(_include, fkey.String()+";") == true {
dsRowList, dsColList = tol.AddValueToMap(dsRowList,
dsColList, idds, fkey, fvalue, _exclude, _include)
} else {
}
} else {
if len(_exclude) > 0 {
if strings.Contains(_exclude, fkey.String()+";") == false {
dsRowList, dsColList = tol.AddValueToMap(dsRowList,
dsColList, idds, fkey, fvalue, _exclude, _include)
} else {
// log.Println("object(type < exclude):", _fvp.FP_who, fkey.String())
}
} else {
// log.Println("object(", ids_ds[i], "):", _fvp.FP_who, fkey.String())
dsRowList, dsColList = tol.AddValueToMap(dsRowList,
dsColList, idds, fkey, fvalue, _exclude, _include)
}
}
return true
})
}
}
}
}
sjson = sjson + grid_getcolumns_map("column.", dsColList, _dvp, _fvp)
sjson = sjson + grid_getrows_map(dsRowList, _fvp, "dataset") + ",\n"
sjson = sjson + `"success":true,` + "\n"
sjson = sjson + `"total":` + fmt.Sprintf("%v", icount)
}
sjson = sjson + "\n}"
// fmt.Println("GetGridFromDataset(json):", sjson)
//
return sjson, nil
}
// dataverses einlesen (momentan 20210331)
func GetSearchHeader(_dvp tol.TDVParams, _fvp tol.TFVParams, _sort bool) (string, error) {
sjson := "{\n"
res, err := dvr.GetContentByAlias(_dvp, _fvp.FP_alias)
if err != nil {
def.LogError("GetContentByAlias(search.header)", err)
return res, err
}
status := gjs.Get(res, "status")
icount := 0
if status.String() == "OK" {
dataverses := gjs.Get(res, "data")
ids_dv, _ := get_dataverse_ids(dataverses)
// liste vorbereiten
var dvColList tol.ColMap
var dvRowList tol.RowMap
dvRowList = make(tol.RowMap)
// dataset - ids
if len(ids_dv) > 0 {
dvColList = make(tol.ColMap)
}
for i := 0; i < len(ids_dv); i++ {
resdv, err := dvr.GetDataverseByIdOrAlias(_dvp, ids_dv[i])
// log.Println("GetGridFromDataverse():", ids_dv[i], resdv)
if err != nil {
def.LogError("GetDataverseByIdOrAlias(search.header)", err)
}
status := gjs.Get(resdv, "status")
if status.String() == "OK" {
icount = icount + 1
data := gjs.Get(resdv, "data")
iddv := gjs.Get(data.String(), "id")
data.ForEach(func(fkey, fvalue gjs.Result) bool {
// log.Println("object(type):", _fvp.FP_who, fkey)
dvRowList, dvColList = tol.AddValueToMap(dvRowList,
dvColList, iddv, fkey, fvalue, "", "")
return true
})
}
}
sjson = sjson + grid_getcolumns_map("column.", dvColList, _dvp, _fvp)
sjson = sjson + grid_getrows_map(dvRowList, _fvp, "datasearch") + ",\n"
sjson = sjson + `"success":true,` + "\n"
sjson = sjson + `"total":` + fmt.Sprintf("%v", icount)
}
sjson = sjson + "\n}"
//
return sjson, nil
}
func search_getcolumns_map(_fparam tol.TFVParams, _id, _json gjs.Result, _key string, _cols tol.ColMap) tol.ColMap {
var columns string = ""
//var rows string = ""
conn, err := tol.CheckConnection(_fparam)
if err == nil {
csql := strings.ReplaceAll(_json.String(), "'", "&#39;")
csql = "SELECT * FROM html.\"GetJson\"('" + csql + "'::JSON);"
// log.Println("<<< search_getcolumns_map()", csql)
filestr, err := conn.Query(csql)
if err == nil {
defer filestr.Close()
for filestr.Next() {
if err := filestr.Scan(&columns); err == nil {
key := gjs.Get(columns, "key")
value := gjs.ParseBytes([]byte(gjs.Get(columns, "value").String()))
if value.IsObject() {
_cols = search_getcolumns_map(_fparam, _id, value, _key+key.String()+".", _cols)
} else {
_cols = tol.AddColumnToMapAsString(_cols, _key+key.String(), key.String())
// log.Printf("file(%v) - key: %v, value: %v\n", _id, _key+key.String(), value)
}
}
}
}
}
//
return _cols
}
// speicher item in map - 20210506
func add_to_json_map(_map tol.JsnMapInt, _obj gjs.Result) tol.JsnMapInt {
if _obj.IsObject() {
var valb map[string]interface{}
var sobj string = _obj.String()
errb := json.Unmarshal([]byte(sobj), &valb)
if errb == nil {
_map[len(_map)] = valb
}
}
//
return _map
}
// berechne parametersatz template - 20210526
func get_params_datasettemplate(_fvp tol.TFVParams) tol.ColMapInt {
var ftmpl tol.ColMapInt = make(tol.ColMapInt, 0)
// aufbereiten der abfrageparameter für "dataset"
for _, ival := range _fvp.FP_parm {
for key, val := range ival {
if key == "search" {
// log.Println("get_params_datasettemplate(key):", key, val)
stmp := val[:strings.IndexByte(val, '=')]
if stmp == "fileDescription" {
continue
}
}
ftmpl[len(ftmpl)] = map[string]string{key: val}
}
}
// log.Println("get_params_datasettemplate(res):", ftmpl)
//
return ftmpl
}
// berechne parametersatz template - 20210526
func get_params_filetemplate(_fvp tol.TFVParams) tol.ColMapInt {
var ftmpl tol.ColMapInt = make(tol.ColMapInt, 0)
// aufbereiten der abfrageparameter für "file"
var sval = "*"
for _, ival := range _fvp.FP_parm {
for key, val := range ival {
if key == "search" {
skey := val[:strings.IndexByte(val, '=')]
if skey == "fileDescription" {
sval = tol.GetstringBetween(val, "[", "]")
if len(sval) > 0 {
// log.Println("get_params_filetemplate(key):", key, sval)
var andqry string = ""
arrval := strings.Split(sval, "+")
if len(arrval) > 1 {
andqry = ""
var andcnt int64 = 0
for _, aval := range arrval {
if andcnt > 0 {
andqry = andqry + " AND "
}
andqry = andqry + skey + `:"` + strings.Replace(strings.Trim(aval, " "), `"`, ``, -1) + `"`
andcnt = andcnt + 1
}
if andcnt > 0 {
sval = "(" + strings.TrimSpace(andqry) + ")"
}
// log.Println("get_params_filetemplate(key1):", key, sval)
} else {
sval = "(" + skey + ":" + sval + ")"
// log.Println("get_params_filetemplate(key2):", key, sval)
}
}
break
}
}
}
}
ftmpl[len(ftmpl)] = map[string]string{"_q_": sval}
ftmpl[len(ftmpl)] = map[string]string{"type": "file"}
ftmpl[len(ftmpl)] = map[string]string{"per_page": "1000"}
for _, ival := range _fvp.FP_parm {
for key, val := range ival {
if key == "subtree" {
ftmpl[len(ftmpl)] = map[string]string{key: val}
break
}
}
}
//
return ftmpl
}
// berechne parametersatz mit startposition - 20210506
func get_params(_parm tol.ColMapInt, _start int64) tol.ColMapInt {
var fparm tol.ColMapInt = make(tol.ColMapInt, 0)
for _, ival := range _parm {
for key, val := range ival {
fparm[len(fparm)] = map[string]string{key: val}
}
}
fparm[len(fparm)] = map[string]string{"start": fmt.Sprintf("%v", _start)}
//
return fparm
}
// suche nach allen "file" (rekursiv) - 20210506
func GetSearchFileList(_dvp tol.TDVParams, _fvp tol.TFVParams, jsonItems tol.JsnMapInt) (tol.JsnMapInt, error) {
// vorbereitung für file-suche (template)
ftmpl := get_params_filetemplate(_fvp)
//
var istart int64 = 0
var itotal int64 = 0
var ireslt int64 = -1
var resf string
var errf error
for itotal > ireslt {
fparm := get_params(ftmpl, istart)
// log.Printf("GetSearchFileList(fparm) - string: %v\n", fparm)
// suche nach files, weil "dataset" gefunden
resf, errf = sch.SearchByParams(_dvp, fparm)
// log.Printf("GetSearchFileList(json) - byte: %v, %v\n", resf, errf)
if errf == nil {
var statusf gjs.Result = gjs.Get(resf, "status")
if statusf.String() == "ERROR" {
var err error = errors.New(resf)
return jsonItems, err
} else {
if statusf.String() == "OK" {
dataf := gjs.Get(resf, "data")
if dataf.IsObject() {
// var sdataf string = strings.ReplaceAll(strings.ReplaceAll(dataf.String(), "\n", ""), #13#10, " ")
re := regexp.MustCompile(`\r?\n`)
var sdataf string = re.ReplaceAllString(dataf.String(), "&nbsp;")
itotal = gjs.Get(sdataf, "total_count").Int()
ireslt = gjs.Get(sdataf, "count_in_response").Int()
itemsf := gjs.Get(sdataf, "items")
// log.Printf("GetSearchFileList(itemsf) - file: %v\n\n", itemsf.String())
for _, valf := range itemsf.Array() {
if valf.IsObject() {
var iobj int = 0
var sobject string = "{"
valf.ForEach(func(okey, oval gjs.Result) bool {
skey := okey.String()
switch skey {
case "description", "dataset_citation", "dataset_name":
return true
default:
if iobj > 0 {
sobject += `,`
}
sobject += `"` + okey.String() + `":"` + tol.DoFilterByStr(oval.String()) + `"`
iobj = iobj + 1
}
return true
})
if iobj > 0 {
sobject += `}`
rvalf := tol.GetObjectFromStr(sobject)
//log.Printf("GetSearchFileList(itemsf %v) - objects 1: %v\n", kalf, rvalf)
jsonItems = add_to_json_map(jsonItems, rvalf)
}
}
// log.Printf("GetSearchFileList(itemsf %v) - objects 2: %v\n\n", kalf, valf)
// jsonItems = add_to_json_map(jsonItems, valf)
}
if ireslt > 0 {
istart = istart + ireslt
} else {
break
}
} else {
break
}
}
}
}
}
// log.Printf("GetSearchFileList(json) - file: %v\n", jsonItems)
//
return jsonItems, errf
}
// suche nach "dataset" & "file" - 20210506
func GetSearchResult(_dvp tol.TDVParams, _fvp tol.TFVParams) (string, error) {
var sjson string = "{&#34;total&#34;:%count%,&#34;datasets&#34;:%datasets%,&#34;files&#34;:%files%,\n&#34;entries&#34;:[%entries%],\n%cols%\n&#34;rows&#34;:[%rows%]\n}"
var jcount int = 0
var dcount int64 = 0
var fcount int64 = 0
var icount int64 = 0
var ecount int64 = 0
var srows string = ""
var sentries string = ""
tol.DataColList = make(tol.ColMap, 0)
tol.FileColList = make(tol.ColMap, 0)
//
// log.Printf("GetSearchResult(fp_parm) - string: %v\n", _fvp.FP_parm)
//
ftmpl := get_params_datasettemplate(_fvp)
res, err := sch.SearchByParams(_dvp, ftmpl)
if err != nil {
def.LogError("SearchByParams(search.result)", err)
return res, err
} else {
// log.Printf("GetSearchResult(search) - all: %v\n", res)
status := gjs.Get(res, "status")
if status.String() == "ERROR" {
var err error = errors.New(res)
return "", err
} else {
if status.String() == "OK" {
var dataJson tol.JsnMapInt = make(tol.JsnMapInt, 0)
var fileJson tol.JsnMapInt = make(tol.JsnMapInt, 0)
data := gjs.Get(res, "data")
if data.IsObject() {
items := gjs.Get(data.String(), "items")
// fmt.Printf("GetSearchResult(search) - items: %v\n", items.String())
if items.IsArray() {
if len(items.Array()) > 0 {
fileJson, err = GetSearchFileList(_dvp, _fvp, fileJson)
// fmt.Printf("GetSearchResult(search) - fileJson: %v\n\n", fileJson)
// "dataset" iterieren
for _, entry := range items.Array() {
if entry.IsObject() {
// log.Printf("GetSearchResult(entry) - item: %v\n", entry.String())
var istype gjs.Result = gjs.Get(entry.String(), "type")
switch istype.String() {
case "dataset":
dataJson = add_to_json_map(dataJson, entry)
var doi gjs.Result = gjs.Get(entry.String(), "global_id")
if doi.Type != gjs.Null {
icount = icount + 1
//var name gjs.Result = gjs.Get(entry.String(), "name")
for _, valj := range fileJson {
bytej, errj := json.Marshal(valj)
if errj == nil {
var entryf gjs.Result = tol.GetObjectFromStr(string(bytej))
// log.Printf("GetSearchResult(json) - entry: (%v) %v %v %v\n", i, entryf.IsObject(), entryf.Type.String(), entryf.String())
if entryf.IsObject() {
doif := gjs.Get(entryf.String(), "dataset_persistent_id")
if doif.String() == doi.String() {
var idf gjs.Result = gjs.Get(entry.String(), "file_id")
tol.FileColList = search_getcolumns_map(_fvp, idf, entryf, "", tol.FileColList)
dataJson = add_to_json_map(dataJson, entryf)
icount = icount + 1
}
}
}
}
// log.Printf("GetSearchResult(json)\n")
}
// log.Printf("GetSearchResult(dataset) - json: %v\n\n", dataJson)
tol.DataColList = search_getcolumns_map(_fvp, doi, entry, "", tol.DataColList)
break
}
}
}
}
//log.Printf("GetSearchResult(fileJson) - string: %v\n", len(fileJson))
//
var datatempl string = ""
var filetempl string = ""
var colstempl string = ""
var rowstempl string = ""
var rowsarray map[int64]string
var rowsColList tol.ColMap
//
rowstempl = rowstempl
rowsColList = rowsColList
// log.Printf("GetSearchResult(DataColList) - dataset: %v\n", tol.DataColList)
rowsarray, _ = file_gettranslated_columns(tol.DataColList, "column", "dataset", _dvp, _fvp)
for i, row := range rowsarray {
switch i {
case 0:
colstempl = row
case 1:
datatempl = row
}
// log.Printf("datarow(%v) - rows-template: \n%v\n", i, row)
}
//
// log.Printf("GetSearchResult(FileColList) - file: %v\n", tol.FileColList)
rowsarray, _ = file_gettranslated_columns(tol.FileColList, "column", "file", _dvp, _fvp)
for i, row := range rowsarray {
switch i {
case 0:
colstempl = row
case 1:
filetempl = row
// log.Printf("filerow(%v) - rows-template: \n%v\n", i, row)
}
// log.Printf("filerow(%v) - rows-template: \n%v\n", i, row)
}
//
sjson = strings.Replace(sjson, "%cols%", colstempl, -1)
if icount > 0 {
icount = 0
ecount = 0
// werte aller teilen ermitteln und eintragen
for i, _ := range dataJson {
i = i
var svalue string
byted, errd := json.Marshal(dataJson[jcount])
if errd == nil {
var entry gjs.Result = tol.GetObjectFromStr(string(byted))
if entry.IsObject() {
if ecount > 0 {
sentries = sentries + ",\n"
}
sentries = sentries + entry.String()
ecount = ecount + 1
// log.Printf("GetSearchResult(object) - entry: %v\n", entry.String())
var id gjs.Result
var name gjs.Result
//var doi gjs.Result
var istype gjs.Result = gjs.Get(entry.String(), "type")
switch istype.String() {
case "dataset":
name = gjs.Get(entry.String(), "name") // doi > "global_id"
rowstempl = datatempl
rowsColList = tol.DataColList
case "file":
name = gjs.Get(entry.String(), "name")
rowstempl = filetempl
rowsColList = tol.FileColList
default:
continue
}
if name.Type != gjs.Null {
// log.Printf("GetSearchResult(entry) - list: %v\n", rowsColList)
var doi gjs.Result = tol.GetObjectFromStr("")
var fct gjs.Result = tol.GetObjectFromStr("")
var slabel string = ""
var sname string = ""
sname = tol.DoFilterByStr(name.String())
if icount > 0 {
srows = srows + ",\n"
}
switch istype.String() {
case "dataset":
slabel = sname + `&` + istype.String() + `;`
doi = gjs.Get(entry.String(), "global_id")
if doi.Type != gjs.Null {
slabel = slabel + `&` + doi.String() + `;`
}
dcount = dcount + 1
case "file":
slabel = sname
id = gjs.Get(entry.String(), "file_id")
if id.Type != gjs.Null {
slabel = slabel + `&(id:` + id.String() + `)`
}
slabel = slabel + `&` + istype.String() + `;`
fct = gjs.Get(entry.String(), "file_content_type")
if fct.Type != gjs.Null {
slabel = slabel + `&` + fct.String()
}
fcount = fcount + 1
default:
continue
}
//
label := gjs.Get(entry.String(), "label")
if label.Type != gjs.Null {
slabel = slabel + " - " + tol.DoFilterByStr(label.String())
}
slabel = slabel + `", "type":"` + istype.String()
slabel = slabel + `", "doi":"` + doi.String()
slabel = slabel + `", "label":"` + sname
// log.Printf("group(%v): %v\n", istype.String(), slabel)
srows = srows + rowstempl
sjson = strings.Replace(sjson, "%group%", slabel, -1)
for key, _ := range rowsColList {
skey := strings.Replace(key, `"`, ``, -1)
gvalue := gjs.Get(entry.String(), skey)
// log.Printf("filerow(%v) - type: %v, key: %v, value: %v\n", id, gvalue.Type.String(), skey, gvalue)
switch gvalue.Type {
case gjs.JSON:
if gvalue.IsArray() {
// log.Printf("filerow(%v) - type: %v, key: %v, value: %v\n", id, gvalue.Type.String(), skey, gvalue.String())
svalue = `<ul style=\"padding-inline-start:20px;\">`
var iarr int = 0
for _, aval := range gvalue.Array() {
if aval.IsObject() {
var iobj int = 0
var sobject string = ""
var sobjtmp string = `<li><table>%object%</table></li>`
aval.ForEach(func(okey, oval gjs.Result) bool {
sobject += `<tr class=\"datagrid-row\" style=\"height:18px;\">`
sobject += `<td style=\"border:none;\">` + okey.String() + `</td>`
sobject += `<td style=\"border:none;\">:&nbsp;</td>`
sobject += `<td style=\"border:none;\">` + tol.DoFilterByStr(oval.String()) + `</td>`
sobject += `</tr>`
iobj = iobj + 1
return true
})
if iobj > 0 {
svalue += strings.ReplaceAll(sobjtmp, "%object%", sobject)
iarr = iarr + 1
} else {
svalue = "---"
}
} else {
svalue += `<li><table>` + tol.DoFilterByStr(aval.String()) + `</table></li>`
iarr = iarr + 1
}
}
if iarr > 0 {
svalue = svalue + "</ul>"
}
srows = strings.ReplaceAll(srows, "%"+skey+"%", svalue)
continue
}
case gjs.String:
svalue = gvalue.String()
case gjs.Number:
svalue = gvalue.String()
case gjs.True:
svalue = gvalue.String()
case gjs.False:
svalue = gvalue.String()
default:
svalue = "---"
}
// log.Printf("filerow(%v) - type: %v, key: %v, value: %v\n", id, gvalue.Type.String(), key, svalue)
if "%"+skey+"%" == "%url%" {
var url string = strings.ReplaceAll(`<a href="%url%" target="_blank" rel="noopener">%url%</a>`, "%url%", svalue)
//log.Println("url:", url)
srows = strings.ReplaceAll(srows, "%"+skey+"%", tol.DoFilterByStr(url))
} else {
srows = strings.ReplaceAll(srows, "%"+skey+"%", tol.DoFilterByStr(svalue))
}
}
icount = icount + 1
srows = strings.Replace(srows, "%group%", slabel, -1)
}
jcount = jcount + 1
//fmt.Printf("GetSearchResult(search 2) - items: %v %v\n", jcount, ecount)
}
}
}
}
}
}
}
}
}
// jcount = 1
if jcount > 0 {
def.LogMessage("", fmt.Sprintf("GetSearchResult(%v) > rows: %v", "success", jcount), def.DEF_loginfo)
} else {
def.GetErrorByString(fmt.Sprintf("No result for query: %v\n", tol.JsonEscape(string(_fvp.FP_qery))), "alert", "GetSearchResult(0)")
}
sjson = strings.Replace(sjson, "%rows%", srows, -1)
// sjson = strings.Replace(sjson, "%entries%", "", -1)
sjson = strings.Replace(sjson, "%entries%", sentries, -1)
sjson = strings.Replace(sjson, "%count%", fmt.Sprintf("%v", jcount), -1)
sjson = strings.Replace(sjson, "%datasets%", fmt.Sprintf("%v", dcount), -1)
sjson = strings.Replace(sjson, "%files%", fmt.Sprintf("%v", fcount), -1)
//
// korrektur der werte für nicht gefundene felder..
var between string = tol.GetstringBetween(sjson, ":\"%", "%\",")
for len(between) > 0 {
sjson = strings.Replace(sjson, "%"+between+"%", "---", -1)
between = tol.GetstringBetween(sjson, ":\"%", "%\",")
}
// fmt.Printf("GetSearchResult(search 3) - items fertsch: j:%v e:%v i:%v, len:%v\n", jcount, ecount, icount, len(sjson))
// fmt.Printf("GetSearchResult(search 4) - items fertsch: j:%v e:%v i:%v, len:%v val:%v\n", jcount, ecount, icount, len(sjson), sjson)
//
return sjson, nil
}
func file_getcolumns_map(_fparam tol.TFVParams, _id, _json gjs.Result, _key string, _cols tol.ColMap) tol.ColMap {
var columns string = ""
//var rows string = ""
conn, err := tol.CheckConnection(_fparam)
if err == nil {
csql := _json.String()
// log.Println("<<< grid_getcolumns_user()", _fparam.FP_who)
csql = "SELECT * FROM html.\"GetJson\"('" + csql + "'::JSON);"
filestr, err := conn.Query(csql)
if err == nil {
defer filestr.Close()
for filestr.Next() {
if err := filestr.Scan(&columns); err == nil {
key := gjs.Get(columns, "key")
value := gjs.ParseBytes([]byte(gjs.Get(columns, "value").String()))
if value.IsObject() {
_cols = file_getcolumns_map(_fparam, _id, value, _key+key.String()+".", _cols)
} else {
_cols = tol.AddColumnToMapAsString(_cols, _key+key.String(), key.String())
// log.Printf("file(%v) - key: %v, value: %v\n", _id, _key+key.String(), value)
}
}
}
}
}
//
return _cols
}
// dataverses einlesen (momentan 20210416)
func GetPropGridFromDatasetFiles(_dvp tol.TDVParams, _fvp tol.TFVParams,
_include string) (string, error) {
var sjson string = "{&#34;total&#34;:%count%,\n%cols%\n&#34;rows&#34;:["
var icount int64 = 0
//
ids_ds := []string{}
var dfColList tol.ColMap
ids_ds = append(ids_ds, _fvp.FP_alias)
for i := 0; i < len(ids_ds); i++ {
resset, err := dst.GetDatasetByPersistentId(_dvp, ids_ds[i], "")
// log.Printf("GetPropGridFromDatasetFiles(%v): %v\n", ids_ds[i], resset)
if err != nil {
def.LogError("GetDatasetByPersistentId(propgrid.files)", err)
} else {
// log.Println("GetGridFromDataset():", ids_ds[i], resset)
status := gjs.Get(resset, "status")
if status.String() == "OK" {
datads := gjs.Get(resset, "data")
if datads.IsObject() {
datalv := gjs.Get(datads.String(), "latestVersion")
if !datalv.IsObject() { // falls nicht gefunden, dann "data" verwenden
datalv = datads
}
if datalv.IsObject() {
// idds := gjs.Get(datalv.String(), "datasetId")
// log.Println("GetPropGridFromDatasetFiles(data):", ids_ds[i], idds, datalv)
fileds := gjs.Get(datalv.String(), _include)
// log.Printf("GetPropGridFromDatasetFiles(%v): %v\n", ids_ds[i], fileds)
if fileds.IsArray() {
var csql string = fmt.Sprintf("%v\n", fileds.String())
csql = strings.ReplaceAll(csql, "\" \"", "\"},{\"")
// log.Println("GetPropGridFromDatasetFiles(", _include, "):", idds, csql)
for _, file := range fileds.Array() {
var id gjs.Result
id = gjs.Get(file.String(), "dataFile.id")
// zeilen (für alle spalten) auslesen, wenn objekt..
if file.IsObject() {
dfColList = file_getcolumns_map(_fvp, id, file, "", dfColList)
}
}
var colstempl string = ""
var rowstempl string = ""
var rowsarray map[int64]string
rowsarray, icount = file_gettranslated_columns(dfColList, "column", "", _dvp, _fvp)
// log.Println("GetPropGridFromDatasetFiles("+_include+"):", icount, rowsarray)
for i, row := range rowsarray {
switch i {
case 0:
colstempl = row
case 1:
rowstempl = row
}
}
// log.Println("GetPropGridFromDatasetFiles(rowstempl):", rowstempl)
// log.Printf("filerow(%v) - cols-template: \n%v\n", icount, colstempl)
if icount > 0 {
sjson = strings.Replace(sjson, "%cols%", colstempl, -1)
icount = 0
// werte aller teilen ermitteln und eintragen
for _, file := range fileds.Array() {
// log.Printf("filerow() - file: %v\n", file.String())
var id gjs.Result
// zeilen (für alle spalten) auslesen, wenn objekt..
var svalue string
if file.IsObject() {
// // id als array-key in map[] ermitteln
id = gjs.Get(file.String(), "dataFile.id")
if id.Type != gjs.Null {
var slabel string = ""
if icount > 0 {
sjson = sjson + ",\n"
}
name := gjs.Get(file.String(), "label")
if name.Type != gjs.Null {
slabel = name.String()
id = gjs.Get(file.String(), "dataFile.id")
if id.Type != gjs.Null {
slabel = slabel + `&(id:` + id.String() + `)`
}
slabel = slabel + `&file;`
fct := gjs.Get(file.String(), "dataFile.contentType")
if fct.Type != gjs.Null {
slabel = slabel + `&` + fct.String()
}
// log.Printf("filerow(%v): %v\n", id, slabel)
}
sjson = sjson + rowstempl
sjson = strings.Replace(sjson, "%group%", slabel, -1)
for key, _ := range dfColList {
skey := strings.Replace(key, `"`, ``, -1)
gvalue := gjs.Get(file.String(), skey)
// log.Printf("filerow(%v) - type: %v, key: %v, value: %v\n", id, gvalue.Type.String(), skey, gvalue)
switch gvalue.Type {
case gjs.String:
svalue = gvalue.String()
case gjs.Number:
svalue = gvalue.String()
case gjs.True:
svalue = gvalue.String()
case gjs.False:
svalue = gvalue.String()
default:
svalue = "---"
}
// log.Printf("filerow(%v) - type: %v, key: %v, value: %v\n", id, gvalue.Type.String(), key, svalue)
sjson = strings.ReplaceAll(sjson, "%"+skey+"%", tol.DoFilterByStr(svalue))
icount = icount + 1
}
}
}
}
}
}
}
}
}
}
}
sjson = strings.Replace(sjson, "%cols%", `"cols":[[]],`, -1)
sjson = strings.Replace(sjson, "%count%", fmt.Sprintf("%v", icount), -1)
sjson = sjson + "\n],\n&#34;success&#34;:true\n}"
//
// korrektur der werte für nicht gefundene felder..
var between string = tol.GetstringBetween(sjson, ":\"%", "%\",")
for len(between) > 0 {
sjson = strings.Replace(sjson, "%"+between+"%", "---", -1)
between = tol.GetstringBetween(sjson, ":\"%", "%\",")
}
//
// log.Printf("filerows(%v) - json: \n%v\n", icount, sjson)
//
return sjson, nil
}
func ImportMetaFields(_dvp tol.TDVParams, _fvp tol.TFVParams, _html string) (string, error) {
for _, arrayp := range _fvp.FP_parm {
for keyp, valuep := range arrayp {
var smeta = `,data:[`
if keyp == "meta" {
metadv, err := dst.GetDatasetMetafield(_dvp, valuep)
if err == nil {
status := gjs.Get(metadv, "status")
if status.String() == "OK" {
smeta = smeta + ``
data := gjs.Get(metadv, "data")
if data.IsObject() {
cvv := gjs.Get(data.String(), "controlledVocabularyValues")
if cvv.IsArray() {
var icount int64 = 0
for _, arrayc := range cvv.Array() {
if icount > 0 {
smeta = smeta + ","
}
smeta = smeta + `{`
var ccount int64 = 0
arrayc.ForEach(func(keyc, valuec gjs.Result) bool {
if ccount > 0 {
smeta = smeta + ","
}
switch keyc.String() {
case "id":
smeta = smeta + `'id':'` + valuec.String() + `'`
case "strValue":
smeta = smeta + `'value':'` + valuec.String() + `'`
smeta = smeta + `,'text':'` + valuec.String() + `'`
icount = icount + 1
return false
}
ccount = ccount + 1
return true
})
smeta = smeta + `}`
}
}
}
}
}
smeta = smeta + `]`
}
_html = strings.Replace(_html, `%`+valuep+`%`, smeta, -1)
}
}
//
return _html, nil
}
func init() {
log.Println("Initialize package - Dataverse..")
}